repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
amenonsen/ansible | lib/ansible/modules/cloud/vmware/_vmware_host_config_facts.py | 21 | 3727 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_config_facts
deprecated:
removed_in: '2.13'
why: Deprecated in favour of C(_info) module.
alternative: Use M(vmware_host_config_info) instead.
short_description: Gathers facts about an ESXi host's advance configuration information
description:
- This module can be used to gather facts about an ESXi host's advance configuration information when ESXi hostname or Cluster name is given.
version_added: '2.5'
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
cluster_name:
description:
- Name of the cluster from which the ESXi host belong to.
- If C(esxi_hostname) is not given, this parameter is required.
type: str
esxi_hostname:
description:
- ESXi hostname to gather facts from.
- If C(cluster_name) is not given, this parameter is required.
type: str
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather facts about all ESXi Host in given Cluster
vmware_host_config_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: cluster_name
delegate_to: localhost
- name: Gather facts about ESXi Host
vmware_host_config_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
delegate_to: localhost
'''
RETURN = r'''
hosts_facts:
description:
- dict with hostname as key and dict with host config facts
returned: always
type: dict
sample: {
"10.76.33.226": {
"Annotations.WelcomeMessage": "",
"BufferCache.FlushInterval": 30000,
"BufferCache.HardMaxDirty": 95,
"BufferCache.PerFileHardMaxDirty": 50,
"BufferCache.SoftMaxDirty": 15,
}
}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi
class VmwareConfigFactsManager(PyVmomi):
def __init__(self, module):
super(VmwareConfigFactsManager, self).__init__(module)
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
def gather_host_facts(self):
hosts_facts = {}
for host in self.hosts:
host_facts = {}
for option in host.configManager.advancedOption.QueryOptions():
host_facts[option.key] = option.value
hosts_facts[host.name] = host_facts
return hosts_facts
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
],
supports_check_mode=True
)
vmware_host_config = VmwareConfigFactsManager(module)
module.exit_json(changed=False, hosts_facts=vmware_host_config.gather_host_facts())
if __name__ == "__main__":
main()
| gpl-3.0 |
homme/ansible-modules-core | system/authorized_key.py | 55 | 15877 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to add authorized_keys for ssh logins.
(c) 2012, Brad Olson <[email protected]>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: authorized_key
short_description: Adds or removes an SSH authorized key
description:
- Adds or removes an SSH authorized key for a user from a remote host.
version_added: "0.5"
options:
user:
description:
- The username on the remote host whose authorized_keys file will be modified
required: true
default: null
key:
description:
- The SSH public key(s), as a string or (since 1.9) url (https://github.com/username.keys)
required: true
default: null
path:
description:
- Alternate path to the authorized_keys file
required: false
default: "(homedir)+/.ssh/authorized_keys"
version_added: "1.2"
manage_dir:
description:
- Whether this module should manage the directory of the authorized key file. If
set, the module will create the directory, as well as set the owner and permissions
of an existing directory. Be sure to
set C(manage_dir=no) if you are using an alternate directory for
authorized_keys, as set with C(path), since you could lock yourself out of
SSH access. See the example below.
required: false
choices: [ "yes", "no" ]
default: "yes"
version_added: "1.2"
state:
description:
- Whether the given key (with the given key_options) should or should not be in the file
required: false
choices: [ "present", "absent" ]
default: "present"
key_options:
description:
- A string of ssh key options to be prepended to the key in the authorized_keys file
required: false
default: null
version_added: "1.4"
exclusive:
description:
- Whether to remove all other non-specified keys from the authorized_keys file. Multiple keys
can be specified in a single C(key) string value by separating them by newlines.
- This option is not loop aware, so if you use C(with_) , it will be exclusive per iteration
of the loop, if you want multiple keys in the file you need to pass them all to C(key) in a
single batch as mentioned above.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.9"
description:
- "Adds or removes authorized keys for particular user accounts"
author: "Brad Olson (@bradobro)"
'''
EXAMPLES = '''
# Example using key data from a local file on the management machine
- authorized_key: user=charlie key="{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}"
# Using github url as key source
- authorized_key: user=charlie key=https://github.com/charlie.keys
# Using alternate directory locations:
- authorized_key: user=charlie
key="{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}"
path='/etc/ssh/authorized_keys/charlie'
manage_dir=no
# Using with_file
- name: Set up authorized_keys for the deploy user
authorized_key: user=deploy
key="{{ item }}"
with_file:
- public_keys/doe-jane
- public_keys/doe-john
# Using key_options:
- authorized_key: user=charlie
key="{{ lookup('file', '/home/charlie/.ssh/id_rsa.pub') }}"
key_options='no-port-forwarding,from="10.0.1.1"'
# Set up authorized_keys exclusively with one key
- authorized_key: user=root key="{{ item }}" state=present
exclusive=yes
with_file:
- public_keys/doe-jane
'''
# Makes sure the public key line is present or absent in the user's .ssh/authorized_keys.
#
# Arguments
# =========
# user = username
# key = line to add to authorized_keys for user
# path = path to the user's authorized_keys file (default: ~/.ssh/authorized_keys)
# manage_dir = whether to create, and control ownership of the directory (default: true)
# state = absent|present (default: present)
#
# see example in examples/playbooks
import sys
import os
import pwd
import os.path
import tempfile
import re
import shlex
class keydict(dict):
""" a dictionary that maintains the order of keys as they are added """
# http://stackoverflow.com/questions/2328235/pythonextend-the-dict-class
def __init__(self, *args, **kw):
super(keydict,self).__init__(*args, **kw)
self.itemlist = super(keydict,self).keys()
def __setitem__(self, key, value):
self.itemlist.append(key)
super(keydict,self).__setitem__(key, value)
def __iter__(self):
return iter(self.itemlist)
def keys(self):
return self.itemlist
def values(self):
return [self[key] for key in self]
def itervalues(self):
return (self[key] for key in self)
def keyfile(module, user, write=False, path=None, manage_dir=True):
"""
Calculate name of authorized keys file, optionally creating the
directories and file, properly setting permissions.
:param str user: name of user in passwd file
:param bool write: if True, write changes to authorized_keys file (creating directories if needed)
:param str path: if not None, use provided path rather than default of '~user/.ssh/authorized_keys'
:param bool manage_dir: if True, create and set ownership of the parent dir of the authorized_keys file
:return: full path string to authorized_keys for user
"""
if module.check_mode and path is not None:
keysfile = path
return keysfile
try:
user_entry = pwd.getpwnam(user)
except KeyError, e:
if module.check_mode and path is None:
module.fail_json(msg="Either user must exist or you must provide full path to key file in check mode")
module.fail_json(msg="Failed to lookup user %s: %s" % (user, str(e)))
if path is None:
homedir = user_entry.pw_dir
sshdir = os.path.join(homedir, ".ssh")
keysfile = os.path.join(sshdir, "authorized_keys")
else:
sshdir = os.path.dirname(path)
keysfile = path
if not write:
return keysfile
uid = user_entry.pw_uid
gid = user_entry.pw_gid
if manage_dir:
if not os.path.exists(sshdir):
os.mkdir(sshdir, 0700)
if module.selinux_enabled():
module.set_default_selinux_context(sshdir, False)
os.chown(sshdir, uid, gid)
os.chmod(sshdir, 0700)
if not os.path.exists(keysfile):
basedir = os.path.dirname(keysfile)
if not os.path.exists(basedir):
os.makedirs(basedir)
try:
f = open(keysfile, "w") #touches file so we can set ownership and perms
finally:
f.close()
if module.selinux_enabled():
module.set_default_selinux_context(keysfile, False)
try:
os.chown(keysfile, uid, gid)
os.chmod(keysfile, 0600)
except OSError:
pass
return keysfile
def parseoptions(module, options):
'''
reads a string containing ssh-key options
and returns a dictionary of those options
'''
options_dict = keydict() #ordered dict
if options:
try:
# the following regex will split on commas while
# ignoring those commas that fall within quotes
regex = re.compile(r'''((?:[^,"']|"[^"]*"|'[^']*')+)''')
parts = regex.split(options)[1:-1]
for part in parts:
if "=" in part:
(key, value) = part.split("=", 1)
options_dict[key] = value
elif part != ",":
options_dict[part] = None
except:
module.fail_json(msg="invalid option string: %s" % options)
return options_dict
def parsekey(module, raw_key):
'''
parses a key, which may or may not contain a list
of ssh-key options at the beginning
'''
VALID_SSH2_KEY_TYPES = [
'ssh-ed25519',
'ecdsa-sha2-nistp256',
'ecdsa-sha2-nistp384',
'ecdsa-sha2-nistp521',
'ssh-dss',
'ssh-rsa',
]
options = None # connection options
key = None # encrypted key string
key_type = None # type of ssh key
type_index = None # index of keytype in key string|list
# remove comment yaml escapes
raw_key = raw_key.replace('\#', '#')
# split key safely
lex = shlex.shlex(raw_key)
lex.quotes = []
lex.commenters = '' #keep comment hashes
lex.whitespace_split = True
key_parts = list(lex)
for i in range(0, len(key_parts)):
if key_parts[i] in VALID_SSH2_KEY_TYPES:
type_index = i
key_type = key_parts[i]
break
# check for options
if type_index is None:
return None
elif type_index > 0:
options = " ".join(key_parts[:type_index])
# parse the options (if any)
options = parseoptions(module, options)
# get key after the type index
key = key_parts[(type_index + 1)]
# set comment to everything after the key
if len(key_parts) > (type_index + 1):
comment = " ".join(key_parts[(type_index + 2):])
return (key, key_type, options, comment)
def readkeys(module, filename):
if not os.path.isfile(filename):
return {}
keys = {}
f = open(filename)
for line in f.readlines():
key_data = parsekey(module, line)
if key_data:
# use key as identifier
keys[key_data[0]] = key_data
else:
# for an invalid line, just append the line
# to the array so it will be re-output later
keys[line] = line
f.close()
return keys
def writekeys(module, filename, keys):
fd, tmp_path = tempfile.mkstemp('', 'tmp', os.path.dirname(filename))
f = open(tmp_path,"w")
try:
for index, key in keys.items():
try:
(keyhash,type,options,comment) = key
option_str = ""
if options:
option_strings = []
for option_key in options.keys():
if options[option_key]:
option_strings.append("%s=%s" % (option_key, options[option_key]))
else:
option_strings.append("%s" % option_key)
option_str = ",".join(option_strings)
option_str += " "
key_line = "%s%s %s %s\n" % (option_str, type, keyhash, comment)
except:
key_line = key
f.writelines(key_line)
except IOError, e:
module.fail_json(msg="Failed to write to file %s: %s" % (tmp_path, str(e)))
f.close()
module.atomic_move(tmp_path, filename)
def enforce_state(module, params):
"""
Add or remove key.
"""
user = params["user"]
key = params["key"]
path = params.get("path", None)
manage_dir = params.get("manage_dir", True)
state = params.get("state", "present")
key_options = params.get("key_options", None)
exclusive = params.get("exclusive", False)
error_msg = "Error getting key from: %s"
# if the key is a url, request it and use it as key source
if key.startswith("http"):
try:
resp, info = fetch_url(module, key)
if info['status'] != 200:
module.fail_json(msg=error_msg % key)
else:
key = resp.read()
except Exception:
module.fail_json(msg=error_msg % key)
# extract individual keys into an array, skipping blank lines and comments
key = [s for s in key.splitlines() if s and not s.startswith('#')]
# check current state -- just get the filename, don't create file
do_write = False
params["keyfile"] = keyfile(module, user, do_write, path, manage_dir)
existing_keys = readkeys(module, params["keyfile"])
# Add a place holder for keys that should exist in the state=present and
# exclusive=true case
keys_to_exist = []
# Check our new keys, if any of them exist we'll continue.
for new_key in key:
parsed_new_key = parsekey(module, new_key)
if not parsed_new_key:
module.fail_json(msg="invalid key specified: %s" % new_key)
if key_options is not None:
parsed_options = parseoptions(module, key_options)
parsed_new_key = (parsed_new_key[0], parsed_new_key[1], parsed_options, parsed_new_key[3])
present = False
matched = False
non_matching_keys = []
if parsed_new_key[0] in existing_keys:
present = True
# Then we check if everything matches, including
# the key type and options. If not, we append this
# existing key to the non-matching list
# We only want it to match everything when the state
# is present
if parsed_new_key != existing_keys[parsed_new_key[0]] and state == "present":
non_matching_keys.append(existing_keys[parsed_new_key[0]])
else:
matched = True
# handle idempotent state=present
if state=="present":
keys_to_exist.append(parsed_new_key[0])
if len(non_matching_keys) > 0:
for non_matching_key in non_matching_keys:
if non_matching_key[0] in existing_keys:
del existing_keys[non_matching_key[0]]
do_write = True
if not matched:
existing_keys[parsed_new_key[0]] = parsed_new_key
do_write = True
elif state=="absent":
if not matched:
continue
del existing_keys[parsed_new_key[0]]
do_write = True
# remove all other keys to honor exclusive
if state == "present" and exclusive:
to_remove = frozenset(existing_keys).difference(keys_to_exist)
for key in to_remove:
del existing_keys[key]
do_write = True
if do_write:
if module.check_mode:
module.exit_json(changed=True)
writekeys(module, keyfile(module, user, do_write, path, manage_dir), existing_keys)
params['changed'] = True
else:
if module.check_mode:
module.exit_json(changed=False)
return params
def main():
module = AnsibleModule(
argument_spec = dict(
user = dict(required=True, type='str'),
key = dict(required=True, type='str'),
path = dict(required=False, type='str'),
manage_dir = dict(required=False, type='bool', default=True),
state = dict(default='present', choices=['absent','present']),
key_options = dict(required=False, type='str'),
unique = dict(default=False, type='bool'),
exclusive = dict(default=False, type='bool'),
),
supports_check_mode=True
)
results = enforce_state(module, module.params)
module.exit_json(**results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
| gpl-3.0 |
dvliman/jaikuengine | .google_appengine/lib/django-1.3/django/contrib/messages/storage/fallback.py | 627 | 2171 | from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import CookieStorage
from django.contrib.messages.storage.session import SessionStorage
class FallbackStorage(BaseStorage):
"""
Tries to store all messages in the first backend, storing any unstored
messages in each subsequent backend backend.
"""
storage_classes = (CookieStorage, SessionStorage)
def __init__(self, *args, **kwargs):
super(FallbackStorage, self).__init__(*args, **kwargs)
self.storages = [storage_class(*args, **kwargs)
for storage_class in self.storage_classes]
self._used_storages = set()
def _get(self, *args, **kwargs):
"""
Gets a single list of messages from all storage backends.
"""
all_messages = []
for storage in self.storages:
messages, all_retrieved = storage._get()
# If the backend hasn't been used, no more retrieval is necessary.
if messages is None:
break
if messages:
self._used_storages.add(storage)
all_messages.extend(messages)
# If this storage class contained all the messages, no further
# retrieval is necessary
if all_retrieved:
break
return all_messages, all_retrieved
def _store(self, messages, response, *args, **kwargs):
"""
Stores the messages, returning any unstored messages after trying all
backends.
For each storage backend, any messages not stored are passed on to the
next backend.
"""
for storage in self.storages:
if messages:
messages = storage._store(messages, response,
remove_oldest=False)
# Even if there are no more messages, continue iterating to ensure
# storages which contained messages are flushed.
elif storage in self._used_storages:
storage._store([], response)
self._used_storages.remove(storage)
return messages
| apache-2.0 |
mzbenami/pyeapi | pyeapi/api/varp.py | 1 | 7989 | #
# Copyright (c) 2015, Arista Networks, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Arista Networks nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""Module for managing the VARP configuration in EOS
This module provides an API for configuring VARP resources using
EOS and eAPI.
Arguments:
name (string): The interface name the configuration is in reference
to. The interface name is the full interface identifier
address (string): The interface IP address in the form of
address/len.
mtu (integer): The interface MTU value. The MTU value accepts
integers in the range of 68 to 65535 bytes
"""
import re
from pyeapi.api import EntityCollection
class Varp(EntityCollection):
def __init__(self, *args, **kwargs):
super(Varp, self).__init__(*args, **kwargs)
self._interfaces = None
@property
def interfaces(self):
if self._interfaces is not None:
return self._interfaces
self._interfaces = VarpInterfaces(self.node)
return self._interfaces
def get(self):
"""Returns the current VARP configuration
The Varp resource returns the following:
* mac_address (str): The virtual-router mac address
* interfaces (dict): A list of the interfaces that have a
virtual-router address configured.
Return:
A Python dictionary object of key/value pairs that represents
the current configuration of the node. If the specified
interface does not exist then None is returned::
{
"mac_address": "aa:bb:cc:dd:ee:ff",
"interfaces": {
"Vlan100": {
"addresses": [ "1.1.1.1", "2.2.2.2"]
},
"Vlan200": [...]
}
}
"""
resource = dict()
resource.update(self._parse_mac_address())
resource.update(self._parse_interfaces())
return resource
def _parse_mac_address(self):
mac_address_re = re.compile(r'^ip\svirtual-router\smac-address\s'
r'((?:[a-f0-9]{2}:){5}[a-f0-9]{2})$', re.M)
mac = mac_address_re.search(self.config)
mac = mac.group(1) if mac else None
return dict(mac_address=mac)
def _parse_interfaces(self):
interfaces = VarpInterfaces(self.node).getall()
return dict(interfaces=interfaces)
def set_mac_address(self, mac_address=None, default=False, disable=False):
""" Sets the virtual-router mac address
This method will set the switch virtual-router mac address. If a
virtual-router mac address already exists it will be overwritten.
Args:
mac_address (string): The mac address that will be assigned as
the virtual-router mac address. This should be in the format,
aa:bb:cc:dd:ee:ff.
default (bool): Sets the virtual-router mac address to the system
default (which is to remove the configuration line).
disabel (bool): Negates the virtual-router mac address using
the system no configuration command
Returns:
True if the set operation succeeds otherwise False.
"""
if not default and not disable:
if mac_address is not None:
# Check to see if mac_address matches expected format
if not re.match(r'(?:[a-f0-9]{2}:){5}[a-f0-9]{2}', mac_address):
raise ValueError('mac_address must be formatted like:'
'aa:bb:cc:dd:ee:ff')
else:
raise ValueError('mac_address must be a properly formatted '
'address string')
commands = self.command_builder('ip virtual-router mac-address',
value=mac_address, default=default,
disable=disable)
return self.configure(commands)
class VarpInterfaces(EntityCollection):
"""The VarpInterfaces class helps manage interfaces with
virtual-router configuration.
"""
def get(self, name):
interface_re = r'interface\s%s' % name
config = self.get_block(interface_re)
if not config:
return None
resource = dict(addresses=dict())
resource.update(self._parse_virtual_addresses(config))
return resource
def getall(self):
resources = dict()
interfaces_re = re.compile(r'^interface\s(Vlan\d+)$', re.M)
for name in interfaces_re.findall(self.config):
interface_detail = self.get(name)
if interface_detail:
resources[name] = interface_detail
return resources
def set_addresses(self, name, addresses=None, default=False, disable=False):
commands = list()
commands.append('interface %s' % name)
if default:
commands.append('default ip virtual-router address')
elif disable:
commands.append('no ip virtual-router address')
elif addresses is not None:
try:
current_addresses = self.get(name)['addresses']
except:
current_addresses = []
# remove virtual-router addresses not present in addresses list
for entry in set(current_addresses).difference(addresses):
commands.append('no ip virtual-router address %s' % entry)
# add new set virtual-router addresses that werent present
for entry in set(addresses).difference(current_addresses):
commands.append('ip virtual-router address %s' % entry)
else:
commands.append('no ip virtual-router address')
return self.configure(commands) if commands else True
def _parse_virtual_addresses(self, config):
virt_ip_re = re.compile(r'^\s+ip\svirtual-router\saddress\s(\S+)$',
re.M)
return dict(addresses=virt_ip_re.findall(config))
def instance(node):
"""Returns an instance of Ipinterfaces
This method will create and return an instance of the Varp object
passing the value of node to the instance. This function is required
for the resource to be autoloaded by the Node object
Args:
node (Node): The node argument provides an instance of Node to
the Varp instance
"""
return Varp(node)
| bsd-3-clause |
andrewyoung1991/abjad | abjad/tools/lilypondparsertools/ContextSpeccedMusic.py | 2 | 2353 | # -*- encoding: utf-8 -*-
from abjad.tools import stringtools
from abjad.tools.lilypondparsertools.Music import Music
class ContextSpeccedMusic(Music):
r'''Abjad model of the LilyPond AST context-specced music node.
'''
### CLASS VARIABLES ###
__slots__ = (
#'context',
'context_name',
'music',
'optional_id',
'optional_context_mod',
)
### INITIALIZER ###
def __init__(
self,
context_name=None,
optional_id=None,
optional_context_mod=None,
music=None,
):
from abjad.tools import lilypondparsertools
context_name = context_name or ''
music = music or lilypondparsertools.SequentialMusic()
assert stringtools.is_string(context_name)
assert isinstance(music, Music)
self.context_name = context_name
self.optional_id = optional_id
self.optional_context_mod = optional_context_mod
self.music = music
### PUBLIC METHODS ###
def construct(self):
r'''Constructs context.
Returns context.
'''
if self.context_name in self.known_contexts:
context = known_contexts[self.context_name]([])
else:
message = 'context type not supported: {}.'
message = message.format(self.context_name)
raise Exception(message)
if self.optional_id is not None:
context.name = self.optional_id
if self.optional_context_mod is not None:
for x in self.optional_context_mod:
print(x)
# TODO: implement context modifications on contexts
pass
if isinstance(self.music, lilypondparsertools.SimultaneousMusic):
context.is_simultaneous = True
context.extend(music.construct())
return context
### PUBLIC PROPERTIES ###
@property
def known_contexts(self):
r'''Known contexts.
Returns dictionary.
'''
return {
'ChoirStaff': scoretools.StaffGroup,
'GrandStaff': scoretools.StaffGroup,
'PianoStaff': scoretools.StaffGroup,
'Score': scoretools.Score,
'Staff': scoretools.Staff,
'StaffGroup': scoretools.StaffGroup,
'Voice': scoretools.Voice,
}
| gpl-3.0 |
nalle/dyndd | dyndd/controller.py | 1 | 9149 | from twisted.internet import reactor, defer
from twisted.python import failure
from twisted.names import client, dns, error, server
import ConfigParser
import logging
import os
import time
import datetime
import socket
import signal
from twisted.internet.address import IPv4Address
import MySQLdb
class DynDDServerFactory(server.DNSServerFactory):
def handleQuery(self, message, protocol, address):
if protocol.transport.socket.type == socket.SOCK_STREAM:
self.peer_address = protocol.transport.getPeer()
elif protocol.transport.socket.type == socket.SOCK_DGRAM:
self.peer_address = IPv4Address('UDP', *address)
else:
print "Unexpected socket type %r" % protocol.transport.socket.type
for resolver in self.resolver.resolvers:
if hasattr(resolver, 'peer_address'):
resolver.peer_address = self.peer_address
return server.DNSServerFactory.handleQuery(self, message
protocol, address)
class DynDDController(object):
def __init__(self):
self.config = ConfigParser.ConfigParser()
self.config.read("/etc/dyndd/dyndd.conf")
if self.config.get("global", "LogLevel").lower() == "fatal":
level = logging.FATAL
elif self.config.get("global", "LogLevel").lower() == "info":
level = logging.INFO
elif self.config.get("global", "LogLevel").lower() == "warn":
level = logging.WARN
elif self.config.get("global", "LogLevel").lower() == "error":
level = logging.ERROR
elif self.config.get("global", "LogLevel").lower() == "debug":
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(format='[%(asctime)s] %(levelname)s: %(message)s',
level=logging.DEBUG,
filename=self.config.get("global",
"ApplicationLog"))
self.connect_mysql()
signal.signal(1, self._reload)
self._peer_address = None
@property
def peer_address(self):
return self._peer_address
@peer_address.setter
def peer_address(self, value):
self._peer_address = value
def _reload(self, signum, handler):
self.config.read("/etc/dyndd/dyndd.conf")
def connect_mysql(self):
try:
self.db = MySQLdb.connect(host=self.config.get("global",
"DBHost"),
user=self.config.get("global",
"DBUser"),
passwd=self.config.get("global",
"DBPass"),
db=self.config.get("global",
"DBSchema"))
self.cursor = self.db.cursor(MySQLdb.cursors.DictCursor)
logging.info("MySQL connection successfully established to %s"
% self.config.get("global", "DBHost"))
except Exception, e:
logging.error("MySQL connection failed to %s, retrying (%s)"
% (self.config.get('global', 'DBHost'), str(e)))
self.connect_mysql()
return True
def ping_mysql(self):
try:
self.db.ping()
except Exception, e:
logging.error("Connection to MySQL %s lost, retrying (%s)"
% (self.config.get('global', 'DBHost'), (str(e))))
self.connect_mysql()
return True
def _type(self, record_type):
if record_type == dns.A:
return "A"
if record_type == dns.AAAA:
return "AAAA"
if record_type == dns.NS:
return "NS"
if record_type == dns.SOA:
return "SOA"
if record_type == dns.MX:
return "MX"
else:
return ""
def _dynamicResponseRequired(self, query):
if query.type == dns.NS or query.type == dns.SOA:
self.cursor.execute("SELECT domain, nameservers, contact
FROM domains WHERE domain='%s'"
% query.name.name)
self.db.commit()
else:
self.cursor.execute("SELECT hostname, ip, recordtype
FROM dnsrecords WHERE hostname='%s'
AND recordtype='%s'"
% (str(query.name.name),
self._type(query.type)))
self.db.commit()
self.lookup_result = self.cursor.fetchall()
if self.cursor.rowcount > 0:
self.lookup_result = self.lookup_result[0]
return self.lookup_result
else:
return ""
def _Record_A(self, query):
answers = [dns.RRHeader(
name=query.name.name, type=query.type,
payload=dns.Record_A(address=self.lookup_result['ip'],
ttl=5), auth=True)]
return answers, [], []
def _Record_AAAA(self, query):
answers = [dns.RRHeader(
name=query.name.name, type=query.type,
payload=dns.Record_AAAA(address=self.lookup_result['ip'],
ttl=5), auth=True)]
return answers, [], []
def _Record_NS(self, query):
answers = []
for nameserver in self.lookup_result['nameservers'].split(','):
answers.append(dns.RRHeader(
name=query.name.name, type=query.type,
payload=dns.Record_NS(name=nameserver,
ttl=5), auth=True))
return answers, [], []
def _Record_MX(self, query):
answers = [dns.RRHeader(
name=query.name.name, type=query.type,
payload=dns.Record_MX(10, self.lookup_result['ip'],
ttl=5), auth=True)]
return answers, [], []
def _Record_SOA(self, query):
d = self.lookup_result['domain']
answers = [dns.RRHeader(
name=query.name.name, type=dns.SOA,
payload=dns.Record_SOA(mname=self.lookup_result['domain'],
rname="hostmaster." % d,
serial=int(time.time()),
refresh=3600,
ttl=5), auth=True)]
return answers, [], []
def _Record_Unknown(self, query):
answers = [dns.RRHeader(
name=query.name.name, type=query.type,
payload=dns.UnknownRecord(query.name.name, ttl=5), auth=True)]
return answers, [], []
def _Record_NXDOMAIN(self, query):
return [], [], []
def _FigureOutSOAForQuery(self, query):
tmp = query.name.name.split('.')
domain = tmp[-2]+"."+tmp[-1]
self.cursor.execute("SELECT domain, nameservers, contact
FROM domains WHERE domain='%s'" % domain)
self.db.commit()
self.lookup_result = self.cursor.fetchall()
if self.cursor.rowcount > 0:
self.lookup_result = self.lookup_result[0]
else:
self.lookup_result = ""
def _doDynamicResponse(self, query):
if query.type == dns.SOA:
return self._Record_SOA(query)
elif query.type == dns.NS:
return self._Record_NS(query)
elif query.type == dns.A:
return self._Record_A(query)
elif query.type == dns.AAAA:
return self._Record_AAAA(query)
elif query.type == dns.MX:
return self._Record_MX(query)
elif query.type == dns.CNAME:
return self._Record_CNAME(query)
else:
return self._Record_Unknown(query)
def query(self, query, timeout=None):
self.ping_mysql()
try:
logging.info(query)
if len(self._dynamicResponseRequired(query)) > 0:
return defer.succeed(self._doDynamicResponse(query))
else:
if query.type == dns.MX:
return defer.succeed(self._Record_NXDOMAIN(query))
return defer.succeed(([], [], []))
except Exception, e:
logging.error(e)
return defer.fail(error.DomainError())
def _src(self, query):
return {'type': query.type,
'src': self.peer_address.host.replace("::ffff:", ""),
'name': query.name.name}
def main(self):
factory = DynDDServerFactory(
clients=[DynDDController(),
client.Resolver(resolv='/etc/resolv.conf')]
)
protocol = dns.DNSDatagramProtocol(controller=factory)
reactor.listenUDP(53, protocol, interface="::0")
reactor.listenTCP(53, factory)
reactor.run()
| gpl-3.0 |
IronLanguages/ironpython2 | Src/StdLib/Lib/distutils/tests/setuptools_build_ext.py | 149 | 11489 | from distutils.command.build_ext import build_ext as _du_build_ext
try:
# Attempt to use Pyrex for building extensions, if available
from Pyrex.Distutils.build_ext import build_ext as _build_ext
except ImportError:
_build_ext = _du_build_ext
import os, sys
from distutils.file_util import copy_file
from distutils.tests.setuptools_extension import Library
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler, get_config_var
get_config_var("LDSHARED") # make sure _config_vars is initialized
from distutils.sysconfig import _config_vars
from distutils import log
from distutils.errors import *
have_rtld = False
use_stubs = False
libtype = 'shared'
if sys.platform == "darwin":
use_stubs = True
elif os.name != 'nt':
try:
from dl import RTLD_NOW
have_rtld = True
use_stubs = True
except ImportError:
pass
def if_dl(s):
if have_rtld:
return s
return ''
class build_ext(_build_ext):
def run(self):
"""Build extensions in build directory, then copy if --inplace"""
old_inplace, self.inplace = self.inplace, 0
_build_ext.run(self)
self.inplace = old_inplace
if old_inplace:
self.copy_extensions_to_source()
def copy_extensions_to_source(self):
build_py = self.get_finalized_command('build_py')
for ext in self.extensions:
fullname = self.get_ext_fullname(ext.name)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
dest_filename = os.path.join(package_dir,os.path.basename(filename))
src_filename = os.path.join(self.build_lib,filename)
# Always copy, even if source is older than destination, to ensure
# that the right extensions for the current Python/platform are
# used.
copy_file(
src_filename, dest_filename, verbose=self.verbose,
dry_run=self.dry_run
)
if ext._needs_stub:
self.write_stub(package_dir or os.curdir, ext, True)
if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
def swig_sources(self, sources, *otherargs):
# first do any Pyrex processing
sources = _build_ext.swig_sources(self, sources) or sources
# Then do any actual SWIG stuff on the remainder
return _du_build_ext.swig_sources(self, sources, *otherargs)
def get_ext_filename(self, fullname):
filename = _build_ext.get_ext_filename(self,fullname)
ext = self.ext_map[fullname]
if isinstance(ext,Library):
fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn,libtype)
elif use_stubs and ext._links_to_dynamic:
d,fn = os.path.split(filename)
return os.path.join(d,'dl-'+fn)
else:
return filename
def initialize_options(self):
_build_ext.initialize_options(self)
self.shlib_compiler = None
self.shlibs = []
self.ext_map = {}
def finalize_options(self):
_build_ext.finalize_options(self)
self.extensions = self.extensions or []
self.check_extensions_list(self.extensions)
self.shlibs = [ext for ext in self.extensions
if isinstance(ext,Library)]
if self.shlibs:
self.setup_shlib_compiler()
for ext in self.extensions:
ext._full_name = self.get_ext_fullname(ext.name)
for ext in self.extensions:
fullname = ext._full_name
self.ext_map[fullname] = ext
ltd = ext._links_to_dynamic = \
self.shlibs and self.links_to_dynamic(ext) or False
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
filename = ext._file_name = self.get_ext_filename(fullname)
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
if ltd and libdir not in ext.library_dirs:
ext.library_dirs.append(libdir)
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
ext.runtime_library_dirs.append(os.curdir)
def setup_shlib_compiler(self):
compiler = self.shlib_compiler = new_compiler(
compiler=self.compiler, dry_run=self.dry_run, force=self.force
)
if sys.platform == "darwin":
tmp = _config_vars.copy()
try:
# XXX Help! I don't have any idea whether these are right...
_config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
_config_vars['CCSHARED'] = " -dynamiclib"
_config_vars['SO'] = ".dylib"
customize_compiler(compiler)
finally:
_config_vars.clear()
_config_vars.update(tmp)
else:
customize_compiler(compiler)
if self.include_dirs is not None:
compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
compiler.undefine_macro(macro)
if self.libraries is not None:
compiler.set_libraries(self.libraries)
if self.library_dirs is not None:
compiler.set_library_dirs(self.library_dirs)
if self.rpath is not None:
compiler.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
compiler.set_link_objects(self.link_objects)
# hack so distutils' build_extension() builds a library instead
compiler.link_shared_object = link_shared_object.__get__(compiler)
def get_export_symbols(self, ext):
if isinstance(ext,Library):
return ext.export_symbols
return _build_ext.get_export_symbols(self,ext)
def build_extension(self, ext):
_compiler = self.compiler
try:
if isinstance(ext,Library):
self.compiler = self.shlib_compiler
_build_ext.build_extension(self,ext)
if ext._needs_stub:
self.write_stub(
self.get_finalized_command('build_py').build_lib, ext
)
finally:
self.compiler = _compiler
def links_to_dynamic(self, ext):
"""Return true if 'ext' links to a dynamic lib in the same package"""
# XXX this should check to ensure the lib is actually being built
# XXX as dynamic, and not just using a locally-found version or a
# XXX static-compiled version
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
for libname in ext.libraries:
if pkg+libname in libnames: return True
return False
def get_outputs(self):
outputs = _build_ext.get_outputs(self)
optimize = self.get_finalized_command('build_py').optimize
for ext in self.extensions:
if ext._needs_stub:
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
outputs.append(base+'.py')
outputs.append(base+'.pyc')
if optimize:
outputs.append(base+'.pyo')
return outputs
def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
if compile and os.path.exists(stub_file):
raise DistutilsError(stub_file+" already exists! Please delete.")
if not self.dry_run:
f = open(stub_file,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% os.path.basename(ext._file_name),
" del __bootstrap__",
" if '__loader__' in globals():",
" del __loader__",
if_dl(" old_flags = sys.getdlopenflags()"),
" old_dir = os.getcwd()",
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
if compile:
from distutils.util import byte_compile
byte_compile([stub_file], optimize=0,
force=True, dry_run=self.dry_run)
optimize = self.get_finalized_command('install_lib').optimize
if optimize > 0:
byte_compile([stub_file], optimize=optimize,
force=True, dry_run=self.dry_run)
if os.path.exists(stub_file) and not self.dry_run:
os.unlink(stub_file)
if use_stubs or os.name=='nt':
# Build shared libraries
#
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
): self.link(
self.SHARED_LIBRARY, objects, output_libname,
output_dir, libraries, library_dirs, runtime_library_dirs,
export_symbols, debug, extra_preargs, extra_postargs,
build_temp, target_lang
)
else:
# Build static libraries everywhere else
libtype = 'static'
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
):
# XXX we need to either disallow these attrs on Library instances,
# or warn/abort here if set, or something...
#libraries=None, library_dirs=None, runtime_library_dirs=None,
#export_symbols=None, extra_preargs=None, extra_postargs=None,
#build_temp=None
assert output_dir is None # distutils build_ext doesn't pass this
output_dir,filename = os.path.split(output_libname)
basename, ext = os.path.splitext(filename)
if self.library_filename("x").startswith('lib'):
# strip 'lib' prefix; this is kludgy if some platform uses
# a different prefix
basename = basename[3:]
self.create_static_lib(
objects, basename, output_dir, debug, target_lang
)
| apache-2.0 |
ianclegg/requests-ntlm2 | requests_ntlm2/__init__.py | 1 | 1172 | # (c) 2015, Ian Clegg <[email protected]>
#
# ntlmlib is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .requests_ntlm2 import HttpNtlm2Auth
from .exceptions import InvalidCredentialsError, NtlmAuthenticationError
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Set default logging handler to avoid "No handler found" warnings.
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = ['HttpNtlm2Auth', 'NtlmAuthenticationError', 'InvalidCredentialsError']
| apache-2.0 |
Williams224/davinci-scripts | ksteta3pi/Consideredbkg/MC_12_11102202_MagUp.py | 3 | 17903 | #-- GAUDI jobOptions generated on Fri Jul 17 16:30:35 2015
#-- Contains event types :
#-- 11102202 - 178 files - 3032774 events - 655.11 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-124834
#-- StepId : 124834
#-- StepName : Reco14a for MC
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p7
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124620
#-- StepId : 124620
#-- StepName : Digi13 with G4 dE/dx
#-- ApplicationName : Boole
#-- ApplicationVersion : v26r3
#-- OptionFiles : $APPCONFIGOPTS/Boole/Default.py;$APPCONFIGOPTS/Boole/DataType-2012.py;$APPCONFIGOPTS/Boole/Boole-SiG4EnergyDeposit.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124632
#-- StepId : 124632
#-- StepName : TCK-0x409f0045 Flagged for Sim08 2012
#-- ApplicationName : Moore
#-- ApplicationVersion : v14r8p1
#-- OptionFiles : $APPCONFIGOPTS/Moore/MooreSimProductionWithL0Emulation.py;$APPCONFIGOPTS/Conditions/TCK-0x409f0045.py;$APPCONFIGOPTS/Moore/DataType-2012.py;$APPCONFIGOPTS/L0/L0TCK-0x0045.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-125999
#-- StepId : 125999
#-- StepName : Sim08d - 2012 - MU - Pythia8
#-- ApplicationName : Gauss
#-- ApplicationVersion : v45r6
#-- OptionFiles : $APPCONFIGOPTS/Gauss/Sim08-Beam4000GeV-mu100-2012-nu2.5.py;$DECFILESROOT/options/@{eventType}.py;$LBPYTHIA8ROOT/options/Pythia8.py;$APPCONFIGOPTS/Gauss/G4PL_FTFP_BERT_EmNoCuts.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20130929-1
#-- CONDDB : sim-20130522-1-vc-mu100
#-- ExtraPackages : AppConfig.v3r182;DecFiles.v27r16
#-- Visible : Y
#-- Processing Pass Step-124630
#-- StepId : 124630
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000017_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000018_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000019_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000020_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000021_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000022_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000023_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000024_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000025_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000026_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000027_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000028_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000029_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000030_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000031_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000035_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000036_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000037_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000038_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000039_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000040_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000041_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000042_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000043_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000044_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000045_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000046_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000047_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000048_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000049_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000050_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000051_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000052_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000053_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000054_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000055_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000056_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000057_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000058_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000059_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000060_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000061_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000062_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000063_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000064_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000065_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000066_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000067_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000068_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000069_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000070_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000071_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000072_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000073_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000074_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000075_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000076_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000077_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000078_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000079_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000080_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000081_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000082_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000083_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000084_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000085_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000086_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000087_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034086/0000/00034086_00000088_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000017_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000018_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000019_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000020_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000021_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000022_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000023_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000024_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000025_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000026_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000027_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000028_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000029_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000030_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000031_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000035_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000036_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000037_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000038_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000039_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000040_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000041_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000042_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000043_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000044_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000045_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000046_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000047_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000048_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000049_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000050_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000051_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000052_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000053_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000054_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000055_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000056_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000057_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000058_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000059_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000060_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000062_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000063_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000064_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000065_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000066_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000067_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000068_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000069_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000070_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000071_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000072_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000073_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000074_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000075_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000076_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000077_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000078_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000079_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000080_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000081_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000082_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000083_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000084_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000085_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000086_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000087_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000088_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000089_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000090_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00034094/0000/00034094_00000091_1.allstreams.dst'
], clear=True)
| mit |
gymnasium/edx-platform | common/test/acceptance/pages/lms/discussion.py | 11 | 29079 | from contextlib import contextmanager
from bok_choy.javascript import wait_for_js
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
from common.test.acceptance.pages.common.utils import hover
from common.test.acceptance.pages.lms.course_page import CoursePage
from common.test.acceptance.tests.helpers import is_focused_on_element
class DiscussionPageMixin(object):
def is_ajax_finished(self):
return self.browser.execute_script("return jQuery.active") == 0
def find_visible_element(self, selector):
"""
Finds a single visible element with the specified selector.
"""
full_selector = selector
if self.root_selector:
full_selector = self.root_selector + " " + full_selector
elements = self.q(css=full_selector)
return next((element for element in elements if element.is_displayed()), None)
@property
def new_post_button(self):
"""
Returns the new post button if visible, else it returns None.
"""
return self.find_visible_element(".new-post-btn")
@property
def new_post_form(self):
"""
Returns the new post form if visible, else it returns None.
"""
return self.find_visible_element(".forum-new-post-form")
def click_new_post_button(self):
"""
Clicks the 'New Post' button.
"""
self.wait_for(
lambda: self.new_post_button,
description="Waiting for new post button"
)
self.new_post_button.click()
self.wait_for(
lambda: self.new_post_form,
description="Waiting for new post form"
)
def click_cancel_new_post(self):
"""
Clicks the 'Cancel' button from the new post form.
"""
self.click_element(".cancel")
self.wait_for(
lambda: not self.new_post_form,
"Waiting for new post form to close"
)
class DiscussionThreadPage(PageObject, DiscussionPageMixin):
url = None
def __init__(self, browser, thread_selector):
super(DiscussionThreadPage, self).__init__(browser)
self.thread_selector = thread_selector
def _find_within(self, selector):
"""
Returns a query corresponding to the given CSS selector within the scope
of this thread page
"""
return self.q(css=self.thread_selector + " " + selector)
def is_browser_on_page(self):
return self.q(css=self.thread_selector).visible
def _get_element_text(self, selector):
"""
Returns the text of the first element matching the given selector, or
None if no such element exists
"""
text_list = self._find_within(selector).text
return text_list[0] if text_list else None
def is_element_visible(self, selector):
"""
Returns true if the element matching the specified selector is visible.
Args:
selector (str): The CSS selector that matches the desired element.
Returns:
bool: True if the element is visible.
"""
query = self._find_within(selector)
return query.present and query.visible
@contextmanager
def secondary_action_menu_open(self, ancestor_selector):
"""
Given the selector for an ancestor of a secondary menu, return a context
manager that will open and close the menu
"""
self.wait_for_ajax()
self._find_within(ancestor_selector + " .action-more").click()
EmptyPromise(
lambda: self.is_element_visible(ancestor_selector + " .actions-dropdown"),
"Secondary action menu opened"
).fulfill()
yield
if self.is_element_visible(ancestor_selector + " .actions-dropdown"):
self._find_within(ancestor_selector + " .action-more").click()
EmptyPromise(
lambda: not self.is_element_visible(ancestor_selector + " .actions-dropdown"),
"Secondary action menu closed"
).fulfill()
def get_group_visibility_label(self):
"""
Returns the group visibility label shown for the thread.
"""
return self._get_element_text(".group-visibility-label")
def get_response_total_text(self):
"""Returns the response count text, or None if not present"""
self.wait_for_ajax()
return self._get_element_text(".response-count")
def get_num_displayed_responses(self):
"""Returns the number of responses actually rendered"""
return len(self._find_within(".discussion-response"))
def get_shown_responses_text(self):
"""Returns the shown response count text, or None if not present"""
return self._get_element_text(".response-display-count")
def get_load_responses_button_text(self):
"""Returns the load more responses button text, or None if not present"""
return self._get_element_text(".load-response-button")
def load_more_responses(self):
"""Clicks the load more responses button and waits for responses to load"""
self._find_within(".load-response-button").click()
EmptyPromise(
self.is_ajax_finished,
"Loading more Responses"
).fulfill()
def has_add_response_button(self):
"""Returns true if the add response button is visible, false otherwise"""
return self.is_element_visible(".add-response-btn")
def has_discussion_reply_editor(self):
"""
Returns true if the discussion reply editor is is visible
"""
return self.is_element_visible(".discussion-reply-new")
def click_add_response_button(self):
"""
Clicks the add response button and ensures that the response text
field receives focus
"""
self._find_within(".add-response-btn").first.click()
EmptyPromise(
lambda: self._find_within(".discussion-reply-new textarea:focus").present,
"Response field received focus"
).fulfill()
@wait_for_js
def is_response_editor_visible(self, response_id):
"""Returns true if the response editor is present, false otherwise"""
return self.is_element_visible(".response_{} .edit-post-body".format(response_id))
@wait_for_js
def is_discussion_body_visible(self):
return self.is_element_visible(".post-body")
def verify_mathjax_preview_available(self):
""" Checks that MathJax Preview css class is present """
self.wait_for(
lambda: len(self.q(css=".MathJax_Preview").text) > 0 and self.q(css=".MathJax_Preview").text[0] == "",
description="MathJax Preview is rendered"
)
def verify_mathjax_rendered(self):
""" Checks that MathJax css class is present """
self.wait_for(
lambda: self.is_element_visible(".MathJax_SVG"),
description="MathJax Preview is rendered"
)
def is_response_visible(self, comment_id):
"""Returns true if the response is viewable onscreen"""
self.wait_for_ajax()
return self.is_element_visible(".response_{} .response-body".format(comment_id))
def is_response_editable(self, response_id):
"""Returns true if the edit response button is present, false otherwise"""
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
return self.is_element_visible(".response_{} .discussion-response .action-edit".format(response_id))
def is_response_deletable(self, response_id):
"""
Returns true if the delete response button is present, false otherwise
"""
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
return self.is_element_visible(".response_{} .discussion-response .action-delete".format(response_id))
def get_response_body(self, response_id):
return self._get_element_text(".response_{} .response-body".format(response_id))
def start_response_edit(self, response_id):
"""Click the edit button for the response, loading the editing view"""
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
self._find_within(".response_{} .discussion-response .action-edit".format(response_id)).first.click()
EmptyPromise(
lambda: self.is_response_editor_visible(response_id),
"Response edit started"
).fulfill()
def get_link_href(self):
"""Extracts href attribute of the referenced link"""
link_href = self._find_within(".post-body p a").attrs('href')
return link_href[0] if link_href else None
def get_response_vote_count(self, response_id):
vote_count_css = '.response_{} .discussion-response .action-vote'.format(response_id)
vote_count_element = self.browser.find_element_by_css_selector(vote_count_css)
# To get the vote count, one must hover over the element first.
hover(self.browser, vote_count_element)
return self._get_element_text(".response_{} .discussion-response .action-vote .vote-count".format(response_id))
def vote_response(self, response_id):
current_count = self.get_response_vote_count(response_id)
self._find_within(".response_{} .discussion-response .action-vote".format(response_id)).first.click()
self.wait_for(
lambda: current_count != self.get_response_vote_count(response_id),
description="Vote updated for {response_id}".format(response_id=response_id)
)
def cannot_vote_response(self, response_id):
"""Assert that the voting button is not visible on this response"""
return not self.is_element_visible(".response_{} .discussion-response .action-vote".format(response_id))
def is_response_reported(self, response_id):
return self.is_element_visible(".response_{} .discussion-response .post-label-reported".format(response_id))
def report_response(self, response_id):
with self.secondary_action_menu_open(".response_{} .discussion-response".format(response_id)):
self._find_within(".response_{} .discussion-response .action-report".format(response_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: self.is_response_reported(response_id),
"Response is reported"
).fulfill()
def cannot_report_response(self, response_id):
"""Assert that the reporting button is not visible on this response"""
return not self.is_element_visible(".response_{} .discussion-response .action-report".format(response_id))
def is_response_endorsed(self, response_id):
return "endorsed" in self._get_element_text(".response_{} .discussion-response .posted-details".format(response_id))
def endorse_response(self, response_id):
self._find_within(".response_{} .discussion-response .action-endorse".format(response_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: self.is_response_endorsed(response_id),
"Response edit started"
).fulfill()
def set_response_editor_value(self, response_id, new_body):
"""Replace the contents of the response editor"""
self._find_within(".response_{} .discussion-response .wmd-input".format(response_id)).fill(new_body)
def verify_link_editor_error_messages_shown(self):
"""
Confirm that the error messages are displayed in the editor.
"""
def errors_visible():
"""
Returns True if both errors are visible, False otherwise.
"""
return (
self.q(css="#new-url-input-field-message.has-error").visible and
self.q(css="#new-url-desc-input-field-message.has-error").visible
)
self.wait_for(errors_visible, "Form errors should be visible.")
def add_content_via_editor_button(self, content_type, response_id, url, description, is_decorative=False):
"""Replace the contents of the response editor"""
self._find_within(
"#wmd-{}-button-edit-post-body-{}".format(
content_type,
response_id,
)
).click()
self.q(css='#new-url-input').fill(url)
self.q(css='#new-url-desc-input').fill(description)
if is_decorative:
self.q(css='#img-is-decorative').click()
self.q(css='input[value="OK"]').click()
def submit_response_edit(self, response_id, new_response_body):
"""Click the submit button on the response editor"""
def submit_response_check_func():
"""
Tries to click "Update post" and returns True if the post
was successfully updated, False otherwise.
"""
self._find_within(
".response_{} .discussion-response .post-update".format(
response_id
)
).first.click()
return (
not self.is_response_editor_visible(response_id) and
self.is_response_visible(response_id) and
self.get_response_body(response_id) == new_response_body
)
self.wait_for(submit_response_check_func, "Comment edit succeeded")
def is_show_comments_visible(self, response_id):
"""Returns true if the "show comments" link is visible for a response"""
return self.is_element_visible(".response_{} .action-show-comments".format(response_id))
def show_comments(self, response_id):
"""Click the "show comments" link for a response"""
self._find_within(".response_{} .action-show-comments".format(response_id)).first.click()
EmptyPromise(
lambda: self.is_element_visible(".response_{} .comments".format(response_id)),
"Comments shown"
).fulfill()
def is_add_comment_visible(self, response_id):
"""Returns true if the "add comment" form is visible for a response"""
return self.is_element_visible("#wmd-input-comment-body-{}".format(response_id))
def is_comment_visible(self, comment_id):
"""Returns true if the comment is viewable onscreen"""
return self.is_element_visible("#comment_{} .response-body".format(comment_id))
def get_comment_body(self, comment_id):
return self._get_element_text("#comment_{} .response-body".format(comment_id))
def is_comment_deletable(self, comment_id):
"""Returns true if the delete comment button is present, false otherwise"""
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
return self.is_element_visible("#comment_{} .action-delete".format(comment_id))
def delete_comment(self, comment_id):
with self.handle_alert():
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
self._find_within("#comment_{} .action-delete".format(comment_id)).first.click()
EmptyPromise(
lambda: not self.is_comment_visible(comment_id),
"Deleted comment was removed"
).fulfill()
def is_comment_editable(self, comment_id):
"""Returns true if the edit comment button is present, false otherwise"""
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
return self.is_element_visible("#comment_{} .action-edit".format(comment_id))
def is_comment_editor_visible(self, comment_id):
"""Returns true if the comment editor is present, false otherwise"""
return self.is_element_visible(".edit-comment-body[data-id='{}']".format(comment_id))
def _get_comment_editor_value(self, comment_id):
return self._find_within("#wmd-input-edit-comment-body-{}".format(comment_id)).text[0]
def start_comment_edit(self, comment_id):
"""Click the edit button for the comment, loading the editing view"""
old_body = self.get_comment_body(comment_id)
with self.secondary_action_menu_open("#comment_{}".format(comment_id)):
self._find_within("#comment_{} .action-edit".format(comment_id)).first.click()
EmptyPromise(
lambda: (
self.is_comment_editor_visible(comment_id) and
not self.is_comment_visible(comment_id) and
self._get_comment_editor_value(comment_id) == old_body
),
"Comment edit started"
).fulfill()
def set_comment_editor_value(self, comment_id, new_body):
"""Replace the contents of the comment editor"""
self._find_within("#comment_{} .wmd-input".format(comment_id)).fill(new_body)
def submit_comment_edit(self, comment_id, new_comment_body):
"""Click the submit button on the comment editor"""
self._find_within("#comment_{} .post-update".format(comment_id)).first.click()
self.wait_for_ajax()
EmptyPromise(
lambda: (
not self.is_comment_editor_visible(comment_id) and
self.is_comment_visible(comment_id) and
self.get_comment_body(comment_id) == new_comment_body
),
"Comment edit succeeded"
).fulfill()
def cancel_comment_edit(self, comment_id, original_body):
"""Click the cancel button on the comment editor"""
self._find_within("#comment_{} .post-cancel".format(comment_id)).first.click()
EmptyPromise(
lambda: (
not self.is_comment_editor_visible(comment_id) and
self.is_comment_visible(comment_id) and
self.get_comment_body(comment_id) == original_body
),
"Comment edit was canceled"
).fulfill()
class DiscussionSortPreferencePage(CoursePage):
"""
Page that contain the discussion board with sorting options
"""
def __init__(self, browser, course_id):
super(DiscussionSortPreferencePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum"
def is_browser_on_page(self):
"""
Return true if the browser is on the right page else false.
"""
return self.q(css="body.discussion .forum-nav-sort-control").present
def show_all_discussions(self):
""" Show the list of all discussions. """
self.q(css=".all-topics").click()
def get_selected_sort_preference(self):
"""
Return the text of option that is selected for sorting.
"""
# Using this workaround (execute script) to make this test work with Chrome browser
selected_value = self.browser.execute_script(
'var selected_value = $(".forum-nav-sort-control").val(); return selected_value')
return selected_value
def change_sort_preference(self, sort_by):
"""
Change the option of sorting by clicking on new option.
"""
self.q(css=".forum-nav-sort-control option[value='{0}']".format(sort_by)).click()
# Click initiates an ajax call, waiting for it to complete
self.wait_for_ajax()
def refresh_page(self):
"""
Reload the page.
"""
self.browser.refresh()
class DiscussionTabSingleThreadPage(CoursePage):
def __init__(self, browser, course_id, discussion_id, thread_id):
super(DiscussionTabSingleThreadPage, self).__init__(browser, course_id)
self.thread_page = DiscussionThreadPage(
browser,
"body.discussion .discussion-article[data-id='{thread_id}']".format(thread_id=thread_id)
)
self.url_path = "discussion/forum/{discussion_id}/threads/{thread_id}".format(
discussion_id=discussion_id, thread_id=thread_id
)
def is_browser_on_page(self):
return self.thread_page.is_browser_on_page()
def __getattr__(self, name):
return getattr(self.thread_page, name)
def show_all_discussions(self):
""" Show the list of all discussions. """
self.q(css=".all-topics").click()
def close_open_thread(self):
with self.thread_page.secondary_action_menu_open(".thread-main-wrapper"):
self._find_within(".thread-main-wrapper .action-close").first.click()
def _thread_is_rendered_successfully(self, thread_id):
return self.q(css=".discussion-article[data-id='{}']".format(thread_id)).visible
def click_and_open_thread(self, thread_id):
"""
Click specific thread on the list.
"""
thread_selector = "li[data-id='{}']".format(thread_id)
self.show_all_discussions()
self.q(css=thread_selector).first.click()
EmptyPromise(
lambda: self._thread_is_rendered_successfully(thread_id),
"Thread has been rendered"
).fulfill()
def check_threads_rendered_successfully(self, thread_count):
"""
Count the number of threads available on page.
"""
return len(self.q(css=".forum-nav-thread").results) == thread_count
class InlineDiscussionPage(PageObject, DiscussionPageMixin):
"""
Acceptance tests for inline discussions.
"""
url = None
def __init__(self, browser, discussion_id):
super(InlineDiscussionPage, self).__init__(browser)
self.root_selector = (
".discussion-module[data-discussion-id='{discussion_id}'] ".format(
discussion_id=discussion_id
)
)
def _find_within(self, selector):
"""
Returns a query corresponding to the given CSS selector within the scope
of this discussion page
"""
return self.q(css=self.root_selector + " " + selector)
def is_browser_on_page(self):
self.wait_for_ajax()
return self.q(css=self.root_selector).present
def is_discussion_expanded(self):
return self._find_within(".discussion").present
def expand_discussion(self):
"""Click the link to expand the discussion"""
self._find_within(".discussion-show").first.click()
EmptyPromise(
self.is_discussion_expanded,
"Discussion expanded"
).fulfill()
def get_num_displayed_threads(self):
return len(self._find_within(".forum-nav-thread"))
def element_exists(self, selector):
return self.q(css=self.root_selector + " " + selector).present
def click_element(self, selector):
self.wait_for_element_presence(
"{discussion} {selector}".format(discussion=self.root_selector, selector=selector),
"{selector} is visible".format(selector=selector)
)
self._find_within(selector).click()
def is_new_post_button_visible(self):
"""
Check if new post button present and visible
"""
return self._is_element_visible('.new-post-btn')
@wait_for_js
def _is_element_visible(self, selector):
query = self._find_within(selector)
return query.present and query.visible
def show_thread(self, thread_id):
"""
Clicks the link for the specified thread to show the detailed view.
"""
self.wait_for_element_presence('.forum-nav-thread-link', 'Thread list has loaded')
thread_selector = ".forum-nav-thread[data-id='{thread_id}'] .forum-nav-thread-link".format(thread_id=thread_id)
self._find_within(thread_selector).first.click()
self.thread_page = InlineDiscussionThreadPage(self.browser, thread_id) # pylint: disable=attribute-defined-outside-init
self.thread_page.wait_for_page()
class InlineDiscussionThreadPage(DiscussionThreadPage):
"""
Page object to manipulate an individual thread view in an inline discussion.
"""
def __init__(self, browser, thread_id):
super(InlineDiscussionThreadPage, self).__init__(
browser,
".discussion-module .discussion-article[data-id='{thread_id}']".format(thread_id=thread_id)
)
def is_thread_anonymous(self):
return not self.q(css=".posted-details > .username").present
@wait_for_js
def check_if_selector_is_focused(self, selector):
"""
Check if selector is focused
"""
return is_focused_on_element(self.browser, selector)
class DiscussionUserProfilePage(CoursePage):
TEXT_NEXT = u'Next >'
TEXT_PREV = u'< Previous'
PAGING_SELECTOR = ".discussion-pagination[data-page-number]"
def __init__(self, browser, course_id, user_id, username, page=1):
super(DiscussionUserProfilePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum/dummy/users/{}?page={}".format(user_id, page)
self.username = username
def is_browser_on_page(self):
return (
self.q(css='.discussion-user-threads[data-course-id="{}"]'.format(self.course_id)).present
and
self.q(css='.user-name').present
and
self.q(css='.user-name').text[0] == self.username
)
@wait_for_js
def is_window_on_top(self):
return self.browser.execute_script("return $('html, body').offset().top") == 0
def get_shown_thread_ids(self):
elems = self.q(css="li.forum-nav-thread")
return [elem.get_attribute("data-id") for elem in elems]
def click_on_sidebar_username(self):
self.wait_for_page()
self.q(css='.user-name').first.click()
def get_user_roles(self):
"""Get user roles"""
return self.q(css='.user-roles').text[0]
class DiscussionTabHomePage(CoursePage, DiscussionPageMixin):
ALERT_SELECTOR = ".discussion-body .forum-nav .search-alert"
def __init__(self, browser, course_id):
super(DiscussionTabHomePage, self).__init__(browser, course_id)
self.url_path = "discussion/forum/"
self.root_selector = None
def is_browser_on_page(self):
return self.q(css=".discussion-body section.home-header").present
def perform_search(self, text="dummy"):
self.q(css=".search-input").fill(text + chr(10))
EmptyPromise(
self.is_ajax_finished,
"waiting for server to return result"
).fulfill()
def is_element_visible(self, selector):
"""
Returns true if the element matching the specified selector is visible.
"""
query = self.q(css=selector)
return query.present and query.visible
def is_checkbox_selected(self, selector):
"""
Returns true or false depending upon the matching checkbox is checked.
"""
return self.q(css=selector).selected
def refresh_and_wait_for_load(self):
"""
Refresh the page and wait for all resources to load.
"""
self.browser.refresh()
self.wait_for_page()
def get_search_alert_messages(self):
return self.q(css=self.ALERT_SELECTOR + " .message").text
def get_search_alert_links(self):
return self.q(css=self.ALERT_SELECTOR + " .link-jump")
def dismiss_alert_message(self, text):
"""
dismiss any search alert message containing the specified text.
"""
def _match_messages(text):
return self.q(css=".search-alert").filter(lambda elem: text in elem.text)
for alert_id in _match_messages(text).attrs("id"):
self.q(css="{}#{} .dismiss".format(self.ALERT_SELECTOR, alert_id)).click()
EmptyPromise(
lambda: _match_messages(text).results == [],
"waiting for dismissed alerts to disappear"
).fulfill()
def click_element(self, selector):
"""
Clicks the element specified by selector
"""
element = self.q(css=selector)
return element.click()
def set_new_post_editor_value(self, new_body):
"""
Set the Discussions new post editor (wmd) with the content in new_body
"""
self.q(css=".wmd-input").fill(new_body)
def get_new_post_preview_value(self, selector=".wmd-preview > *"):
"""
Get the rendered preview of the contents of the Discussions new post editor
Waits for content to appear, as the preview is triggered on debounced/delayed onchange
"""
self.wait_for_element_visibility(selector, "WMD preview pane has contents", timeout=10)
return self.q(css=".wmd-preview").html[0]
def get_new_post_preview_text(self):
"""
Get the rendered preview of the contents of the Discussions new post editor
Waits for content to appear, as the preview is triggered on debounced/delayed onchange
"""
self.wait_for_element_visibility(".wmd-preview > div", "WMD preview pane has contents", timeout=10)
return self.q(css=".wmd-preview").text[0]
| agpl-3.0 |
utds3lab/pemu | scripts/tracetool/backend/dtrace.py | 94 | 2484 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
DTrace/SystemTAP backend.
"""
__author__ = "Lluís Vilanova <[email protected]>"
__copyright__ = "Copyright 2012, Lluís Vilanova <[email protected]>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "[email protected]"
from tracetool import out
PUBLIC = True
PROBEPREFIX = None
def _probeprefix():
if PROBEPREFIX is None:
raise ValueError("you must set PROBEPREFIX")
return PROBEPREFIX
BINARY = None
def _binary():
if BINARY is None:
raise ValueError("you must set BINARY")
return BINARY
def c(events):
pass
def h(events):
out('#include "trace/generated-tracers-dtrace.h"',
'')
for e in events:
out('static inline void trace_%(name)s(%(args)s) {',
' QEMU_%(uppername)s(%(argnames)s);',
'}',
name = e.name,
args = e.args,
uppername = e.name.upper(),
argnames = ", ".join(e.args.names()),
)
def d(events):
out('provider qemu {')
for e in events:
args = str(e.args)
# DTrace provider syntax expects foo() for empty
# params, not foo(void)
if args == 'void':
args = ''
# Define prototype for probe arguments
out('',
'probe %(name)s(%(args)s);',
name = e.name,
args = args,
)
out('',
'};')
# Technically 'self' is not used by systemtap yet, but
# they recommended we keep it in the reserved list anyway
RESERVED_WORDS = (
'break', 'catch', 'continue', 'delete', 'else', 'for',
'foreach', 'function', 'global', 'if', 'in', 'limit',
'long', 'next', 'probe', 'return', 'self', 'string',
'try', 'while'
)
def stap(events):
for e in events:
# Define prototype for probe arguments
out('probe %(probeprefix)s.%(name)s = process("%(binary)s").mark("%(name)s")',
'{',
probeprefix = _probeprefix(),
name = e.name,
binary = _binary(),
)
i = 1
if len(e.args) > 0:
for name in e.args.names():
# Append underscore to reserved keywords
if name in RESERVED_WORDS:
name += '_'
out(' %s = $arg%d;' % (name, i))
i += 1
out('}')
out()
| gpl-2.0 |
cbertinato/pandas | pandas/tests/io/test_packers.py | 1 | 33090 | import datetime
from distutils.version import LooseVersion
import glob
from io import BytesIO
import os
from warnings import catch_warnings
import numpy as np
import pytest
from pandas._libs.tslib import iNaT
from pandas.errors import PerformanceWarning
import pandas
from pandas import (
Categorical, DataFrame, Index, Interval, MultiIndex, NaT, Period, Series,
Timestamp, bdate_range, date_range, period_range)
import pandas.util.testing as tm
from pandas.util.testing import (
assert_categorical_equal, assert_frame_equal, assert_index_equal,
assert_series_equal, ensure_clean)
from pandas.io.packers import read_msgpack, to_msgpack
nan = np.nan
try:
import blosc # NOQA
except ImportError:
_BLOSC_INSTALLED = False
else:
_BLOSC_INSTALLED = True
try:
import zlib # NOQA
except ImportError:
_ZLIB_INSTALLED = False
else:
_ZLIB_INSTALLED = True
@pytest.fixture(scope='module')
def current_packers_data():
# our current version packers data
from pandas.tests.io.generate_legacy_storage_files import (
create_msgpack_data)
return create_msgpack_data()
@pytest.fixture(scope='module')
def all_packers_data():
# our all of our current version packers data
from pandas.tests.io.generate_legacy_storage_files import (
create_data)
return create_data()
def check_arbitrary(a, b):
if isinstance(a, (list, tuple)) and isinstance(b, (list, tuple)):
assert(len(a) == len(b))
for a_, b_ in zip(a, b):
check_arbitrary(a_, b_)
elif isinstance(a, DataFrame):
assert_frame_equal(a, b)
elif isinstance(a, Series):
assert_series_equal(a, b)
elif isinstance(a, Index):
assert_index_equal(a, b)
elif isinstance(a, Categorical):
# Temp,
# Categorical.categories is changed from str to bytes in PY3
# maybe the same as GH 13591
if b.categories.inferred_type == 'string':
pass
else:
tm.assert_categorical_equal(a, b)
elif a is NaT:
assert b is NaT
elif isinstance(a, Timestamp):
assert a == b
assert a.freq == b.freq
else:
assert(a == b)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestPackers:
def setup_method(self, method):
self.path = '__%s__.msg' % tm.rands(10)
def teardown_method(self, method):
pass
def encode_decode(self, x, compress=None, **kwargs):
with ensure_clean(self.path) as p:
to_msgpack(p, x, compress=compress, **kwargs)
return read_msgpack(p, **kwargs)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestAPI(TestPackers):
def test_string_io(self):
df = DataFrame(np.random.randn(10, 2))
s = df.to_msgpack(None)
result = read_msgpack(s)
tm.assert_frame_equal(result, df)
s = df.to_msgpack()
result = read_msgpack(s)
tm.assert_frame_equal(result, df)
s = df.to_msgpack()
result = read_msgpack(BytesIO(s))
tm.assert_frame_equal(result, df)
s = to_msgpack(None, df)
result = read_msgpack(s)
tm.assert_frame_equal(result, df)
with ensure_clean(self.path) as p:
s = df.to_msgpack()
with open(p, 'wb') as fh:
fh.write(s)
result = read_msgpack(p)
tm.assert_frame_equal(result, df)
def test_path_pathlib(self):
df = tm.makeDataFrame()
result = tm.round_trip_pathlib(df.to_msgpack, read_msgpack)
tm.assert_frame_equal(df, result)
def test_path_localpath(self):
df = tm.makeDataFrame()
result = tm.round_trip_localpath(df.to_msgpack, read_msgpack)
tm.assert_frame_equal(df, result)
def test_iterator_with_string_io(self):
dfs = [DataFrame(np.random.randn(10, 2)) for i in range(5)]
s = to_msgpack(None, *dfs)
for i, result in enumerate(read_msgpack(s, iterator=True)):
tm.assert_frame_equal(result, dfs[i])
def test_invalid_arg(self):
# GH10369
class A:
def __init__(self):
self.read = 0
msg = "Invalid file path or buffer object type: <class '{}'>"
with pytest.raises(ValueError, match=msg.format('NoneType')):
read_msgpack(path_or_buf=None)
with pytest.raises(ValueError, match=msg.format('dict')):
read_msgpack(path_or_buf={})
with pytest.raises(ValueError, match=msg.format(r'.*\.A')):
read_msgpack(path_or_buf=A())
class TestNumpy(TestPackers):
def test_numpy_scalar_float(self):
x = np.float32(np.random.rand())
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_numpy_scalar_complex(self):
x = np.complex64(np.random.rand() + 1j * np.random.rand())
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_scalar_float(self):
x = np.random.rand()
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_scalar_bool(self):
x = np.bool_(1)
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
x = np.bool_(0)
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_scalar_complex(self):
x = np.random.rand() + 1j * np.random.rand()
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_list_numpy_float(self):
x = [np.float32(np.random.rand()) for i in range(5)]
x_rec = self.encode_decode(x)
# current msgpack cannot distinguish list/tuple
tm.assert_almost_equal(tuple(x), x_rec)
x_rec = self.encode_decode(tuple(x))
tm.assert_almost_equal(tuple(x), x_rec)
def test_list_numpy_float_complex(self):
if not hasattr(np, 'complex128'):
pytest.skip('numpy can not handle complex128')
x = [np.float32(np.random.rand()) for i in range(5)] + \
[np.complex128(np.random.rand() + 1j * np.random.rand())
for i in range(5)]
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_list_float(self):
x = [np.random.rand() for i in range(5)]
x_rec = self.encode_decode(x)
# current msgpack cannot distinguish list/tuple
tm.assert_almost_equal(tuple(x), x_rec)
x_rec = self.encode_decode(tuple(x))
tm.assert_almost_equal(tuple(x), x_rec)
def test_list_float_complex(self):
x = [np.random.rand() for i in range(5)] + \
[(np.random.rand() + 1j * np.random.rand()) for i in range(5)]
x_rec = self.encode_decode(x)
assert np.allclose(x, x_rec)
def test_dict_float(self):
x = {'foo': 1.0, 'bar': 2.0}
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_dict_complex(self):
x = {'foo': 1.0 + 1.0j, 'bar': 2.0 + 2.0j}
x_rec = self.encode_decode(x)
tm.assert_dict_equal(x, x_rec)
for key in x:
tm.assert_class_equal(x[key], x_rec[key], obj="complex value")
def test_dict_numpy_float(self):
x = {'foo': np.float32(1.0), 'bar': np.float32(2.0)}
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_dict_numpy_complex(self):
x = {'foo': np.complex128(1.0 + 1.0j),
'bar': np.complex128(2.0 + 2.0j)}
x_rec = self.encode_decode(x)
tm.assert_dict_equal(x, x_rec)
for key in x:
tm.assert_class_equal(x[key], x_rec[key], obj="numpy complex128")
def test_numpy_array_float(self):
# run multiple times
for n in range(10):
x = np.random.rand(10)
for dtype in ['float32', 'float64']:
x = x.astype(dtype)
x_rec = self.encode_decode(x)
tm.assert_almost_equal(x, x_rec)
def test_numpy_array_complex(self):
x = (np.random.rand(5) + 1j * np.random.rand(5)).astype(np.complex128)
x_rec = self.encode_decode(x)
assert (all(map(lambda x, y: x == y, x, x_rec)) and
x.dtype == x_rec.dtype)
def test_list_mixed(self):
x = [1.0, np.float32(3.5), np.complex128(4.25), 'foo', np.bool_(1)]
x_rec = self.encode_decode(x)
# current msgpack cannot distinguish list/tuple
tm.assert_almost_equal(tuple(x), x_rec)
x_rec = self.encode_decode(tuple(x))
tm.assert_almost_equal(tuple(x), x_rec)
class TestBasic(TestPackers):
def test_timestamp(self):
for i in [Timestamp(
'20130101'), Timestamp('20130101', tz='US/Eastern'),
Timestamp('201301010501')]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_nat(self):
nat_rec = self.encode_decode(NaT)
assert NaT is nat_rec
def test_datetimes(self):
for i in [datetime.datetime(2013, 1, 1),
datetime.datetime(2013, 1, 1, 5, 1),
datetime.date(2013, 1, 1),
np.datetime64(datetime.datetime(2013, 1, 5, 2, 15))]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_timedeltas(self):
for i in [datetime.timedelta(days=1),
datetime.timedelta(days=1, seconds=10),
np.timedelta64(1000000)]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_periods(self):
# 13463
for i in [Period('2010-09', 'M'), Period('2014-Q1', 'Q')]:
i_rec = self.encode_decode(i)
assert i == i_rec
def test_intervals(self):
# 19967
for i in [Interval(0, 1), Interval(0, 1, 'left'),
Interval(10, 25., 'right')]:
i_rec = self.encode_decode(i)
assert i == i_rec
class TestIndex(TestPackers):
def setup_method(self, method):
super().setup_method(method)
self.d = {
'string': tm.makeStringIndex(100),
'date': tm.makeDateIndex(100),
'int': tm.makeIntIndex(100),
'rng': tm.makeRangeIndex(100),
'float': tm.makeFloatIndex(100),
'empty': Index([]),
'tuple': Index(zip(['foo', 'bar', 'baz'], [1, 2, 3])),
'period': Index(period_range('2012-1-1', freq='M', periods=3)),
'date2': Index(date_range('2013-01-1', periods=10)),
'bdate': Index(bdate_range('2013-01-02', periods=10)),
'cat': tm.makeCategoricalIndex(100),
'interval': tm.makeIntervalIndex(100),
'timedelta': tm.makeTimedeltaIndex(100, 'H')
}
self.mi = {
'reg': MultiIndex.from_tuples([('bar', 'one'), ('baz', 'two'),
('foo', 'two'),
('qux', 'one'), ('qux', 'two')],
names=['first', 'second']),
}
def test_basic_index(self):
for s, i in self.d.items():
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
# datetime with no freq (GH5506)
i = Index([Timestamp('20130101'), Timestamp('20130103')])
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
# datetime with timezone
i = Index([Timestamp('20130101 9:00:00'), Timestamp(
'20130103 11:00:00')]).tz_localize('US/Eastern')
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
def test_multi_index(self):
for s, i in self.mi.items():
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
def test_unicode(self):
i = tm.makeUnicodeIndex(100)
i_rec = self.encode_decode(i)
tm.assert_index_equal(i, i_rec)
def categorical_index(self):
# GH15487
df = DataFrame(np.random.randn(10, 2))
df = df.astype({0: 'category'}).set_index(0)
result = self.encode_decode(df)
tm.assert_frame_equal(result, df)
class TestSeries(TestPackers):
def setup_method(self, method):
super().setup_method(method)
self.d = {}
s = tm.makeStringSeries()
s.name = 'string'
self.d['string'] = s
s = tm.makeObjectSeries()
s.name = 'object'
self.d['object'] = s
s = Series(iNaT, dtype='M8[ns]', index=range(5))
self.d['date'] = s
data = {
'A': [0., 1., 2., 3., np.nan],
'B': [0, 1, 0, 1, 0],
'C': ['foo1', 'foo2', 'foo3', 'foo4', 'foo5'],
'D': date_range('1/1/2009', periods=5),
'E': [0., 1, Timestamp('20100101'), 'foo', 2.],
'F': [Timestamp('20130102', tz='US/Eastern')] * 2 +
[Timestamp('20130603', tz='CET')] * 3,
'G': [Timestamp('20130102', tz='US/Eastern')] * 5,
'H': Categorical([1, 2, 3, 4, 5]),
'I': Categorical([1, 2, 3, 4, 5], ordered=True),
'J': (np.bool_(1), 2, 3, 4, 5),
}
self.d['float'] = Series(data['A'])
self.d['int'] = Series(data['B'])
self.d['mixed'] = Series(data['E'])
self.d['dt_tz_mixed'] = Series(data['F'])
self.d['dt_tz'] = Series(data['G'])
self.d['cat_ordered'] = Series(data['H'])
self.d['cat_unordered'] = Series(data['I'])
self.d['numpy_bool_mixed'] = Series(data['J'])
def test_basic(self):
# run multiple times here
for n in range(10):
for s, i in self.d.items():
i_rec = self.encode_decode(i)
assert_series_equal(i, i_rec)
class TestCategorical(TestPackers):
def setup_method(self, method):
super().setup_method(method)
self.d = {}
self.d['plain_str'] = Categorical(['a', 'b', 'c', 'd', 'e'])
self.d['plain_str_ordered'] = Categorical(['a', 'b', 'c', 'd', 'e'],
ordered=True)
self.d['plain_int'] = Categorical([5, 6, 7, 8])
self.d['plain_int_ordered'] = Categorical([5, 6, 7, 8], ordered=True)
def test_basic(self):
# run multiple times here
for n in range(10):
for s, i in self.d.items():
i_rec = self.encode_decode(i)
assert_categorical_equal(i, i_rec)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestNDFrame(TestPackers):
def setup_method(self, method):
super().setup_method(method)
data = {
'A': [0., 1., 2., 3., np.nan],
'B': [0, 1, 0, 1, 0],
'C': ['foo1', 'foo2', 'foo3', 'foo4', 'foo5'],
'D': date_range('1/1/2009', periods=5),
'E': [0., 1, Timestamp('20100101'), 'foo', 2.],
'F': [Timestamp('20130102', tz='US/Eastern')] * 5,
'G': [Timestamp('20130603', tz='CET')] * 5,
'H': Categorical(['a', 'b', 'c', 'd', 'e']),
'I': Categorical(['a', 'b', 'c', 'd', 'e'], ordered=True),
}
self.frame = {
'float': DataFrame(dict(A=data['A'], B=Series(data['A']) + 1)),
'int': DataFrame(dict(A=data['B'], B=Series(data['B']) + 1)),
'mixed': DataFrame(data)}
def test_basic_frame(self):
for s, i in self.frame.items():
i_rec = self.encode_decode(i)
assert_frame_equal(i, i_rec)
def test_multi(self):
i_rec = self.encode_decode(self.frame)
for k in self.frame.keys():
assert_frame_equal(self.frame[k], i_rec[k])
packed_items = tuple([self.frame['float'], self.frame['float'].A,
self.frame['float'].B, None])
l_rec = self.encode_decode(packed_items)
check_arbitrary(packed_items, l_rec)
# this is an oddity in that packed lists will be returned as tuples
packed_items = [self.frame['float'], self.frame['float'].A,
self.frame['float'].B, None]
l_rec = self.encode_decode(packed_items)
assert isinstance(l_rec, tuple)
check_arbitrary(packed_items, l_rec)
def test_iterator(self):
packed_items = [self.frame['float'], self.frame['float'].A,
self.frame['float'].B, None]
with ensure_clean(self.path) as path:
to_msgpack(path, *packed_items)
for i, packed in enumerate(read_msgpack(path, iterator=True)):
check_arbitrary(packed, packed_items[i])
def tests_datetimeindex_freq_issue(self):
# GH 5947
# inferring freq on the datetimeindex
df = DataFrame([1, 2, 3], index=date_range('1/1/2013', '1/3/2013'))
result = self.encode_decode(df)
assert_frame_equal(result, df)
df = DataFrame([1, 2], index=date_range('1/1/2013', '1/2/2013'))
result = self.encode_decode(df)
assert_frame_equal(result, df)
def test_dataframe_duplicate_column_names(self):
# GH 9618
expected_1 = DataFrame(columns=['a', 'a'])
expected_2 = DataFrame(columns=[1] * 100)
expected_2.loc[0] = np.random.randn(100)
expected_3 = DataFrame(columns=[1, 1])
expected_3.loc[0] = ['abc', np.nan]
result_1 = self.encode_decode(expected_1)
result_2 = self.encode_decode(expected_2)
result_3 = self.encode_decode(expected_3)
assert_frame_equal(result_1, expected_1)
assert_frame_equal(result_2, expected_2)
assert_frame_equal(result_3, expected_3)
@pytest.mark.filterwarnings("ignore:Sparse:FutureWarning")
@pytest.mark.filterwarnings("ignore:Series.to_sparse:FutureWarning")
@pytest.mark.filterwarnings("ignore:DataFrame.to_sparse:FutureWarning")
class TestSparse(TestPackers):
def _check_roundtrip(self, obj, comparator, **kwargs):
# currently these are not implemetned
# i_rec = self.encode_decode(obj)
# comparator(obj, i_rec, **kwargs)
msg = r"msgpack sparse (series|frame) is not implemented"
with pytest.raises(NotImplementedError, match=msg):
self.encode_decode(obj)
def test_sparse_series(self):
s = tm.makeStringSeries()
s[3:5] = np.nan
ss = s.to_sparse()
self._check_roundtrip(ss, tm.assert_series_equal,
check_series_type=True)
ss2 = s.to_sparse(kind='integer')
self._check_roundtrip(ss2, tm.assert_series_equal,
check_series_type=True)
ss3 = s.to_sparse(fill_value=0)
self._check_roundtrip(ss3, tm.assert_series_equal,
check_series_type=True)
def test_sparse_frame(self):
s = tm.makeDataFrame()
s.loc[3:5, 1:3] = np.nan
s.loc[8:10, -2] = np.nan
ss = s.to_sparse()
self._check_roundtrip(ss, tm.assert_frame_equal,
check_frame_type=True)
ss2 = s.to_sparse(kind='integer')
self._check_roundtrip(ss2, tm.assert_frame_equal,
check_frame_type=True)
ss3 = s.to_sparse(fill_value=0)
self._check_roundtrip(ss3, tm.assert_frame_equal,
check_frame_type=True)
class TestCompression(TestPackers):
"""See https://github.com/pandas-dev/pandas/pull/9783
"""
def setup_method(self, method):
try:
from sqlalchemy import create_engine
self._create_sql_engine = create_engine
except ImportError:
self._SQLALCHEMY_INSTALLED = False
else:
self._SQLALCHEMY_INSTALLED = True
super().setup_method(method)
data = {
'A': np.arange(1000, dtype=np.float64),
'B': np.arange(1000, dtype=np.int32),
'C': list(100 * 'abcdefghij'),
'D': date_range(datetime.datetime(2015, 4, 1), periods=1000),
'E': [datetime.timedelta(days=x) for x in range(1000)],
}
self.frame = {
'float': DataFrame({k: data[k] for k in ['A', 'A']}),
'int': DataFrame({k: data[k] for k in ['B', 'B']}),
'mixed': DataFrame(data),
}
def test_plain(self):
i_rec = self.encode_decode(self.frame)
for k in self.frame.keys():
assert_frame_equal(self.frame[k], i_rec[k])
def _test_compression(self, compress):
i_rec = self.encode_decode(self.frame, compress=compress)
for k in self.frame.keys():
value = i_rec[k]
expected = self.frame[k]
assert_frame_equal(value, expected)
# make sure that we can write to the new frames
for block in value._data.blocks:
assert block.values.flags.writeable
def test_compression_zlib(self):
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
self._test_compression('zlib')
def test_compression_blosc(self):
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
self._test_compression('blosc')
def _test_compression_warns_when_decompress_caches(
self, monkeypatch, compress):
not_garbage = []
control = [] # copied data
compress_module = globals()[compress]
real_decompress = compress_module.decompress
def decompress(ob):
"""mock decompress function that delegates to the real
decompress but caches the result and a copy of the result.
"""
res = real_decompress(ob)
not_garbage.append(res) # hold a reference to this bytes object
control.append(bytearray(res)) # copy the data here to check later
return res
# types mapped to values to add in place.
rhs = {
np.dtype('float64'): 1.0,
np.dtype('int32'): 1,
np.dtype('object'): 'a',
np.dtype('datetime64[ns]'): np.timedelta64(1, 'ns'),
np.dtype('timedelta64[ns]'): np.timedelta64(1, 'ns'),
}
with monkeypatch.context() as m, \
tm.assert_produces_warning(PerformanceWarning) as ws:
m.setattr(compress_module, 'decompress', decompress)
i_rec = self.encode_decode(self.frame, compress=compress)
for k in self.frame.keys():
value = i_rec[k]
expected = self.frame[k]
assert_frame_equal(value, expected)
# make sure that we can write to the new frames even though
# we needed to copy the data
for block in value._data.blocks:
assert block.values.flags.writeable
# mutate the data in some way
block.values[0] += rhs[block.dtype]
for w in ws:
# check the messages from our warnings
assert str(w.message) == ('copying data after decompressing; '
'this may mean that decompress is '
'caching its result')
for buf, control_buf in zip(not_garbage, control):
# make sure none of our mutations above affected the
# original buffers
assert buf == control_buf
def test_compression_warns_when_decompress_caches_zlib(self, monkeypatch):
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
self._test_compression_warns_when_decompress_caches(
monkeypatch, 'zlib')
def test_compression_warns_when_decompress_caches_blosc(self, monkeypatch):
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
self._test_compression_warns_when_decompress_caches(
monkeypatch, 'blosc')
def _test_small_strings_no_warn(self, compress):
empty = np.array([], dtype='uint8')
with tm.assert_produces_warning(None):
empty_unpacked = self.encode_decode(empty, compress=compress)
tm.assert_numpy_array_equal(empty_unpacked, empty)
assert empty_unpacked.flags.writeable
char = np.array([ord(b'a')], dtype='uint8')
with tm.assert_produces_warning(None):
char_unpacked = self.encode_decode(char, compress=compress)
tm.assert_numpy_array_equal(char_unpacked, char)
assert char_unpacked.flags.writeable
# if this test fails I am sorry because the interpreter is now in a
# bad state where b'a' points to 98 == ord(b'b').
char_unpacked[0] = ord(b'b')
# we compare the ord of bytes b'a' with unicode 'a' because the should
# always be the same (unless we were able to mutate the shared
# character singleton in which case ord(b'a') == ord(b'b').
assert ord(b'a') == ord('a')
tm.assert_numpy_array_equal(
char_unpacked,
np.array([ord(b'b')], dtype='uint8'),
)
def test_small_strings_no_warn_zlib(self):
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
self._test_small_strings_no_warn('zlib')
def test_small_strings_no_warn_blosc(self):
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
self._test_small_strings_no_warn('blosc')
def test_readonly_axis_blosc(self):
# GH11880
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
df1 = DataFrame({'A': list('abcd')})
df2 = DataFrame(df1, index=[1., 2., 3., 4.])
assert 1 in self.encode_decode(df1['A'], compress='blosc')
assert 1. in self.encode_decode(df2['A'], compress='blosc')
def test_readonly_axis_zlib(self):
# GH11880
df1 = DataFrame({'A': list('abcd')})
df2 = DataFrame(df1, index=[1., 2., 3., 4.])
assert 1 in self.encode_decode(df1['A'], compress='zlib')
assert 1. in self.encode_decode(df2['A'], compress='zlib')
def test_readonly_axis_blosc_to_sql(self):
# GH11880
if not _BLOSC_INSTALLED:
pytest.skip('no blosc')
if not self._SQLALCHEMY_INSTALLED:
pytest.skip('no sqlalchemy')
expected = DataFrame({'A': list('abcd')})
df = self.encode_decode(expected, compress='blosc')
eng = self._create_sql_engine("sqlite:///:memory:")
df.to_sql('test', eng, if_exists='append')
result = pandas.read_sql_table('test', eng, index_col='index')
result.index.names = [None]
assert_frame_equal(expected, result)
def test_readonly_axis_zlib_to_sql(self):
# GH11880
if not _ZLIB_INSTALLED:
pytest.skip('no zlib')
if not self._SQLALCHEMY_INSTALLED:
pytest.skip('no sqlalchemy')
expected = DataFrame({'A': list('abcd')})
df = self.encode_decode(expected, compress='zlib')
eng = self._create_sql_engine("sqlite:///:memory:")
df.to_sql('test', eng, if_exists='append')
result = pandas.read_sql_table('test', eng, index_col='index')
result.index.names = [None]
assert_frame_equal(expected, result)
class TestEncoding(TestPackers):
def setup_method(self, method):
super().setup_method(method)
data = {
'A': ['\u2019'] * 1000,
'B': np.arange(1000, dtype=np.int32),
'C': list(100 * 'abcdefghij'),
'D': date_range(datetime.datetime(2015, 4, 1), periods=1000),
'E': [datetime.timedelta(days=x) for x in range(1000)],
'G': [400] * 1000
}
self.frame = {
'float': DataFrame({k: data[k] for k in ['A', 'A']}),
'int': DataFrame({k: data[k] for k in ['B', 'B']}),
'mixed': DataFrame(data),
}
self.utf_encodings = ['utf8', 'utf16', 'utf32']
def test_utf(self):
# GH10581
for encoding in self.utf_encodings:
for frame in self.frame.values():
result = self.encode_decode(frame, encoding=encoding)
assert_frame_equal(result, frame)
def test_default_encoding(self):
for frame in self.frame.values():
result = frame.to_msgpack()
expected = frame.to_msgpack(encoding='utf8')
assert result == expected
result = self.encode_decode(frame)
assert_frame_equal(result, frame)
files = glob.glob(os.path.join(os.path.dirname(__file__), "data",
"legacy_msgpack", "*", "*.msgpack"))
@pytest.fixture(params=files)
def legacy_packer(request, datapath):
return datapath(request.param)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
@pytest.mark.filterwarnings("ignore:Sparse:FutureWarning")
class TestMsgpack:
"""
How to add msgpack tests:
1. Install pandas version intended to output the msgpack.
2. Execute "generate_legacy_storage_files.py" to create the msgpack.
$ python generate_legacy_storage_files.py <output_dir> msgpack
3. Move the created pickle to "data/legacy_msgpack/<version>" directory.
"""
minimum_structure = {'series': ['float', 'int', 'mixed',
'ts', 'mi', 'dup'],
'frame': ['float', 'int', 'mixed', 'mi'],
'panel': ['float'],
'index': ['int', 'date', 'period'],
'mi': ['reg2']}
def check_min_structure(self, data, version):
for typ, v in self.minimum_structure.items():
if typ == "panel":
# FIXME: kludge; get this key out of the legacy file
continue
assert typ in data, '"{0}" not found in unpacked data'.format(typ)
for kind in v:
msg = '"{0}" not found in data["{1}"]'.format(kind, typ)
assert kind in data[typ], msg
def compare(self, current_data, all_data, vf, version):
# GH12277 encoding default used to be latin-1, now utf-8
if LooseVersion(version) < LooseVersion('0.18.0'):
data = read_msgpack(vf, encoding='latin-1')
else:
data = read_msgpack(vf)
if "panel" in data:
# FIXME: kludge; get the key out of the stored file
del data["panel"]
self.check_min_structure(data, version)
for typ, dv in data.items():
assert typ in all_data, ('unpacked data contains '
'extra key "{0}"'
.format(typ))
for dt, result in dv.items():
assert dt in current_data[typ], ('data["{0}"] contains extra '
'key "{1}"'.format(typ, dt))
try:
expected = current_data[typ][dt]
except KeyError:
continue
# use a specific comparator
# if available
comp_method = "compare_{typ}_{dt}".format(typ=typ, dt=dt)
comparator = getattr(self, comp_method, None)
if comparator is not None:
comparator(result, expected, typ, version)
else:
check_arbitrary(result, expected)
return data
def compare_series_dt_tz(self, result, expected, typ, version):
# 8260
# dtype is object < 0.17.0
if LooseVersion(version) < LooseVersion('0.17.0'):
expected = expected.astype(object)
tm.assert_series_equal(result, expected)
else:
tm.assert_series_equal(result, expected)
def compare_frame_dt_mixed_tzs(self, result, expected, typ, version):
# 8260
# dtype is object < 0.17.0
if LooseVersion(version) < LooseVersion('0.17.0'):
expected = expected.astype(object)
tm.assert_frame_equal(result, expected)
else:
tm.assert_frame_equal(result, expected)
def test_msgpacks_legacy(self, current_packers_data, all_packers_data,
legacy_packer, datapath):
version = os.path.basename(os.path.dirname(legacy_packer))
# GH12142 0.17 files packed in P2 can't be read in P3
if (version.startswith('0.17.') and
legacy_packer.split('.')[-4][-1] == '2'):
msg = "Files packed in Py2 can't be read in Py3 ({})"
pytest.skip(msg.format(version))
try:
with catch_warnings(record=True):
self.compare(current_packers_data, all_packers_data,
legacy_packer, version)
except ImportError:
# blosc not installed
pass
def test_msgpack_period_freq(self):
# https://github.com/pandas-dev/pandas/issues/24135
s = Series(np.random.rand(5), index=date_range('20130101', periods=5))
r = read_msgpack(s.to_msgpack())
repr(r)
| bsd-3-clause |
maleficarium/youtube-dl | youtube_dl/extractor/cbsnews.py | 3 | 3855 | # encoding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .cbs import CBSBaseIE
from ..utils import (
parse_duration,
)
class CBSNewsIE(CBSBaseIE):
IE_DESC = 'CBS News'
_VALID_URL = r'https?://(?:www\.)?cbsnews\.com/(?:news|videos)/(?P<id>[\da-z_-]+)'
_TESTS = [
{
'url': 'http://www.cbsnews.com/news/tesla-and-spacex-elon-musks-industrial-empire/',
'info_dict': {
'id': 'tesla-and-spacex-elon-musks-industrial-empire',
'ext': 'flv',
'title': 'Tesla and SpaceX: Elon Musk\'s industrial empire',
'thumbnail': 'http://beta.img.cbsnews.com/i/2014/03/30/60147937-2f53-4565-ad64-1bdd6eb64679/60-0330-pelley-640x360.jpg',
'duration': 791,
},
'params': {
# rtmp download
'skip_download': True,
},
},
{
'url': 'http://www.cbsnews.com/videos/fort-hood-shooting-army-downplays-mental-illness-as-cause-of-attack/',
'info_dict': {
'id': 'SNJBOYzXiWBOvaLsdzwH8fmtP1SCd91Y',
'ext': 'mp4',
'title': 'Fort Hood shooting: Army downplays mental illness as cause of attack',
'description': 'md5:4a6983e480542d8b333a947bfc64ddc7',
'upload_date': '19700101',
'uploader': 'CBSI-NEW',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 205,
'subtitles': {
'en': [{
'ext': 'ttml',
}],
},
},
'params': {
# m3u8 download
'skip_download': True,
},
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_info = self._parse_json(self._html_search_regex(
r'(?:<ul class="media-list items" id="media-related-items"><li data-video-info|<div id="cbsNewsVideoPlayer" data-video-player-options)=\'({.+?})\'',
webpage, 'video JSON info'), video_id)
item = video_info['item'] if 'item' in video_info else video_info
guid = item['mpxRefId']
return self._extract_video_info('byGuid=%s' % guid, guid)
class CBSNewsLiveVideoIE(InfoExtractor):
IE_DESC = 'CBS News Live Videos'
_VALID_URL = r'https?://(?:www\.)?cbsnews\.com/live/video/(?P<id>[\da-z_-]+)'
_TEST = {
'url': 'http://www.cbsnews.com/live/video/clinton-sanders-prepare-to-face-off-in-nh/',
'info_dict': {
'id': 'clinton-sanders-prepare-to-face-off-in-nh',
'ext': 'flv',
'title': 'Clinton, Sanders Prepare To Face Off In NH',
'duration': 334,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_info = self._parse_json(self._html_search_regex(
r'data-story-obj=\'({.+?})\'', webpage, 'video JSON info'), video_id)['story']
hdcore_sign = 'hdcore=3.3.1'
f4m_formats = self._extract_f4m_formats(video_info['url'] + '&' + hdcore_sign, video_id)
if f4m_formats:
for entry in f4m_formats:
# URLs without the extra param induce an 404 error
entry.update({'extra_param_to_segment_url': hdcore_sign})
self._sort_formats(f4m_formats)
return {
'id': video_id,
'title': video_info['headline'],
'thumbnail': video_info.get('thumbnail_url_hd') or video_info.get('thumbnail_url_sd'),
'duration': parse_duration(video_info.get('segmentDur')),
'formats': f4m_formats,
}
| unlicense |
wengyian/project-manager-system | node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | 1283 | 65086 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import copy
import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
import tempfile
from gyp.common import GypError
# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
# "xcodebuild" is called too quickly (it has been found to return incorrect
# version number).
XCODE_VERSION_CACHE = None
# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
# corresponding to the installed version of Xcode.
XCODE_ARCHS_DEFAULT_CACHE = None
def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
"""Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
mapping = {'$(ARCHS_STANDARD)': archs}
if archs_including_64_bit:
mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
return mapping
class XcodeArchsDefault(object):
"""A class to resolve ARCHS variable from xcode_settings, resolving Xcode
macros and implementing filtering by VALID_ARCHS. The expansion of macros
depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
on the version of Xcode.
"""
# Match variable like $(ARCHS_STANDARD).
variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
def __init__(self, default, mac, iphonesimulator, iphoneos):
self._default = (default,)
self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
def _VariableMapping(self, sdkroot):
"""Returns the dictionary of variable mapping depending on the SDKROOT."""
sdkroot = sdkroot.lower()
if 'iphoneos' in sdkroot:
return self._archs['ios']
elif 'iphonesimulator' in sdkroot:
return self._archs['iossim']
else:
return self._archs['mac']
def _ExpandArchs(self, archs, sdkroot):
"""Expands variables references in ARCHS, and remove duplicates."""
variable_mapping = self._VariableMapping(sdkroot)
expanded_archs = []
for arch in archs:
if self.variable_pattern.match(arch):
variable = arch
try:
variable_expansion = variable_mapping[variable]
for arch in variable_expansion:
if arch not in expanded_archs:
expanded_archs.append(arch)
except KeyError as e:
print 'Warning: Ignoring unsupported variable "%s".' % variable
elif arch not in expanded_archs:
expanded_archs.append(arch)
return expanded_archs
def ActiveArchs(self, archs, valid_archs, sdkroot):
"""Expands variables references in ARCHS, and filter by VALID_ARCHS if it
is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
values present in VALID_ARCHS are kept)."""
expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
if valid_archs:
filtered_archs = []
for arch in expanded_archs:
if arch in valid_archs:
filtered_archs.append(arch)
expanded_archs = filtered_archs
return expanded_archs
def GetXcodeArchsDefault():
"""Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
installed version of Xcode. The default values used by Xcode for ARCHS
and the expansion of the variables depends on the version of Xcode used.
For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
$(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
and deprecated with Xcode 5.1.
For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
architecture as part of $(ARCHS_STANDARD) and default to only building it.
For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
are also part of $(ARCHS_STANDARD).
All thoses rules are coded in the construction of the |XcodeArchsDefault|
object to use depending on the version of Xcode detected. The object is
for performance reason."""
global XCODE_ARCHS_DEFAULT_CACHE
if XCODE_ARCHS_DEFAULT_CACHE:
return XCODE_ARCHS_DEFAULT_CACHE
xcode_version, _ = XcodeVersion()
if xcode_version < '0500':
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD)',
XcodeArchsVariableMapping(['i386']),
XcodeArchsVariableMapping(['i386']),
XcodeArchsVariableMapping(['armv7']))
elif xcode_version < '0510':
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD_INCLUDING_64_BIT)',
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
XcodeArchsVariableMapping(
['armv7', 'armv7s'],
['armv7', 'armv7s', 'arm64']))
else:
XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
'$(ARCHS_STANDARD)',
XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
XcodeArchsVariableMapping(
['armv7', 'armv7s', 'arm64'],
['armv7', 'armv7s', 'arm64']))
return XCODE_ARCHS_DEFAULT_CACHE
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_plist_cache = {}
# Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_codesigning_key_cache = {}
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith(']')]
for key in conditional_keys:
# If you need more, speak up at http://crbug.com/122592
if key.endswith("[sdk=iphoneos*]"):
if configname.endswith("iphoneos"):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
del settings[key]
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def IsBinaryOutputFormat(self, configname):
default = "binary" if self.isIOS else "xml"
format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
default)
return format == "binary"
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def _IsIosAppExtension(self):
return int(self.spec.get('ios_app_extension', 0)) != 0
def _IsIosWatchKitExtension(self):
return int(self.spec.get('ios_watchkit_extension', 0)) != 0
def _IsIosWatchApp(self):
return int(self.spec.get('ios_watch_app', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
return '.' + self.spec.get('product_extension', 'appex')
else:
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsIosAppExtension():
assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.app-extension'
if self._IsIosWatchKitExtension():
assert self._IsBundle(), ('ios_watchkit_extension flag requires '
'mac_bundle (target %s)' % self.spec['target_name'])
return 'com.apple.product-type.watchkit-extension'
if self._IsIosWatchApp():
assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.application.watchapp'
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
config_settings = self.xcode_settings[configname]
xcode_archs_default = GetXcodeArchsDefault()
return xcode_archs_default.ActiveArchs(
config_settings.get('ARCHS'),
config_settings.get('VALID_ARCHS'),
config_settings.get('SDKROOT'))
def _GetSdkVersionInfoItem(self, sdk, infoitem):
# xcodebuild requires Xcode and can't run on Command Line Tools-only
# systems from 10.7 onward.
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
except:
pass
def _SdkRoot(self, configname):
if configname is None:
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
return self._XcodeSdkPath(sdk_root)
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings() and sdk_root:
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
# In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
# llvm-gcc. It also requires a fairly recent libtool, and
# if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
# path to the libLTO.dylib that matches the used clang.
if self._Test('LLVM_LTO', 'YES', default='NO'):
cflags.append('-flto')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
assert self.configname
archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
if sdk_root:
framework_root = sdk_root
else:
framework_root = ''
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
'YES', default='NO'):
flags.append('-Wobjc-missing-property-synthesis')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = r'(\S+)'
WORD = r'\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings() and self._SdkPath():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
assert self.configname
archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
sdk_root = self._SdkPath()
if not sdk_root:
sdk_root = ''
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
if sdk_root and is_extension:
# Adds the link flags for extensions. These flags are common for all
# extensions and provide loader and main function.
# These flags reflect the compilation options used by xcode to compile
# extensions.
ldflags.append('-lpkstart')
if XcodeVersion() < '0900':
ldflags.append(sdk_root +
'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
ldflags.append('-fapplication-extension')
ldflags.append('-Xlinker -rpath '
'-Xlinker @executable_path/../../Frameworks')
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerConfigSetting(self, setting, configname, default=None):
if configname in self.xcode_settings:
return self.xcode_settings[configname].get(setting, default)
else:
return self.GetPerTargetSetting(setting, default)
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def _GetTargetPostbuilds(self, configname, output, output_binary,
quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and self.spec['type'] == 'executable'):
return []
settings = self.xcode_settings[configname]
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
return []
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
if not identity:
return None
if identity not in XcodeSettings._codesigning_key_cache:
output = subprocess.check_output(
['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines():
if identity in line:
fingerprint = line.split()[1]
cache = XcodeSettings._codesigning_key_cache
assert identity not in cache or fingerprint == cache[identity], (
"Multiple codesigning fingerprints for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = fingerprint
return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
"""Returns a list of shell commands that should run before and after
|postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
return pre + postbuilds + post
def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
sdk_root = self._SdkPath(config_name)
if not sdk_root:
sdk_root = ''
# Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
# ".dylib" without providing a real support for them. What it does, for
# "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
# library order and cause collision when building Chrome.
#
# Instead substitude ".tbd" to ".dylib" in the generated project when the
# following conditions are both true:
# - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
# - the ".dylib" file does not exists but a ".tbd" file do.
library = l.replace('$(SDKROOT)', sdk_root)
if l.startswith('$(SDKROOT)'):
basename, ext = os.path.splitext(library)
if ext == '.dylib' and not os.path.exists(library):
tbd_library = basename + '.tbd'
if os.path.exists(tbd_library):
library = tbd_library
return library
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [self._AdjustLibrary(library, config_name)
for library in libraries]
return libraries
def _BuildMachineOSBuild(self):
return GetStdout(['sw_vers', '-buildVersion'])
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
return [int(x) for x in family.split(',')]
def GetExtraPlistItems(self, configname=None):
"""Returns a dictionary with extra items to insert into Info.plist."""
if configname not in XcodeSettings._plist_cache:
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductBuildVersion')
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
cache['DTPlatformName'] = cache['DTSDKName']
if configname.endswith("iphoneos"):
cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
# XcodeSettings.
items = dict(XcodeSettings._plist_cache[configname])
if self.isIOS:
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
xcode_version, xcode_build = XcodeVersion()
if xcode_version < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
try:
all_sdks = GetStdout(['xcodebuild', '-showsdks'])
except:
# If xcodebuild fails, there will be no valid SDKs
return ''
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return ''
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def XcodeVersion():
"""Returns a tuple of version and build version of installed Xcode."""
# `xcodebuild -version` output looks like
# Xcode 4.6.3
# Build version 4H1503
# or like
# Xcode 3.2.6
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
global XCODE_VERSION_CACHE
if XCODE_VERSION_CACHE:
return XCODE_VERSION_CACHE
try:
version_list = GetStdout(['xcodebuild', '-version']).splitlines()
# In some circumstances xcodebuild exits 0 but doesn't return
# the right results; for example, a user on 10.7 or 10.8 with
# a bogus path set via xcode-select
# In that case this may be a CLT-only install so fall back to
# checking that version.
if len(version_list) < 2:
raise GypError("xcodebuild returned unexpected results")
except:
version = CLTVersion()
if version:
version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
else:
raise GypError("No Xcode or CLT version detected!")
# The CLT has no build information, so we return an empty string.
version_list = [version, '']
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
if build:
build = build.split()[-1]
XCODE_VERSION_CACHE = (version, build)
return XCODE_VERSION_CACHE
# This function ported from the logic in Homebrew's CLT version check
def CLTVersion():
"""Returns the version of command-line tools from pkgutil."""
# pkgutil output looks like
# package-id: com.apple.pkg.CLTools_Executables
# version: 5.0.1.0.1.1382131676
# volume: /
# location: /
# install-time: 1382544035
# groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
regex = re.compile('version: (?P<version>.+)')
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
try:
output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
return re.search(regex, output).groupdict()['version']
except:
continue
def GetStdout(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_FRAMEWORKS_DIR' : built_products_dir,
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
sdk_root = xcode_settings._SdkRoot(configuration)
if not sdk_root:
sdk_root = xcode_settings._XcodeSdkPath('')
if sdk_root is None:
sdk_root = ''
env['SDKROOT'] = sdk_root
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict['configurations'].values():
if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
return True
return False
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
to build for iOS devices and use correct architectures for those builds."""
for target_dict in targets.itervalues():
toolset = target_dict['toolset']
configs = target_dict['configurations']
for config_name, config_dict in dict(configs).iteritems():
iphoneos_config_dict = copy.deepcopy(config_dict)
configs[config_name + '-iphoneos'] = iphoneos_config_dict
configs[config_name + '-iphonesimulator'] = config_dict
if toolset == 'target':
iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts
| mit |
yiheng/BigDL | spark/dl/src/test/resources/tf/models/rnn.py | 9 | 1652 | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tensorflow as tf
import numpy as np
from sys import argv
from tensorflow.contrib import rnn
from util import run_model
def main():
"""
Run this command to generate the pb file
1. mkdir model
2. python rnn.py
"""
tf.set_random_seed(1)
n_steps = 2
n_input = 10
n_hidden = 20
n_output = 5
xs = tf.Variable(tf.random_uniform([4, n_steps, n_input]), name='input', dtype=tf.float32)
xs = tf.identity(xs, "input_node")
weight = tf.Variable(tf.random_uniform([n_hidden, n_output]), name="weight", dtype=tf.float32)
bias = tf.Variable(tf.random_uniform([n_output]), name="bias", dtype=tf.float32)
x = tf.unstack(xs, n_steps, 1)
cell = rnn.BasicRNNCell(n_hidden)
output, states = rnn.static_rnn(cell, x, dtype=tf.float32)
final = tf.nn.bias_add(tf.matmul(output[-1], weight), bias, name='output')
net_outputs = map(lambda x: tf.get_default_graph().get_tensor_by_name(x), argv[2].split(','))
run_model(net_outputs, argv[1], 'rnn', argv[3] == 'True')
if __name__ == "__main__":
main()
| apache-2.0 |
poo12138/gem5-stable | src/arch/x86/isa/insts/general_purpose/control_transfer/__init__.py | 91 | 2404 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["call",
"conditional_jump",
"interrupts_and_exceptions",
"jump",
"loop",
"xreturn"]
microcode = ""
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
| bsd-3-clause |
xzturn/tensorflow | tensorflow/lite/micro/examples/magic_wand/train/data_prepare.py | 19 | 5911 | # Lint as: python3
# coding=utf-8
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Prepare data for further process.
Read data from "/slope", "/ring", "/wing", "/negative" and save them
in "/data/complete_data" in python dict format.
It will generate a new file with the following structure:
├── data
│ └── complete_data
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import json
import os
import random
LABEL_NAME = "gesture"
DATA_NAME = "accel_ms2_xyz"
folders = ["wing", "ring", "slope"]
names = [
"hyw", "shiyun", "tangsy", "dengyl", "zhangxy", "pengxl", "liucx",
"jiangyh", "xunkai"
]
def prepare_original_data(folder, name, data, file_to_read): # pylint: disable=redefined-outer-name
"""Read collected data from files."""
if folder != "negative":
with open(file_to_read, "r") as f:
lines = csv.reader(f)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
for idx, line in enumerate(lines): # pylint: disable=unused-variable,redefined-outer-name
if len(line) == 3:
if line[2] == "-" and data_new[DATA_NAME]:
data.append(data_new)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
elif line[2] != "-":
data_new[DATA_NAME].append([float(i) for i in line[0:3]])
data.append(data_new)
else:
with open(file_to_read, "r") as f:
lines = csv.reader(f)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
for idx, line in enumerate(lines):
if len(line) == 3 and line[2] != "-":
if len(data_new[DATA_NAME]) == 120:
data.append(data_new)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
else:
data_new[DATA_NAME].append([float(i) for i in line[0:3]])
data.append(data_new)
def generate_negative_data(data): # pylint: disable=redefined-outer-name
"""Generate negative data labeled as 'negative6~8'."""
# Big movement -> around straight line
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
start_x = (random.random() - 0.5) * 2000
start_y = (random.random() - 0.5) * 2000
start_z = (random.random() - 0.5) * 2000
x_increase = (random.random() - 0.5) * 10
y_increase = (random.random() - 0.5) * 10
z_increase = (random.random() - 0.5) * 10
for j in range(128):
dic[DATA_NAME].append([
start_x + j * x_increase + (random.random() - 0.5) * 6,
start_y + j * y_increase + (random.random() - 0.5) * 6,
start_z + j * z_increase + (random.random() - 0.5) * 6
])
data.append(dic)
# Random
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
for j in range(128):
dic[DATA_NAME].append([(random.random() - 0.5) * 1000,
(random.random() - 0.5) * 1000,
(random.random() - 0.5) * 1000])
data.append(dic)
# Stay still
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
start_x = (random.random() - 0.5) * 2000
start_y = (random.random() - 0.5) * 2000
start_z = (random.random() - 0.5) * 2000
for j in range(128):
dic[DATA_NAME].append([
start_x + (random.random() - 0.5) * 40,
start_y + (random.random() - 0.5) * 40,
start_z + (random.random() - 0.5) * 40
])
data.append(dic)
# Write data to file
def write_data(data_to_write, path):
with open(path, "w") as f:
for idx, item in enumerate(data_to_write): # pylint: disable=unused-variable,redefined-outer-name
dic = json.dumps(item, ensure_ascii=False)
f.write(dic)
f.write("\n")
if __name__ == "__main__":
data = [] # pylint: disable=redefined-outer-name
for idx1, folder in enumerate(folders):
for idx2, name in enumerate(names):
prepare_original_data(folder, name, data,
"./%s/output_%s_%s.txt" % (folder, folder, name))
for idx in range(5):
prepare_original_data("negative", "negative%d" % (idx + 1), data,
"./negative/output_negative_%d.txt" % (idx + 1))
generate_negative_data(data)
print("data_length: " + str(len(data)))
if not os.path.exists("./data"):
os.makedirs("./data")
write_data(data, "./data/complete_data")
| apache-2.0 |
edx/lettuce | tests/integration/lib/Django-1.3/tests/regressiontests/test_client_regress/views.py | 50 | 4115 | from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.core.exceptions import SuspiciousOperation
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.utils.encoding import smart_str
from django.core.serializers.json import DjangoJSONEncoder
from django.test.client import CONTENT_TYPE_RE
from django.template import RequestContext
def no_template_view(request):
"A simple view that expects a GET request, and returns a rendered template"
return HttpResponse("No template used. Sample content: twice once twice. Content ends.")
def staff_only_view(request):
"A view that can only be visited by staff. Non staff members get an exception"
if request.user.is_staff:
return HttpResponse('')
else:
raise SuspiciousOperation()
def get_view(request):
"A simple login protected view"
return HttpResponse("Hello world")
get_view = login_required(get_view)
def request_data(request, template='base.html', data='sausage'):
"A simple view that returns the request data in the context"
return render_to_response(template, {
'get-foo':request.GET.get('foo',None),
'get-bar':request.GET.get('bar',None),
'post-foo':request.POST.get('foo',None),
'post-bar':request.POST.get('bar',None),
'request-foo':request.REQUEST.get('foo',None),
'request-bar':request.REQUEST.get('bar',None),
'data': data,
})
def view_with_argument(request, name):
"""A view that takes a string argument
The purpose of this view is to check that if a space is provided in
the argument, the test framework unescapes the %20 before passing
the value to the view.
"""
if name == 'Arthur Dent':
return HttpResponse('Hi, Arthur')
else:
return HttpResponse('Howdy, %s' % name)
def login_protected_redirect_view(request):
"A view that redirects all requests to the GET view"
return HttpResponseRedirect('/test_client_regress/get_view/')
login_protected_redirect_view = login_required(login_protected_redirect_view)
def set_session_view(request):
"A view that sets a session variable"
request.session['session_var'] = 'YES'
return HttpResponse('set_session')
def check_session_view(request):
"A view that reads a session variable"
return HttpResponse(request.session.get('session_var', 'NO'))
def request_methods_view(request):
"A view that responds with the request method"
return HttpResponse('request method: %s' % request.method)
def return_unicode(request):
return render_to_response('unicode.html')
def return_json_file(request):
"A view that parses and returns a JSON string as a file."
match = CONTENT_TYPE_RE.match(request.META['CONTENT_TYPE'])
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
# This just checks that the uploaded data is JSON
obj_dict = simplejson.loads(request.raw_post_data.decode(charset))
obj_json = simplejson.dumps(obj_dict, encoding=charset,
cls=DjangoJSONEncoder,
ensure_ascii=False)
response = HttpResponse(smart_str(obj_json, encoding=charset), status=200,
mimetype='application/json; charset=' + charset)
response['Content-Disposition'] = 'attachment; filename=testfile.json'
return response
def check_headers(request):
"A view that responds with value of the X-ARG-CHECK header"
return HttpResponse('HTTP_X_ARG_CHECK: %s' % request.META.get('HTTP_X_ARG_CHECK', 'Undefined'))
def raw_post_data(request):
"A view that is requested with GET and accesses request.raw_post_data. Refs #14753."
return HttpResponse(request.raw_post_data)
def request_context_view(request):
# Special attribute that won't be present on a plain HttpRequest
request.special_path = request.path
return render_to_response('request_context.html', context_instance=RequestContext(request, {}))
| gpl-3.0 |
jjhelmus/scipy | scipy/stats/tests/test_discrete_basic.py | 38 | 8770 | from __future__ import division, print_function, absolute_import
import numpy.testing as npt
import numpy as np
from scipy._lib.six import xrange
from scipy import stats
from common_tests import (check_normalization, check_moment, check_mean_expect,
check_var_expect, check_skew_expect,
check_kurt_expect, check_entropy,
check_private_entropy, check_edge_support,
check_named_args, check_random_state_property,
check_pickling, check_rvs_broadcast)
from scipy.stats._distr_params import distdiscrete
knf = npt.dec.knownfailureif
vals = ([1, 2, 3, 4], [0.1, 0.2, 0.3, 0.4])
distdiscrete += [[stats.rv_discrete(values=vals), ()]]
def test_discrete_basic():
for distname, arg in distdiscrete:
try:
distfn = getattr(stats, distname)
except TypeError:
distfn = distname
distname = 'sample distribution'
np.random.seed(9765456)
rvs = distfn.rvs(size=2000, *arg)
supp = np.unique(rvs)
m, v = distfn.stats(*arg)
yield check_cdf_ppf, distfn, arg, supp, distname + ' cdf_ppf'
yield check_pmf_cdf, distfn, arg, distname
yield check_oth, distfn, arg, supp, distname + ' oth'
yield check_edge_support, distfn, arg
alpha = 0.01
yield (check_discrete_chisquare, distfn, arg, rvs, alpha,
distname + ' chisquare')
seen = set()
for distname, arg in distdiscrete:
if distname in seen:
continue
seen.add(distname)
try:
distfn = getattr(stats, distname)
except TypeError:
distfn = distname
distname = 'sample distribution'
locscale_defaults = (0,)
meths = [distfn.pmf, distfn.logpmf, distfn.cdf, distfn.logcdf,
distfn.logsf]
# make sure arguments are within support
spec_k = {'randint': 11, 'hypergeom': 4, 'bernoulli': 0, }
k = spec_k.get(distname, 1)
yield check_named_args, distfn, k, arg, locscale_defaults, meths
if distname != 'sample distribution':
yield check_scale_docstring, distfn
yield check_random_state_property, distfn, arg
yield check_pickling, distfn, arg
# Entropy
yield check_entropy, distfn, arg, distname
if distfn.__class__._entropy != stats.rv_discrete._entropy:
yield check_private_entropy, distfn, arg, stats.rv_discrete
def test_moments():
for distname, arg in distdiscrete:
try:
distfn = getattr(stats, distname)
except TypeError:
distfn = distname
distname = 'sample distribution'
m, v, s, k = distfn.stats(*arg, moments='mvsk')
yield check_normalization, distfn, arg, distname
# compare `stats` and `moment` methods
yield check_moment, distfn, arg, m, v, distname
yield check_mean_expect, distfn, arg, m, distname
yield check_var_expect, distfn, arg, m, v, distname
yield check_skew_expect, distfn, arg, m, v, s, distname
cond = distname in ['zipf']
msg = distname + ' fails kurtosis'
yield knf(cond, msg)(check_kurt_expect), distfn, arg, m, v, k, distname
# frozen distr moments
yield check_moment_frozen, distfn, arg, m, 1
yield check_moment_frozen, distfn, arg, v+m*m, 2
def test_rvs_broadcast():
for dist, shape_args in distdiscrete:
# If shape_only is True, it means the _rvs method of the
# distribution uses more than one random number to generate a random
# variate. That means the result of using rvs with broadcasting or
# with a nontrivial size will not necessarily be the same as using the
# numpy.vectorize'd version of rvs(), so we can only compare the shapes
# of the results, not the values.
# Whether or not a distribution is in the following list is an
# implementation detail of the distribution, not a requirement. If
# the implementation the rvs() method of a distribution changes, this
# test might also have to be changed.
shape_only = dist in ['skellam']
try:
distfunc = getattr(stats, dist)
except TypeError:
distfunc = dist
dist = 'rv_discrete(values=(%r, %r))' % (dist.xk, dist.pk)
loc = np.zeros(2)
nargs = distfunc.numargs
allargs = []
bshape = []
# Generate shape parameter arguments...
for k in range(nargs):
shp = (k + 3,) + (1,)*(k + 1)
param_val = shape_args[k]
allargs.append(param_val*np.ones(shp, dtype=np.array(param_val).dtype))
bshape.insert(0, shp[0])
allargs.append(loc)
bshape.append(loc.size)
# bshape holds the expected shape when loc, scale, and the shape
# parameters are all broadcast together.
yield check_rvs_broadcast, distfunc, dist, allargs, bshape, shape_only, [np.int_]
def check_cdf_ppf(distfn, arg, supp, msg):
# cdf is a step function, and ppf(q) = min{k : cdf(k) >= q, k integer}
npt.assert_array_equal(distfn.ppf(distfn.cdf(supp, *arg), *arg),
supp, msg + '-roundtrip')
npt.assert_array_equal(distfn.ppf(distfn.cdf(supp, *arg) - 1e-8, *arg),
supp, msg + '-roundtrip')
if not hasattr(distfn, 'xk'):
supp1 = supp[supp < distfn.b]
npt.assert_array_equal(distfn.ppf(distfn.cdf(supp1, *arg) + 1e-8, *arg),
supp1 + distfn.inc, msg + ' ppf-cdf-next')
# -1e-8 could cause an error if pmf < 1e-8
def check_pmf_cdf(distfn, arg, distname):
if hasattr(distfn, 'xk'):
index = distfn.xk
else:
startind = int(distfn.ppf(0.01, *arg) - 1)
index = list(range(startind, startind + 10))
cdfs = distfn.cdf(index, *arg)
pmfs_cum = distfn.pmf(index, *arg).cumsum()
atol, rtol = 1e-10, 1e-10
if distname == 'skellam': # ncx2 accuracy
atol, rtol = 1e-5, 1e-5
npt.assert_allclose(cdfs - cdfs[0], pmfs_cum - pmfs_cum[0],
atol=atol, rtol=rtol)
def check_moment_frozen(distfn, arg, m, k):
npt.assert_allclose(distfn(*arg).moment(k), m,
atol=1e-10, rtol=1e-10)
def check_oth(distfn, arg, supp, msg):
# checking other methods of distfn
npt.assert_allclose(distfn.sf(supp, *arg), 1. - distfn.cdf(supp, *arg),
atol=1e-10, rtol=1e-10)
q = np.linspace(0.01, 0.99, 20)
npt.assert_allclose(distfn.isf(q, *arg), distfn.ppf(1. - q, *arg),
atol=1e-10, rtol=1e-10)
median_sf = distfn.isf(0.5, *arg)
npt.assert_(distfn.sf(median_sf - 1, *arg) > 0.5)
npt.assert_(distfn.cdf(median_sf + 1, *arg) > 0.5)
def check_discrete_chisquare(distfn, arg, rvs, alpha, msg):
"""Perform chisquare test for random sample of a discrete distribution
Parameters
----------
distname : string
name of distribution function
arg : sequence
parameters of distribution
alpha : float
significance level, threshold for p-value
Returns
-------
result : bool
0 if test passes, 1 if test fails
"""
wsupp = 0.05
# construct intervals with minimum mass `wsupp`.
# intervals are left-half-open as in a cdf difference
lo = int(max(distfn.a, -1000))
distsupport = xrange(lo, int(min(distfn.b, 1000)) + 1)
last = 0
distsupp = [lo]
distmass = []
for ii in distsupport:
current = distfn.cdf(ii, *arg)
if current - last >= wsupp - 1e-14:
distsupp.append(ii)
distmass.append(current - last)
last = current
if current > (1 - wsupp):
break
if distsupp[-1] < distfn.b:
distsupp.append(distfn.b)
distmass.append(1 - last)
distsupp = np.array(distsupp)
distmass = np.array(distmass)
# convert intervals to right-half-open as required by histogram
histsupp = distsupp + 1e-8
histsupp[0] = distfn.a
# find sample frequencies and perform chisquare test
freq, hsupp = np.histogram(rvs, histsupp)
chis, pval = stats.chisquare(np.array(freq), len(rvs)*distmass)
npt.assert_(pval > alpha,
'chisquare - test for %s at arg = %s with pval = %s' %
(msg, str(arg), str(pval)))
def check_scale_docstring(distfn):
if distfn.__doc__ is not None:
# Docstrings can be stripped if interpreter is run with -OO
npt.assert_('scale' not in distfn.__doc__)
if __name__ == "__main__":
npt.run_module_suite()
| bsd-3-clause |
4shadoww/hakkuframework | core/lib/whois/__init__.py | 3 | 2563 | from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import *
import re
import sys
import os
import subprocess
import socket
from .parser import WhoisEntry
from .whois import NICClient
def whois(url, command=False):
# clean domain to expose netloc
ip_match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", url)
if ip_match:
domain = url
try:
result = socket.gethostbyaddr(url)
except socket.herror as e:
pass
else:
domain = result[0]
else:
domain = extract_domain(url)
if command:
# try native whois command
r = subprocess.Popen(['whois', domain], stdout=subprocess.PIPE)
text = r.stdout.read()
else:
# try builtin client
nic_client = NICClient()
text = nic_client.whois_lookup(None, domain, 0)
return WhoisEntry.load(domain, text)
def extract_domain(url):
"""Extract the domain from the given URL
>>> extract_domain('http://www.google.com.au/tos.html')
'google.com.au'
>>> extract_domain('www.webscraping.com')
'webscraping.com'
>>> extract_domain('198.252.206.140')
'stackoverflow.com'
>>> extract_domain('102.112.2O7.net')
'2o7.net'
>>> extract_domain('1-0-1-1-1-0-1-1-1-1-1-1-1-.0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info')
'0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info'
"""
if re.match(r'\d+\.\d+\.\d+\.\d+', url):
# this is an IP address
return socket.gethostbyaddr(url)[0]
tlds_path = os.path.join(os.getcwd(), os.path.dirname(__file__), 'data', 'tlds.txt')
with open(tlds_path) as tlds_fil:
suffixes = [line.lower().encode('utf-8')
for line in (x.strip() for x in tlds_fil)
if not line.startswith('#')]
suff = 'xn--p1ai'
if not isinstance(url, str):
url = url.decode('utf-8')
url = re.sub('^.*://', '', url)
url = url.split('/')[0].lower().encode('idna')
domain = []
for section in url.split(b'.'):
if section in suffixes:
domain.append(section)
else:
domain = [section]
return b'.'.join(domain).decode('idna')
if __name__ == '__main__':
try:
url = sys.argv[1]
except IndexError:
print('Usage: %s url' % sys.argv[0])
else:
print(whois(url))
| mit |
atcoin/atcoin | share/qt/extract_strings_qt.py | 2945 | 1844 | #!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
| mit |
Funky7Monkey/WargamingAPI.py | WargamingAPI/enums.py | 1 | 1356 | """
The MIT License (MIT)
Copyright (c) 2017 Funky7Monkey
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from enum import Enum
class Region(Enum):
EU = 1
NA = 2
RU = 3
ASIA = 4
def domain(self):
if self.name == 'NA':
return 'com'
else:
return self.name.lower()
class Platform(Enum):
XBOX = 1
PS4 = 2
| mit |
hosseinmh/jango_learning | .venv/lib/python2.7/site-packages/django/conf/locale/zh_Hans/formats.py | 1008 | 1810 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
TIME_FORMAT = 'H:i' # 20:45
DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
YEAR_MONTH_FORMAT = 'Y年n月' # 2016年9月
MONTH_DAY_FORMAT = 'm月j日' # 9月5日
SHORT_DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
SHORT_DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
FIRST_DAY_OF_WEEK = 1 # 星期一 (Monday)
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%Y/%m/%d', # '2016/09/05'
'%Y-%m-%d', # '2016-09-05'
'%Y年%n月%j日', # '2016年9月5日'
]
TIME_INPUT_FORMATS = [
'%H:%M', # '20:45'
'%H:%M:%S', # '20:45:29'
'%H:%M:%S.%f', # '20:45:29.000200'
]
DATETIME_INPUT_FORMATS = [
'%Y/%m/%d %H:%M', # '2016/09/05 20:45'
'%Y-%m-%d %H:%M', # '2016-09-05 20:45'
'%Y年%n月%j日 %H:%M', # '2016年9月5日 14:45'
'%Y/%m/%d %H:%M:%S', # '2016/09/05 20:45:29'
'%Y-%m-%d %H:%M:%S', # '2016-09-05 20:45:29'
'%Y年%n月%j日 %H:%M:%S', # '2016年9月5日 20:45:29'
'%Y/%m/%d %H:%M:%S.%f', # '2016/09/05 20:45:29.000200'
'%Y-%m-%d %H:%M:%S.%f', # '2016-09-05 20:45:29.000200'
'%Y年%n月%j日 %H:%n:%S.%f', # '2016年9月5日 20:45:29.000200'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ''
NUMBER_GROUPING = 4
| mit |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/OpenGL/GL/APPLE/ycbcr_422.py | 9 | 3045 | '''OpenGL extension APPLE.ycbcr_422
This module customises the behaviour of the
OpenGL.raw.GL.APPLE.ycbcr_422 to provide a more
Python-friendly API
Overview (from the spec)
This extension provides a method for GL to read, store and optionally
process textures that are defined in Y'CbCr 422 video formats. This
extension supports the two common Y'CbCr 422 video formats (known by
QuickTime FourCC as '2vuy' and 'yuvs'). These formats represent one of the
most common 16 bit Y'CbCr formats in both standard and reverse byte
ordering. From a client stand point these can be assumed to be decoded
immediately (even though the implementation is free to optimize the data
storage and keep it in the native format) and otherwise function as any
other texture format. The texture command <internalformat> parameter
normally be should be specified as RGB, since Y'CbCr is just a form of RGB
data. This extension can be supported with either hardware or software
decoding and it is up to the specific implementation to determine which is
used.
A new <format> is added, YCBCR_422_APPLE. Additionally, to handle the
difference in pixel size and byte ordering for 422 video, the pixel storage
operations treat YCBCR_422_APPLE as a 2 component format using
the UNSIGNED_SHORT_8_8_APPLE or UNSIGNED_SHORT_8_8_REV_APPLE <type>.
The '2vuy' or k2vuyPixelFormat pixel format is an 8-bit 4:2:2 Component
Y'CbCr format. Each 16 bit pixel is represented by an unsigned eight bit
luminance component and two unsigned eight bit chroma components. Each pair
of pixels shares a common set of chroma values. The components are ordered
in memory; Cb, Y0, Cr, Y1. The luminance components have a range of [16,
235], while the chroma value has a range of [16, 240]. This is consistent
with the CCIR601 spec. This format is fairly prevalent on both Mac and Win32
platforms. The equivalent Microsoft fourCC is OUYVYO. This format is
supported with the UNSIGNED_SHORT_8_8_REV_APPLE type for pixel storage
operations.
The 'yuvs' or kYUVSPixelFormat is an 8-bit 4:2:2 Component Y'CbCr format.
Identical to the k2vuyPixelFormat except each 16 bit word has been byte
swapped. This results in a component ordering of; Y0, Cb, Y1, Cr. This is
most prevalent yuv 4:2:2 format on both Mac and Win32 platforms. The
equivalent Microsoft fourCC is 'YUY2'. This format is supported with the
UNSIGNED_SHORT_8_8_APPLE type for pixel storage operations.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/APPLE/ycbcr_422.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.APPLE.ycbcr_422 import *
from OpenGL.raw.GL.APPLE.ycbcr_422 import _EXTENSION_NAME
def glInitYcbcr422APPLE():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | gpl-3.0 |
abhattad4/Digi-Menu | digimenu2/tests/migrations/models.py | 386 | 1780 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps.registry import Apps
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class CustomModelBase(models.base.ModelBase):
pass
class ModelWithCustomBase(six.with_metaclass(CustomModelBase, models.Model)):
pass
@python_2_unicode_compatible
class UnicodeModel(models.Model):
title = models.CharField('ÚÑÍ¢ÓÐÉ', max_length=20, default='“Ðjáñgó”')
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
verbose_name = 'úñí©óðé µóðéø'
verbose_name_plural = 'úñí©óðé µóðéøß'
def __str__(self):
return self.title
class Unserializable(object):
"""
An object that migration doesn't know how to serialize.
"""
pass
class UnserializableModel(models.Model):
title = models.CharField(max_length=20, default=Unserializable())
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
class UnmigratedModel(models.Model):
"""
A model that is in a migration-less app (which this app is
if its migrations directory has not been repointed)
"""
pass
class EmptyManager(models.Manager):
use_in_migrations = True
class FoodQuerySet(models.query.QuerySet):
pass
class BaseFoodManager(models.Manager):
def __init__(self, a, b, c=1, d=2):
super(BaseFoodManager, self).__init__()
self.args = (a, b, c, d)
class FoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
use_in_migrations = True
class NoMigrationFoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
pass
| bsd-3-clause |
jmagnusson/flask-admin | examples/geo_alchemy/app.py | 39 | 2044 | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import flask_admin as admin
from geoalchemy2.types import Geometry
from flask_admin.contrib.geoa import ModelView
# Create application
app = Flask(__name__)
app.config.from_pyfile('config.py')
db = SQLAlchemy(app)
class Point(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POINT"))
class MultiPoint(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOINT"))
class Polygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POLYGON"))
class MultiPolygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOLYGON"))
class LineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("LINESTRING"))
class MultiLineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTILINESTRING"))
# Flask views
@app.route('/')
def index():
return '<a href="/admin/">Click me to get to Admin!</a>'
# Create admin
admin = admin.Admin(app, name='Example: GeoAlchemy', template_mode='bootstrap3')
# Add views
admin.add_view(ModelView(Point, db.session, category='Points'))
admin.add_view(ModelView(MultiPoint, db.session, category='Points'))
admin.add_view(ModelView(Polygon, db.session, category='Polygons'))
admin.add_view(ModelView(MultiPolygon, db.session, category='Polygons'))
admin.add_view(ModelView(LineString, db.session, category='Lines'))
admin.add_view(ModelView(MultiLineString, db.session, category='Lines'))
if __name__ == '__main__':
db.create_all()
# Start app
app.run(debug=True)
| bsd-3-clause |
offlinehacker/pyfilesystem | fs/osfs/watch_win32.py | 10 | 16983 | """
fs.osfs.watch_win32
===================
Change watcher support for OSFS, using ReadDirectoryChangesW on win32.
"""
import os
import sys
import errno
import threading
import Queue
import stat
import struct
import ctypes
import ctypes.wintypes
import traceback
import weakref
try:
LPVOID = ctypes.wintypes.LPVOID
except AttributeError:
# LPVOID wasn't defined in Py2.5, guess it was introduced in Py2.6
LPVOID = ctypes.c_void_p
from fs.errors import *
from fs.path import *
from fs.watch import *
INVALID_HANDLE_VALUE = 0xFFFFFFFF
FILE_NOTIFY_CHANGE_FILE_NAME = 0x01
FILE_NOTIFY_CHANGE_DIR_NAME = 0x02
FILE_NOTIFY_CHANGE_ATTRIBUTES = 0x04
FILE_NOTIFY_CHANGE_SIZE = 0x08
FILE_NOTIFY_CHANGE_LAST_WRITE = 0x010
FILE_NOTIFY_CHANGE_LAST_ACCESS = 0x020
FILE_NOTIFY_CHANGE_CREATION = 0x040
FILE_NOTIFY_CHANGE_SECURITY = 0x0100
FILE_LIST_DIRECTORY = 0x01
FILE_SHARE_READ = 0x01
FILE_SHARE_WRITE = 0x02
OPEN_EXISTING = 3
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
FILE_FLAG_OVERLAPPED = 0x40000000
THREAD_TERMINATE = 0x0001
FILE_ACTION_ADDED = 1
FILE_ACTION_REMOVED = 2
FILE_ACTION_MODIFIED = 3
FILE_ACTION_RENAMED_OLD_NAME = 4
FILE_ACTION_RENAMED_NEW_NAME = 5
FILE_ACTION_OVERFLOW = 0xFFFF
WAIT_ABANDONED = 0x00000080
WAIT_IO_COMPLETION = 0x000000C0
WAIT_OBJECT_0 = 0x00000000
WAIT_TIMEOUT = 0x00000102
def _errcheck_bool(value,func,args):
if not value:
raise ctypes.WinError()
return args
def _errcheck_handle(value,func,args):
if not value:
raise ctypes.WinError()
if value == INVALID_HANDLE_VALUE:
raise ctypes.WinError()
return args
def _errcheck_dword(value,func,args):
if value == 0xFFFFFFFF:
raise ctypes.WinError()
return args
class OVERLAPPED(ctypes.Structure):
_fields_ = [('Internal', LPVOID),
('InternalHigh', LPVOID),
('Offset', ctypes.wintypes.DWORD),
('OffsetHigh', ctypes.wintypes.DWORD),
('Pointer', LPVOID),
('hEvent', ctypes.wintypes.HANDLE),
]
try:
ReadDirectoryChangesW = ctypes.windll.kernel32.ReadDirectoryChangesW
except AttributeError:
raise ImportError("ReadDirectoryChangesW is not available")
ReadDirectoryChangesW.restype = ctypes.wintypes.BOOL
ReadDirectoryChangesW.errcheck = _errcheck_bool
ReadDirectoryChangesW.argtypes = (
ctypes.wintypes.HANDLE, # hDirectory
LPVOID, # lpBuffer
ctypes.wintypes.DWORD, # nBufferLength
ctypes.wintypes.BOOL, # bWatchSubtree
ctypes.wintypes.DWORD, # dwNotifyFilter
ctypes.POINTER(ctypes.wintypes.DWORD), # lpBytesReturned
ctypes.POINTER(OVERLAPPED), # lpOverlapped
LPVOID #FileIOCompletionRoutine # lpCompletionRoutine
)
CreateFileW = ctypes.windll.kernel32.CreateFileW
CreateFileW.restype = ctypes.wintypes.HANDLE
CreateFileW.errcheck = _errcheck_handle
CreateFileW.argtypes = (
ctypes.wintypes.LPCWSTR, # lpFileName
ctypes.wintypes.DWORD, # dwDesiredAccess
ctypes.wintypes.DWORD, # dwShareMode
LPVOID, # lpSecurityAttributes
ctypes.wintypes.DWORD, # dwCreationDisposition
ctypes.wintypes.DWORD, # dwFlagsAndAttributes
ctypes.wintypes.HANDLE # hTemplateFile
)
CloseHandle = ctypes.windll.kernel32.CloseHandle
CloseHandle.restype = ctypes.wintypes.BOOL
CloseHandle.argtypes = (
ctypes.wintypes.HANDLE, # hObject
)
CreateEvent = ctypes.windll.kernel32.CreateEventW
CreateEvent.restype = ctypes.wintypes.HANDLE
CreateEvent.errcheck = _errcheck_handle
CreateEvent.argtypes = (
LPVOID, # lpEventAttributes
ctypes.wintypes.BOOL, # bManualReset
ctypes.wintypes.BOOL, # bInitialState
ctypes.wintypes.LPCWSTR, #lpName
)
SetEvent = ctypes.windll.kernel32.SetEvent
SetEvent.restype = ctypes.wintypes.BOOL
SetEvent.errcheck = _errcheck_bool
SetEvent.argtypes = (
ctypes.wintypes.HANDLE, # hEvent
)
WaitForSingleObjectEx = ctypes.windll.kernel32.WaitForSingleObjectEx
WaitForSingleObjectEx.restype = ctypes.wintypes.DWORD
WaitForSingleObjectEx.errcheck = _errcheck_dword
WaitForSingleObjectEx.argtypes = (
ctypes.wintypes.HANDLE, # hObject
ctypes.wintypes.DWORD, # dwMilliseconds
ctypes.wintypes.BOOL, # bAlertable
)
CreateIoCompletionPort = ctypes.windll.kernel32.CreateIoCompletionPort
CreateIoCompletionPort.restype = ctypes.wintypes.HANDLE
CreateIoCompletionPort.errcheck = _errcheck_handle
CreateIoCompletionPort.argtypes = (
ctypes.wintypes.HANDLE, # FileHandle
ctypes.wintypes.HANDLE, # ExistingCompletionPort
LPVOID, # CompletionKey
ctypes.wintypes.DWORD, # NumberOfConcurrentThreads
)
GetQueuedCompletionStatus = ctypes.windll.kernel32.GetQueuedCompletionStatus
GetQueuedCompletionStatus.restype = ctypes.wintypes.BOOL
GetQueuedCompletionStatus.errcheck = _errcheck_bool
GetQueuedCompletionStatus.argtypes = (
ctypes.wintypes.HANDLE, # CompletionPort
LPVOID, # lpNumberOfBytesTransferred
LPVOID, # lpCompletionKey
ctypes.POINTER(OVERLAPPED), # lpOverlapped
ctypes.wintypes.DWORD, # dwMilliseconds
)
PostQueuedCompletionStatus = ctypes.windll.kernel32.PostQueuedCompletionStatus
PostQueuedCompletionStatus.restype = ctypes.wintypes.BOOL
PostQueuedCompletionStatus.errcheck = _errcheck_bool
PostQueuedCompletionStatus.argtypes = (
ctypes.wintypes.HANDLE, # CompletionPort
ctypes.wintypes.DWORD, # lpNumberOfBytesTransferred
ctypes.wintypes.DWORD, # lpCompletionKey
ctypes.POINTER(OVERLAPPED), # lpOverlapped
)
class WatchedDirectory(object):
def __init__(self,callback,path,flags,recursive=True):
self.path = path
self.flags = flags
self.callback = callback
self.recursive = recursive
self.handle = None
self.error = None
self.handle = CreateFileW(path,
FILE_LIST_DIRECTORY,
FILE_SHARE_READ | FILE_SHARE_WRITE,
None,
OPEN_EXISTING,
FILE_FLAG_BACKUP_SEMANTICS|FILE_FLAG_OVERLAPPED,
None)
self.result = ctypes.create_string_buffer(1024)
self.overlapped = overlapped = OVERLAPPED()
self.ready = threading.Event()
def __del__(self):
self.close()
def close(self):
if self.handle is not None:
CloseHandle(self.handle)
self.handle = None
def post(self):
overlapped = self.overlapped
overlapped.Internal = 0
overlapped.InternalHigh = 0
overlapped.Offset = 0
overlapped.OffsetHigh = 0
overlapped.Pointer = 0
overlapped.hEvent = 0
try:
ReadDirectoryChangesW(self.handle,
ctypes.byref(self.result),len(self.result),
self.recursive,self.flags,None,
overlapped,None)
except WindowsError, e:
self.error = e
self.close()
def complete(self,nbytes):
if nbytes == 0:
self.callback(None,0)
else:
res = self.result.raw[:nbytes]
for (name,action) in self._extract_change_info(res):
if self.callback:
self.callback(os.path.join(self.path,name),action)
def _extract_change_info(self,buffer):
"""Extract the information out of a FILE_NOTIFY_INFORMATION structure."""
pos = 0
while pos < len(buffer):
jump, action, namelen = struct.unpack("iii",buffer[pos:pos+12])
# TODO: this may return a shortname or a longname, with no way
# to tell which. Normalise them somehow?
name = buffer[pos+12:pos+12+namelen].decode("utf16")
yield (name,action)
if not jump:
break
pos += jump
class WatchThread(threading.Thread):
"""Thread for watching filesystem changes."""
def __init__(self):
super(WatchThread,self).__init__()
self.closed = False
self.watched_directories = {}
self.ready = threading.Event()
self._iocp = None
self._new_watches = Queue.Queue()
def close(self):
if not self.closed:
self.closed = True
if self._iocp:
PostQueuedCompletionStatus(self._iocp,0,1,None)
def add_watcher(self,callback,path,events,recursive):
if os.path.isfile(path):
path = os.path.dirname(path)
watched_dirs = []
for w in self._get_watched_dirs(callback,path,events,recursive):
self.attach_watched_directory(w)
watched_dirs.append(w)
return watched_dirs
def del_watcher(self,w):
w = self.watched_directories.pop(hash(w))
w.callback = None
w.close()
def _get_watched_dirs(self,callback,path,events,recursive):
do_access = False
do_change = False
flags = 0
for evt in events:
if issubclass(ACCESSED,evt):
do_access = True
if issubclass(MODIFIED,evt):
do_change = True
flags |= FILE_NOTIFY_CHANGE_ATTRIBUTES
flags |= FILE_NOTIFY_CHANGE_CREATION
flags |= FILE_NOTIFY_CHANGE_SECURITY
if issubclass(CREATED,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if issubclass(REMOVED,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if issubclass(MOVED_SRC,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if issubclass(MOVED_DST,evt):
flags |= FILE_NOTIFY_CHANGE_FILE_NAME
flags |= FILE_NOTIFY_CHANGE_DIR_NAME
if do_access:
# Separately capture FILE_NOTIFY_CHANGE_LAST_ACCESS events
# so we can reliably generate ACCESSED events.
def on_access_event(path,action):
if action == FILE_ACTION_OVERFLOW:
callback(OVERFLOW,path)
else:
callback(ACCESSED,path)
yield WatchedDirectory(on_access_event,path,
FILE_NOTIFY_CHANGE_LAST_ACCESS,recursive)
if do_change:
# Separately capture FILE_NOTIFY_CHANGE_LAST_WRITE events
# so we can generate MODIFIED(data_changed=True) events.
cflags = FILE_NOTIFY_CHANGE_LAST_WRITE | FILE_NOTIFY_CHANGE_SIZE
def on_change_event(path,action):
if action == FILE_ACTION_OVERFLOW:
callback(OVERFLOW,path)
else:
callback(MODIFIED,path,True)
yield WatchedDirectory(on_change_event,path,cflags,recursive)
if flags:
# All other events we can route through a common handler.
old_name = [None]
def on_misc_event(path,action):
if action == FILE_ACTION_OVERFLOW:
callback(OVERFLOW,path)
elif action == FILE_ACTION_ADDED:
callback(CREATED,path)
elif action == FILE_ACTION_REMOVED:
callback(REMOVED,path)
elif action == FILE_ACTION_MODIFIED:
callback(MODIFIED,path)
elif action == FILE_ACTION_RENAMED_OLD_NAME:
old_name[0] = path
elif action == FILE_ACTION_RENAMED_NEW_NAME:
callback(MOVED_DST,path,old_name[0])
callback(MOVED_SRC,old_name[0],path)
old_name[0] = None
yield WatchedDirectory(on_misc_event,path,flags,recursive)
def run(self):
try:
self._iocp = CreateIoCompletionPort(INVALID_HANDLE_VALUE,None,0,1)
self.ready.set()
nbytes = ctypes.wintypes.DWORD()
iocpkey = ctypes.wintypes.DWORD()
overlapped = OVERLAPPED()
while not self.closed:
try:
GetQueuedCompletionStatus(self._iocp,
ctypes.byref(nbytes),
ctypes.byref(iocpkey),
ctypes.byref(overlapped),
-1)
except WindowsError:
traceback.print_exc()
else:
if iocpkey.value > 1:
try:
w = self.watched_directories[iocpkey.value]
except KeyError:
pass
else:
w.complete(nbytes.value)
w.post()
elif not self.closed:
try:
while True:
w = self._new_watches.get_nowait()
if w.handle is not None:
CreateIoCompletionPort(w.handle,
self._iocp,
hash(w),0)
w.post()
w.ready.set()
except Queue.Empty:
pass
finally:
self.ready.set()
for w in self.watched_directories.itervalues():
w.close()
if self._iocp:
CloseHandle(self._iocp)
def attach_watched_directory(self,w):
self.watched_directories[hash(w)] = w
self._new_watches.put(w)
PostQueuedCompletionStatus(self._iocp,0,1,None)
w.ready.wait()
class OSFSWatchMixin(WatchableFSMixin):
"""Mixin providing change-watcher support via pyinotify."""
__watch_lock = threading.Lock()
__watch_thread = None
def close(self):
super(OSFSWatchMixin,self).close()
self.__shutdown_watch_thread(force=True)
self.notify_watchers(CLOSED)
@convert_os_errors
def add_watcher(self,callback,path="/",events=None,recursive=True):
w = super(OSFSWatchMixin,self).add_watcher(callback,path,events,recursive)
syspath = self.getsyspath(path)
wt = self.__get_watch_thread()
# Careful not to create a reference cycle here.
weak_self = weakref.ref(self)
def handle_event(event_class,path,*args,**kwds):
selfref = weak_self()
if selfref is None:
return
try:
path = selfref.unsyspath(path)
except ValueError:
pass
else:
if event_class in (MOVED_SRC,MOVED_DST) and args and args[0]:
args = (selfref.unsyspath(args[0]),) + args[1:]
event = event_class(selfref,path,*args,**kwds)
w.handle_event(event)
w._watch_objs = wt.add_watcher(handle_event,syspath,w.events,w.recursive)
for wd in w._watch_objs:
if wd.error is not None:
self.del_watcher(w)
raise wd.error
return w
@convert_os_errors
def del_watcher(self,watcher_or_callback):
wt = self.__get_watch_thread()
if isinstance(watcher_or_callback,Watcher):
watchers = [watcher_or_callback]
else:
watchers = self._find_watchers(watcher_or_callback)
for watcher in watchers:
for wobj in watcher._watch_objs:
wt.del_watcher(wobj)
super(OSFSWatchMixin,self).del_watcher(watcher)
if not wt.watched_directories:
self.__shutdown_watch_thread()
def __get_watch_thread(self):
"""Get the shared watch thread, initializing if necessary."""
if self.__watch_thread is None:
self.__watch_lock.acquire()
try:
if self.__watch_thread is None:
wt = WatchThread()
wt.start()
wt.ready.wait()
OSFSWatchMixin.__watch_thread = wt
finally:
self.__watch_lock.release()
return self.__watch_thread
def __shutdown_watch_thread(self,force=False):
"""Stop the shared watch manager, if there are no watches left."""
self.__watch_lock.acquire()
try:
if OSFSWatchMixin.__watch_thread is None:
return
if not force and OSFSWatchMixin.__watch_thread.watched_directories:
return
try:
OSFSWatchMixin.__watch_thread.close()
except EnvironmentError:
pass
else:
OSFSWatchMixin.__watch_thread.join()
OSFSWatchMixin.__watch_thread = None
finally:
self.__watch_lock.release()
| bsd-3-clause |
40223209/test | static/Brython3.1.3-20150514-095342/Lib/unittest/case.py | 743 | 48873 | """Test case implementation"""
import sys
import functools
import difflib
import pprint
import re
import warnings
import collections
from . import result
from .util import (strclass, safe_repr, _count_diff_all_purpose,
_count_diff_hashable)
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestCase.skipTest() or one of the skipping decorators
instead of raising this directly.
"""
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
super(_ExpectedFailure, self).__init__()
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
class _Outcome(object):
def __init__(self):
self.success = True
self.skipped = None
self.unexpectedSuccess = None
self.expectedFailure = None
self.errors = []
self.failures = []
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not isinstance(test_item, type):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesBaseContext(object):
def __init__(self, expected, test_case, callable_obj=None,
expected_regex=None):
self.expected = expected
self.test_case = test_case
if callable_obj is not None:
try:
self.obj_name = callable_obj.__name__
except AttributeError:
self.obj_name = str(callable_obj)
else:
self.obj_name = None
if isinstance(expected_regex, (bytes, str)):
expected_regex = re.compile(expected_regex)
self.expected_regex = expected_regex
self.msg = None
def _raiseFailure(self, standardMsg):
msg = self.test_case._formatMessage(self.msg, standardMsg)
raise self.test_case.failureException(msg)
def handle(self, name, callable_obj, args, kwargs):
"""
If callable_obj is None, assertRaises/Warns is being used as a
context manager, so check for a 'msg' kwarg and return self.
If callable_obj is not None, call it passing args and kwargs.
"""
if callable_obj is None:
self.msg = kwargs.pop('msg', None)
return self
with self:
callable_obj(*args, **kwargs)
class _AssertRaisesContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
if self.obj_name:
self._raiseFailure("{} not raised by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
# store exception, without traceback, for later retrieval
self.exception = exc_value.with_traceback(None)
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
if not expected_regex.search(str(exc_value)):
self._raiseFailure('"{}" does not match "{}"'.format(
expected_regex.pattern, str(exc_value)))
return True
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
for v in sys.modules.values():
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = warnings.catch_warnings(record=True)
self.warnings = self.warnings_manager.__enter__()
warnings.simplefilter("always", self.expected)
return self
def __exit__(self, exc_type, exc_value, tb):
self.warnings_manager.__exit__(exc_type, exc_value, tb)
if exc_type is not None:
# let unexpected exceptions pass through
return
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
first_matching = None
for m in self.warnings:
w = m.message
if not isinstance(w, self.expected):
continue
if first_matching is None:
first_matching = w
if (self.expected_regex is not None and
not self.expected_regex.search(str(w))):
continue
# store warning for later retrieval
self.warning = w
self.filename = m.filename
self.lineno = m.lineno
return
# Now we simply try to choose a helpful failure message
if first_matching is not None:
self._raiseFailure('"{}" does not match "{}"'.format(
self.expected_regex.pattern, str(first_matching)))
if self.obj_name:
self._raiseFailure("{} not triggered by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not triggered".format(exc_name))
class TestCase(object):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
When subclassing TestCase, you can set these attributes:
* failureException: determines which exception will be raised when
the instance's assertion methods fail; test methods raising this
exception will be deemed to have 'failed' rather than 'errored'.
* longMessage: determines whether long messages (including repr of
objects used in assert methods) will be printed on failure in *addition*
to any explicit message passed.
* maxDiff: sets the maximum length of a diff in failure messages
by assert methods using difflib. It is looked up as an instance
attribute so can be configured by individual tests if required.
"""
failureException = AssertionError
longMessage = True
maxDiff = 80*8
# If a string is longer than _diffThreshold, use normal comparison instead
# of difflib. See #11763.
_diffThreshold = 2**16
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._outcomeForDoCleanups = None
self._testMethodDoc = 'No test'
try:
testMethod = getattr(self, methodName)
except AttributeError:
if methodName != 'runTest':
# we allow instantiation with no explicit method name
# but not an *incorrect* or missing method name
raise ValueError("no such test method in %s: %s" %
(self.__class__, methodName))
else:
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = {}
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(str, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(self)
def _executeTestPart(self, function, outcome, isTest=False):
try:
function()
except KeyboardInterrupt:
raise
except SkipTest as e:
outcome.success = False
outcome.skipped = str(e)
except _UnexpectedSuccess:
exc_info = sys.exc_info()
outcome.success = False
if isTest:
outcome.unexpectedSuccess = exc_info
else:
outcome.errors.append(exc_info)
except _ExpectedFailure:
outcome.success = False
exc_info = sys.exc_info()
if isTest:
outcome.expectedFailure = exc_info
else:
outcome.errors.append(exc_info)
except self.failureException:
outcome.success = False
outcome.failures.append(sys.exc_info())
exc_info = sys.exc_info()
except:
outcome.success = False
outcome.errors.append(sys.exc_info())
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
outcome = _Outcome()
self._outcomeForDoCleanups = outcome
self._executeTestPart(self.setUp, outcome)
if outcome.success:
self._executeTestPart(testMethod, outcome, isTest=True)
self._executeTestPart(self.tearDown, outcome)
self.doCleanups()
if outcome.success:
result.addSuccess(self)
else:
if outcome.skipped is not None:
self._addSkip(result, outcome.skipped)
for exc_info in outcome.errors:
result.addError(self, exc_info)
for exc_info in outcome.failures:
result.addFailure(self, exc_info)
if outcome.unexpectedSuccess is not None:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failures",
RuntimeWarning)
result.addFailure(self, outcome.unexpectedSuccess)
if outcome.expectedFailure is not None:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, outcome.expectedFailure)
else:
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
return result
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
outcome = self._outcomeForDoCleanups or _Outcome()
while self._cleanups:
function, args, kwargs = self._cleanups.pop()
part = lambda: function(*args, **kwargs)
self._executeTestPart(part, outcome)
# return this for backwards compatibility
# even though we no longer us it internally
return outcome.success
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"""Check that the expression is false."""
if expr:
msg = self._formatMessage(msg, "%s is not false" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Check that the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not true" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
# don't switch to '{}' formatting in Python 2.X
# it changes the way unicode input is handled
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is raised
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
raised, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
An optional keyword argument 'msg' can be provided when assertRaises
is used as a context object.
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(excClass, self, callableObj)
return context.handle('assertRaises', callableObj, args, kwargs)
def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs):
"""Fail unless a warning of class warnClass is triggered
by callable_obj when invoked with arguments args and keyword
arguments kwargs. If a different type of warning is
triggered, it will not be handled: depending on the other
warning filtering rules in effect, it might be silenced, printed
out, or raised as an exception.
If called with callable_obj omitted or None, will return a
context object used like this::
with self.assertWarns(SomeWarning):
do_something()
An optional keyword argument 'msg' can be provided when assertWarns
is used as a context object.
The context manager keeps a reference to the first matching
warning as the 'warning' attribute; similarly, the 'filename'
and 'lineno' attributes give you information about the line
of Python code from which the warning was triggered.
This allows you to inspect the warning after the assertion::
with self.assertWarns(SomeWarning) as cm:
do_something()
the_warning = cm.warning
self.assertEqual(the_warning.some_attribute, 147)
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj)
return context.handle('assertWarns', callable_obj, args, kwargs)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
if isinstance(asserter, str):
asserter = getattr(self, asserter)
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '!='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertSequenceEqual(self, seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = safe_repr(seq1)
seq2_repr = safe_repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in range(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support different types of sets, and
is optimized for sets specifically (parameters must support a
difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1),
safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict, 'First argument is not a dictionary')
self.assertIsInstance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, subset, dictionary, msg=None):
"""Checks whether dictionary is a superset of subset."""
warnings.warn('assertDictContainsSubset is deprecated',
DeprecationWarning)
missing = []
mismatched = []
for key, value in subset.items():
if key not in dictionary:
missing.append(key)
elif value != dictionary[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(dictionary[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertCountEqual(self, first, second, msg=None):
"""An unordered sequence comparison asserting that the same elements,
regardless of order. If the same element occurs more than once,
it verifies that the elements occur the same number of times.
self.assertEqual(Counter(list(first)),
Counter(list(second)))
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
first_seq, second_seq = list(first), list(second)
try:
first = collections.Counter(first_seq)
second = collections.Counter(second_seq)
except TypeError:
# Handle case with unhashable elements
differences = _count_diff_all_purpose(first_seq, second_seq)
else:
if first == second:
return
differences = _count_diff_hashable(first_seq, second_seq)
if differences:
standardMsg = 'Element counts were not equal:\n'
lines = ['First has %d, Second has %d: %r' % diff for diff in differences]
diffMsg = '\n'.join(lines)
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertIsInstance(first, str, 'First argument is not a string')
self.assertIsInstance(second, str, 'Second argument is not a string')
if first != second:
# don't use difflib if the strings are too long
if (len(first) > self._diffThreshold or
len(second) > self._diffThreshold):
self._baseAssertEqual(first, second, msg)
firstlines = first.splitlines(keepends=True)
secondlines = second.splitlines(keepends=True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
standardMsg = '%s != %s' % (safe_repr(first, True),
safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegex(self, expected_exception, expected_regex,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regex.
Args:
expected_exception: Exception class expected to be raised.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
msg: Optional message used in case of failure. Can only be used
when assertRaisesRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertRaisesContext(expected_exception, self, callable_obj,
expected_regex)
return context.handle('assertRaisesRegex', callable_obj, args, kwargs)
def assertWarnsRegex(self, expected_warning, expected_regex,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a triggered warning matches a regexp.
Basic functioning is similar to assertWarns() with the addition
that only warnings whose messages also match the regular expression
are considered successful matches.
Args:
expected_warning: Warning class expected to be triggered.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
msg: Optional message used in case of failure. Can only be used
when assertWarnsRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj,
expected_regex)
return context.handle('assertWarnsRegex', callable_obj, args, kwargs)
def assertRegex(self, text, expected_regex, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regex, (str, bytes)):
assert expected_regex, "expected_regex must not be empty."
expected_regex = re.compile(expected_regex)
if not expected_regex.search(text):
msg = msg or "Regex didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text)
raise self.failureException(msg)
def assertNotRegex(self, text, unexpected_regex, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regex, (str, bytes)):
unexpected_regex = re.compile(unexpected_regex)
match = unexpected_regex.search(text)
if match:
msg = msg or "Regex matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regex.pattern,
text)
raise self.failureException(msg)
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
'Please use {0} instead.'.format(original_func.__name__),
DeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
# see #9424
failUnlessEqual = assertEquals = _deprecate(assertEqual)
failIfEqual = assertNotEquals = _deprecate(assertNotEqual)
failUnlessAlmostEqual = assertAlmostEquals = _deprecate(assertAlmostEqual)
failIfAlmostEqual = assertNotAlmostEquals = _deprecate(assertNotAlmostEqual)
failUnless = assert_ = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
assertRaisesRegexp = _deprecate(assertRaisesRegex)
assertRegexpMatches = _deprecate(assertRegex)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s tec=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
| agpl-3.0 |
kdwink/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/db/backends/mysql/introspection.py | 624 | 1426 | from MySQLdb.constants import FIELD_TYPE
from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.mysql.introspection import DatabaseIntrospection
class MySQLIntrospection(DatabaseIntrospection):
# Updating the data_types_reverse dictionary with the appropriate
# type for Geometry fields.
data_types_reverse = DatabaseIntrospection.data_types_reverse.copy()
data_types_reverse[FIELD_TYPE.GEOMETRY] = 'GeometryField'
def get_geometry_type(self, table_name, geo_col):
cursor = self.connection.cursor()
try:
# In order to get the specific geometry type of the field,
# we introspect on the table definition using `DESCRIBE`.
cursor.execute('DESCRIBE %s' %
self.connection.ops.quote_name(table_name))
# Increment over description info until we get to the geometry
# column.
for column, typ, null, key, default, extra in cursor.fetchall():
if column == geo_col:
# Using OGRGeomType to convert from OGC name to Django field.
# MySQL does not support 3D or SRIDs, so the field params
# are empty.
field_type = OGRGeomType(typ).django
field_params = {}
break
finally:
cursor.close()
return field_type, field_params
| apache-2.0 |
franky88/emperioanimesta | env/Lib/site-packages/django/contrib/admin/options.py | 27 | 83212 | from __future__ import unicode_literals
import copy
import json
import operator
from collections import OrderedDict
from functools import partial, reduce, update_wrapper
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import helpers, widgets
from django.contrib.admin.checks import (
BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks,
)
from django.contrib.admin.exceptions import DisallowedModelAdminToField
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.utils import (
NestedObjects, flatten_fieldsets, get_deleted_objects,
lookup_needs_distinct, model_format_dict, quote, unquote,
)
from django.contrib.auth import get_permission_codename
from django.core.exceptions import (
FieldDoesNotExist, FieldError, PermissionDenied, ValidationError,
)
from django.core.paginator import Paginator
from django.db import models, router, transaction
from django.db.models.constants import LOOKUP_SEP
from django.db.models.fields import BLANK_CHOICE_DASH
from django.forms.formsets import DELETION_FIELD_NAME, all_valid
from django.forms.models import (
BaseInlineFormSet, inlineformset_factory, modelform_defines_fields,
modelform_factory, modelformset_factory,
)
from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple
from django.http import Http404, HttpResponseRedirect
from django.http.response import HttpResponseBase
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.urls import reverse
from django.utils import six
from django.utils.decorators import method_decorator
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import escape, format_html
from django.utils.http import urlencode, urlquote
from django.utils.safestring import mark_safe
from django.utils.text import capfirst, get_text_list
from django.utils.translation import (
override as translation_override, string_concat, ugettext as _, ungettext,
)
from django.views.decorators.csrf import csrf_protect
from django.views.generic import RedirectView
IS_POPUP_VAR = '_popup'
TO_FIELD_VAR = '_to_field'
HORIZONTAL, VERTICAL = 1, 2
def get_content_type_for_model(obj):
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level.
from django.contrib.contenttypes.models import ContentType
return ContentType.objects.get_for_model(obj, for_concrete_model=False)
def get_ul_class(radio_style):
return 'radiolist' if radio_style == VERTICAL else 'radiolist inline'
class IncorrectLookupParameters(Exception):
pass
# Defaults for formfield_overrides. ModelAdmin subclasses can change this
# by adding to ModelAdmin.formfield_overrides.
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.EmailField: {'widget': widgets.AdminEmailInputWidget},
}
csrf_protect_m = method_decorator(csrf_protect)
class BaseModelAdmin(six.with_metaclass(forms.MediaDefiningClass)):
"""Functionality common to both ModelAdmin and InlineAdmin."""
raw_id_fields = ()
fields = None
exclude = None
fieldsets = None
form = forms.ModelForm
filter_vertical = ()
filter_horizontal = ()
radio_fields = {}
prepopulated_fields = {}
formfield_overrides = {}
readonly_fields = ()
ordering = None
view_on_site = True
show_full_result_count = True
checks_class = BaseModelAdminChecks
def check(self, **kwargs):
return self.checks_class().check(self, **kwargs)
def __init__(self):
# Merge FORMFIELD_FOR_DBFIELD_DEFAULTS with the formfield_overrides
# rather than simply overwriting.
overrides = copy.deepcopy(FORMFIELD_FOR_DBFIELD_DEFAULTS)
for k, v in self.formfield_overrides.items():
overrides.setdefault(k, {}).update(v)
self.formfield_overrides = overrides
def formfield_for_dbfield(self, db_field, request, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
# If the field specifies choices, we don't need to look for special
# admin widgets - we just need to use a select widget of some kind.
if db_field.choices:
return self.formfield_for_choice_field(db_field, request, **kwargs)
# ForeignKey or ManyToManyFields
if isinstance(db_field, models.ManyToManyField) or isinstance(db_field, models.ForeignKey):
# Combine the field kwargs with any options for formfield_overrides.
# Make sure the passed in **kwargs override anything in
# formfield_overrides because **kwargs is more specific, and should
# always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs)
# Get the correct formfield.
if isinstance(db_field, models.ForeignKey):
formfield = self.formfield_for_foreignkey(db_field, request, **kwargs)
elif isinstance(db_field, models.ManyToManyField):
formfield = self.formfield_for_manytomany(db_field, request, **kwargs)
# For non-raw_id fields, wrap the widget with a wrapper that adds
# extra HTML -- the "add other" interface -- to the end of the
# rendered output. formfield can be None if it came from a
# OneToOneField with parent_link=True or a M2M intermediary.
if formfield and db_field.name not in self.raw_id_fields:
related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model)
wrapper_kwargs = {}
if related_modeladmin:
wrapper_kwargs.update(
can_add_related=related_modeladmin.has_add_permission(request),
can_change_related=related_modeladmin.has_change_permission(request),
can_delete_related=related_modeladmin.has_delete_permission(request),
)
formfield.widget = widgets.RelatedFieldWidgetWrapper(
formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs
)
return formfield
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
def formfield_for_choice_field(self, db_field, request, **kwargs):
"""
Get a form Field for a database Field that has declared choices.
"""
# If the field is named as a radio_field, use a RadioSelect
if db_field.name in self.radio_fields:
# Avoid stomping on custom widget/choices arguments.
if 'widget' not in kwargs:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
if 'choices' not in kwargs:
kwargs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('None'))]
)
return db_field.formfield(**kwargs)
def get_field_queryset(self, db, db_field, request):
"""
If the ModelAdmin specifies ordering, the queryset should respect that
ordering. Otherwise don't specify the queryset, let the field decide
(returns None in that case).
"""
related_admin = self.admin_site._registry.get(db_field.remote_field.model)
if related_admin is not None:
ordering = related_admin.get_ordering(request)
if ordering is not None and ordering != ():
return db_field.remote_field.model._default_manager.using(db).order_by(*ordering)
return None
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
Get a form Field for a ForeignKey.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field, self.admin_site, using=db)
elif db_field.name in self.radio_fields:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
kwargs['empty_label'] = _('None') if db_field.blank else None
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
"""
Get a form Field for a ManyToManyField.
"""
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if not db_field.remote_field.through._meta.auto_created:
return None
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.remote_field, self.admin_site, using=db)
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(
db_field.verbose_name,
db_field.name in self.filter_vertical
)
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
form_field = db_field.formfield(**kwargs)
if isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, CheckboxSelectMultiple):
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
help_text = form_field.help_text
form_field.help_text = string_concat(help_text, ' ', msg) if help_text else msg
return form_field
def get_view_on_site_url(self, obj=None):
if obj is None or not self.view_on_site:
return None
if callable(self.view_on_site):
return self.view_on_site(obj)
elif self.view_on_site and hasattr(obj, 'get_absolute_url'):
# use the ContentType lookup if view_on_site is True
return reverse('admin:view_on_site', kwargs={
'content_type_id': get_content_type_for_model(obj).pk,
'object_id': obj.pk
})
def get_empty_value_display(self):
"""
Return the empty_value_display set on ModelAdmin or AdminSite.
"""
try:
return mark_safe(self.empty_value_display)
except AttributeError:
return mark_safe(self.admin_site.empty_value_display)
def get_fields(self, request, obj=None):
"""
Hook for specifying fields.
"""
return self.fields
def get_fieldsets(self, request, obj=None):
"""
Hook for specifying fieldsets.
"""
if self.fieldsets:
return self.fieldsets
return [(None, {'fields': self.get_fields(request, obj)})]
def get_ordering(self, request):
"""
Hook for specifying field ordering.
"""
return self.ordering or () # otherwise we might try to *None, which is bad ;)
def get_readonly_fields(self, request, obj=None):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
"""
Hook for specifying custom prepopulated fields.
"""
return self.prepopulated_fields
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = self.model._default_manager.get_queryset()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def lookup_allowed(self, lookup, value):
from django.contrib.admin.filters import SimpleListFilter
model = self.model
# Check FKey lookups that are allowed, so that popups produced by
# ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to,
# are allowed to work.
for l in model._meta.related_fkey_lookups:
# As ``limit_choices_to`` can be a callable, invoke it here.
if callable(l):
l = l()
for k, v in widgets.url_params_from_lookup_dict(l).items():
if k == lookup and v == value:
return True
relation_parts = []
prev_field = None
for part in lookup.split(LOOKUP_SEP):
try:
field = model._meta.get_field(part)
except FieldDoesNotExist:
# Lookups on non-existent fields are ok, since they're ignored
# later.
break
# It is allowed to filter on values that would be found from local
# model anyways. For example, if you filter on employee__department__id,
# then the id value would be found already from employee__department_id.
if not prev_field or (prev_field.concrete and
field not in prev_field.get_path_info()[-1].target_fields):
relation_parts.append(part)
if not getattr(field, 'get_path_info', None):
# This is not a relational field, so further parts
# must be transforms.
break
prev_field = field
model = field.get_path_info()[-1].to_opts.model
if len(relation_parts) <= 1:
# Either a local field filter, or no fields at all.
return True
clean_lookup = LOOKUP_SEP.join(relation_parts)
valid_lookups = [self.date_hierarchy]
for filter_item in self.list_filter:
if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter):
valid_lookups.append(filter_item.parameter_name)
elif isinstance(filter_item, (list, tuple)):
valid_lookups.append(filter_item[0])
else:
valid_lookups.append(filter_item)
return clean_lookup in valid_lookups
def to_field_allowed(self, request, to_field):
"""
Returns True if the model associated with this admin should be
allowed to be referenced by the specified field.
"""
opts = self.model._meta
try:
field = opts.get_field(to_field)
except FieldDoesNotExist:
return False
# Always allow referencing the primary key since it's already possible
# to get this information from the change view URL.
if field.primary_key:
return True
# Allow reverse relationships to models defining m2m fields if they
# target the specified field.
for many_to_many in opts.many_to_many:
if many_to_many.m2m_target_field_name() == to_field:
return True
# Make sure at least one of the models registered for this site
# references this field through a FK or a M2M relationship.
registered_models = set()
for model, admin in self.admin_site._registry.items():
registered_models.add(model)
for inline in admin.inlines:
registered_models.add(inline.model)
related_objects = (
f for f in opts.get_fields(include_hidden=True)
if (f.auto_created and not f.concrete)
)
for related_object in related_objects:
related_model = related_object.related_model
remote_field = related_object.field.remote_field
if (any(issubclass(model, related_model) for model in registered_models) and
hasattr(remote_field, 'get_related_field') and
remote_field.get_related_field() == field):
return True
return False
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overridden by the user in subclasses.
"""
opts = self.opts
codename = get_permission_codename('add', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('delete', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_module_permission(self, request):
"""
Returns True if the given request has any permission in the given
app label.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to view the module on
the admin index page and access the module's index page. Overriding it
does not restrict access to the add, change or delete views. Use
`ModelAdmin.has_(add|change|delete)_permission` for that.
"""
return request.user.has_module_perms(self.opts.app_label)
@python_2_unicode_compatible
class ModelAdmin(BaseModelAdmin):
"Encapsulates all admin options and functionality for a given model."
list_display = ('__str__',)
list_display_links = ()
list_filter = ()
list_select_related = False
list_per_page = 100
list_max_show_all = 200
list_editable = ()
search_fields = ()
date_hierarchy = None
save_as = False
save_as_continue = True
save_on_top = False
paginator = Paginator
preserve_filters = True
inlines = []
# Custom templates (designed to be over-ridden in subclasses)
add_form_template = None
change_form_template = None
change_list_template = None
delete_confirmation_template = None
delete_selected_confirmation_template = None
object_history_template = None
# Actions
actions = []
action_form = helpers.ActionForm
actions_on_top = True
actions_on_bottom = False
actions_selection_counter = True
checks_class = ModelAdminChecks
def __init__(self, model, admin_site):
self.model = model
self.opts = model._meta
self.admin_site = admin_site
super(ModelAdmin, self).__init__()
def __str__(self):
return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__)
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
def get_urls(self):
from django.conf.urls import url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
wrapper.model_admin = self
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
urlpatterns = [
url(r'^$', wrap(self.changelist_view), name='%s_%s_changelist' % info),
url(r'^add/$', wrap(self.add_view), name='%s_%s_add' % info),
url(r'^(.+)/history/$', wrap(self.history_view), name='%s_%s_history' % info),
url(r'^(.+)/delete/$', wrap(self.delete_view), name='%s_%s_delete' % info),
url(r'^(.+)/change/$', wrap(self.change_view), name='%s_%s_change' % info),
# For backwards compatibility (was the change url before 1.9)
url(r'^(.+)/$', wrap(RedirectView.as_view(
pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info)
))),
]
return urlpatterns
def urls(self):
return self.get_urls()
urls = property(urls)
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'core.js',
'vendor/jquery/jquery%s.js' % extra,
'jquery.init.js',
'admin/RelatedObjectLookups.js',
'actions%s.js' % extra,
'urlify.js',
'prepopulate%s.js' % extra,
'vendor/xregexp/xregexp%s.js' % extra,
]
return forms.Media(js=['admin/js/%s' % url for url in js])
def get_model_perms(self, request):
"""
Returns a dict of all perms for this model. This dict has the keys
``add``, ``change``, and ``delete`` mapping to the True/False for each
of those actions.
"""
return {
'add': self.has_add_permission(request),
'change': self.has_change_permission(request),
'delete': self.has_delete_permission(request),
}
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_form(request, obj, fields=None)
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
readonly_fields = self.get_readonly_fields(request, obj)
exclude.extend(readonly_fields)
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistent with the
# default on modelform_factory
exclude = exclude or None
# Remove declared form fields which are in readonly_fields.
new_attrs = OrderedDict(
(f, None) for f in readonly_fields
if f in self.form.declared_fields
)
form = type(self.form.__name__, (self.form,), new_attrs)
defaults = {
"form": form,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError(
'%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__)
)
def get_changelist(self, request, **kwargs):
"""
Returns the ChangeList class for use on the changelist page.
"""
from django.contrib.admin.views.main import ChangeList
return ChangeList
def get_object(self, request, object_id, from_field=None):
"""
Returns an instance matching the field and value provided, the primary
key is used if no field is provided. Returns ``None`` if no match is
found or the object_id fails validation.
"""
queryset = self.get_queryset(request)
model = queryset.model
field = model._meta.pk if from_field is None else model._meta.get_field(from_field)
try:
object_id = field.to_python(object_id)
return queryset.get(**{field.name: object_id})
except (model.DoesNotExist, ValidationError, ValueError):
return None
def get_changelist_form(self, request, **kwargs):
"""
Returns a Form class for use in the Formset on the changelist page.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
def get_changelist_formset(self, request, **kwargs):
"""
Returns a FormSet class for use on the changelist page if list_editable
is used.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelformset_factory(
self.model, self.get_changelist_form(request), extra=0,
fields=self.list_editable, **defaults
)
def get_formsets_with_inlines(self, request, obj=None):
"""
Yields formsets and the corresponding inlines.
"""
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj), inline
def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True):
return self.paginator(queryset, per_page, orphans, allow_empty_first_page)
def log_addition(self, request, object, message):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION,
change_message=message,
)
def log_change(self, request, object, message):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=CHANGE,
change_message=message,
)
def log_deletion(self, request, object, object_repr):
"""
Log that an object will be deleted. Note that this method must be
called before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=object_repr,
action_flag=DELETION,
)
def action_checkbox(self, obj):
"""
A list_display column containing a checkbox widget.
"""
return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk))
action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />')
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
if self.actions is None or IS_POPUP_VAR in request.GET:
return OrderedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict(
(name, (func, name, desc))
for func, name, desc in actions
)
return actions
def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = [] + default_choices
for func, name, description in six.itervalues(self.get_actions(request)):
choice = (name, description % model_format_dict(self.opts))
choices.append(choice)
return choices
def get_action(self, action):
"""
Return a given action from a parameter, which can either be a callable,
or the name of a method on the ModelAdmin. Return is a tuple of
(callable, name, description).
"""
# If the action is a callable, just use it.
if callable(action):
func = action
action = action.__name__
# Next, look for a method. Grab it off self.__class__ to get an unbound
# method instead of a bound one; this ensures that the calling
# conventions are the same for functions and methods.
elif hasattr(self.__class__, action):
func = getattr(self.__class__, action)
# Finally, look for a named method on the admin site
else:
try:
func = self.admin_site.get_action(action)
except KeyError:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description
def get_list_display(self, request):
"""
Return a sequence containing the fields to be displayed on the
changelist.
"""
return self.list_display
def get_list_display_links(self, request, list_display):
"""
Return a sequence containing the fields to be displayed as links
on the changelist. The list_display parameter is the list of fields
returned by get_list_display().
"""
if self.list_display_links or self.list_display_links is None or not list_display:
return self.list_display_links
else:
# Use only the first item in list_display as link
return list(list_display)[:1]
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar of the changelist page.
"""
return self.list_filter
def get_list_select_related(self, request):
"""
Returns a list of fields to add to the select_related() part of the
changelist items query.
"""
return self.list_select_related
def get_search_fields(self, request):
"""
Returns a sequence containing the fields to be searched whenever
somebody submits a search query.
"""
return self.search_fields
def get_search_results(self, request, queryset, search_term):
"""
Returns a tuple containing a queryset to implement the search,
and a boolean indicating if the results may contain duplicates.
"""
# Apply keyword searches.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
search_fields = self.get_search_fields(request)
if search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
def get_preserved_filters(self, request):
"""
Returns the preserved filters querystring.
"""
match = request.resolver_match
if self.preserve_filters and match:
opts = self.model._meta
current_url = '%s:%s' % (match.app_name, match.url_name)
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
if current_url == changelist_url:
preserved_filters = request.GET.urlencode()
else:
preserved_filters = request.GET.get('_changelist_filters')
if preserved_filters:
return urlencode({'_changelist_filters': preserved_filters})
return ''
def construct_change_message(self, request, form, formsets, add=False):
"""
Construct a JSON structure describing changes from a changed object.
Translations are deactivated so that strings are stored untranslated.
Translation happens later on LogEntry access.
"""
change_message = []
if add:
change_message.append({'added': {}})
elif form.changed_data:
change_message.append({'changed': {'fields': form.changed_data}})
if formsets:
with translation_override(None):
for formset in formsets:
for added_object in formset.new_objects:
change_message.append({
'added': {
'name': force_text(added_object._meta.verbose_name),
'object': force_text(added_object),
}
})
for changed_object, changed_fields in formset.changed_objects:
change_message.append({
'changed': {
'name': force_text(changed_object._meta.verbose_name),
'object': force_text(changed_object),
'fields': changed_fields,
}
})
for deleted_object in formset.deleted_objects:
change_message.append({
'deleted': {
'name': force_text(deleted_object._meta.verbose_name),
'object': force_text(deleted_object),
}
})
return change_message
def message_user(self, request, message, level=messages.INFO, extra_tags='',
fail_silently=False):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
Exposes almost the same API as messages.add_message(), but accepts the
positional arguments in a different order to maintain backwards
compatibility. For convenience, it accepts the `level` argument as
a string rather than the usual level number.
"""
if not isinstance(level, int):
# attempt to get the level if passed a string
try:
level = getattr(messages.constants, level.upper())
except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels)
raise ValueError(
'Bad message level string: `%s`. Possible values are: %s'
% (level, levels_repr)
)
messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently)
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
return form.save(commit=False)
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
"""
obj.save()
def delete_model(self, request, obj):
"""
Given a model instance delete it from the database.
"""
obj.delete()
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
opts = self.model._meta
app_label = opts.app_label
preserved_filters = self.get_preserved_filters(request)
form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url)
view_on_site_url = self.get_view_on_site_url(obj)
context.update({
'add': add,
'change': change,
'has_add_permission': self.has_add_permission(request),
'has_change_permission': self.has_change_permission(request, obj),
'has_delete_permission': self.has_delete_permission(request, obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': view_on_site_url is not None,
'absolute_url': view_on_site_url,
'form_url': form_url,
'opts': opts,
'content_type_id': get_content_type_for_model(self.model).pk,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
'to_field_var': TO_FIELD_VAR,
'is_popup_var': IS_POPUP_VAR,
'app_label': app_label,
})
if add and self.add_form_template is not None:
form_template = self.add_form_template
else:
form_template = self.change_form_template
request.current_app = self.admin_site.name
return TemplateResponse(request, form_template or [
"admin/%s/%s/change_form.html" % (app_label, opts.model_name),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
], context)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage.
"""
opts = obj._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
obj_url = reverse(
'admin:%s_%s_change' % (opts.app_label, opts.model_name),
args=(quote(pk_value),),
current_app=self.admin_site.name,
)
# Add a link to the object's change form if the user can edit the obj.
if self.has_change_permission(request, obj):
obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj)
else:
obj_repr = force_text(obj)
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': obj_repr,
}
# Here, we distinguish between different save types by checking for
# the presence of keys in request.POST.
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
if to_field:
attr = str(to_field)
else:
attr = obj._meta.pk.attname
value = obj.serializable_value(attr)
popup_response_data = json.dumps({
'value': six.text_type(value),
'obj': six.text_type(obj),
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
elif "_continue" in request.POST or (
# Redirecting after "Save as new".
"_saveasnew" in request.POST and self.save_as_continue and
self.has_change_permission(request, obj)
):
msg = format_html(
_('The {name} "{obj}" was added successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
if post_url_continue is None:
post_url_continue = obj_url
post_url_continue = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts},
post_url_continue
)
return HttpResponseRedirect(post_url_continue)
elif "_addanother" in request.POST:
msg = format_html(
_('The {name} "{obj}" was added successfully. You may add another {name} below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = format_html(
_('The {name} "{obj}" was added successfully.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_add(request, obj)
def response_change(self, request, obj):
"""
Determines the HttpResponse for the change_view stage.
"""
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
attr = str(to_field) if to_field else obj._meta.pk.attname
# Retrieve the `object_id` from the resolved pattern arguments.
value = request.resolver_match.args[0]
new_value = obj.serializable_value(attr)
popup_response_data = json.dumps({
'action': 'change',
'value': six.text_type(value),
'obj': six.text_type(obj),
'new_value': six.text_type(new_value),
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
opts = self.model._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj),
}
if "_continue" in request.POST:
msg = format_html(
_('The {name} "{obj}" was changed successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_saveasnew" in request.POST:
msg = format_html(
_('The {name} "{obj}" was added successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(pk_value,),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_addanother" in request.POST:
msg = format_html(
_('The {name} "{obj}" was changed successfully. You may add another {name} below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_add' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = format_html(
_('The {name} "{obj}" was changed successfully.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when adding a new object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_post_save_change(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when editing an existing object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_action(self, request, queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func = self.get_actions(request)[action][0]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
return None
if not select_across:
# Perform the action only on the selected objects
queryset = queryset.filter(pk__in=selected)
response = func(self, request, queryset)
# Actions may return an HttpResponse-like object, which will be
# used as the response from the POST. If not, we'll be a good
# little HTTP citizen and redirect back to the changelist page.
if isinstance(response, HttpResponseBase):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg, messages.WARNING)
return None
def response_delete(self, request, obj_display, obj_id):
"""
Determines the HttpResponse for the delete_view stage.
"""
opts = self.model._meta
if IS_POPUP_VAR in request.POST:
popup_response_data = json.dumps({
'action': 'delete',
'value': str(obj_id),
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
self.message_user(
request,
_('The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display),
},
messages.SUCCESS,
)
if self.has_change_permission(request, None):
post_url = reverse(
'admin:%s_%s_changelist' % (opts.app_label, opts.model_name),
current_app=self.admin_site.name,
)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts}, post_url
)
else:
post_url = reverse('admin:index', current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def render_delete_form(self, request, context):
opts = self.model._meta
app_label = opts.app_label
request.current_app = self.admin_site.name
context.update(
to_field_var=TO_FIELD_VAR,
is_popup_var=IS_POPUP_VAR,
media=self.media,
)
return TemplateResponse(
request,
self.delete_confirmation_template or [
"admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name),
"admin/{}/delete_confirmation.html".format(app_label),
"admin/delete_confirmation.html",
],
context,
)
def get_inline_formsets(self, request, formsets, inline_instances, obj=None):
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(
inline, formset, fieldsets, prepopulated, readonly,
model_admin=self,
)
inline_admin_formsets.append(inline_admin_formset)
return inline_admin_formsets
def get_changeform_initial_data(self, request):
"""
Get the initial form data.
Unless overridden, this populates from the GET params.
"""
initial = dict(request.GET.items())
for k in initial:
try:
f = self.model._meta.get_field(k)
except FieldDoesNotExist:
continue
# We have to special-case M2Ms as a list of comma-separated PKs.
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
return initial
@csrf_protect_m
@transaction.atomic
def changeform_view(self, request, object_id=None, form_url='', extra_context=None):
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
model = self.model
opts = model._meta
if request.method == 'POST' and '_saveasnew' in request.POST:
object_id = None
add = object_id is None
if add:
if not self.has_add_permission(request):
raise PermissionDenied
obj = None
else:
obj = self.get_object(request, unquote(object_id), to_field)
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name), 'key': escape(object_id)})
ModelForm = self.get_form(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=not add)
else:
form_validated = False
new_object = form.instance
formsets, inline_instances = self._create_formsets(request, new_object, change=not add)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, not add)
self.save_related(request, form, formsets, not add)
change_message = self.construct_change_message(request, form, formsets, add)
if add:
self.log_addition(request, new_object, change_message)
return self.response_add(request, new_object)
else:
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
form_validated = False
else:
if add:
initial = self.get_changeform_initial_data(request)
form = ModelForm(initial=initial)
formsets, inline_instances = self._create_formsets(request, form.instance, change=False)
else:
form = ModelForm(instance=obj)
formsets, inline_instances = self._create_formsets(request, obj, change=True)
adminForm = helpers.AdminForm(
form,
list(self.get_fieldsets(request, obj)),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self)
media = self.media + adminForm.media
inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj)
for inline_formset in inline_formsets:
media = media + inline_formset.media
context = dict(
self.admin_site.each_context(request),
title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name),
adminform=adminForm,
object_id=object_id,
original=obj,
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
media=media,
inline_admin_formsets=inline_formsets,
errors=helpers.AdminErrorList(form, formsets),
preserved_filters=self.get_preserved_filters(request),
)
# Hide the "Save" and "Save and continue" buttons if "Save as New" was
# previously chosen to prevent the interface from getting confusing.
if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST:
context['show_save'] = False
context['show_save_and_continue'] = False
# Use the change template instead of the add template.
add = False
context.update(extra_context or {})
return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url)
def add_view(self, request, form_url='', extra_context=None):
return self.changeform_view(request, None, form_url, extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
return self.changeform_view(request, object_id, form_url, extra_context)
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
search_fields = self.get_search_fields(request)
list_select_related = self.get_list_select_related(request)
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
# Add the action checkboxes if there are any actions available.
list_display = ['action_checkbox'] + list(list_display)
ChangeList = self.get_changelist(request)
try:
cl = ChangeList(
request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
search_fields, list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self,
)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given
# and the 'invalid=1' parameter was already in the query string,
# something is screwed up with the database, so display an error
# page.
if ERROR_FLAG in request.GET.keys():
return SimpleTemplateResponse('admin/invalid_setup.html', {
'title': _('Database error'),
})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
# If the request was POSTed, this might be a bulk action or a bulk
# edit. Try to look up an action or confirmation first, but if this
# isn't an action the POST will fall through to the bulk edit check,
# below.
action_failed = False
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
action_failed = True
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
# If we're allowing changelist editing, we need to construct a formset
# for the changelist given all the fields to be edited. Then we'll
# use the formset to validate/process POSTed data.
formset = cl.formset = None
# Handle POSTed bulk-edit data.
if (request.method == "POST" and cl.list_editable and
'_save' in request.POST and not action_failed):
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(request.POST, request.FILES, queryset=self.get_queryset(request))
if formset.is_valid():
changecount = 0
for form in formset.forms:
if form.has_changed():
obj = self.save_form(request, form, change=True)
self.save_model(request, obj, form, change=True)
self.save_related(request, form, formsets=[], change=True)
change_msg = self.construct_change_message(request, form, None)
self.log_change(request, obj, change_msg)
changecount += 1
if changecount:
if changecount == 1:
name = force_text(opts.verbose_name)
else:
name = force_text(opts.verbose_name_plural)
msg = ungettext(
"%(count)s %(name)s was changed successfully.",
"%(count)s %(name)s were changed successfully.",
changecount
) % {
'count': changecount,
'name': name,
'obj': force_text(obj),
}
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(request.get_full_path())
# Handle GET -- construct a formset for display.
elif cl.list_editable:
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(queryset=cl.result_list)
# Build the list of media to be used by the formset.
if formset:
media = self.media + formset.media
else:
media = self.media
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext(
'%(total_count)s selected',
'All %(total_count)s selected',
cl.result_count
)
context = dict(
self.admin_site.each_context(request),
module_name=force_text(opts.verbose_name_plural),
selection_note=_('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
selection_note_all=selection_note_all % {'total_count': cl.result_count},
title=cl.title,
is_popup=cl.is_popup,
to_field=cl.to_field,
cl=cl,
media=media,
has_add_permission=self.has_add_permission(request),
opts=cl.opts,
action_form=action_form,
actions_on_top=self.actions_on_top,
actions_on_bottom=self.actions_on_bottom,
actions_selection_counter=self.actions_selection_counter,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
request.current_app = self.admin_site.name
return TemplateResponse(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.model_name),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context)
@csrf_protect_m
@transaction.atomic
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
obj = self.get_object(request, unquote(object_id), to_field)
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
)
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, model_count, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
if request.POST and not protected: # The user has confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_text(obj)
attr = str(to_field) if to_field else opts.pk.attname
obj_id = obj.serializable_value(attr)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
return self.response_delete(request, obj_display, obj_id)
object_name = force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = dict(
self.admin_site.each_context(request),
title=title,
object_name=object_name,
object=obj,
deleted_objects=deleted_objects,
model_count=dict(model_count).items(),
perms_lacking=perms_needed,
protected=protected,
opts=opts,
app_label=app_label,
preserved_filters=self.get_preserved_filters(request),
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
)
context.update(extra_context or {})
return self.render_delete_form(request, context)
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = self.get_object(request, unquote(object_id))
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(model._meta.verbose_name),
'key': escape(object_id),
})
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(object_id),
content_type=get_content_type_for_model(model)
).select_related().order_by('action_time')
context = dict(
self.admin_site.each_context(request),
title=_('Change history: %s') % force_text(obj),
action_list=action_list,
module_name=capfirst(force_text(opts.verbose_name_plural)),
object=obj,
opts=opts,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
request.current_app = self.admin_site.name
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.model_name),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context)
def _create_formsets(self, request, obj, change):
"Helper function to generate formsets for add/change_view."
formsets = []
inline_instances = []
prefixes = {}
get_formsets_args = [request]
if change:
get_formsets_args.append(obj)
for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset_params = {
'instance': obj,
'prefix': prefix,
'queryset': inline.get_queryset(request),
}
if request.method == 'POST':
formset_params.update({
'data': request.POST,
'files': request.FILES,
'save_as_new': '_saveasnew' in request.POST
})
formsets.append(FormSet(**formset_params))
inline_instances.append(inline)
return formsets, inline_instances
class InlineModelAdmin(BaseModelAdmin):
"""
Options for inline editing of ``model`` instances.
Provide ``fk_name`` to specify the attribute name of the ``ForeignKey``
from ``model`` to its parent. This is required if ``model`` has more than
one ``ForeignKey`` to its parent.
"""
model = None
fk_name = None
formset = BaseInlineFormSet
extra = 3
min_num = None
max_num = None
template = None
verbose_name = None
verbose_name_plural = None
can_delete = True
show_change_link = False
checks_class = InlineModelAdminChecks
classes = None
def __init__(self, parent_model, admin_site):
self.admin_site = admin_site
self.parent_model = parent_model
self.opts = self.model._meta
self.has_registered_model = admin_site.is_registered(self.model)
super(InlineModelAdmin, self).__init__()
if self.verbose_name is None:
self.verbose_name = self.model._meta.verbose_name
if self.verbose_name_plural is None:
self.verbose_name_plural = self.model._meta.verbose_name_plural
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js',
'inlines%s.js' % extra]
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
if self.classes and 'collapse' in self.classes:
js.append('collapse%s.js' % extra)
return forms.Media(js=['admin/js/%s' % url for url in js])
def get_extra(self, request, obj=None, **kwargs):
"""Hook for customizing the number of extra inline forms."""
return self.extra
def get_min_num(self, request, obj=None, **kwargs):
"""Hook for customizing the min number of inline forms."""
return self.min_num
def get_max_num(self, request, obj=None, **kwargs):
"""Hook for customizing the max number of extra inline forms."""
return self.max_num
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# If exclude is an empty list we use None, since that's the actual
# default.
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"extra": self.get_extra(request, obj, **kwargs),
"min_num": self.get_min_num(request, obj, **kwargs),
"max_num": self.get_max_num(request, obj, **kwargs),
"can_delete": can_delete,
}
defaults.update(kwargs)
base_model_form = defaults['form']
class DeleteProtectedModelForm(base_model_form):
def hand_clean_DELETE(self):
"""
We don't validate the 'DELETE' field itself because on
templates it's not rendered using the field information, but
just using a generic "deletion_field" of the InlineModelAdmin.
"""
if self.cleaned_data.get(DELETION_FIELD_NAME, False):
using = router.db_for_write(self._meta.model)
collector = NestedObjects(using=using)
if self.instance.pk is None:
return
collector.collect([self.instance])
if collector.protected:
objs = []
for p in collector.protected:
objs.append(
# Translators: Model verbose name and instance representation,
# suitable to be an item in a list.
_('%(class_name)s %(instance)s') % {
'class_name': p._meta.verbose_name,
'instance': p}
)
params = {'class_name': self._meta.model._meta.verbose_name,
'instance': self.instance,
'related_objects': get_text_list(objs, _('and'))}
msg = _("Deleting %(class_name)s %(instance)s would require "
"deleting the following protected related objects: "
"%(related_objects)s")
raise ValidationError(msg, code='deleting_protected', params=params)
def is_valid(self):
result = super(DeleteProtectedModelForm, self).is_valid()
self.hand_clean_DELETE()
return result
defaults['form'] = DeleteProtectedModelForm
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_formset(request, obj, fields=None).form
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_queryset(self, request):
queryset = super(InlineModelAdmin, self).get_queryset(request)
if not self.has_change_permission(request):
queryset = queryset.none()
return queryset
def has_add_permission(self, request):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request)
return super(InlineModelAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
opts = self.opts
if opts.auto_created:
# The model was auto-created as intermediary for a
# ManyToMany-relationship, find the target model
for field in opts.fields:
if field.remote_field and field.remote_field.model != self.parent_model:
opts = field.remote_field.model._meta
break
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request, obj)
return super(InlineModelAdmin, self).has_delete_permission(request, obj)
class StackedInline(InlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class TabularInline(InlineModelAdmin):
template = 'admin/edit_inline/tabular.html'
| gpl-3.0 |
kamyu104/LeetCode | Python/design-phone-directory.py | 2 | 1688 | # init: Time: O(n), Space: O(n)
# get: Time: O(1), Space: O(1)
# check: Time: O(1), Space: O(1)
# release: Time: O(1), Space: O(1)
class PhoneDirectory(object):
def __init__(self, maxNumbers):
"""
Initialize your data structure here
@param maxNumbers - The maximum numbers that can be stored in the phone directory.
:type maxNumbers: int
"""
self.__curr = 0
self.__numbers = range(maxNumbers)
self.__used = [False] * maxNumbers
def get(self):
"""
Provide a number which is not assigned to anyone.
@return - Return an available number. Return -1 if none is available.
:rtype: int
"""
if self.__curr == len(self.__numbers):
return -1
number = self.__numbers[self.__curr]
self.__curr += 1
self.__used[number] = True
return number
def check(self, number):
"""
Check if a number is available or not.
:type number: int
:rtype: bool
"""
return 0 <= number < len(self.__numbers) and \
not self.__used[number]
def release(self, number):
"""
Recycle or release a number.
:type number: int
:rtype: void
"""
if not 0 <= number < len(self.__numbers) or \
not self.__used[number]:
return
self.__used[number] = False
self.__curr -= 1
self.__numbers[self.__curr] = number
# Your PhoneDirectory object will be instantiated and called as such:
# obj = PhoneDirectory(maxNumbers)
# param_1 = obj.get()
# param_2 = obj.check(number)
# obj.release(number)
| mit |
nafex/pyload | module/network/Browser.py | 40 | 4190 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from logging import getLogger
from HTTPRequest import HTTPRequest
from HTTPDownload import HTTPDownload
class Browser(object):
__slots__ = ("log", "options", "bucket", "cj", "_size", "http", "dl")
def __init__(self, bucket=None, options={}):
self.log = getLogger("log")
self.options = options #holds pycurl options
self.bucket = bucket
self.cj = None # needs to be setted later
self._size = 0
self.renewHTTPRequest()
self.dl = None
def renewHTTPRequest(self):
if hasattr(self, "http"): self.http.close()
self.http = HTTPRequest(self.cj, self.options)
def setLastURL(self, val):
self.http.lastURL = val
# tunnel some attributes from HTTP Request to Browser
lastEffectiveURL = property(lambda self: self.http.lastEffectiveURL)
lastURL = property(lambda self: self.http.lastURL, setLastURL)
code = property(lambda self: self.http.code)
cookieJar = property(lambda self: self.cj)
def setCookieJar(self, cj):
self.cj = cj
self.http.cj = cj
@property
def speed(self):
if self.dl:
return self.dl.speed
return 0
@property
def size(self):
if self._size:
return self._size
if self.dl:
return self.dl.size
return 0
@property
def arrived(self):
if self.dl:
return self.dl.arrived
return 0
@property
def percent(self):
if not self.size: return 0
return (self.arrived * 100) / self.size
def clearCookies(self):
if self.cj:
self.cj.clear()
self.http.clearCookies()
def clearReferer(self):
self.http.lastURL = None
def abortDownloads(self):
self.http.abort = True
if self.dl:
self._size = self.dl.size
self.dl.abort = True
def httpDownload(self, url, filename, get={}, post={}, ref=True, cookies=True, chunks=1, resume=False,
progressNotify=None, disposition=False):
""" this can also download ftp """
self._size = 0
self.dl = HTTPDownload(url, filename, get, post, self.lastEffectiveURL if ref else None,
self.cj if cookies else None, self.bucket, self.options, progressNotify, disposition)
name = self.dl.download(chunks, resume)
self._size = self.dl.size
self.dl = None
return name
def load(self, *args, **kwargs):
""" retrieves page """
return self.http.load(*args, **kwargs)
def putHeader(self, name, value):
""" add a header to the request """
self.http.putHeader(name, value)
def addAuth(self, pwd):
"""Adds user and pw for http auth
:param pwd: string, user:password
"""
self.options["auth"] = pwd
self.renewHTTPRequest() #we need a new request
def removeAuth(self):
if "auth" in self.options: del self.options["auth"]
self.renewHTTPRequest()
def setOption(self, name, value):
"""Adds an option to the request, see HTTPRequest for existing ones"""
self.options[name] = value
def deleteOption(self, name):
if name in self.options: del self.options[name]
def clearHeaders(self):
self.http.clearHeaders()
def close(self):
""" cleanup """
if hasattr(self, "http"):
self.http.close()
del self.http
if hasattr(self, "dl"):
del self.dl
if hasattr(self, "cj"):
del self.cj
if __name__ == "__main__":
browser = Browser()#proxies={"socks5": "localhost:5000"})
ip = "http://www.whatismyip.com/automation/n09230945.asp"
#browser.getPage("http://google.com/search?q=bar")
#browser.getPage("https://encrypted.google.com/")
#print browser.getPage(ip)
#print browser.getRedirectLocation("http://google.com/")
#browser.getPage("https://encrypted.google.com/")
#browser.getPage("http://google.com/search?q=bar")
browser.httpDownload("http://speedtest.netcologne.de/test_10mb.bin", "test_10mb.bin")
| gpl-3.0 |
qrealka/skia-hc | third_party/externals/shaderc2/utils/remove-file-by-suffix.py | 16 | 1028 | #!/usr/bin/env python
# Copyright 2015 The Shaderc Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Removes all files with a certain suffix in a given path recursively.
# Arguments: path suffix
import os
import sys
def main():
path = sys.argv[1]
suffix = sys.argv[2]
for root, _, filenames in os.walk(path):
for filename in filenames:
if filename.endswith(suffix):
os.remove(os.path.join(root, filename))
if __name__ == '__main__':
main()
| apache-2.0 |
trevor/calendarserver | txdav/carddav/datastore/query/test/test_filter.py | 1 | 3060 | ##
# Copyright (c) 2011-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from twext.enterprise.dal.syntax import SQLFragment
from twisted.trial.unittest import TestCase
from twistedcaldav import carddavxml
from txdav.carddav.datastore.query.filter import Filter, FilterBase
from txdav.common.datastore.sql_tables import schema
from txdav.carddav.datastore.query.builder import buildExpression
from txdav.common.datastore.query.generator import SQLQueryGenerator
from txdav.carddav.datastore.index_file import sqladdressbookquery
class TestQueryFilter(TestCase):
_objectSchema = schema.ADDRESSBOOK_OBJECT
_queryFields = {
"UID": _objectSchema.UID
}
def test_query(self):
"""
Basic query test - single term.
Only UID can be queried via sql.
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name":"UID"}
)]
)
filter = Filter(filter)
expression = buildExpression(filter, self._queryFields)
sql = SQLQueryGenerator(expression, self, 1234)
select, args = sql.generate()
self.assertEqual(select.toSQL(), SQLFragment("select distinct RESOURCE_NAME, VCARD_UID from ADDRESSBOOK_OBJECT where ADDRESSBOOK_HOME_RESOURCE_ID = ? and VCARD_UID like (? || (? || ?))", [1234, "%", "Example", "%"]))
self.assertEqual(args, {})
def test_sqllite_query(self):
"""
Basic query test - single term.
Only UID can be queried via sql.
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name":"UID"}
)]
)
filter = Filter(filter)
sql, args = sqladdressbookquery(filter, 1234)
self.assertEqual(sql, " from RESOURCE where RESOURCE.UID GLOB :1")
self.assertEqual(args, ["*Example*"])
class TestQueryFilterSerialize(TestCase):
def test_query(self):
"""
Basic query test - no time range
"""
filter = carddavxml.Filter(
*[carddavxml.PropertyFilter(
carddavxml.TextMatch.fromString("Example"),
**{"name":"UID"}
)]
)
filter = Filter(filter)
j = filter.serialize()
self.assertEqual(j["type"], "Filter")
f = FilterBase.deserialize(j)
self.assertTrue(isinstance(f, Filter))
| apache-2.0 |
guerrerocarlos/fiware-IoTAgent-Cplusplus | third_party/gmock-1.7.0/test/gmock_test_utils.py | 769 | 3684 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for Google C++ Mocking Framework."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import sys
# Determines path to gtest_test_utils and imports it.
SCRIPT_DIR = os.path.dirname(__file__) or '.'
# isdir resolves symbolic links.
gtest_tests_util_dir = os.path.join(SCRIPT_DIR, '../gtest/test')
if os.path.isdir(gtest_tests_util_dir):
GTEST_TESTS_UTIL_DIR = gtest_tests_util_dir
else:
GTEST_TESTS_UTIL_DIR = os.path.join(SCRIPT_DIR, '../../gtest/test')
sys.path.append(GTEST_TESTS_UTIL_DIR)
import gtest_test_utils # pylint: disable-msg=C6204
def GetSourceDir():
"""Returns the absolute path of the directory where the .py files are."""
return gtest_test_utils.GetSourceDir()
def GetTestExecutablePath(executable_name):
"""Returns the absolute path of the test binary given its name.
The function will print a message and abort the program if the resulting file
doesn't exist.
Args:
executable_name: name of the test binary that the test script runs.
Returns:
The absolute path of the test binary.
"""
return gtest_test_utils.GetTestExecutablePath(executable_name)
def GetExitStatus(exit_code):
"""Returns the argument to exit(), or -1 if exit() wasn't called.
Args:
exit_code: the result value of os.system(command).
"""
if os.name == 'nt':
# On Windows, os.WEXITSTATUS() doesn't work and os.system() returns
# the argument to exit() directly.
return exit_code
else:
# On Unix, os.WEXITSTATUS() must be used to extract the exit status
# from the result of os.system().
if os.WIFEXITED(exit_code):
return os.WEXITSTATUS(exit_code)
else:
return -1
# Suppresses the "Invalid const name" lint complaint
# pylint: disable-msg=C6409
# Exposes utilities from gtest_test_utils.
Subprocess = gtest_test_utils.Subprocess
TestCase = gtest_test_utils.TestCase
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
PREMATURE_EXIT_FILE_ENV_VAR = gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR
# pylint: enable-msg=C6409
def Main():
"""Runs the unit test."""
gtest_test_utils.Main()
| agpl-3.0 |
Alexoner/web-crawlers | scripts/12306/12306.py | 1 | 63121 | #!/usr/bin/env python2
# -*- encoding: utf-8 -*-
# standard library
import argparse
import ConfigParser
import datetime
import sys
import random
import re
import time
import urllib
# third party library
import requests
# customized libarary
from utils import hasKeys,dumpObj
# set default encoding to UTF-8
reload(sys)
sys.setdefaultencoding('utf-8')
# global variables
RET_OK = 0
RET_ERR = -1
MAX_TRIES = 2
MAX_DAYS = 60
stations = []
cards = {
'1': u'二代身份证',
'2': u'一代身份证',
'C': u'港澳通行证',
'G': u'台湾通行证',
'B': u'护照'
}
Tickets = {
'1': u'成人票',
'2': u'儿童票',
'3': u'学生票',
'4': u'残军票'
}
seatMaps = [
('1', u'硬座'), # 硬座/无座
('3', u'硬卧'),
('4', u'软卧'),
('7', u'一等软座'),
('8', u'二等软座'),
('9', u'商务座'),
('M', u'一等座'),
('O', u'二等座'),
('B', u'混编硬座'),
('P', u'特等座')
]
# global functions
def printDelimiter():
print('-' * 64)
def getTime():
# convert a time tuple to a string according to a format
# specification
return time.strftime('%Y-%m-%d %X', time.localtime()) # 2014-01-01 12:00:00
def date2UTC(d):
# Convert '2014-01-01' to 'Wed Jan 01 00:00:00 UTC+0800 2014'
# parse a string to a time tuple according to a format specification
t = time.strptime(d, '%Y-%m-%d')
asc = time.asctime(t) # 'Wed Jan 01 00:00:00 2014'
# 'Wed Jan 01 00:00:00 UTC+0800 2014'
return (asc[0:-4] + 'UTC+0800 ' + asc[-4:])
def getCardType(key):
return cards[key] if key in cards else u'未知证件类型'
def getTicketType(key):
return Tickets[key] if key in Tickets else u'未知票种'
def getSeatType(key):
d = dict(seatMaps)
return d[key] if key in d else u'未知席别'
def selectSeatType():
key = '1' # 默认硬座
while True:
print(u'请选择席别编码(即左边第一个英文字母):')
for xb in seatMaps:
print(u'%s: %s' % (xb[0], xb[1]))
key = raw_input().upper()
d = dict(seatMaps)
if key in d:
return key
else:
print(u'无效的席别类型!')
def checkDate(date):
m = re.match(r'^\d{4}-\d{2}-\d{2}$', date) # 2014-01-01
if m:
today = datetime.datetime.now()
fmt = '%Y-%m-%d'
today = datetime.datetime.strptime(today.strftime(fmt), fmt)
train_date = datetime.datetime.strptime(m.group(0), fmt)
delta = train_date - today
if delta.days < 0:
print(u'乘车日期%s无效, 只能预订%s以后的车票' % (
train_date.strftime(fmt),
today.strftime(fmt)))
return False
else:
return True
else:
return False
def selectDate():
fmt = '%Y-%m-%d'
week_days = [u'星期一', u'星期二', u'星期三', u'星期四', u'星期五', u'星期六', u'星期天']
now = datetime.datetime.now()
available_date = [(now + datetime.timedelta(days=i)) for i in xrange(MAX_DAYS)]
for i in xrange(0, MAX_DAYS, 2):
print(u'第%2d天: %s(%s)' % (
i + 1, available_date[i].strftime(fmt), week_days[available_date[i].weekday()])),
if i + 1 < MAX_DAYS:
print(u'\t\t第%2d天: %s(%s)' % (
i + 2, available_date[i + 1].strftime(fmt), week_days[available_date[i + 1].weekday()]))
else:
print('')
while True:
print(u'请选择乘车日期(1~%d)' % (MAX_DAYS))
index = raw_input()
if not index.isdigit():
print(u'只能输入数字序号, 请重新选择乘车日期(1~%d)' % (MAX_DAYS))
continue
index = int(index)
if index < 1 or index > MAX_DAYS:
print(u'输入的序号无效, 请重新选择乘车日期(1~%d)' % (MAX_DAYS))
continue
index -= 1
train_date = available_date[index].strftime(fmt)
return train_date
def getStationByName(name):
matched_stations = []
for station in stations:
if (
station['name'] == name
or station['abbr'].find(name.lower()) != -1
or station['pinyin'].find(name.lower()) != -1
or station['pyabbr'].find(name.lower()) != -1):
matched_stations.append(station)
count = len(matched_stations)
if count <= 0:
return None
elif count == 1:
return matched_stations[0]
else:
for i in xrange(0, count):
print(u'%d:\t%s' % (i + 1, matched_stations[i]['name']))
print(u'请选择站点(1~%d)' % (count))
index = raw_input()
if not index.isdigit():
print(u'只能输入数字序号(1~%d)' % (count))
return None
index = int(index)
if index < 1 or index > count:
print(u'输入的序号无效(1~%d)' % (count))
return None
else:
return matched_stations[index - 1]
def inputStation():
while True:
print(u'支持中文, 拼音和拼音缩写(如: 北京,beijing,bj)')
name = raw_input().decode('gb2312', 'ignore')
station = getStationByName(name)
if station:
return station
else:
print(u'站点错误, 没有站点"%s", 请重新输入.' % (name))
def selectTrain(trains):
trains_num = len(trains)
index = 0
while True: # 必须选择有效的车次
index = raw_input()
if not index.isdigit():
print(u'只能输入数字序号,请重新选择车次(1~%d)' % (trains_num))
continue
index = int(index)
if index < 1 or index > trains_num:
print(u'输入的序号无效,请重新选择车次(1~%d)' % (trains_num))
continue
if trains[index - 1]['queryLeftNewDTO']['canWebBuy'] != 'Y':
print(u'您选择的车次%s没票啦,请重新选择车次' % (
trains[index - 1]['queryLeftNewDTO']['station_train_code']))
continue
else:
break
return index
class TicketOrder(object):
'''docstring for TicketOrder'''
def __init__(
self,
username='',
password='',
train_date='',
from_city_name='',
to_city_name=''):
super(TicketOrder, self).__init__()
self.username = username # 账号
self.password = password # 密码
self.train_date = train_date # 乘车日期[2014-01-01]
today = datetime.datetime.now()
self.back_train_date = today.strftime('%Y-%m-%d') # 返程日期[2014-01-01]
self.tour_flag = 'dc' # 单程dc/往返wf
self.purpose_code = 'ADULT' # 成人票
self.from_city_name = from_city_name # 对应查询界面'出发地'输入框中的内容
self.to_city_name = to_city_name # 对应查询界面'目的地'输入框中的内容
self.from_station_telecode = '' # 出发站编码
self.to_station_telecode = '' # 目的站编码
self.passengers = [] # 乘车人列表,最多5人
self.normal_passengers = [] # 我的联系人列表
self.trains = [] # 列车列表, 查询余票后自动更新
self.current_train_index = 0 # 当前选中的列车索引序号
self.captcha = '' # 图片验证码
self.orderId = '' # 订单流水号
self.canWebBuy = False # 可预订
self.notify = {
'mail_enable': 0,
'mail_username': '',
'mail_password': '',
'mail_server': '',
'mail_to': [],
'dates': [],
'trains': [],
'xb': [],
'focus': {}
}
def initSession(self):
self.session = requests.Session()
self.session.headers = {
'Accept': 'application/x-ms-application, image/jpeg, application/xaml+xml, image/gif, image/pjpeg, application/x-ms-xbap, */*',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C)',
'Referer': 'https://kyfw.12306.cn/otn/index/init',
'Host': 'kyfw.12306.cn',
'Connection': 'Keep-Alive'
}
def updateHeaders(self, url):
d = {
'https://kyfw.12306.cn/otn/resources/js/framework/station_name.js': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/'
},
'https://kyfw.12306.cn/otn/login/init': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/'
},
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=login&rand=sjrand&': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/login/init'
},
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=passenger&rand=randp&': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'
},
'https://kyfw.12306.cn/otn/passcodeNew/checkRandCodeAnsyn': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/login/init',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/login/loginAysnSuggest': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/login/init',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/login/userLogin': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/login/init'
},
'https://kyfw.12306.cn/otn/index/init': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/login/init'
},
'https://kyfw.12306.cn/otn/leftTicket/init': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/index/init',
'Content-Type': 'application/x-www-form-urlencoded'
},
'https://kyfw.12306.cn/otn/leftTicket/log?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'x-requested-with': 'XMLHttpRequest',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0'
},
'https://kyfw.12306.cn/otn/leftTicket/query?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'x-requested-with': 'XMLHttpRequest',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0'
},
'https://kyfw.12306.cn/otn/leftTicket/queryT?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'x-requested-with': 'XMLHttpRequest',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0'
},
'https://kyfw.12306.cn/otn/login/checkUser': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'Cache-Control': 'no-cache',
'If-Modified-Since': '0',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/leftTicket/submitOrderRequest': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/initDc': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/leftTicket/init',
'Content-Type': 'application/x-www-form-urlencoded',
'Cache-Control': 'no-cache'
},
'https://kyfw.12306.cn/otn/confirmPassenger/getPassengerDTOs': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/checkOrderInfo': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/getQueueCount': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/confirmSingleForQueue': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn/confirmPassenger/queryOrderWaitTime?': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'x-requested-with': 'XMLHttpRequest'
},
'https://kyfw.12306.cn/otn/confirmPassenger/resultOrderForDcQueue': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
},
'https://kyfw.12306.cn/otn//payOrder/init?': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc',
'Cache-Control': 'no-cache',
'Content-Type': 'application/x-www-form-urlencoded'
},
'https://kyfw.12306.cn/otn/queryOrder/initNoComplete': {
'method': 'GET',
'Referer': 'https://kyfw.12306.cn/otn//payOrder/init?random=1417862054369'
},
'https://kyfw.12306.cn/otn/queryOrder/queryTicketOrderNoComplete': {
'method': 'POST',
'Referer': 'https://kyfw.12306.cn/otn/queryOrder/initNoComplete',
'Cache-Control': 'no-cache',
'x-requested-with': 'XMLHttpRequest',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
}
}
l = [
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=login&rand=sjrand&',
'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=passenger&rand=randp&',
'https://kyfw.12306.cn/otn/leftTicket/log?',
'https://kyfw.12306.cn/otn/leftTicket/query?',
'https://kyfw.12306.cn/otn/leftTicket/queryT?',
'https://kyfw.12306.cn/otn/confirmPassenger/queryOrderWaitTime?',
'https://kyfw.12306.cn/otn//payOrder/init?'
]
for s in l:
if url.find(s) == 0:
url = s
if not url in d:
print(u'未知 url: %s' % url)
return RET_ERR
self.session.headers.update({'Referer': d[url]['Referer']})
keys = [
'Referer',
'Cache-Control',
'x-requested-with',
'Content-Type'
]
for key in keys:
if key in d[url]:
self.session.headers.update({key: d[url][key]})
else:
self.session.headers.update({key: None})
def get(self, url):
self.updateHeaders(url)
tries = 0
while tries < MAX_TRIES:
tries += 1
try:
r = self.session.get(url, verify=False, timeout=16)
except requests.exceptions.ConnectionError as e:
print('ConnectionError(%s): e=%s' % (url, e))
continue
except requests.exceptions.Timeout as e:
print('Timeout(%s): e=%s' % (url, e))
continue
except requests.exceptions.TooManyRedirects as e:
print('TooManyRedirects(%s): e=%s' % (url, e))
continue
except requests.exceptions.HTTPError as e:
print('HTTPError(%s): e=%s' % (url, e))
continue
except requests.exceptions.RequestException as e:
print('RequestException(%s): e=%s' % (url, e))
continue
except:
print('Unknown exception(%s)' % (url))
continue
if r.status_code != 200:
print('Request %s failed %d times, status_code=%d' % (
url,
tries,
r.status_code))
else:
return r
else:
return None
def post(self, url, payload):
self.updateHeaders(url)
if url == 'https://kyfw.12306.cn/otn/passcodeNew/checkRandCodeAnsyn':
if payload.find('REPEAT_SUBMIT_TOKEN') != -1:
self.session.headers.update({'Referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'})
else:
self.session.headers.update({'Referer': 'https://kyfw.12306.cn/otn/login/init'})
tries = 0
while tries < MAX_TRIES:
tries += 1
try:
r = self.session.post(url, data=payload, verify=False, timeout=16)
except requests.exceptions.ConnectionError as e:
print('ConnectionError(%s): e=%s' % (url, e))
continue
except requests.exceptions.Timeout as e:
print('Timeout(%s): e=%s' % (url, e))
continue
except requests.exceptions.TooManyRedirects as e:
print('TooManyRedirects(%s): e=%s' % (url, e))
continue
except requests.exceptions.HTTPError as e:
print('HTTPError(%s): e=%s' % (url, e))
continue
except requests.exceptions.RequestException as e:
print('RequestException(%s): e=%s' % (url, e))
continue
except:
print('Unknown exception(%s)' % (url))
continue
if r.status_code != 200:
print('Request %s failed %d times, status_code=%d' % (
url,
tries,
r.status_code))
else:
return r
else:
return None
def getCaptcha(self, url):
self.updateHeaders(url)
r = self.session.get(url, verify=False, stream=True, timeout=16)
with open('captcha.png', 'wb') as fd:
for chunk in r.iter_content():
fd.write(chunk)
print(u'请输入4位图片验证码(回车刷新验证码):')
captcha = raw_input()
if len(captcha) == 4:
return captcha
elif len(captcha) != 0:
print(u'%s是无效的图片验证码, 必须是4位' % (captcha))
return None
else:
return 1 # 刷新
def initStation(self):
url = 'https://kyfw.12306.cn/otn/resources/js/framework/station_name.js'
r = self.get(url)
if not r:
print(u'站点数据库初始化失败, 请求异常')
return None
data = r.text
# tuples are split by '@'
station_list = data.split('@')
if len(station_list) < 1:
print(u'站点数据库初始化失败, 数据异常')
return None
station_list = station_list[1:]
for station in station_list:
# items in each tuple are separated by '|'
items = station.split('|') # bji|北京|BJP|beijing|bj|2
if len(items) < 5:
print(u'忽略无效站点: %s' % (items))
continue
stations.append({'abbr': items[0],
'name': items[1],
'telecode': items[2],
'pinyin': items[3],
'pyabbr': items[4]})
return stations
def readConfig(self, config_file='config.ini'):
cp = ConfigParser.ConfigParser()
try:
cp.readfp(open(config_file, 'r'))
except IOError as e:
print(u'打开配置文件"%s"失败啦, 请先创建或者拷贝一份配置文件config.ini' % (config_file))
raw_input('Press any key to continue')
sys.exit()
self.username = cp.get('login', 'username')
self.password = cp.get('login', 'password')
self.train_date = cp.get('train', 'date')
self.from_city_name = cp.get('train', 'from')
self.to_city_name = cp.get('train', 'to')
self.notify['mail_enable'] = int(cp.get('notify', 'mail_enable'))
self.notify['mail_username'] = cp.get('notify', 'mail_username')
self.notify['mail_password'] = cp.get('notify', 'mail_password')
self.notify['mail_server'] = cp.get('notify', 'mail_server')
self.notify['mail_to'] = cp.get('notify', 'mail_to').split(',')
self.notify['dates'] = cp.get('notify', 'dates').split(',')
self.notify['trains'] = cp.get('notify', 'trains').split(',')
self.notify['xb'] = cp.get('notify', 'xb').split(',')
for t in self.notify['trains']:
self.notify['focus'][t] = self.notify['xb']
# 检查出发站
station = getStationByName(self.from_city_name)
if not station:
print(u'出发站错误, 请重新输入')
station = inputStation()
self.from_city_name = station['name']
self.from_station_telecode = station['telecode']
# 检查目的站
station = getStationByName(self.to_city_name)
if not station:
print(u'目的站错误,请重新输入')
station = inputStation()
self.to_city_name = station['name']
self.to_station_telecode = station['telecode']
# 检查乘车日期
if not checkDate(self.train_date):
print(u'乘车日期无效, 请重新选择')
self.train_date = selectDate()
# 分析乘客信息
self.passengers = []
index = 1
passenger_sections = ['passenger%d' % (i) for i in xrange(1, 6)]
sections = cp.sections()
for section in passenger_sections:
if section in sections:
passenger = {}
passenger['index'] = index
passenger['name'] = cp.get(section, 'name') # 必选参数
passenger['cardtype'] = cp.get(
section,
'cardtype') if cp.has_option(
section,
'cardtype') else '1' # 证件类型:可选参数,默认值1,即二代身份证
passenger['id'] = cp.get(section, 'id') # 必选参数
passenger['phone'] = cp.get(
section,
'phone') if cp.has_option(
section,
'phone') else '13800138000' # 手机号码
passenger['seattype'] = cp.get(
section,
'seattype') if cp.has_option(
section,
'seattype') else '1' # 席别:可选参数, 默认值1, 即硬座
passenger['tickettype'] = cp.get(
section,
'tickettype') if cp.has_option(
section,
'tickettype') else '1' # 票种:可选参数, 默认值1, 即成人票
self.passengers.append(passenger)
index += 1
def printConfig(self):
printDelimiter()
print(u'订票信息:\n%s\t%s\t%s--->%s' % (
self.username,
self.train_date,
self.from_city_name,
self.to_city_name))
printDelimiter()
th = [u'序号', u'姓名', u'证件类型', u'证件号码', u'席别', u'票种']
print(u'%s\t%s\t%s\t%s\t%s\t%s' % (
th[0].ljust(2), th[1].ljust(4), th[2].ljust(5),
th[3].ljust(12), th[4].ljust(2), th[5].ljust(3)))
for p in self.passengers:
print(u'%s\t%s\t%s\t%s\t%s\t%s' % (
p['index'],
p['name'].decode('utf-8', 'ignore').ljust(4),
getCardType(p['cardtype']).ljust(5),
p['id'].ljust(20),
getSeatType(p['seattype']).ljust(2),
getTicketType(p['tickettype']).ljust(3)))
def checkRandCodeAnsyn(self, module):
d = {
'login': { # 登陆验证码
'rand': 'sjrand',
'referer': 'https://kyfw.12306.cn/otn/login/init'
},
'passenger': { # 订单验证码
'rand': 'randp',
'referer': 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'
}
}
if not module in d:
print(u'无效的 module: %s' % (module))
return RET_ERR
tries = 0
while tries < MAX_TRIES:
tries += 1
url = 'https://kyfw.12306.cn/otn/passcodeNew/getPassCodeNew?module=%s&rand=%s&' % (module, d[module]['rand'])
if tries > 1:
url = '%s%1.16f' % (url, random.random())
print(u'正在等待验证码...')
self.captcha = self.getCaptcha(url)
if not self.captcha:
continue
if self.captcha == 1: # 刷新不计数
tries -= 1
continue
url = 'https://kyfw.12306.cn/otn/passcodeNew/checkRandCodeAnsyn'
parameters = [
('randCode', self.captcha),
('rand', d[module]['rand'])
]
if module == 'login':
parameters.append(('randCode_validate', ''))
else:
parameters.append(('_json_att', ''))
parameters.append(('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken))
payload = urllib.urlencode(parameters)
print(u'正在校验验证码...')
r = self.post(url, payload)
if not r:
print(u'校验验证码异常')
continue
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"result":"1","msg":"randCodeRight"},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['result', 'msg'])
and (obj['data']['result'] == '1')):
print(u'校验验证码成功')
return RET_OK
else:
print(u'校验验证码失败')
dumpObj(obj)
continue
else:
return RET_ERR
def login(self):
url = 'https://kyfw.12306.cn/otn/login/init'
r = self.get(url)
if not r:
print(u'登录失败, 请求异常')
return RET_ERR
if self.session.cookies:
cookies = requests.utils.dict_from_cookiejar(self.session.cookies)
if cookies['JSESSIONID']:
self.jsessionid = cookies['JSESSIONID']
if self.checkRandCodeAnsyn('login') == RET_ERR:
return RET_ERR
print(u'正在登录...')
url = 'https://kyfw.12306.cn/otn/login/loginAysnSuggest'
parameters = [
('loginUserDTO.user_name', self.username),
('userDTO.password', self.password),
('randCode', self.captcha),
('randCode_validate', ''),
#('ODg3NzQ0', 'OTIyNmFhNmQwNmI5ZmQ2OA%3D%3D'),
('NDU4MjQ0','YzM1ZjE5NTBiZmE4YzBiNw==')
('myversion', 'undefined')
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'登录失败, 请求异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"loginCheck":"Y"},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['loginCheck'])
and (obj['data']['loginCheck'] == 'Y')):
print(u'登陆成功^_^')
url = 'https://kyfw.12306.cn/otn/login/userLogin'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
return RET_OK
else:
print(u'登陆失败啦!重新登陆...')
dumpObj(obj)
return RET_ERR
def getPassengerDTOs(self):
url = 'https://kyfw.12306.cn/otn/confirmPassenger/getPassengerDTOs'
parameters = [
('', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'获取乘客信息异常')
return RET_ERR
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['normal_passengers'])
and obj['data']['normal_passengers']):
self.normal_passengers = obj['data']['normal_passengers']
return RET_OK
else:
print(u'获取乘客信息失败')
if hasKeys(obj, ['messages']):
dumpObj(obj['messages'])
if hasKeys(obj, ['data']) and hasKeys(obj['data'], ['exMsg']):
dumpObj(obj['data']['exMsg'])
return RET_ERR
def selectPassengers(self, prompt):
if prompt == 1:
print(u'是否重新选择乘客?(如需选择请输入y或者yes, 默认使用配置文件提供的乘客信息)')
act = raw_input()
act = act.lower()
if act != 'y' and act != 'yes':
self.printConfig()
return RET_OK
if not (self.normal_passengers and len(self.normal_passengers)):
tries = 0
while tries < MAX_TRIES:
tries += 1
if self.getPassengerDTOs() == RET_OK:
break
else:
print(u'获取乘客信息失败次数太多, 使用配置文件提供的乘客信息')
return RET_ERR
num = len(self.normal_passengers)
for i in xrange(0, num):
p = self.normal_passengers[i]
print(u'%d.%s \t' % (i + 1, p['passenger_name'])),
if (i + 1) % 5 == 0:
print('')
while True:
print(u'\n请选择乘车人(最多选择5个, 以逗号隔开, 如:1,2,3,4,5, 直接回车不选择, 使用配置文件中的乘客信息)')
buf = raw_input()
if not buf:
return RET_ERR
pattern = re.compile(r'^[0-9,]*\d$') # 只能输入数字和逗号, 并且必须以数字结束
if pattern.match(buf):
break
else:
print(u'输入格式错误, 只能输入数字和逗号, 并且必须以数字结束, 如:1,2,3,4,5')
ids = buf.split(',')
if not (ids and 1 <= len(ids) <= 5):
return RET_ERR
seattype = selectSeatType()
ids = [int(id) for id in ids]
del self.passengers[:]
for id in ids:
if id < 1 or id > num:
print(u'不存在的联系人, 忽略')
else:
passenger = {}
id = id - 1
passenger['index'] = len(self.passengers) + 1
passenger['name'] = self.normal_passengers[id]['passenger_name']
passenger['cardtype'] = self.normal_passengers[id]['passenger_id_type_code']
passenger['id'] = self.normal_passengers[id]['passenger_id_no']
passenger['phone'] = self.normal_passengers[id]['mobile_no']
passenger['seattype'] = seattype
passenger['tickettype'] = self.normal_passengers[id]['passenger_type']
self.passengers.append(passenger)
self.printConfig()
return RET_OK
def queryTickets(self):
self.canWebBuy = False
url = 'https://kyfw.12306.cn/otn/leftTicket/init'
parameters = [
('_json_att', ''),
('leftTicketDTO.from_station_name', self.from_city_name),
('leftTicketDTO.to_station_name', self.to_city_name),
('leftTicketDTO.from_station', self.from_station_telecode),
('leftTicketDTO.to_station', self.to_station_telecode),
('leftTicketDTO.train_date', self.train_date),
('back_train_date', self.back_train_date),
('purpose_codes', self.purpose_code),
('pre_step_flag', 'index')
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'查询车票异常')
url = 'https://kyfw.12306.cn/otn/leftTicket/log?'
parameters = [
('leftTicketDTO.train_date', self.train_date),
('leftTicketDTO.from_station', self.from_station_telecode),
('leftTicketDTO.to_station', self.to_station_telecode),
('purpose_codes', self.purpose_code),
]
url += urllib.urlencode(parameters)
r = self.get(url)
if not r:
print(u'查询车票异常')
url = 'https://kyfw.12306.cn/otn/leftTicket/queryT?'
parameters = [
('leftTicketDTO.train_date', self.train_date),
('leftTicketDTO.from_station', self.from_station_telecode),
('leftTicketDTO.to_station', self.to_station_telecode),
('purpose_codes', self.purpose_code),
]
url += urllib.urlencode(parameters)
r = self.get(url)
if not r:
print(u'查询车票异常')
return RET_ERR
obj = r.json()
if (hasKeys(obj, ['status', 'httpstatus', 'data']) and len(obj['data'])):
self.trains = obj['data']
return RET_OK
else:
print(u'查询车票失败')
if hasKeys(obj, ['messages']):
dumpObj(obj['messages'])
return RET_ERR
def sendMailNotification(self):
print(u'正在发送邮件提醒...')
me = u'订票提醒<%s>' % (self.notify['mail_username'])
msg = MIMEText(
self.notify['mail_content'],
_subtype='plain',
_charset='gb2312')
msg['Subject'] = u'余票信息'
msg['From'] = me
msg['To'] = ';'.join(self.notify['mail_to'])
try:
server = smtplib.SMTP()
server.connect(self.notify['mail_server'])
server.login(
self.notify['mail_username'],
self.notify['mail_password'])
server.sendmail(me, self.notify['mail_to'], msg.as_string())
server.close()
print(u'发送邮件提醒成功')
return True
except Exception as e:
print(u'发送邮件提醒失败, %s' % str(e))
return False
def printTrains(self):
printDelimiter()
print(u'余票查询结果如下:')
print(u"%s\t%s--->%s\n'有':票源充足 '无':票已售完 '*':未到起售时间 '--':无此席别" % (
self.train_date,
self.from_city_name,
self.to_city_name))
printDelimiter()
print(u'序号/车次\t乘车站\t目的站\t一等\t二等\t软卧\t硬卧\t硬座\t无座')
seatTypeCode = {
'swz': '商务座',
'tz': '特等座',
'zy': '一等座',
'ze': '二等座',
'gr': '高级软卧',
'rw': '软卧',
'yw': '硬卧',
'rz': '软座',
'yz': '硬座',
'wz': '无座',
'qt': '其它',
}
# TODO 余票数量和票价 https://kyfw.12306.cn/otn/leftTicket/queryTicketPrice?train_no=770000K77505&from_station_no=09&to_station_no=13&seat_types=1431&train_date=2014-01-01
# yp_info=4022300000301440004610078033421007800536 代表
# 4022300000 软卧0
# 3014400046 硬卧46
# 1007803342 无座342
# 1007800536 硬座536
index = 1
self.notify['mail_content'] = ''
for train in self.trains:
t = train['queryLeftNewDTO']
status = '售完' if t['canWebBuy'] == 'N' else '预定'
i = 0
ypInfo = {
'wz': { # 无座
'price': 0,
'left': 0
},
'yz': { # 硬座
'price': 0,
'left': 0
},
'yw': { # 硬卧
'price': 0,
'left': 0
},
'rw': { # 软卧
'price': 0,
'left': 0
},
}
# 分析票价和余票数量
while i < (len(t['yp_info']) / 10):
tmp = t['yp_info'][i * 10:(i + 1) * 10]
price = int(tmp[1:5])
left = int(tmp[-3:])
if tmp[0] == '1':
if tmp[6] == '3':
ypInfo['wz']['price'] = price
ypInfo['wz']['left'] = left
else:
ypInfo['yz']['price'] = price
ypInfo['yz']['left'] = left
elif tmp[0] == '3':
ypInfo['yw']['price'] = price
ypInfo['yw']['left'] = left
elif tmp[0] == '4':
ypInfo['rw']['price'] = price
ypInfo['rw']['left'] = left
i = i + 1
yz_price = u'硬座%s' % (
ypInfo['yz']['price']) if ypInfo['yz']['price'] else ''
yw_price = u'硬卧%s' % (
ypInfo['yw']['price']) if ypInfo['yw']['price'] else ''
print(u'(%d) %s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s' % (
index,
t['station_train_code'],
t['from_station_name'][0:3], # 最多保留3个中文
t['to_station_name'][0:3], # 最多保留3个中文
t['zy_num'],
t['ze_num'],
ypInfo['rw']['left'] if ypInfo['rw']['left'] else t['rw_num'],
ypInfo['yw']['left'] if ypInfo['yw']['left'] else t['yw_num'],
#t['rz_num'],
ypInfo['yz']['left'] if ypInfo['yz']['left'] else t['yz_num'],
ypInfo['wz']['left'] if ypInfo['wz']['left'] else t['wz_num'],
#yz_price,
#yw_price
))
if t['canWebBuy'] == 'Y':
self.canWebBuy = True
index += 1
if self.notify['mail_enable'] == 1 and t['canWebBuy'] == 'Y':
msg = ''
prefix = u'[%s]车次%s[%s/%s->%s/%s, 历时%s]现在有票啦\n' % (
t['start_train_date'],
t['station_train_code'],
t['from_station_name'],
t['start_time'],
t['to_station_name'],
t['arrive_time'],
t['lishi'])
if 'all' in self.notify['focus']: # 任意车次
if self.notify['focus']['all'][0] == 'all': # 任意席位
msg = prefix
else: # 指定席位
for seat in self.notify['focus']['all']:
if seat in ypInfo and ypInfo[seat]['left']:
msg += u'座位类型:%s, 剩余车票数量:%s, 票价:%s \n' % (
seat if seat not in seatTypeCode else seatTypeCode[seat],
ypInfo[seat]['left'],
ypInfo[seat]['price'])
if msg:
msg = prefix + msg + u'\n'
elif t['station_train_code'] in self.notify['focus']: # 指定车次
# 任意席位
if self.notify['focus'][t['station_train_code']][0] == 'all':
msg = prefix
else: # 指定席位
for seat in self.notify['focus'][t['station_train_code']]:
if seat in ypInfo and ypInfo[seat]['left']:
msg += u'座位类型:%s, 剩余车票数量:%s, 票价:%s \n' % (
seat if seat not in seatTypeCode else seatTypeCode[seat],
ypInfo[seat]['left'],
ypInfo[seat]['price'])
if msg:
msg = prefix + msg + u'\n'
self.notify['mail_content'] += msg
printDelimiter()
if self.notify['mail_enable'] == 1:
if self.notify['mail_content']:
self.sendMailNotification()
return RET_OK
else:
length = len(self.notify['dates'])
if length > 1:
self.train_date = self.notify['dates'][
random.randint(
0,
length -
1)]
return RET_ERR
else:
return RET_OK
# -1->重新查询/0->退出程序/1~len->车次序号
def selectAction(self):
ret = -1
self.current_train_index = 0
trains_num = len(self.trains)
print(u'您可以选择:')
if self.canWebBuy:
print(u'1~%d.选择车次开始订票' % (trains_num))
print(u'p.更换乘车人')
print(u's.更改席别')
print(u'd.更改乘车日期')
print(u'f.更改出发站')
print(u't.更改目的站')
print(u'a.同时更改乘车日期,出发站和目的站')
print(u'u.查询未完成订单')
print(u'c.查看订票信息')
print(u'r.刷票模式')
print(u'n.普通模式')
print(u'q.退出')
print(u'刷新车票请直接回车')
printDelimiter()
select = raw_input()
select = select.lower()
if select.isdigit():
if not self.canWebBuy:
print(u'没有可预订的车次, 请刷新车票或者更改查询条件')
return -1
index = int(select)
if index < 1 or index > trains_num:
print(u'输入的序号无效,请重新选择车次(1~%d)' % (trains_num))
index = selectTrain(self.trains)
if self.trains[index - 1]['queryLeftNewDTO']['canWebBuy'] != 'Y':
print(u'您选择的车次%s没票啦,请重新选择车次' % (self.trains[index - 1]['queryLeftNewDTO']['station_train_code']))
index = selectTrain(self.trains)
ret = index
self.current_train_index = index - 1
elif select == 'p':
self.selectPassengers(0)
elif select == 's':
seattype = selectSeatType()
for p in self.passengers:
p['seattype'] = seattype
self.printConfig()
elif select == 'd':
self.train_date = selectDate()
elif select == 'f':
print(u'请输入出发站:')
station = inputStation()
self.from_city_name = station['name']
self.from_station_telecode = station['telecode']
elif select == 't':
print(u'请输入目的站:')
station = inputStation()
self.to_city_name = station['name']
self.to_station_telecode = station['telecode']
elif select == 'a':
self.train_date = selectDate()
print(u'请输入出发站:')
station = inputStation()
self.from_city_name = station['name']
self.from_station_telecode = station['telecode']
print(u'请输入目的站:')
station = inputStation()
self.to_city_name = station['name']
self.to_station_telecode = station['telecode']
elif select == 'u':
ret = self.queryTicketOrderNotComplete()
ret = self.selectAction()
elif select == 'c':
ret = self.printConfig()
ret = self.selectAction()
elif select == 'r':
self.notify['mail_enable'] = 1
ret = -1
elif select == 'n':
self.notify['mail_enable'] = 0
ret = -1
elif select == 'q':
ret = 0
return ret
def initOrder(self):
url = 'https://kyfw.12306.cn/otn/login/checkUser'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'初始化订单异常')
print(u'准备下单喽')
url = 'https://kyfw.12306.cn/otn/leftTicket/submitOrderRequest'
parameters = [
#('ODA4NzIx', 'MTU0MTczYmQ2N2I3MjJkOA%3D%3D'),
('myversion', 'undefined'),
('secretStr', self.trains[self.current_train_index]['secretStr']),
('train_date', self.train_date),
('back_train_date', self.back_train_date),
('tour_flag', self.tour_flag),
('purpose_codes', self.purpose_code),
('query_from_station_name', self.from_city_name),
('query_to_station_name', self.to_city_name),
('undefined', '')
]
# TODO 注意:此处post不需要做urlencode, 比较奇怪, 不能用urllib.urlencode(parameters)
payload = ''
length = len(parameters)
for i in range(0, length):
payload += parameters[i][0] + '=' + parameters[i][1]
if i < (length - 1):
payload += '&'
r = self.post(url, payload)
if not r:
print(u'下单异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"messages":[],"validateMessages":{}}
obj = r.json()
if not (hasKeys(obj, ['status', 'httpstatus'])
and obj['status']):
print(u'下单失败啦')
dumpObj(obj)
return RET_ERR
print(u'订单初始化...')
self.session.close() # TODO
url = 'https://kyfw.12306.cn/otn/confirmPassenger/initDc'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'订单初始化异常')
return RET_ERR
data = r.text
s = data.find('globalRepeatSubmitToken') # TODO
e = data.find('global_lang')
if s == -1 or e == -1:
print(u'找不到 globalRepeatSubmitToken')
return RET_ERR
buf = data[s:e]
s = buf.find("'")
e = buf.find("';")
if s == -1 or e == -1:
print(u'很遗憾, 找不到 globalRepeatSubmitToken')
return RET_ERR
self.repeatSubmitToken = buf[s + 1:e]
s = data.find('key_check_isChange')
e = data.find('leftDetails')
if s == -1 or e == -1:
print(u'找不到 key_check_isChange')
return RET_ERR
self.keyCheckIsChange = data[s + len('key_check_isChange') + 3:e - 3]
return RET_OK
def checkOrderInfo(self):
if self.checkRandCodeAnsyn('passenger') == RET_ERR:
return RET_ERR
passengerTicketStr = ''
oldPassengerStr = ''
passenger_seat_detail = '0' # TODO [0->随机][1->下铺][2->中铺][3->上铺]
for p in self.passengers:
if p['index'] != 1:
passengerTicketStr += 'N_'
oldPassengerStr += '1_'
passengerTicketStr += '%s,%s,%s,%s,%s,%s,%s,' % (
p['seattype'],
passenger_seat_detail,
p['tickettype'],
p['name'],
p['cardtype'],
p['id'],
p['phone'])
oldPassengerStr += '%s,%s,%s,' % (
p['name'],
p['cardtype'],
p['id'])
passengerTicketStr += 'N'
oldPassengerStr += '1_'
self.passengerTicketStr = passengerTicketStr
self.oldPassengerStr = oldPassengerStr
print(u'检查订单...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/checkOrderInfo'
parameters = [
('cancel_flag', '2'), # TODO
('bed_level_order_num', '000000000000000000000000000000'), # TODO
('passengerTicketStr', self.passengerTicketStr),
('oldPassengerStr', self.oldPassengerStr),
('tour_flag', self.tour_flag),
('randCode', self.captcha),
#('NzA4MTc1', 'NmYyYzZkYWY2OWZkNzg2YQ%3D%3D'), # TODO
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'检查订单异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"submitStatus":true},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['submitStatus'])
and obj['status']
and obj['data']['submitStatus']):
print(u'检查订单成功')
return RET_OK
else:
print(u'检查订单失败')
dumpObj(obj)
return RET_ERR
def getQueueCount(self):
print(u'查询排队情况...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/getQueueCount'
t = self.trains[self.current_train_index]['queryLeftNewDTO']
parameters = [
('train_date', date2UTC(self.train_date)),
('train_no', t['train_no']),
('stationTrainCode', t['station_train_code']),
('seatType', '1'), # TODO
('fromStationTelecode', t['from_station_telecode']),
('toStationTelecode', t['to_station_telecode']),
('leftTicket', t['yp_info']),
('purpose_codes', '00'), # TODO
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken)
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'查询排队情况异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"count":"0","ticket":"100985109710098535003021350212","op_2":"false","countT":"0","op_1":"false"},"messages":[],"validateMessages":{}}
obj = r.json()
if not (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['op_1', 'op_2'])
and obj['status']):
print(u'查询排队情况失败')
dumpObj(obj)
return RET_ERR
if obj['data']['op_1'] != 'false':
print(u'已有人先于您提交相同的购票需求, 到处理您的需求时可能已无票, 建议根据当前余票确定是否排队.')
if obj['data']['op_2'] != 'false':
print(u'目前排队人数已经超过余票张数,请您选择其他席别或车次,特此提醒。')
if 'ticket' in obj['data']:
print(u'排队详情:%s' % (obj['data']['ticket'])) # TODO
return RET_OK
def confirmSingleForQueue(self):
print(u'提交订单排队...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/confirmSingleForQueue'
t = self.trains[self.current_train_index]['queryLeftNewDTO']
parameters = [
('passengerTicketStr', self.passengerTicketStr),
('oldPassengerStr', self.oldPassengerStr),
('randCode', self.captcha),
('purpose_codes', '00'), # TODO
('key_check_isChange', self.keyCheckIsChange),
('leftTicketStr', t['yp_info']),
('train_location', t['location_code']),
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'提交订单排队异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"submitStatus":true},"messages":[],"validateMessages":{}}
obj = r.json()
if (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['submitStatus'])
and obj['status'] and obj['data']['submitStatus']):
print(u'订单排队中...')
return RET_OK
else:
print(u'提交订单排队失败')
dumpObj(obj)
return RET_ERR
def queryOrderWaitTime(self):
print(u'等待订单流水号...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/queryOrderWaitTime?random=%13d&tourFlag=dc&_json_att=&REPEAT_SUBMIT_TOKEN=%s' % (
random.randint(1000000000000, 1999999999999), self.repeatSubmitToken)
r = self.get(url)
if not r:
print(u'等待订单流水号异常')
return RET_ERR
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"queryOrderWaitTimeStatus":true,"count":0,"waitTime":4,"requestId":5944637152210732219,"waitCount":2,"tourFlag":"dc","orderId":null},"messages":[],"validateMessages":{}}
# {"validateMessagesShowId":"_validatorMessage","status":true,"httpstatus":200,"data":{"queryOrderWaitTimeStatus":true,"count":0,"waitTime":-1,"requestId":5944637152210732219,"waitCount":0,"tourFlag":"dc","orderId":"E739900792"},"messages":[],"validateMessages":{}}
obj = r.json()
if not (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['orderId'])
and obj['status']
and obj['data']['orderId']):
print(u'等待订单流水号失败')
dumpObj(obj)
return RET_ERR
self.orderId = obj['data']['orderId']
if (self.orderId and self.orderId != 'null'):
print(u'订单流水号为:')
print(self.orderId)
return RET_OK
else:
print(u'等待订单流水号失败')
return RET_ERR
def payOrder(self):
print(u'等待订票结果...')
url = 'https://kyfw.12306.cn/otn/confirmPassenger/resultOrderForDcQueue'
parameters = [
('orderSequence_no', self.orderId),
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'等待订票结果异常')
return RET_ERR
# {'validateMessagesShowId':'_validatorMessage','status':true,'httpstatus':200,'data':{'submitStatus':true},'messages':[],'validateMessages':{}}
# {'validateMessagesShowId':'_validatorMessage','status':true,'httpstatus':200,'data':{'errMsg':'获取订单信息失败,请查看未完成订单,继续支付!','submitStatus':false},'messages':[],'validateMessages':{}}
obj = r.json()
if not (
hasKeys(obj, ['status', 'httpstatus', 'data'])
and hasKeys(obj['data'], ['submitStatus'])
and obj['status']
and obj['data']['submitStatus']):
print(u'等待订票结果失败')
dumpObj(obj)
return RET_ERR
url = 'https://kyfw.12306.cn/otn//payOrder/init?random=%13d' % (
random.randint(1000000000000, 1999999999999))
parameters = [
('_json_att', ''),
('REPEAT_SUBMIT_TOKEN', self.repeatSubmitToken),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'请求异常')
return RET_ERR
data = r.text
if data.find(u'席位已锁定') != -1:
print(u'订票成功^_^请在45分钟内完成网上支付,否则系统将自动取消')
return RET_OK
else:
return RET_ERR
def queryTicketOrderNotComplete(self):
print(u'正在查询未完成订单...')
url = 'https://kyfw.12306.cn/otn/queryOrder/queryTicketOrderNoComplete'
parameters = [
('_json_att', ''),
]
payload = urllib.urlencode(parameters)
r = self.post(url, payload)
if not r:
print(u'查询未完成订单异常')
return RET_ERR
obj = r.json()
if not (hasKeys(obj, ['status', 'httpstatus', 'data']) and obj['status']):
print(u'查询未完成订单失败')
dumpObj(obj)
return RET_ERR
if (hasKeys(obj['data'], ['orderDBList']) and len(obj['data']['orderDBList'])):
print(u'查询到有未完成订单,请先处理')
return RET_OK
if (
hasKeys(obj['data'], ['orderCacheDTO'])
and obj['data']['orderCacheDTO']
and hasKeys(obj['data']['orderCacheDTO'], ['status'])):
if obj['data']['orderCacheDTO']['status'] == 0:
print(u'查询到cache有未完成订单,请先处理')
return RET_OK
else:
if (hasKeys(obj['data']['orderCacheDTO'], ['message'])):
dumpObj(obj['data']['orderCacheDTO']['message'])
return RET_ERR
def main():
print(getTime())
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', help='Specify config file')
parser.add_argument('-u', '--username', help='Specify username to login')
parser.add_argument('-p', '--password', help='Specify password to login')
parser.add_argument('-d', '--date', help='Specify train date, 2014-01-01')
parser.add_argument('-m', '--mail', help='Send email notification')
args = parser.parse_args()
order = TicketOrder()
order.initSession()
order.initStation()
if args.config:
order.readConfig(args.config) # 使用指定的配置文件
else:
order.readConfig() # 使用默认的配置文件config.ini
if args.username:
order.username = args.username # 使用指定的账号代替配置文件中的账号
if args.password:
order.password = args.password # 使用指定的密码代替配置文件中的密码
if args.date:
if checkDate(args.date):
order.train_date = args.date # 使用指定的乘车日期代替配置文件中的乘车日期
else:
print(u'乘车日期无效, 请重新选择')
order.train_date = selectDate()
if args.mail:
# 有票时自动发送邮件通知
order.notify['mail_enable'] = 1 if args.mail == '1' else 0
tries = 0
while tries < MAX_TRIES:
tries += 1
if order.login() == RET_OK:
break
else:
#print(u'失败次数太多,自动退出程序')
#sys.exit()
print(u'登陆失败,但是你仍然可以进行查票操作!')
order.selectPassengers(1)
while True:
time.sleep(1)
# 查询车票
if order.queryTickets() != RET_OK:
continue
# 显示查询结果
if order.printTrains() != RET_OK:
continue
# 选择菜单列举的动作之一
action = order.selectAction()
if action == -1:
continue
elif action == 0:
break
# 订单初始化
if order.initOrder() != RET_OK:
continue
# 检查订单信息
if order.checkOrderInfo() != RET_OK:
continue
# 查询排队和余票情况
# if order.getQueueCount() != RET_OK:
# continue
# 提交订单到队里中
tries = 0
while tries < 2:
tries += 1
if order.confirmSingleForQueue() == RET_OK:
break
# 获取orderId
tries = 0
while tries < 2:
tries += 1
if order.queryOrderWaitTime() == RET_OK:
break
# 正式提交订单
if order.payOrder() == RET_OK:
break
# 访问未完成订单页面检查是否订票成功
if order.queryTicketOrderNotComplete() == RET_OK:
print(u'订票成功^_^请在45分钟内完成网上支付,否则系统将自动取消')
break
print(getTime())
raw_input('Press any key to continue')
if __name__ == '__main__':
main()
# EOF
| gpl-2.0 |
msleal/amspy | amspy/examples/create_streaming_endpoint.py | 2 | 2523 | """
Copyright (c) 2016, Marcelo Leal
Description: Simple Azure Media Services Python library
License: MIT (see LICENSE.txt file for details)
"""
import os
import json
import amspy
import time
import logging
import datetime
###########################################################################################
##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER #####
###########################################################################################
# ALL CODE IN THIS DIRECTOY (INCLUDING THIS FILE) ARE EXAMPLE CODES THAT WILL ACT ON YOUR
# AMS ACCOUNT. IT ASSUMES THAT THE AMS ACCOUNT IS CLEAN (e.g.: BRAND NEW), WITH NO DATA OR
# PRODUCTION CODE ON IT. DO NOT, AGAIN: DO NOT RUN ANY EXAMPLE CODE AGAINST PRODUCTION AMS
# ACCOUNT! IF YOU RUN ANY EXAMPLE CODE AGAINST YOUR PRODUCTION AMS ACCOUNT, YOU CAN LOSE
# DATA, AND/OR PUT YOUR AMS SERVICES IN A DEGRADED OR UNAVAILABLE STATE. BE WARNED!
###########################################################################################
##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER ##### ##### DISCLAIMER #####
###########################################################################################
# Load Azure app defaults
try:
with open('config.json') as configFile:
configData = json.load(configFile)
except FileNotFoundError:
print("ERROR: Expecting config.json in current folder")
sys.exit()
account_name = configData['accountName']
account_key = configData['accountKey']
# Get the access token...
response = amspy.get_access_token(account_name, account_key)
resjson = response.json()
access_token = resjson["access_token"]
#Initialization...
print ("\n-----------------------= AMS Py =----------------------");
print ("Simple Python Library for Azure Media Services REST API");
print ("-------------------------------------------------------\n");
### create a streaming endpoint
print ("\n001 >>> Creating a Streaming Endpoint")
response = amspy.create_streaming_endpoint(access_token, "NewStreamingEndpoint", description="HLS Streaming Endpoint")
if (response.status_code == 202):
resjson = response.json()
streamingendpoint_id = str(resjson['d']['Id'])
print("POST Status.............................: " + str(response.status_code))
print("Streaming Endpoint Id..........................: " + streamingendpoint_id)
else:
print("POST Status.............................: " + str(response.status_code) + " - Streaming Endpoint Creation ERROR." + str(response.content))
| mit |
oudalab/phyllo | phyllo/extractors/hydatiusDB.py | 1 | 5254 | import sqlite3
import urllib
import re
from urllib.request import urlopen
from bs4 import BeautifulSoup
from phyllo.phyllo_logger import logger
def getBooks(soup):
siteURL = 'http://www.thelatinlibrary.com'
textsURL = []
# get links to books in the collection
for a in soup.find_all('a', href=True):
link = a['href']
textsURL.append("{}/{}".format(siteURL, a['href']))
# remove unnecessary URLs
while ("http://www.thelatinlibrary.com/index.html" in textsURL):
textsURL.remove("http://www.thelatinlibrary.com/index.html")
textsURL.remove("http://www.thelatinlibrary.com/classics.html")
textsURL.remove("http://www.thelatinlibrary.com/christian.html")
logger.info("\n".join(textsURL))
return textsURL
def main():
# The collection URL below.
collURL = 'http://www.thelatinlibrary.com/hydatius.html'
collOpen = urllib.request.urlopen(collURL)
collSOUP = BeautifulSoup(collOpen, 'html5lib')
author = collSOUP.title.string.strip()
colltitle = collSOUP.title.string.strip()
date = collSOUP.span.string.replace('(', '').replace(')', '').replace(u"\u2013", '-').strip()
textsURL = getBooks(collSOUP)
with sqlite3.connect('texts.db') as db:
c = db.cursor()
c.execute(
'CREATE TABLE IF NOT EXISTS texts (id INTEGER PRIMARY KEY, title TEXT, book TEXT,'
' language TEXT, author TEXT, date TEXT, chapter TEXT, verse TEXT, passage TEXT,'
' link TEXT, documentType TEXT)')
c.execute("DELETE FROM texts WHERE author = 'Hydatius'")
for url in textsURL:
openurl = urllib.request.urlopen(url)
textsoup = BeautifulSoup(openurl, 'html5lib')
title = textsoup.title.string.split(":")[1].strip()
print(title)
chapter = -1
verse = 0
# this just numbers paragraphs as verses
# does not split chapters b/c I wasn't sure how to split them
if title.startswith("Chronicon"):
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallborder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
verses = []
text = p.get_text()
text = text.strip()
verses.append(text.strip())
for v in verses:
if v.startswith('Christian'):
continue
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
# leaves internal numbers as part of text and numbers each line
# poetry or prose?
else:
chapter = "-1"
verse = 0
getp = textsoup.find_all('p')
for p in getp:
try:
if p['class'][0].lower() in ['border', 'pagehead', 'shortborder', 'smallborder', 'margin',
'internal_navigation']: # these are not part of the main text
continue
except:
pass
txtstr = p.get_text().strip()
if txtstr.startswith("Christian"):
continue
brtags = p.findAll('br')
verses = []
try:
firstline = brtags[0].previous_sibling.previous_sibling.strip()
except:
firstline = brtags[0].previous_sibling.strip()
verses.append(firstline)
for br in brtags:
try:
text = br.next_sibling.next_sibling.strip()
except:
text = br.next_sibling.strip()
if text is None or text == '' or text.isspace():
continue
verses.append(text)
for v in verses:
if v.startswith('Christian'):
continue
if v is None or v == '' or v.isspace():
continue
# verse number assignment.
verse += 1
c.execute("INSERT INTO texts VALUES (?,?,?,?,?,?,?, ?, ?, ?, ?)",
(None, colltitle, title, 'Latin', author, date, chapter,
verse, v.strip(), url, 'prose'))
if __name__ == '__main__':
main()
| apache-2.0 |
fheinle/Photoblog | gdata/tlslite/TLSConnection.py | 278 | 70347 | """
MAIN CLASS FOR TLS LITE (START HERE!).
"""
from __future__ import generators
import socket
from utils.compat import formatExceptionTrace
from TLSRecordLayer import TLSRecordLayer
from Session import Session
from constants import *
from utils.cryptomath import getRandomBytes
from errors import *
from messages import *
from mathtls import *
from HandshakeSettings import HandshakeSettings
class TLSConnection(TLSRecordLayer):
"""
This class wraps a socket and provides TLS handshaking and data
transfer.
To use this class, create a new instance, passing a connected
socket into the constructor. Then call some handshake function.
If the handshake completes without raising an exception, then a TLS
connection has been negotiated. You can transfer data over this
connection as if it were a socket.
This class provides both synchronous and asynchronous versions of
its key functions. The synchronous versions should be used when
writing single-or multi-threaded code using blocking sockets. The
asynchronous versions should be used when performing asynchronous,
event-based I/O with non-blocking sockets.
Asynchronous I/O is a complicated subject; typically, you should
not use the asynchronous functions directly, but should use some
framework like asyncore or Twisted which TLS Lite integrates with
(see
L{tlslite.integration.TLSAsyncDispatcherMixIn.TLSAsyncDispatcherMixIn} or
L{tlslite.integration.TLSTwistedProtocolWrapper.TLSTwistedProtocolWrapper}).
"""
def __init__(self, sock):
"""Create a new TLSConnection instance.
@param sock: The socket data will be transmitted on. The
socket should already be connected. It may be in blocking or
non-blocking mode.
@type sock: L{socket.socket}
"""
TLSRecordLayer.__init__(self, sock)
def handshakeClientSRP(self, username, password, session=None,
settings=None, checker=None, async=False):
"""Perform an SRP handshake in the role of client.
This function performs a TLS/SRP handshake. SRP mutually
authenticates both parties to each other using only a
username and password. This function may also perform a
combined SRP and server-certificate handshake, if the server
chooses to authenticate itself with a certificate chain in
addition to doing SRP.
TLS/SRP is non-standard. Most TLS implementations don't
support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} or
U{http://trevp.net/tlssrp/} for the latest information on
TLS/SRP.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The SRP username.
@type password: str
@param password: The SRP password.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. This
session must be an SRP session performed with the same username
and password as were passed in. If the resumption does not
succeed, a full SRP handshake will be performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(srpParams=(username, password),
session=session, settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientCert(self, certChain=None, privateKey=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a certificate-based handshake in the role of client.
This function performs an SSL or TLS handshake. The server
will authenticate itself using an X.509 or cryptoID certificate
chain. If the handshake succeeds, the server's certificate
chain will be stored in the session's serverCertChain attribute.
Unless a checker object is passed in, this function does no
validation or checking of the server's certificate chain.
If the server requests client authentication, the
client will send the passed-in certificate chain, and use the
passed-in private key to authenticate itself. If no
certificate chain and private key were passed in, the client
will attempt to proceed without client authentication. The
server may or may not allow this.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
server requests client authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the server
requests client authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(certParams=(certChain,
privateKey), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientUnknown(self, srpCallback=None, certCallback=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a to-be-determined type of handshake in the role of client.
This function performs an SSL or TLS handshake. If the server
requests client certificate authentication, the
certCallback will be invoked and should return a (certChain,
privateKey) pair. If the callback returns None, the library
will attempt to proceed without client authentication. The
server may or may not allow this.
If the server requests SRP authentication, the srpCallback
will be invoked and should return a (username, password) pair.
If the callback returns None, the local implementation will
signal a user_canceled error alert.
After the handshake completes, the client can inspect the
connection's session attribute to determine what type of
authentication was performed.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type srpCallback: callable
@param srpCallback: The callback to be used if the server
requests SRP authentication. If None, the client will not
offer support for SRP ciphersuites.
@type certCallback: callable
@param certCallback: The callback to be used if the server
requests client certificate authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(unknownParams=(srpCallback,
certCallback), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientSharedKey(self, username, sharedKey, settings=None,
checker=None, async=False):
"""Perform a shared-key handshake in the role of client.
This function performs a shared-key handshake. Using shared
symmetric keys of high entropy (128 bits or greater) mutually
authenticates both parties to each other.
TLS with shared-keys is non-standard. Most TLS
implementations don't support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} for the
latest information on TLS with shared-keys. If the shared-keys
Internet-Draft changes or is superceded, TLS Lite will track
those changes, so the shared-key support in later versions of
TLS Lite may become incompatible with this version.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The shared-key username.
@type sharedKey: str
@param sharedKey: The shared key.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(sharedKeyParams=(username,
sharedKey), settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def _handshakeClientAsync(self, srpParams=(), certParams=(),
unknownParams=(), sharedKeyParams=(),
session=None, settings=None, checker=None,
recursive=False):
handshaker = self._handshakeClientAsyncHelper(srpParams=srpParams,
certParams=certParams, unknownParams=unknownParams,
sharedKeyParams=sharedKeyParams, session=session,
settings=settings, recursive=recursive)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeClientAsyncHelper(self, srpParams, certParams, unknownParams,
sharedKeyParams, session, settings, recursive):
if not recursive:
self._handshakeStart(client=True)
#Unpack parameters
srpUsername = None # srpParams
password = None # srpParams
clientCertChain = None # certParams
privateKey = None # certParams
srpCallback = None # unknownParams
certCallback = None # unknownParams
#session # sharedKeyParams (or session)
#settings # settings
if srpParams:
srpUsername, password = srpParams
elif certParams:
clientCertChain, privateKey = certParams
elif unknownParams:
srpCallback, certCallback = unknownParams
elif sharedKeyParams:
session = Session()._createSharedKey(*sharedKeyParams)
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Validate parameters
if srpUsername and not password:
raise ValueError("Caller passed a username but no password")
if password and not srpUsername:
raise ValueError("Caller passed a password but no username")
if clientCertChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not clientCertChain:
raise ValueError("Caller passed a privateKey but no certChain")
if clientCertChain:
foundType = False
try:
import cryptoIDlib.CertChain
if isinstance(clientCertChain, cryptoIDlib.CertChain.CertChain):
if "cryptoID" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't "\
"match Handshake Settings")
settings.certificateTypes = ["cryptoID"]
foundType = True
except ImportError:
pass
if not foundType and isinstance(clientCertChain,
X509CertChain):
if "x509" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't match "\
"Handshake Settings")
settings.certificateTypes = ["x509"]
foundType = True
if not foundType:
raise ValueError("Unrecognized certificate type")
if session:
if not session.valid():
session = None #ignore non-resumable sessions...
elif session.resumable and \
(session.srpUsername != srpUsername):
raise ValueError("Session username doesn't match")
#Add Faults to parameters
if srpUsername and self.fault == Fault.badUsername:
srpUsername += "GARBAGE"
if password and self.fault == Fault.badPassword:
password += "GARBAGE"
if sharedKeyParams:
identifier = sharedKeyParams[0]
sharedKey = sharedKeyParams[1]
if self.fault == Fault.badIdentifier:
identifier += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
elif self.fault == Fault.badSharedKey:
sharedKey += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
#Initialize locals
serverCertChain = None
cipherSuite = 0
certificateType = CertificateType.x509
premasterSecret = None
#Get client nonce
clientRandom = getRandomBytes(32)
#Initialize acceptable ciphersuites
cipherSuites = []
if srpParams:
cipherSuites += CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
elif certParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif unknownParams:
if srpCallback:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += \
CipherSuite.getSrpSuites(settings.cipherNames)
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif sharedKeyParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
else:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate types
certificateTypes = settings._getCertificateTypes()
#Tentatively set the version to the client's minimum version.
#We'll use this for the ClientHello, and if an error occurs
#parsing the Server Hello, we'll use this version for the response
self.version = settings.maxVersion
#Either send ClientHello (with a resumable session)...
if session:
#If it's a resumable (i.e. not a shared-key session), then its
#ciphersuite must be one of the acceptable ciphersuites
if (not sharedKeyParams) and \
session.cipherSuite not in cipherSuites:
raise ValueError("Session's cipher suite not consistent "\
"with parameters")
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
session.sessionID, cipherSuites,
certificateTypes, session.srpUsername)
#Or send ClientHello (without)
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
createByteArraySequence([]), cipherSuites,
certificateTypes, srpUsername)
for result in self._sendMsg(clientHello):
yield result
#Get ServerHello (or missing_srp_username)
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.server_hello):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, ServerHello):
serverHello = msg
elif isinstance(msg, Alert):
alert = msg
#If it's not a missing_srp_username, re-raise
if alert.description != AlertDescription.missing_srp_username:
self._shutdown(False)
raise TLSRemoteAlert(alert)
#If we're not in SRP callback mode, we won't have offered SRP
#without a username, so we shouldn't get this alert
if not srpCallback:
for result in self._sendError(\
AlertDescription.unexpected_message):
yield result
srpParams = srpCallback()
#If the callback returns None, cancel the handshake
if srpParams == None:
for result in self._sendError(AlertDescription.user_canceled):
yield result
#Recursively perform handshake
for result in self._handshakeClientAsyncHelper(srpParams,
None, None, None, None, settings, True):
yield result
return
#Get the server version. Do this before anything else, so any
#error alerts will use the server's version
self.version = serverHello.server_version
#Future responses from server must use this version
self._versionCheck = True
#Check ServerHello
if serverHello.server_version < settings.minVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(serverHello.server_version)):
yield result
if serverHello.server_version > settings.maxVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too new version: %s" % str(serverHello.server_version)):
yield result
if serverHello.cipher_suite not in cipherSuites:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect ciphersuite"):
yield result
if serverHello.certificate_type not in certificateTypes:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect certificate type"):
yield result
if serverHello.compression_method != 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect compression method"):
yield result
#Get the server nonce
serverRandom = serverHello.random
#If the server agrees to resume
if session and session.sessionID and \
serverHello.session_id == session.sessionID:
#If a shared-key, we're flexible about suites; otherwise the
#server-chosen suite has to match the session's suite
if sharedKeyParams:
session.cipherSuite = serverHello.cipher_suite
elif serverHello.cipher_suite != session.cipherSuite:
for result in self._sendError(\
AlertDescription.illegal_parameter,\
"Server's ciphersuite doesn't match session"):
yield result
#Set the session for this connection
self.session = session
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
for result in self._sendFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
#If server DOES NOT agree to resume
else:
if sharedKeyParams:
for result in self._sendError(\
AlertDescription.user_canceled,
"Was expecting a shared-key resumption"):
yield result
#We've already validated these
cipherSuite = serverHello.cipher_suite
certificateType = serverHello.certificate_type
#If the server chose an SRP suite...
if cipherSuite in CipherSuite.srpSuites:
#Get ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an SRP+RSA suite...
elif cipherSuite in CipherSuite.srpRsaSuites:
#Get Certificate, ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an RSA suite...
elif cipherSuite in CipherSuite.rsaSuites:
#Get Certificate[, CertificateRequest], ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
(HandshakeType.server_hello_done,
HandshakeType.certificate_request)):
if result in (0,1):
yield result
else:
break
msg = result
certificateRequest = None
if isinstance(msg, CertificateRequest):
certificateRequest = msg
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
elif isinstance(msg, ServerHelloDone):
serverHelloDone = msg
else:
raise AssertionError()
#Calculate SRP premaster secret, if server chose an SRP or
#SRP+RSA suite
if cipherSuite in CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites:
#Get and check the server's group parameters and B value
N = serverKeyExchange.srp_N
g = serverKeyExchange.srp_g
s = serverKeyExchange.srp_s
B = serverKeyExchange.srp_B
if (g,N) not in goodGroupParameters:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"Unknown group parameters"):
yield result
if numBits(N) < settings.minKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too small: %d" % numBits(N)):
yield result
if numBits(N) > settings.maxKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too large: %d" % numBits(N)):
yield result
if B % N == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Suspicious B value"):
yield result
#Check the server's signature, if server chose an
#SRP+RSA suite
if cipherSuite in CipherSuite.srpRsaSuites:
#Hash ServerKeyExchange/ServerSRPParams
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
#Extract signature bytes from ServerKeyExchange
sigBytes = serverKeyExchange.signature
if len(sigBytes) == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server sent an SRP ServerKeyExchange "\
"message without a signature"):
yield result
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Verify signature
if not publicKey.verify(sigBytes, hashBytes):
for result in self._sendError(\
AlertDescription.decrypt_error,
"Signature failed to verify"):
yield result
#Calculate client's ephemeral DH values (a, A)
a = bytesToNumber(getRandomBytes(32))
A = powMod(g, a, N)
#Calculate client's static DH values (x, v)
x = makeX(bytesToString(s), srpUsername, password)
v = powMod(g, x, N)
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
k = makeK(N, g)
S = powMod((B - (k*v)) % N, a+(u*x), N)
if self.fault == Fault.badA:
A = N
S = 0
premasterSecret = numberToBytes(S)
#Send ClientKeyExchange
for result in self._sendMsg(\
ClientKeyExchange(cipherSuite).createSRP(A)):
yield result
#Calculate RSA premaster secret, if server chose an RSA suite
elif cipherSuite in CipherSuite.rsaSuites:
#Handle the presence of a CertificateRequest
if certificateRequest:
if unknownParams and certCallback:
certParamsNew = certCallback()
if certParamsNew:
clientCertChain, privateKey = certParamsNew
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Calculate premaster secret
premasterSecret = getRandomBytes(48)
premasterSecret[0] = settings.maxVersion[0]
premasterSecret[1] = settings.maxVersion[1]
if self.fault == Fault.badPremasterPadding:
premasterSecret[0] = 5
if self.fault == Fault.shortPremasterSecret:
premasterSecret = premasterSecret[:-1]
#Encrypt premaster secret to server's public key
encryptedPreMasterSecret = publicKey.encrypt(premasterSecret)
#If client authentication was requested, send Certificate
#message, either with certificates or empty
if certificateRequest:
clientCertificate = Certificate(certificateType)
if clientCertChain:
#Check to make sure we have the same type of
#certificates the server requested
wrongType = False
if certificateType == CertificateType.x509:
if not isinstance(clientCertChain, X509CertChain):
wrongType = True
elif certificateType == CertificateType.cryptoID:
if not isinstance(clientCertChain,
cryptoIDlib.CertChain.CertChain):
wrongType = True
if wrongType:
for result in self._sendError(\
AlertDescription.handshake_failure,
"Client certificate is of wrong type"):
yield result
clientCertificate.create(clientCertChain)
for result in self._sendMsg(clientCertificate):
yield result
else:
#The server didn't request client auth, so we
#zeroize these so the clientCertChain won't be
#stored in the session.
privateKey = None
clientCertChain = None
#Send ClientKeyExchange
clientKeyExchange = ClientKeyExchange(cipherSuite,
self.version)
clientKeyExchange.createRSA(encryptedPreMasterSecret)
for result in self._sendMsg(clientKeyExchange):
yield result
#If client authentication was requested and we have a
#private key, send CertificateVerify
if certificateRequest and privateKey:
if self.version == (3,0):
#Create a temporary session object, just for the
#purpose of creating the CertificateVerify
session = Session()
session._calcMasterSecret(self.version,
premasterSecret,
clientRandom,
serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(\
self._handshake_md5.digest() + \
self._handshake_sha.digest())
if self.fault == Fault.badVerifyMessage:
verifyBytes[0] = ((verifyBytes[0]+1) % 256)
signedBytes = privateKey.sign(verifyBytes)
certificateVerify = CertificateVerify()
certificateVerify.create(signedBytes)
for result in self._sendMsg(certificateVerify):
yield result
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = serverHello.session_id
self.session.cipherSuite = cipherSuite
self.session.srpUsername = srpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def handshakeServer(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Perform a handshake in the role of server.
This function performs an SSL or TLS handshake. Depending on
the arguments and the behavior of the client, this function can
perform a shared-key, SRP, or certificate-based handshake. It
can also perform a combined SRP and server-certificate
handshake.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
This function does not send a Hello Request message before
performing the handshake, so if re-handshaking is required,
the server must signal the client to begin the re-handshake
through some other means.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type sharedKeyDB: L{tlslite.SharedKeyDB.SharedKeyDB}
@param sharedKeyDB: A database of shared symmetric keys
associated with usernames. If the client performs a
shared-key handshake, the session's sharedKeyUsername
attribute will be set.
@type verifierDB: L{tlslite.VerifierDB.VerifierDB}
@param verifierDB: A database of SRP password verifiers
associated with usernames. If the client performs an SRP
handshake, the session's srpUsername attribute will be set.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
client requests server certificate authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the client
requests server certificate authentication.
@type reqCert: bool
@param reqCert: Whether to request client certificate
authentication. This only applies if the client chooses server
certificate authentication; if the client chooses SRP or
shared-key authentication, this will be ignored. If the client
performs a client certificate authentication, the sessions's
clientCertChain attribute will be set.
@type sessionCache: L{tlslite.SessionCache.SessionCache}
@param sessionCache: An in-memory cache of resumable sessions.
The client can resume sessions from this cache. Alternatively,
if the client performs a full handshake, a new session will be
added to the cache.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites and SSL/TLS version chosen by the server.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
for result in self.handshakeServerAsync(sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache, settings,
checker):
pass
def handshakeServerAsync(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Start a server handshake operation on the TLS connection.
This function returns a generator which behaves similarly to
handshakeServer(). Successive invocations of the generator
will return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or it will raise StopIteration
if the handshake operation is complete.
@rtype: iterable
@return: A generator; see above for details.
"""
handshaker = self._handshakeServerAsyncHelper(\
sharedKeyDB=sharedKeyDB,
verifierDB=verifierDB, certChain=certChain,
privateKey=privateKey, reqCert=reqCert,
sessionCache=sessionCache, settings=settings)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeServerAsyncHelper(self, sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache,
settings):
self._handshakeStart(client=False)
if (not sharedKeyDB) and (not verifierDB) and (not certChain):
raise ValueError("Caller passed no authentication credentials")
if certChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not certChain:
raise ValueError("Caller passed a privateKey but no certChain")
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Initialize acceptable cipher suites
cipherSuites = []
if verifierDB:
if certChain:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
if sharedKeyDB or certChain:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate type
certificateType = None
if certChain:
try:
import cryptoIDlib.CertChain
if isinstance(certChain, cryptoIDlib.CertChain.CertChain):
certificateType = CertificateType.cryptoID
except ImportError:
pass
if isinstance(certChain, X509CertChain):
certificateType = CertificateType.x509
if certificateType == None:
raise ValueError("Unrecognized certificate type")
#Initialize locals
clientCertChain = None
serverCertChain = None #We may set certChain to this later
postFinishedError = None
#Tentatively set version to most-desirable version, so if an error
#occurs parsing the ClientHello, this is what we'll use for the
#error alert
self.version = settings.maxVersion
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#If client's version is too low, reject it
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#Calculate the first cipher suite intersection.
#This is the 'privileged' ciphersuite. We'll use it if we're
#doing a shared-key resumption or a new negotiation. In fact,
#the only time we won't use it is if we're resuming a non-sharedkey
#session, in which case we use the ciphersuite from the session.
#
#Given the current ciphersuite ordering, this means we prefer SRP
#over non-SRP.
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If resumption was requested...
if clientHello.session_id and (sharedKeyDB or sessionCache):
session = None
#Check in the sharedKeys container
if sharedKeyDB and len(clientHello.session_id)==16:
try:
#Trim off zero padding, if any
for x in range(16):
if clientHello.session_id[x]==0:
break
self.allegedSharedKeyUsername = bytesToString(\
clientHello.session_id[:x])
session = sharedKeyDB[self.allegedSharedKeyUsername]
if not session.sharedKey:
raise AssertionError()
#use privileged ciphersuite
session.cipherSuite = cipherSuite
except KeyError:
pass
#Then check in the session cache
if sessionCache and not session:
try:
session = sessionCache[bytesToString(\
clientHello.session_id)]
if session.sharedKey:
raise AssertionError()
if not session.resumable:
raise AssertionError()
#Check for consistency with ClientHello
if session.cipherSuite not in cipherSuites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if session.cipherSuite not in clientHello.cipher_suites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if clientHello.srp_username:
if clientHello.srp_username != session.srpUsername:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
except KeyError:
pass
#If a session is found..
if session:
#Set the session
self.session = session
#Send ServerHello
serverHello = ServerHello()
serverHello.create(self.version, serverRandom,
session.sessionID, session.cipherSuite,
certificateType)
for result in self._sendMsg(serverHello):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
return
#If not a resumption...
#TRICKY: we might have chosen an RSA suite that was only deemed
#acceptable because of the shared-key resumption. If the shared-
#key resumption failed, because the identifier wasn't recognized,
#we might fall through to here, where we have an RSA suite
#chosen, but no certificate.
if cipherSuite in CipherSuite.rsaSuites and not certChain:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If an RSA suite is chosen, check for certificate type intersection
#(We do this check down here because if the mismatch occurs but the
# client is using a shared-key session, it's okay)
if cipherSuite in CipherSuite.rsaSuites + \
CipherSuite.srpRsaSuites:
if certificateType not in clientHello.certificate_types:
for result in self._sendError(\
AlertDescription.handshake_failure,
"the client doesn't support my certificate type"):
yield result
#Move certChain -> serverCertChain, now that we're using it
serverCertChain = certChain
#Create sessionID
if sessionCache:
sessionID = getRandomBytes(32)
else:
sessionID = createByteArraySequence([])
#If we've selected an SRP suite, exchange keys and calculate
#premaster secret:
if cipherSuite in CipherSuite.srpSuites + CipherSuite.srpRsaSuites:
#If there's no SRP username...
if not clientHello.srp_username:
#Ask the client to re-send ClientHello with one
for result in self._sendMsg(Alert().create(\
AlertDescription.missing_srp_username,
AlertLevel.warning)):
yield result
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#Check ClientHello
#If client's version is too low, reject it (COPIED CODE; BAD!)
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Recalculate the privileged cipher suite, making sure to
#pick an SRP suite
cipherSuites = [c for c in cipherSuites if c in \
CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites]
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#The username better be there, this time
if not clientHello.srp_username:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Client resent a hello, but without the SRP"\
" username"):
yield result
#Get username
self.allegedSrpUsername = clientHello.srp_username
#Get parameters from username
try:
entry = verifierDB[self.allegedSrpUsername]
except KeyError:
for result in self._sendError(\
AlertDescription.unknown_srp_username):
yield result
(N, g, s, v) = entry
#Calculate server's ephemeral DH values (b, B)
b = bytesToNumber(getRandomBytes(32))
k = makeK(N, g)
B = (powMod(g, b, N) + (k*v)) % N
#Create ServerKeyExchange, signing it if necessary
serverKeyExchange = ServerKeyExchange(cipherSuite)
serverKeyExchange.createSRP(N, g, stringToBytes(s), B)
if cipherSuite in CipherSuite.srpRsaSuites:
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
serverKeyExchange.signature = privateKey.sign(hashBytes)
#Send ServerHello[, Certificate], ServerKeyExchange,
#ServerHelloDone
msgs = []
serverHello = ServerHello()
serverHello.create(self.version, serverRandom, sessionID,
cipherSuite, certificateType)
msgs.append(serverHello)
if cipherSuite in CipherSuite.srpRsaSuites:
certificateMsg = Certificate(certificateType)
certificateMsg.create(serverCertChain)
msgs.append(certificateMsg)
msgs.append(serverKeyExchange)
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get and check ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
A = clientKeyExchange.srp_A
if A % N == 0:
postFinishedError = (AlertDescription.illegal_parameter,
"Suspicious A value")
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
S = powMod((A * powMod(v,u,N)) % N, b, N)
premasterSecret = numberToBytes(S)
#If we've selected an RSA suite, exchange keys and calculate
#premaster secret:
elif cipherSuite in CipherSuite.rsaSuites:
#Send ServerHello, Certificate[, CertificateRequest],
#ServerHelloDone
msgs = []
msgs.append(ServerHello().create(self.version, serverRandom,
sessionID, cipherSuite, certificateType))
msgs.append(Certificate(certificateType).create(serverCertChain))
if reqCert:
msgs.append(CertificateRequest())
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get [Certificate,] (if was requested)
if reqCert:
if self.version == (3,0):
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, Alert):
#If it's not a no_certificate alert, re-raise
alert = msg
if alert.description != \
AlertDescription.no_certificate:
self._shutdown(False)
raise TLSRemoteAlert(alert)
elif isinstance(msg, Certificate):
clientCertificate = msg
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
elif self.version in ((3,1), (3,2)):
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
clientCertificate = result
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
#Get ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
#Decrypt ClientKeyExchange
premasterSecret = privateKey.decrypt(\
clientKeyExchange.encryptedPreMasterSecret)
randomPreMasterSecret = getRandomBytes(48)
versionCheck = (premasterSecret[0], premasterSecret[1])
if not premasterSecret:
premasterSecret = randomPreMasterSecret
elif len(premasterSecret)!=48:
premasterSecret = randomPreMasterSecret
elif versionCheck != clientHello.client_version:
if versionCheck != self.version: #Tolerate buggy IE clients
premasterSecret = randomPreMasterSecret
#Get and check CertificateVerify, if relevant
if clientCertChain:
if self.version == (3,0):
#Create a temporary session object, just for the purpose
#of checking the CertificateVerify
session = Session()
session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(self._handshake_md5.digest() +\
self._handshake_sha.digest())
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate_verify):
if result in (0,1):
yield result
else:
break
certificateVerify = result
publicKey = clientCertChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too small: %d" % len(publicKey))
if len(publicKey) > settings.maxKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too large: %d" % len(publicKey))
if not publicKey.verify(certificateVerify.signature,
verifyBytes):
postFinishedError = (AlertDescription.decrypt_error,
"Signature failed to verify")
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = sessionID
self.session.cipherSuite = cipherSuite
self.session.srpUsername = self.allegedSrpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
#If we were holding a post-finished error until receiving the client
#finished message, send it now. We delay the call until this point
#because calling sendError() throws an exception, and our caller might
#shut down the socket upon receiving the exception. If he did, and the
#client was still sending its ChangeCipherSpec or Finished messages, it
#would cause a socket error on the client side. This is a lot of
#consideration to show to misbehaving clients, but this would also
#cause problems with fault-testing.
if postFinishedError:
for result in self._sendError(*postFinishedError):
yield result
for result in self._sendFinished():
yield result
#Add the session object to the session cache
if sessionCache and sessionID:
sessionCache[bytesToString(sessionID)] = self.session
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def _handshakeWrapperAsync(self, handshaker, checker):
if not self.fault:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except:
self._shutdown(False)
raise
else:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except socket.error, e:
raise TLSFaultError("socket error!")
except TLSAbruptCloseError, e:
raise TLSFaultError("abrupt close error!")
except TLSAlert, alert:
if alert.description not in Fault.faultAlerts[self.fault]:
raise TLSFaultError(str(alert))
else:
pass
except:
self._shutdown(False)
raise
else:
raise TLSFaultError("No error!")
def _getKeyFromChain(self, certificate, settings):
#Get and check cert chain from the Certificate message
certChain = certificate.certChain
if not certChain or certChain.getNumCerts() == 0:
for result in self._sendError(AlertDescription.illegal_parameter,
"Other party sent a Certificate message without "\
"certificates"):
yield result
#Get and check public key from the cert chain
publicKey = certChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too small: %d" % len(publicKey)):
yield result
if len(publicKey) > settings.maxKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too large: %d" % len(publicKey)):
yield result
yield publicKey, certChain
| gpl-3.0 |
nmercier/linux-cross-gcc | linux/lib/python2.7/compiler/transformer.py | 176 | 53108 | """Parse tree transformation module.
Transforms Python source code into an abstract syntax tree (AST)
defined in the ast module.
The simplest ways to invoke this module are via parse and parseFile.
parse(buf) -> AST
parseFile(path) -> AST
"""
# Original version written by Greg Stein ([email protected])
# and Bill Tutt ([email protected])
# February 1997.
#
# Modifications and improvements for Python 2.0 by Jeremy Hylton and
# Mark Hammond
#
# Some fixes to try to have correct line number on almost all nodes
# (except Module, Discard and Stmt) added by Sylvain Thenault
#
# Portions of this file are:
# Copyright (C) 1997-1998 Greg Stein. All Rights Reserved.
#
# This module is provided under a BSD-ish license. See
# http://www.opensource.org/licenses/bsd-license.html
# and replace OWNER, ORGANIZATION, and YEAR as appropriate.
from compiler.ast import *
import parser
import symbol
import token
class WalkerError(StandardError):
pass
from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
from compiler.consts import OP_ASSIGN, OP_DELETE, OP_APPLY
def parseFile(path):
f = open(path, "U")
# XXX The parser API tolerates files without a trailing newline,
# but not strings without a trailing newline. Always add an extra
# newline to the file contents, since we're going through the string
# version of the API.
src = f.read() + "\n"
f.close()
return parse(src)
def parse(buf, mode="exec"):
if mode == "exec" or mode == "single":
return Transformer().parsesuite(buf)
elif mode == "eval":
return Transformer().parseexpr(buf)
else:
raise ValueError("compile() arg 3 must be"
" 'exec' or 'eval' or 'single'")
def asList(nodes):
l = []
for item in nodes:
if hasattr(item, "asList"):
l.append(item.asList())
else:
if type(item) is type( (None, None) ):
l.append(tuple(asList(item)))
elif type(item) is type( [] ):
l.append(asList(item))
else:
l.append(item)
return l
def extractLineNo(ast):
if not isinstance(ast[1], tuple):
# get a terminal node
return ast[2]
for child in ast[1:]:
if isinstance(child, tuple):
lineno = extractLineNo(child)
if lineno is not None:
return lineno
def Node(*args):
kind = args[0]
if kind in nodes:
try:
return nodes[kind](*args[1:])
except TypeError:
print nodes[kind], len(args), args
raise
else:
raise WalkerError, "Can't find appropriate Node type: %s" % str(args)
#return apply(ast.Node, args)
class Transformer:
"""Utility object for transforming Python parse trees.
Exposes the following methods:
tree = transform(ast_tree)
tree = parsesuite(text)
tree = parseexpr(text)
tree = parsefile(fileob | filename)
"""
def __init__(self):
self._dispatch = {}
for value, name in symbol.sym_name.items():
if hasattr(self, name):
self._dispatch[value] = getattr(self, name)
self._dispatch[token.NEWLINE] = self.com_NEWLINE
self._atom_dispatch = {token.LPAR: self.atom_lpar,
token.LSQB: self.atom_lsqb,
token.LBRACE: self.atom_lbrace,
token.BACKQUOTE: self.atom_backquote,
token.NUMBER: self.atom_number,
token.STRING: self.atom_string,
token.NAME: self.atom_name,
}
self.encoding = None
def transform(self, tree):
"""Transform an AST into a modified parse tree."""
if not (isinstance(tree, tuple) or isinstance(tree, list)):
tree = parser.st2tuple(tree, line_info=1)
return self.compile_node(tree)
def parsesuite(self, text):
"""Return a modified parse tree for the given suite text."""
return self.transform(parser.suite(text))
def parseexpr(self, text):
"""Return a modified parse tree for the given expression text."""
return self.transform(parser.expr(text))
def parsefile(self, file):
"""Return a modified parse tree for the contents of the given file."""
if type(file) == type(''):
file = open(file)
return self.parsesuite(file.read())
# --------------------------------------------------------------
#
# PRIVATE METHODS
#
def compile_node(self, node):
### emit a line-number node?
n = node[0]
if n == symbol.encoding_decl:
self.encoding = node[2]
node = node[1]
n = node[0]
if n == symbol.single_input:
return self.single_input(node[1:])
if n == symbol.file_input:
return self.file_input(node[1:])
if n == symbol.eval_input:
return self.eval_input(node[1:])
if n == symbol.lambdef:
return self.lambdef(node[1:])
if n == symbol.funcdef:
return self.funcdef(node[1:])
if n == symbol.classdef:
return self.classdef(node[1:])
raise WalkerError, ('unexpected node type', n)
def single_input(self, node):
### do we want to do anything about being "interactive" ?
# NEWLINE | simple_stmt | compound_stmt NEWLINE
n = node[0][0]
if n != token.NEWLINE:
return self.com_stmt(node[0])
return Pass()
def file_input(self, nodelist):
doc = self.get_docstring(nodelist, symbol.file_input)
if doc is not None:
i = 1
else:
i = 0
stmts = []
for node in nodelist[i:]:
if node[0] != token.ENDMARKER and node[0] != token.NEWLINE:
self.com_append_stmt(stmts, node)
return Module(doc, Stmt(stmts))
def eval_input(self, nodelist):
# from the built-in function input()
### is this sufficient?
return Expression(self.com_node(nodelist[0]))
def decorator_name(self, nodelist):
listlen = len(nodelist)
assert listlen >= 1 and listlen % 2 == 1
item = self.atom_name(nodelist)
i = 1
while i < listlen:
assert nodelist[i][0] == token.DOT
assert nodelist[i + 1][0] == token.NAME
item = Getattr(item, nodelist[i + 1][1])
i += 2
return item
def decorator(self, nodelist):
# '@' dotted_name [ '(' [arglist] ')' ]
assert len(nodelist) in (3, 5, 6)
assert nodelist[0][0] == token.AT
assert nodelist[-1][0] == token.NEWLINE
assert nodelist[1][0] == symbol.dotted_name
funcname = self.decorator_name(nodelist[1][1:])
if len(nodelist) > 3:
assert nodelist[2][0] == token.LPAR
expr = self.com_call_function(funcname, nodelist[3])
else:
expr = funcname
return expr
def decorators(self, nodelist):
# decorators: decorator ([NEWLINE] decorator)* NEWLINE
items = []
for dec_nodelist in nodelist:
assert dec_nodelist[0] == symbol.decorator
items.append(self.decorator(dec_nodelist[1:]))
return Decorators(items)
def decorated(self, nodelist):
assert nodelist[0][0] == symbol.decorators
if nodelist[1][0] == symbol.funcdef:
n = [nodelist[0]] + list(nodelist[1][1:])
return self.funcdef(n)
elif nodelist[1][0] == symbol.classdef:
decorators = self.decorators(nodelist[0][1:])
cls = self.classdef(nodelist[1][1:])
cls.decorators = decorators
return cls
raise WalkerError()
def funcdef(self, nodelist):
# -6 -5 -4 -3 -2 -1
# funcdef: [decorators] 'def' NAME parameters ':' suite
# parameters: '(' [varargslist] ')'
if len(nodelist) == 6:
assert nodelist[0][0] == symbol.decorators
decorators = self.decorators(nodelist[0][1:])
else:
assert len(nodelist) == 5
decorators = None
lineno = nodelist[-4][2]
name = nodelist[-4][1]
args = nodelist[-3][2]
if args[0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(args[1:])
else:
names = defaults = ()
flags = 0
doc = self.get_docstring(nodelist[-1])
# code for function
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Function(decorators, name, names, defaults, flags, doc, code,
lineno=lineno)
def lambdef(self, nodelist):
# lambdef: 'lambda' [varargslist] ':' test
if nodelist[2][0] == symbol.varargslist:
names, defaults, flags = self.com_arglist(nodelist[2][1:])
else:
names = defaults = ()
flags = 0
# code for lambda
code = self.com_node(nodelist[-1])
return Lambda(names, defaults, flags, code, lineno=nodelist[1][2])
old_lambdef = lambdef
def classdef(self, nodelist):
# classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
name = nodelist[1][1]
doc = self.get_docstring(nodelist[-1])
if nodelist[2][0] == token.COLON:
bases = []
elif nodelist[3][0] == token.RPAR:
bases = []
else:
bases = self.com_bases(nodelist[3])
# code for class
code = self.com_node(nodelist[-1])
if doc is not None:
assert isinstance(code, Stmt)
assert isinstance(code.nodes[0], Discard)
del code.nodes[0]
return Class(name, bases, doc, code, lineno=nodelist[1][2])
def stmt(self, nodelist):
return self.com_stmt(nodelist[0])
small_stmt = stmt
flow_stmt = stmt
compound_stmt = stmt
def simple_stmt(self, nodelist):
# small_stmt (';' small_stmt)* [';'] NEWLINE
stmts = []
for i in range(0, len(nodelist), 2):
self.com_append_stmt(stmts, nodelist[i])
return Stmt(stmts)
def parameters(self, nodelist):
raise WalkerError
def varargslist(self, nodelist):
raise WalkerError
def fpdef(self, nodelist):
raise WalkerError
def fplist(self, nodelist):
raise WalkerError
def dotted_name(self, nodelist):
raise WalkerError
def comp_op(self, nodelist):
raise WalkerError
def trailer(self, nodelist):
raise WalkerError
def sliceop(self, nodelist):
raise WalkerError
def argument(self, nodelist):
raise WalkerError
# --------------------------------------------------------------
#
# STATEMENT NODES (invoked by com_node())
#
def expr_stmt(self, nodelist):
# augassign testlist | testlist ('=' testlist)*
en = nodelist[-1]
exprNode = self.lookup_node(en)(en[1:])
if len(nodelist) == 1:
return Discard(exprNode, lineno=exprNode.lineno)
if nodelist[1][0] == token.EQUAL:
nodesl = []
for i in range(0, len(nodelist) - 2, 2):
nodesl.append(self.com_assign(nodelist[i], OP_ASSIGN))
return Assign(nodesl, exprNode, lineno=nodelist[1][2])
else:
lval = self.com_augassign(nodelist[0])
op = self.com_augassign_op(nodelist[1])
return AugAssign(lval, op[1], exprNode, lineno=op[2])
raise WalkerError, "can't get here"
def print_stmt(self, nodelist):
# print ([ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ])
items = []
if len(nodelist) == 1:
start = 1
dest = None
elif nodelist[1][0] == token.RIGHTSHIFT:
assert len(nodelist) == 3 \
or nodelist[3][0] == token.COMMA
dest = self.com_node(nodelist[2])
start = 4
else:
dest = None
start = 1
for i in range(start, len(nodelist), 2):
items.append(self.com_node(nodelist[i]))
if nodelist[-1][0] == token.COMMA:
return Print(items, dest, lineno=nodelist[0][2])
return Printnl(items, dest, lineno=nodelist[0][2])
def del_stmt(self, nodelist):
return self.com_assign(nodelist[1], OP_DELETE)
def pass_stmt(self, nodelist):
return Pass(lineno=nodelist[0][2])
def break_stmt(self, nodelist):
return Break(lineno=nodelist[0][2])
def continue_stmt(self, nodelist):
return Continue(lineno=nodelist[0][2])
def return_stmt(self, nodelist):
# return: [testlist]
if len(nodelist) < 2:
return Return(Const(None), lineno=nodelist[0][2])
return Return(self.com_node(nodelist[1]), lineno=nodelist[0][2])
def yield_stmt(self, nodelist):
expr = self.com_node(nodelist[0])
return Discard(expr, lineno=expr.lineno)
def yield_expr(self, nodelist):
if len(nodelist) > 1:
value = self.com_node(nodelist[1])
else:
value = Const(None)
return Yield(value, lineno=nodelist[0][2])
def raise_stmt(self, nodelist):
# raise: [test [',' test [',' test]]]
if len(nodelist) > 5:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
if len(nodelist) > 3:
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
if len(nodelist) > 1:
expr1 = self.com_node(nodelist[1])
else:
expr1 = None
return Raise(expr1, expr2, expr3, lineno=nodelist[0][2])
def import_stmt(self, nodelist):
# import_stmt: import_name | import_from
assert len(nodelist) == 1
return self.com_node(nodelist[0])
def import_name(self, nodelist):
# import_name: 'import' dotted_as_names
return Import(self.com_dotted_as_names(nodelist[1]),
lineno=nodelist[0][2])
def import_from(self, nodelist):
# import_from: 'from' ('.'* dotted_name | '.') 'import' ('*' |
# '(' import_as_names ')' | import_as_names)
assert nodelist[0][1] == 'from'
idx = 1
while nodelist[idx][1] == '.':
idx += 1
level = idx - 1
if nodelist[idx][0] == symbol.dotted_name:
fromname = self.com_dotted_name(nodelist[idx])
idx += 1
else:
fromname = ""
assert nodelist[idx][1] == 'import'
if nodelist[idx + 1][0] == token.STAR:
return From(fromname, [('*', None)], level,
lineno=nodelist[0][2])
else:
node = nodelist[idx + 1 + (nodelist[idx + 1][0] == token.LPAR)]
return From(fromname, self.com_import_as_names(node), level,
lineno=nodelist[0][2])
def global_stmt(self, nodelist):
# global: NAME (',' NAME)*
names = []
for i in range(1, len(nodelist), 2):
names.append(nodelist[i][1])
return Global(names, lineno=nodelist[0][2])
def exec_stmt(self, nodelist):
# exec_stmt: 'exec' expr ['in' expr [',' expr]]
expr1 = self.com_node(nodelist[1])
if len(nodelist) >= 4:
expr2 = self.com_node(nodelist[3])
if len(nodelist) >= 6:
expr3 = self.com_node(nodelist[5])
else:
expr3 = None
else:
expr2 = expr3 = None
return Exec(expr1, expr2, expr3, lineno=nodelist[0][2])
def assert_stmt(self, nodelist):
# 'assert': test, [',' test]
expr1 = self.com_node(nodelist[1])
if (len(nodelist) == 4):
expr2 = self.com_node(nodelist[3])
else:
expr2 = None
return Assert(expr1, expr2, lineno=nodelist[0][2])
def if_stmt(self, nodelist):
# if: test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
tests = []
for i in range(0, len(nodelist) - 3, 4):
testNode = self.com_node(nodelist[i + 1])
suiteNode = self.com_node(nodelist[i + 3])
tests.append((testNode, suiteNode))
if len(nodelist) % 4 == 3:
elseNode = self.com_node(nodelist[-1])
## elseNode.lineno = nodelist[-1][1][2]
else:
elseNode = None
return If(tests, elseNode, lineno=nodelist[0][2])
def while_stmt(self, nodelist):
# 'while' test ':' suite ['else' ':' suite]
testNode = self.com_node(nodelist[1])
bodyNode = self.com_node(nodelist[3])
if len(nodelist) > 4:
elseNode = self.com_node(nodelist[6])
else:
elseNode = None
return While(testNode, bodyNode, elseNode, lineno=nodelist[0][2])
def for_stmt(self, nodelist):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
assignNode = self.com_assign(nodelist[1], OP_ASSIGN)
listNode = self.com_node(nodelist[3])
bodyNode = self.com_node(nodelist[5])
if len(nodelist) > 8:
elseNode = self.com_node(nodelist[8])
else:
elseNode = None
return For(assignNode, listNode, bodyNode, elseNode,
lineno=nodelist[0][2])
def try_stmt(self, nodelist):
return self.com_try_except_finally(nodelist)
def with_stmt(self, nodelist):
return self.com_with(nodelist)
def with_var(self, nodelist):
return self.com_with_var(nodelist)
def suite(self, nodelist):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if len(nodelist) == 1:
return self.com_stmt(nodelist[0])
stmts = []
for node in nodelist:
if node[0] == symbol.stmt:
self.com_append_stmt(stmts, node)
return Stmt(stmts)
# --------------------------------------------------------------
#
# EXPRESSION NODES (invoked by com_node())
#
def testlist(self, nodelist):
# testlist: expr (',' expr)* [',']
# testlist_safe: test [(',' test)+ [',']]
# exprlist: expr (',' expr)* [',']
return self.com_binary(Tuple, nodelist)
testlist_safe = testlist # XXX
testlist1 = testlist
exprlist = testlist
def testlist_comp(self, nodelist):
# test ( comp_for | (',' test)* [','] )
assert nodelist[0][0] == symbol.test
if len(nodelist) == 2 and nodelist[1][0] == symbol.comp_for:
test = self.com_node(nodelist[0])
return self.com_generator_expression(test, nodelist[1])
return self.testlist(nodelist)
def test(self, nodelist):
# or_test ['if' or_test 'else' test] | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
then = self.com_node(nodelist[0])
if len(nodelist) > 1:
assert len(nodelist) == 5
assert nodelist[1][1] == 'if'
assert nodelist[3][1] == 'else'
test = self.com_node(nodelist[2])
else_ = self.com_node(nodelist[4])
return IfExp(test, then, else_, lineno=nodelist[1][2])
return then
def or_test(self, nodelist):
# and_test ('or' and_test)* | lambdef
if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
return self.lambdef(nodelist[0])
return self.com_binary(Or, nodelist)
old_test = or_test
def and_test(self, nodelist):
# not_test ('and' not_test)*
return self.com_binary(And, nodelist)
def not_test(self, nodelist):
# 'not' not_test | comparison
result = self.com_node(nodelist[-1])
if len(nodelist) == 2:
return Not(result, lineno=nodelist[0][2])
return result
def comparison(self, nodelist):
# comparison: expr (comp_op expr)*
node = self.com_node(nodelist[0])
if len(nodelist) == 1:
return node
results = []
for i in range(2, len(nodelist), 2):
nl = nodelist[i-1]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
n = nl[1]
if n[0] == token.NAME:
type = n[1]
if len(nl) == 3:
if type == 'not':
type = 'not in'
else:
type = 'is not'
else:
type = _cmp_types[n[0]]
lineno = nl[1][2]
results.append((type, self.com_node(nodelist[i])))
# we need a special "compare" node so that we can distinguish
# 3 < x < 5 from (3 < x) < 5
# the two have very different semantics and results (note that the
# latter form is always true)
return Compare(node, results, lineno=lineno)
def expr(self, nodelist):
# xor_expr ('|' xor_expr)*
return self.com_binary(Bitor, nodelist)
def xor_expr(self, nodelist):
# xor_expr ('^' xor_expr)*
return self.com_binary(Bitxor, nodelist)
def and_expr(self, nodelist):
# xor_expr ('&' xor_expr)*
return self.com_binary(Bitand, nodelist)
def shift_expr(self, nodelist):
# shift_expr ('<<'|'>>' shift_expr)*
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.LEFTSHIFT:
node = LeftShift([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.RIGHTSHIFT:
node = RightShift([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def arith_expr(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
if nodelist[i-1][0] == token.PLUS:
node = Add([node, right], lineno=nodelist[1][2])
elif nodelist[i-1][0] == token.MINUS:
node = Sub([node, right], lineno=nodelist[1][2])
else:
raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
return node
def term(self, nodelist):
node = self.com_node(nodelist[0])
for i in range(2, len(nodelist), 2):
right = self.com_node(nodelist[i])
t = nodelist[i-1][0]
if t == token.STAR:
node = Mul([node, right])
elif t == token.SLASH:
node = Div([node, right])
elif t == token.PERCENT:
node = Mod([node, right])
elif t == token.DOUBLESLASH:
node = FloorDiv([node, right])
else:
raise ValueError, "unexpected token: %s" % t
node.lineno = nodelist[1][2]
return node
def factor(self, nodelist):
elt = nodelist[0]
t = elt[0]
node = self.lookup_node(nodelist[-1])(nodelist[-1][1:])
# need to handle (unary op)constant here...
if t == token.PLUS:
return UnaryAdd(node, lineno=elt[2])
elif t == token.MINUS:
return UnarySub(node, lineno=elt[2])
elif t == token.TILDE:
node = Invert(node, lineno=elt[2])
return node
def power(self, nodelist):
# power: atom trailer* ('**' factor)*
node = self.com_node(nodelist[0])
for i in range(1, len(nodelist)):
elt = nodelist[i]
if elt[0] == token.DOUBLESTAR:
return Power([node, self.com_node(nodelist[i+1])],
lineno=elt[2])
node = self.com_apply_trailer(node, elt)
return node
def atom(self, nodelist):
return self._atom_dispatch[nodelist[0][0]](nodelist)
def atom_lpar(self, nodelist):
if nodelist[1][0] == token.RPAR:
return Tuple((), lineno=nodelist[0][2])
return self.com_node(nodelist[1])
def atom_lsqb(self, nodelist):
if nodelist[1][0] == token.RSQB:
return List((), lineno=nodelist[0][2])
return self.com_list_constructor(nodelist[1])
def atom_lbrace(self, nodelist):
if nodelist[1][0] == token.RBRACE:
return Dict((), lineno=nodelist[0][2])
return self.com_dictorsetmaker(nodelist[1])
def atom_backquote(self, nodelist):
return Backquote(self.com_node(nodelist[1]))
def atom_number(self, nodelist):
### need to verify this matches compile.c
k = eval(nodelist[0][1])
return Const(k, lineno=nodelist[0][2])
def decode_literal(self, lit):
if self.encoding:
# this is particularly fragile & a bit of a
# hack... changes in compile.c:parsestr and
# tokenizer.c must be reflected here.
if self.encoding not in ['utf-8', 'iso-8859-1']:
lit = unicode(lit, 'utf-8').encode(self.encoding)
return eval("# coding: %s\n%s" % (self.encoding, lit))
else:
return eval(lit)
def atom_string(self, nodelist):
k = ''
for node in nodelist:
k += self.decode_literal(node[1])
return Const(k, lineno=nodelist[0][2])
def atom_name(self, nodelist):
return Name(nodelist[0][1], lineno=nodelist[0][2])
# --------------------------------------------------------------
#
# INTERNAL PARSING UTILITIES
#
# The use of com_node() introduces a lot of extra stack frames,
# enough to cause a stack overflow compiling test.test_parser with
# the standard interpreter recursionlimit. The com_node() is a
# convenience function that hides the dispatch details, but comes
# at a very high cost. It is more efficient to dispatch directly
# in the callers. In these cases, use lookup_node() and call the
# dispatched node directly.
def lookup_node(self, node):
return self._dispatch[node[0]]
def com_node(self, node):
# Note: compile.c has handling in com_node for del_stmt, pass_stmt,
# break_stmt, stmt, small_stmt, flow_stmt, simple_stmt,
# and compound_stmt.
# We'll just dispatch them.
return self._dispatch[node[0]](node[1:])
def com_NEWLINE(self, *args):
# A ';' at the end of a line can make a NEWLINE token appear
# here, Render it harmless. (genc discards ('discard',
# ('const', xxxx)) Nodes)
return Discard(Const(None))
def com_arglist(self, nodelist):
# varargslist:
# (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME)
# | fpdef ['=' test] (',' fpdef ['=' test])* [',']
# fpdef: NAME | '(' fplist ')'
# fplist: fpdef (',' fpdef)* [',']
names = []
defaults = []
flags = 0
i = 0
while i < len(nodelist):
node = nodelist[i]
if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
if node[0] == token.STAR:
node = nodelist[i+1]
if node[0] == token.NAME:
names.append(node[1])
flags = flags | CO_VARARGS
i = i + 3
if i < len(nodelist):
# should be DOUBLESTAR
t = nodelist[i][0]
if t == token.DOUBLESTAR:
node = nodelist[i+1]
else:
raise ValueError, "unexpected token: %s" % t
names.append(node[1])
flags = flags | CO_VARKEYWORDS
break
# fpdef: NAME | '(' fplist ')'
names.append(self.com_fpdef(node))
i = i + 1
if i < len(nodelist) and nodelist[i][0] == token.EQUAL:
defaults.append(self.com_node(nodelist[i + 1]))
i = i + 2
elif len(defaults):
# we have already seen an argument with default, but here
# came one without
raise SyntaxError, "non-default argument follows default argument"
# skip the comma
i = i + 1
return names, defaults, flags
def com_fpdef(self, node):
# fpdef: NAME | '(' fplist ')'
if node[1][0] == token.LPAR:
return self.com_fplist(node[2])
return node[1][1]
def com_fplist(self, node):
# fplist: fpdef (',' fpdef)* [',']
if len(node) == 2:
return self.com_fpdef(node[1])
list = []
for i in range(1, len(node), 2):
list.append(self.com_fpdef(node[i]))
return tuple(list)
def com_dotted_name(self, node):
# String together the dotted names and return the string
name = ""
for n in node:
if type(n) == type(()) and n[0] == 1:
name = name + n[1] + '.'
return name[:-1]
def com_dotted_as_name(self, node):
assert node[0] == symbol.dotted_as_name
node = node[1:]
dot = self.com_dotted_name(node[0][1:])
if len(node) == 1:
return dot, None
assert node[1][1] == 'as'
assert node[2][0] == token.NAME
return dot, node[2][1]
def com_dotted_as_names(self, node):
assert node[0] == symbol.dotted_as_names
node = node[1:]
names = [self.com_dotted_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_dotted_as_name(node[i]))
return names
def com_import_as_name(self, node):
assert node[0] == symbol.import_as_name
node = node[1:]
assert node[0][0] == token.NAME
if len(node) == 1:
return node[0][1], None
assert node[1][1] == 'as', node
assert node[2][0] == token.NAME
return node[0][1], node[2][1]
def com_import_as_names(self, node):
assert node[0] == symbol.import_as_names
node = node[1:]
names = [self.com_import_as_name(node[0])]
for i in range(2, len(node), 2):
names.append(self.com_import_as_name(node[i]))
return names
def com_bases(self, node):
bases = []
for i in range(1, len(node), 2):
bases.append(self.com_node(node[i]))
return bases
def com_try_except_finally(self, nodelist):
# ('try' ':' suite
# ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite]
# | 'finally' ':' suite))
if nodelist[3][0] == token.NAME:
# first clause is a finally clause: only try-finally
return TryFinally(self.com_node(nodelist[2]),
self.com_node(nodelist[5]),
lineno=nodelist[0][2])
#tryexcept: [TryNode, [except_clauses], elseNode)]
clauses = []
elseNode = None
finallyNode = None
for i in range(3, len(nodelist), 3):
node = nodelist[i]
if node[0] == symbol.except_clause:
# except_clause: 'except' [expr [(',' | 'as') expr]] */
if len(node) > 2:
expr1 = self.com_node(node[2])
if len(node) > 4:
expr2 = self.com_assign(node[4], OP_ASSIGN)
else:
expr2 = None
else:
expr1 = expr2 = None
clauses.append((expr1, expr2, self.com_node(nodelist[i+2])))
if node[0] == token.NAME:
if node[1] == 'else':
elseNode = self.com_node(nodelist[i+2])
elif node[1] == 'finally':
finallyNode = self.com_node(nodelist[i+2])
try_except = TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
lineno=nodelist[0][2])
if finallyNode:
return TryFinally(try_except, finallyNode, lineno=nodelist[0][2])
else:
return try_except
def com_with(self, nodelist):
# with_stmt: 'with' with_item (',' with_item)* ':' suite
body = self.com_node(nodelist[-1])
for i in range(len(nodelist) - 3, 0, -2):
ret = self.com_with_item(nodelist[i], body, nodelist[0][2])
if i == 1:
return ret
body = ret
def com_with_item(self, nodelist, body, lineno):
# with_item: test ['as' expr]
if len(nodelist) == 4:
var = self.com_assign(nodelist[3], OP_ASSIGN)
else:
var = None
expr = self.com_node(nodelist[1])
return With(expr, var, body, lineno=lineno)
def com_augassign_op(self, node):
assert node[0] == symbol.augassign
return node[1]
def com_augassign(self, node):
"""Return node suitable for lvalue of augmented assignment
Names, slices, and attributes are the only allowable nodes.
"""
l = self.com_node(node)
if l.__class__ in (Name, Slice, Subscript, Getattr):
return l
raise SyntaxError, "can't assign to %s" % l.__class__.__name__
def com_assign(self, node, assigning):
# return a node suitable for use as an "lvalue"
# loop to avoid trivial recursion
while 1:
t = node[0]
if t in (symbol.exprlist, symbol.testlist, symbol.testlist_safe, symbol.testlist_comp):
if len(node) > 2:
return self.com_assign_tuple(node, assigning)
node = node[1]
elif t in _assign_types:
if len(node) > 2:
raise SyntaxError, "can't assign to operator"
node = node[1]
elif t == symbol.power:
if node[1][0] != symbol.atom:
raise SyntaxError, "can't assign to operator"
if len(node) > 2:
primary = self.com_node(node[1])
for i in range(2, len(node)-1):
ch = node[i]
if ch[0] == token.DOUBLESTAR:
raise SyntaxError, "can't assign to operator"
primary = self.com_apply_trailer(primary, ch)
return self.com_assign_trailer(primary, node[-1],
assigning)
node = node[1]
elif t == symbol.atom:
t = node[1][0]
if t == token.LPAR:
node = node[2]
if node[0] == token.RPAR:
raise SyntaxError, "can't assign to ()"
elif t == token.LSQB:
node = node[2]
if node[0] == token.RSQB:
raise SyntaxError, "can't assign to []"
return self.com_assign_list(node, assigning)
elif t == token.NAME:
return self.com_assign_name(node[1], assigning)
else:
raise SyntaxError, "can't assign to literal"
else:
raise SyntaxError, "bad assignment (%s)" % t
def com_assign_tuple(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
assigns.append(self.com_assign(node[i], assigning))
return AssTuple(assigns, lineno=extractLineNo(node))
def com_assign_list(self, node, assigning):
assigns = []
for i in range(1, len(node), 2):
if i + 1 < len(node):
if node[i + 1][0] == symbol.list_for:
raise SyntaxError, "can't assign to list comprehension"
assert node[i + 1][0] == token.COMMA, node[i + 1]
assigns.append(self.com_assign(node[i], assigning))
return AssList(assigns, lineno=extractLineNo(node))
def com_assign_name(self, node, assigning):
return AssName(node[1], assigning, lineno=node[2])
def com_assign_trailer(self, primary, node, assigning):
t = node[1][0]
if t == token.DOT:
return self.com_assign_attr(primary, node[2], assigning)
if t == token.LSQB:
return self.com_subscriptlist(primary, node[2], assigning)
if t == token.LPAR:
raise SyntaxError, "can't assign to function call"
raise SyntaxError, "unknown trailer type: %s" % t
def com_assign_attr(self, primary, node, assigning):
return AssAttr(primary, node[1], assigning, lineno=node[-1])
def com_binary(self, constructor, nodelist):
"Compile 'NODE (OP NODE)*' into (type, [ node1, ..., nodeN ])."
l = len(nodelist)
if l == 1:
n = nodelist[0]
return self.lookup_node(n)(n[1:])
items = []
for i in range(0, l, 2):
n = nodelist[i]
items.append(self.lookup_node(n)(n[1:]))
return constructor(items, lineno=extractLineNo(nodelist))
def com_stmt(self, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
return result
return Stmt([result])
def com_append_stmt(self, stmts, node):
result = self.lookup_node(node)(node[1:])
assert result is not None
if isinstance(result, Stmt):
stmts.extend(result.nodes)
else:
stmts.append(result)
def com_list_constructor(self, nodelist):
# listmaker: test ( list_for | (',' test)* [','] )
values = []
for i in range(1, len(nodelist)):
if nodelist[i][0] == symbol.list_for:
assert len(nodelist[i:]) == 1
return self.com_list_comprehension(values[0],
nodelist[i])
elif nodelist[i][0] == token.COMMA:
continue
values.append(self.com_node(nodelist[i]))
return List(values, lineno=values[0].lineno)
def com_list_comprehension(self, expr, node):
return self.com_comprehension(expr, None, node, 'list')
def com_comprehension(self, expr1, expr2, node, type):
# list_iter: list_for | list_if
# list_for: 'for' exprlist 'in' testlist [list_iter]
# list_if: 'if' test [list_iter]
# XXX should raise SyntaxError for assignment
# XXX(avassalotti) Set and dict comprehensions should have generator
# semantics. In other words, they shouldn't leak
# variables outside of the comprehension's scope.
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
compNode = self.com_node(node[4])
newfor = ListCompFor(assignNode, compNode, [])
newfor.lineno = node[1][2]
fors.append(newfor)
if len(node) == 5:
node = None
elif type == 'list':
node = self.com_list_iter(node[5])
else:
node = self.com_comp_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = ListCompIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
elif type == 'list':
node = self.com_list_iter(node[3])
else:
node = self.com_comp_iter(node[3])
else:
raise SyntaxError, \
("unexpected comprehension element: %s %d"
% (node, lineno))
if type == 'list':
return ListComp(expr1, fors, lineno=lineno)
elif type == 'set':
return SetComp(expr1, fors, lineno=lineno)
elif type == 'dict':
return DictComp(expr1, expr2, fors, lineno=lineno)
else:
raise ValueError("unexpected comprehension type: " + repr(type))
def com_list_iter(self, node):
assert node[0] == symbol.list_iter
return node[1]
def com_comp_iter(self, node):
assert node[0] == symbol.comp_iter
return node[1]
def com_generator_expression(self, expr, node):
# comp_iter: comp_for | comp_if
# comp_for: 'for' exprlist 'in' test [comp_iter]
# comp_if: 'if' test [comp_iter]
lineno = node[1][2]
fors = []
while node:
t = node[1][1]
if t == 'for':
assignNode = self.com_assign(node[2], OP_ASSIGN)
genNode = self.com_node(node[4])
newfor = GenExprFor(assignNode, genNode, [],
lineno=node[1][2])
fors.append(newfor)
if (len(node)) == 5:
node = None
else:
node = self.com_comp_iter(node[5])
elif t == 'if':
test = self.com_node(node[2])
newif = GenExprIf(test, lineno=node[1][2])
newfor.ifs.append(newif)
if len(node) == 3:
node = None
else:
node = self.com_comp_iter(node[3])
else:
raise SyntaxError, \
("unexpected generator expression element: %s %d"
% (node, lineno))
fors[0].is_outmost = True
return GenExpr(GenExprInner(expr, fors), lineno=lineno)
def com_dictorsetmaker(self, nodelist):
# dictorsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) |
# (test (comp_for | (',' test)* [','])) )
assert nodelist[0] == symbol.dictorsetmaker
nodelist = nodelist[1:]
if len(nodelist) == 1 or nodelist[1][0] == token.COMMA:
# set literal
items = []
for i in range(0, len(nodelist), 2):
items.append(self.com_node(nodelist[i]))
return Set(items, lineno=items[0].lineno)
elif nodelist[1][0] == symbol.comp_for:
# set comprehension
expr = self.com_node(nodelist[0])
return self.com_comprehension(expr, None, nodelist[1], 'set')
elif len(nodelist) > 3 and nodelist[3][0] == symbol.comp_for:
# dict comprehension
assert nodelist[1][0] == token.COLON
key = self.com_node(nodelist[0])
value = self.com_node(nodelist[2])
return self.com_comprehension(key, value, nodelist[3], 'dict')
else:
# dict literal
items = []
for i in range(0, len(nodelist), 4):
items.append((self.com_node(nodelist[i]),
self.com_node(nodelist[i+2])))
return Dict(items, lineno=items[0][0].lineno)
def com_apply_trailer(self, primaryNode, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
return self.com_call_function(primaryNode, nodelist[2])
if t == token.DOT:
return self.com_select_member(primaryNode, nodelist[2])
if t == token.LSQB:
return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)
raise SyntaxError, 'unknown node type: %s' % t
def com_select_member(self, primaryNode, nodelist):
if nodelist[0] != token.NAME:
raise SyntaxError, "member must be a name"
return Getattr(primaryNode, nodelist[1], lineno=nodelist[2])
def com_call_function(self, primaryNode, nodelist):
if nodelist[0] == token.RPAR:
return CallFunc(primaryNode, [], lineno=extractLineNo(nodelist))
args = []
kw = 0
star_node = dstar_node = None
len_nodelist = len(nodelist)
i = 1
while i < len_nodelist:
node = nodelist[i]
if node[0]==token.STAR:
if star_node is not None:
raise SyntaxError, 'already have the varargs indentifier'
star_node = self.com_node(nodelist[i+1])
i = i + 3
continue
elif node[0]==token.DOUBLESTAR:
if dstar_node is not None:
raise SyntaxError, 'already have the kwargs indentifier'
dstar_node = self.com_node(nodelist[i+1])
i = i + 3
continue
# positional or named parameters
kw, result = self.com_argument(node, kw, star_node)
if len_nodelist != 2 and isinstance(result, GenExpr) \
and len(node) == 3 and node[2][0] == symbol.comp_for:
# allow f(x for x in y), but reject f(x for x in y, 1)
# should use f((x for x in y), 1) instead of f(x for x in y, 1)
raise SyntaxError, 'generator expression needs parenthesis'
args.append(result)
i = i + 2
return CallFunc(primaryNode, args, star_node, dstar_node,
lineno=extractLineNo(nodelist))
def com_argument(self, nodelist, kw, star_node):
if len(nodelist) == 3 and nodelist[2][0] == symbol.comp_for:
test = self.com_node(nodelist[1])
return 0, self.com_generator_expression(test, nodelist[2])
if len(nodelist) == 2:
if kw:
raise SyntaxError, "non-keyword arg after keyword arg"
if star_node:
raise SyntaxError, "only named arguments may follow *expression"
return 0, self.com_node(nodelist[1])
result = self.com_node(nodelist[3])
n = nodelist[1]
while len(n) == 2 and n[0] != token.NAME:
n = n[1]
if n[0] != token.NAME:
raise SyntaxError, "keyword can't be an expression (%s)"%n[0]
node = Keyword(n[1], result, lineno=n[2])
return 1, node
def com_subscriptlist(self, primary, nodelist, assigning):
# slicing: simple_slicing | extended_slicing
# simple_slicing: primary "[" short_slice "]"
# extended_slicing: primary "[" slice_list "]"
# slice_list: slice_item ("," slice_item)* [","]
# backwards compat slice for '[i:j]'
if len(nodelist) == 2:
sub = nodelist[1]
if (sub[1][0] == token.COLON or \
(len(sub) > 2 and sub[2][0] == token.COLON)) and \
sub[-1][0] != symbol.sliceop:
return self.com_slice(primary, sub, assigning)
subscripts = []
for i in range(1, len(nodelist), 2):
subscripts.append(self.com_subscript(nodelist[i]))
return Subscript(primary, assigning, subscripts,
lineno=extractLineNo(nodelist))
def com_subscript(self, node):
# slice_item: expression | proper_slice | ellipsis
ch = node[1]
t = ch[0]
if t == token.DOT and node[2][0] == token.DOT:
return Ellipsis()
if t == token.COLON or len(node) > 2:
return self.com_sliceobj(node)
return self.com_node(ch)
def com_sliceobj(self, node):
# proper_slice: short_slice | long_slice
# short_slice: [lower_bound] ":" [upper_bound]
# long_slice: short_slice ":" [stride]
# lower_bound: expression
# upper_bound: expression
# stride: expression
#
# Note: a stride may be further slicing...
items = []
if node[1][0] == token.COLON:
items.append(Const(None))
i = 2
else:
items.append(self.com_node(node[1]))
# i == 2 is a COLON
i = 3
if i < len(node) and node[i][0] == symbol.test:
items.append(self.com_node(node[i]))
i = i + 1
else:
items.append(Const(None))
# a short_slice has been built. look for long_slice now by looking
# for strides...
for j in range(i, len(node)):
ch = node[j]
if len(ch) == 2:
items.append(Const(None))
else:
items.append(self.com_node(ch[2]))
return Sliceobj(items, lineno=extractLineNo(node))
def com_slice(self, primary, node, assigning):
# short_slice: [lower_bound] ":" [upper_bound]
lower = upper = None
if len(node) == 3:
if node[1][0] == token.COLON:
upper = self.com_node(node[2])
else:
lower = self.com_node(node[1])
elif len(node) == 4:
lower = self.com_node(node[1])
upper = self.com_node(node[3])
return Slice(primary, assigning, lower, upper,
lineno=extractLineNo(node))
def get_docstring(self, node, n=None):
if n is None:
n = node[0]
node = node[1:]
if n == symbol.suite:
if len(node) == 1:
return self.get_docstring(node[0])
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.file_input:
for sub in node:
if sub[0] == symbol.stmt:
return self.get_docstring(sub)
return None
if n == symbol.atom:
if node[0][0] == token.STRING:
s = ''
for t in node:
s = s + eval(t[1])
return s
return None
if n == symbol.stmt or n == symbol.simple_stmt \
or n == symbol.small_stmt:
return self.get_docstring(node[0])
if n in _doc_nodes and len(node) == 1:
return self.get_docstring(node[0])
return None
_doc_nodes = [
symbol.expr_stmt,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
]
# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
# | 'in' | 'not' 'in' | 'is' | 'is' 'not'
_cmp_types = {
token.LESS : '<',
token.GREATER : '>',
token.EQEQUAL : '==',
token.EQUAL : '==',
token.LESSEQUAL : '<=',
token.GREATEREQUAL : '>=',
token.NOTEQUAL : '!=',
}
_legal_node_types = [
symbol.funcdef,
symbol.classdef,
symbol.stmt,
symbol.small_stmt,
symbol.flow_stmt,
symbol.simple_stmt,
symbol.compound_stmt,
symbol.expr_stmt,
symbol.print_stmt,
symbol.del_stmt,
symbol.pass_stmt,
symbol.break_stmt,
symbol.continue_stmt,
symbol.return_stmt,
symbol.raise_stmt,
symbol.import_stmt,
symbol.global_stmt,
symbol.exec_stmt,
symbol.assert_stmt,
symbol.if_stmt,
symbol.while_stmt,
symbol.for_stmt,
symbol.try_stmt,
symbol.with_stmt,
symbol.suite,
symbol.testlist,
symbol.testlist_safe,
symbol.test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.exprlist,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
symbol.power,
symbol.atom,
]
if hasattr(symbol, 'yield_stmt'):
_legal_node_types.append(symbol.yield_stmt)
if hasattr(symbol, 'yield_expr'):
_legal_node_types.append(symbol.yield_expr)
_assign_types = [
symbol.test,
symbol.or_test,
symbol.and_test,
symbol.not_test,
symbol.comparison,
symbol.expr,
symbol.xor_expr,
symbol.and_expr,
symbol.shift_expr,
symbol.arith_expr,
symbol.term,
symbol.factor,
]
_names = {}
for k, v in symbol.sym_name.items():
_names[k] = v
for k, v in token.tok_name.items():
_names[k] = v
def debug_tree(tree):
l = []
for elt in tree:
if isinstance(elt, int):
l.append(_names.get(elt, elt))
elif isinstance(elt, str):
l.append(elt)
else:
l.append(debug_tree(elt))
return l
| bsd-3-clause |
buuck/root | interpreter/cling/tools/Jupyter/kernel/clingkernel.py | 9 | 11312 | #!/usr/bin/env python
#------------------------------------------------------------------------------
# CLING - the C++ LLVM-based InterpreterG :)
# author: Min RK
# Copyright (c) Min RK
#
# This file is dual-licensed: you can choose to license it under the University
# of Illinois Open Source License or the GNU Lesser General Public License. See
# LICENSE.TXT for details.
#------------------------------------------------------------------------------
"""
Cling Kernel for Jupyter
Talks to Cling via ctypes
"""
from __future__ import print_function
__version__ = '0.0.2'
import ctypes
from contextlib import contextmanager
from fcntl import fcntl, F_GETFL, F_SETFL
import os
import shutil
import select
import struct
import sys
import threading
from traitlets import Unicode, Float, Dict, List, CaselessStrEnum
from ipykernel.kernelbase import Kernel
from ipykernel.kernelapp import kernel_aliases,kernel_flags, IPKernelApp
from ipykernel.ipkernel import IPythonKernel
from ipykernel.zmqshell import ZMQInteractiveShell
from IPython.core.profiledir import ProfileDir
from jupyter_client.session import Session
class my_void_p(ctypes.c_void_p):
pass
libc = ctypes.CDLL(None)
try:
c_stdout_p = ctypes.c_void_p.in_dll(libc, 'stdout')
c_stderr_p = ctypes.c_void_p.in_dll(libc, 'stderr')
except ValueError:
# libc.stdout is has a funny name on OS X
c_stdout_p = ctypes.c_void_p.in_dll(libc, '__stdoutp')
c_stderr_p = ctypes.c_void_p.in_dll(libc, '__stderrp')
class ClingKernel(Kernel):
"""Cling Kernel for Jupyter"""
implementation = 'cling_kernel'
implementation_version = __version__
language_version = 'X'
banner = Unicode()
def _banner_default(self):
return 'cling-%s' % self.language_version
return self._banner
# codemirror_mode='clike' *should* work but doesn't, using the mimetype instead
language_info = {'name': 'c++',
'codemirror_mode': 'text/x-c++src',
'mimetype': ' text/x-c++src',
'file_extension': '.c++'}
flush_interval = Float(0.25, config=True)
std = CaselessStrEnum(default_value='c++11',
values = ['c++11', 'c++14', 'c++17'],
help="C++ standard to use, either c++17, c++14 or c++11").tag(config=True);
def __init__(self, **kwargs):
super(ClingKernel, self).__init__(**kwargs)
try:
whichCling = os.readlink(shutil.which('cling'))
except OSError as e:
#If cling is not a symlink try a regular file
#readlink returns POSIX error EINVAL (22) if the
#argument is not a symlink
if e.args[0] == 22:
whichCling = shutil.which('cling')
else:
raise e
except AttributeError:
from distutils.spawn import find_executable
whichCling = find_executable('cling')
if whichCling:
clingInstDir = os.path.dirname(os.path.dirname(whichCling))
llvmResourceDir = clingInstDir
else:
raise RuntimeError('Cannot find cling in $PATH. No cling, no fun.')
for ext in ['so', 'dylib', 'dll']:
libFilename = clingInstDir + "/lib/libclingJupyter." + ext
if os.access(libFilename, os.R_OK):
self.libclingJupyter = ctypes.CDLL(clingInstDir + "/lib/libclingJupyter." + ext,
mode = ctypes.RTLD_GLOBAL)
break
if not getattr(self, 'libclingJupyter', None):
raise RuntimeError('Cannot find ' + clingInstDir + '/lib/libclingJupyter.{so,dylib,dll}')
self.libclingJupyter.cling_create.restype = my_void_p
self.libclingJupyter.cling_eval.restype = my_void_p
#build -std=c++11 or -std=c++14 option
stdopt = ("-std=" + self.std).encode('utf-8')
self.log.info("Using {}".format(stdopt.decode('utf-8')))
#from IPython.utils import io
#io.rprint("DBG: Using {}".format(stdopt.decode('utf-8')))
strarr = ctypes.c_char_p*5
argv = strarr(b"clingJupyter",stdopt, b"-I" + clingInstDir.encode('utf-8') + b"/include/",b"",b"")
llvmResourceDirCP = ctypes.c_char_p(llvmResourceDir.encode('utf8'))
self.output_pipe, pipe_in = os.pipe()
self.interp = self.libclingJupyter.cling_create(5, argv, llvmResourceDirCP, pipe_in)
self.libclingJupyter.cling_complete_start.restype = my_void_p
self.libclingJupyter.cling_complete_next.restype = my_void_p #c_char_p
self.output_thread = threading.Thread(target=self.publish_pipe_output)
self.output_thread.daemon = True
self.output_thread.start()
def _recv_dict(self, pipe):
"""Receive a serialized dict on a pipe
Returns the dictionary.
"""
# Wire format:
# // Pipe sees (all numbers are longs, except for the first):
# // - num bytes in a long (sent as a single unsigned char!)
# // - num elements of the MIME dictionary; Jupyter selects one to display.
# // For each MIME dictionary element:
# // - length of MIME type key
# // - MIME type key
# // - size of MIME data buffer (including the terminating 0 for
# // 0-terminated strings)
# // - MIME data buffer
data = {}
b1 = os.read(pipe, 1)
sizeof_long = struct.unpack('B', b1)[0]
if sizeof_long == 8:
fmt = 'Q'
else:
fmt = 'L'
buf = os.read(pipe, sizeof_long)
num_elements = struct.unpack(fmt, buf)[0]
for i in range(num_elements):
buf = os.read(pipe, sizeof_long)
len_key = struct.unpack(fmt, buf)[0]
key = os.read(pipe, len_key).decode('utf8')
buf = os.read(pipe, sizeof_long)
len_value = struct.unpack(fmt, buf)[0]
value = os.read(pipe, len_value).decode('utf8')
data[key] = value
return data
def publish_pipe_output(self):
"""Watch output_pipe for display-data messages
and publish them on IOPub when they arrive
"""
while True:
select.select([self.output_pipe], [], [])
data = self._recv_dict(self.output_pipe)
self.session.send(self.iopub_socket, 'display_data',
content={
'data': data,
'metadata': {},
},
parent=self._parent_header,
)
@contextmanager
def forward_stream(self, name):
"""Capture stdout and forward it as stream messages"""
# create pipe for stdout
if name == 'stdout':
c_flush_p = c_stdout_p
elif name == 'stderr':
c_flush_p = c_stderr_p
else:
raise ValueError("Name must be stdout or stderr, not %r" % name)
real_fd = getattr(sys, '__%s__' % name).fileno()
save_fd = os.dup(real_fd)
pipe_out, pipe_in = os.pipe()
os.dup2(pipe_in, real_fd)
os.close(pipe_in)
# make pipe_out non-blocking
flags = fcntl(pipe_out, F_GETFL)
fcntl(pipe_out, F_SETFL, flags|os.O_NONBLOCK)
def forwarder(pipe):
"""Forward bytes on a pipe to stream messages"""
while True:
r, w, x = select.select([pipe], [], [], self.flush_interval)
if not r:
# nothing to read, flush libc's stdout and check again
libc.fflush(c_flush_p)
continue
data = os.read(pipe, 1024)
if not data:
# pipe closed, we are done
break
# send output
self.session.send(self.iopub_socket, 'stream', {
'name': name,
'text': data.decode('utf8', 'replace'),
}, parent=self._parent_header)
t = threading.Thread(target=forwarder, args=(pipe_out,))
t.start()
try:
yield
finally:
# flush the pipe
libc.fflush(c_flush_p)
os.close(real_fd)
t.join()
# and restore original stdout
os.close(pipe_out)
os.dup2(save_fd, real_fd)
os.close(save_fd)
def run_cell(self, code, silent=False):
return self.libclingJupyter.cling_eval(self.interp, ctypes.c_char_p(code.encode('utf8')))
def do_execute(self, code, silent, store_history=True,
user_expressions=None, allow_stdin=False):
if not code.strip():
return {
'status': 'ok',
'execution_count': self.execution_count,
'payload': [],
'user_expressions': {},
}
status = 'ok'
with self.forward_stream('stdout'), self.forward_stream('stderr'):
stringResult = self.run_cell(code, silent)
if not stringResult:
status = 'error'
else:
self.session.send(
self.iopub_socket,
'execute_result',
content={
'data': {
'text/plain': ctypes.cast(stringResult, ctypes.c_char_p).value.decode('utf8', 'replace'),
},
'metadata': {},
'execution_count': self.execution_count,
},
parent=self._parent_header
)
self.libclingJupyter.cling_eval_free(stringResult)
reply = {
'status': status,
'execution_count': self.execution_count,
}
if status == 'error':
err = {
'ename': 'ename',
'evalue': 'evalue',
'traceback': [],
}
self.send_response(self.iopub_socket, 'error', err)
reply.update(err)
elif status == 'ok':
reply.update({
'THIS DOES NOT WORK: payload': [{
'source': 'set_next_input',
'replace': True,
'text':'//THIS IS MAGIC\n' + code
}],
'user_expressions': {},
})
else:
raise ValueError("Invalid status: %r" % status)
return reply
def do_complete(self, code, cursor_pos):
"""Provide completions here"""
# if cursor_pos = cursor_start = cursor_end,
# matches should be a list of strings to be appended after the cursor
return {'matches' : [],
'cursor_end' : cursor_pos,
'cursor_start' : cursor_pos,
'metadata' : {},
'status' : 'ok'}
cling_flags = kernel_flags
class ClingKernelApp(IPKernelApp):
name='cling-kernel'
cling_aliases = kernel_aliases.copy()
cling_aliases['std']='ClingKernel.std'
aliases = Dict(cling_aliases)
flags = Dict(cling_flags)
classes = List([ ClingKernel, IPythonKernel, ZMQInteractiveShell, ProfileDir, Session ])
kernel_class = ClingKernel
def main():
"""launch a cling kernel"""
ClingKernelApp.launch_instance()
if __name__ == '__main__':
main()
| lgpl-2.1 |
rds0751/colinkers | env/Lib/site-packages/django/conf/locale/pt_BR/formats.py | 504 | 1434 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j \d\e F \d\e Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = r'j \d\e F \d\e Y à\s H:i'
YEAR_MONTH_FORMAT = r'F \d\e Y'
MONTH_DAY_FORMAT = r'j \d\e F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
# '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006'
# '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006'
]
DATETIME_INPUT_FORMATS = [
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| agpl-3.0 |
kangfend/django | django/contrib/staticfiles/management/commands/findstatic.py | 463 | 1745 | from __future__ import unicode_literals
import os
from django.contrib.staticfiles import finders
from django.core.management.base import LabelCommand
from django.utils.encoding import force_text
class Command(LabelCommand):
help = "Finds the absolute paths for the given static file(s)."
label = 'static file'
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument('--first', action='store_false', dest='all',
default=True,
help="Only return the first match for each static file.")
def handle_label(self, path, **options):
verbosity = options['verbosity']
result = finders.find(path, all=options['all'])
path = force_text(path)
if verbosity >= 2:
searched_locations = ("Looking in the following locations:\n %s" %
"\n ".join(force_text(location)
for location in finders.searched_locations))
else:
searched_locations = ''
if result:
if not isinstance(result, (list, tuple)):
result = [result]
result = (force_text(os.path.realpath(path)) for path in result)
if verbosity >= 1:
file_list = '\n '.join(result)
return ("Found '%s' here:\n %s\n%s" %
(path, file_list, searched_locations))
else:
return '\n'.join(result)
else:
message = ["No matching file found for '%s'." % path]
if verbosity >= 2:
message.append(searched_locations)
if verbosity >= 1:
self.stderr.write('\n'.join(message))
| bsd-3-clause |
cjmayo/xbmc | tools/EventClients/Clients/Kodi Send/kodi-send.py | 10 | 2592 | #!/usr/bin/python
#
# XBMC Media Center
# XBMC Send
# Copyright (c) 2009 team-xbmc
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import sys, os
import getopt
from socket import *
try:
from kodi.xbmcclient import *
except:
sys.path.append(os.path.join(os.path.realpath(os.path.dirname(__file__)), '../../lib/python'))
from xbmcclient import *
def usage():
print("kodi-send [OPTION] --action=ACTION")
print('Example')
print('\tkodi-send --host=192.168.0.1 --port=9777 --action="Quit"')
print("Options")
print("\t-?, --help\t\t\tWill bring up this message")
print("\t--host=HOST\t\t\tChoose what HOST to connect to (default=localhost)")
print("\t--port=PORT\t\t\tChoose what PORT to connect to (default=9777)")
print('\t--action=ACTION\t\t\tSends an action to XBMC, this option can be added multiple times to create a macro')
pass
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "?pa:v", ["help", "host=", "port=", "action="])
except getopt.GetoptError as err:
# print help information and exit:
print(str(err)) # will print something like "option -a not recognized"
usage()
sys.exit(2)
ip = "localhost"
port = 9777
actions = []
verbose = False
for o, a in opts:
if o in ("-?", "--help"):
usage()
sys.exit()
elif o == "--host":
ip = a
elif o == "--port":
port = int(a)
elif o in ("-a", "--action"):
actions.append(a)
else:
assert False, "unhandled option"
addr = (ip, port)
sock = socket(AF_INET,SOCK_DGRAM)
if len(actions) is 0:
usage()
sys.exit(0)
for action in actions:
print('Sending action: %s' % action)
packet = PacketACTION(actionmessage=action, actiontype=ACTION_BUTTON)
packet.send(sock, addr)
if __name__=="__main__":
main()
| gpl-2.0 |
SanPen/GridCal | src/GridCal/Gui/TowerBuilder/LineBuilderDialogue.py | 1 | 9842 | # This file is part of GridCal.
#
# GridCal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GridCal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GridCal. If not, see <http://www.gnu.org/licenses/>.
import sys
from PySide2.QtWidgets import *
from GridCal.Gui.TowerBuilder.gui import *
from GridCal.Engine.Devices import *
from GridCal.Gui.TowerBuilder.tower_model import *
from GridCal.Gui.GuiFunctions import PandasModel
from GridCal.Gui.GeneralDialogues import LogsDialogue
class TowerBuilderGUI(QtWidgets.QDialog):
def __init__(self, parent=None, tower=None, wires_catalogue=list()):
"""
Constructor
Args:
parent:
"""
QtWidgets.QDialog.__init__(self, parent)
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.setWindowTitle('Line builder')
# 10:1
self.ui.main_splitter.setStretchFactor(0, 8)
self.ui.main_splitter.setStretchFactor(1, 2)
# create wire collection from the catalogue
self.wires_table = WiresTable(self)
for wire in wires_catalogue:
self.wires_table.add(wire)
# was there a tower passed? else create one
if tower is None:
self.tower_driver = TowerModel(self, edit_callback=self.plot)
else:
self.tower_driver = TowerModel(self, edit_callback=self.plot, tower=tower)
self.ui.name_lineEdit.setText(self.tower_driver.tower.name)
self.ui.rho_doubleSpinBox.setValue(self.tower_driver.tower.earth_resistivity)
# set models
self.ui.wires_tableView.setModel(self.wires_table)
self.ui.tower_tableView.setModel(self.tower_driver)
# button clicks
# self.ui.add_wire_pushButton.clicked.connect(self.add_wire_to_collection)
# self.ui.delete_wire_pushButton.clicked.connect(self.delete_wire_from_collection)
self.ui.add_to_tower_pushButton.clicked.connect(self.add_wire_to_tower)
self.ui.delete_from_tower_pushButton.clicked.connect(self.delete_wire_from_tower)
self.ui.compute_pushButton.clicked.connect(self.compute)
self.ui.name_lineEdit.textChanged.connect(self.name_changed)
def msg(self, text, title="Warning"):
"""
Message box
:param text: Text to display
:param title: Name of the window
"""
msg = QMessageBox()
msg.setIcon(QMessageBox.Information)
msg.setText(text)
# msg.setInformativeText("This is additional information")
msg.setWindowTitle(title)
# msg.setDetailedText("The details are as follows:")
msg.setStandardButtons(QMessageBox.Ok)
retval = msg.exec_()
def name_changed(self):
"""
Change name
:return:
"""
self.tower_driver.tower.tower_name = self.ui.name_lineEdit.text()
def add_wire_to_collection(self):
"""
Add new wire to collection
:return:
"""
name = 'Wire_' + str(len(self.wires_table.wires) + 1)
wire = Wire(name=name, gmr=0.01, r=0.01, x=0)
self.wires_table.add(wire)
def delete_wire_from_collection(self):
"""
Delete wire from the collection
:return:
"""
idx = self.ui.wires_tableView.currentIndex()
sel_idx = idx.row()
if sel_idx > -1:
# delete all the wires from the tower too
self.tower_driver.delete_by_name(self.wires_table.wires[sel_idx])
# delete from the catalogue
self.wires_table.delete(sel_idx)
self.plot()
else:
self.msg('Select a wire in the wires catalogue')
def add_wire_to_tower(self):
"""
Add wire to tower
:return:
"""
idx = self.ui.wires_tableView.currentIndex()
sel_idx = idx.row()
if sel_idx > -1:
selected_wire = self.wires_table.wires[sel_idx].copy()
self.tower_driver.add(WireInTower(selected_wire))
else:
self.msg('Select a wire in the wires catalogue')
def delete_wire_from_tower(self):
"""
Delete wire from the tower
:return:
"""
idx = self.ui.tower_tableView.currentIndex()
sel_idx = idx.row()
if sel_idx > -1:
self.tower_driver.delete(sel_idx)
self.plot()
else:
self.msg('Select a wire from the tower')
def compute(self):
"""
:return:
"""
self.tower_driver.tower.frequency = self.ui.frequency_doubleSpinBox.value()
self.tower_driver.tower.earth_resistivity = self.ui.rho_doubleSpinBox.value()
# heck the wires configuration
logs = Logger()
all_ok = self.tower_driver.tower.check(logs)
if not all_ok:
logger_diag = LogsDialogue(name='Tower computation', logs=logs)
logger_diag.exec_()
else:
try:
# compute the matrices
self.tower_driver.tower.compute()
# Impedances in Ohm/km
cols = ['Phase' + str(i) for i in self.tower_driver.tower.z_phases_abcn]
z_df = pd.DataFrame(data=self.tower_driver.tower.z_abcn, columns=cols, index=cols)
self.ui.z_tableView_abcn.setModel(PandasModel(z_df))
cols = ['Phase' + str(i) for i in self.tower_driver.tower.z_phases_abc]
z_df = pd.DataFrame(data=self.tower_driver.tower.z_abc, columns=cols, index=cols)
self.ui.z_tableView_abc.setModel(PandasModel(z_df))
cols = ['Sequence ' + str(i) for i in range(3)]
z_df = pd.DataFrame(data=self.tower_driver.tower.z_seq, columns=cols, index=cols)
self.ui.z_tableView_seq.setModel(PandasModel(z_df))
# Admittances in uS/km
cols = ['Phase' + str(i) for i in self.tower_driver.tower.y_phases_abcn]
z_df = pd.DataFrame(data=self.tower_driver.tower.y_abcn * 1e6, columns=cols, index=cols)
self.ui.y_tableView_abcn.setModel(PandasModel(z_df))
cols = ['Phase' + str(i) for i in self.tower_driver.tower.y_phases_abc]
z_df = pd.DataFrame(data=self.tower_driver.tower.y_abc * 1e6, columns=cols, index=cols)
self.ui.y_tableView_abc.setModel(PandasModel(z_df))
cols = ['Sequence ' + str(i) for i in range(3)]
z_df = pd.DataFrame(data=self.tower_driver.tower.y_seq * 1e6, columns=cols, index=cols)
self.ui.y_tableView_seq.setModel(PandasModel(z_df))
# plot
self.plot()
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.msg(str(exc_traceback) + '\n' + str(exc_value), 'Tower calculation')
def plot(self):
"""
PLot the tower distribution
"""
self.ui.plotwidget.clear()
fig = self.ui.plotwidget.get_figure()
fig.set_facecolor('white')
ax = self.ui.plotwidget.get_axis()
self.tower_driver.tower.plot(ax=ax)
self.ui.plotwidget.redraw()
def example_1(self):
name = '4/0 6/1 ACSR'
r = 0.367851632 # ohm / km
x = 0 # ohm / km
gmr = 0.002481072 # m
wire = Wire(name=name, gmr=gmr, r=r, x=x)
self.tower_driver.add(WireInTower(wire=wire, xpos=0, ypos=8.8392, phase=0))
self.tower_driver.add(WireInTower(wire=wire, xpos=0.762, ypos=8.8392, phase=1))
self.tower_driver.add(WireInTower(wire=wire, xpos=2.1336, ypos=8.8392, phase=2))
self.tower_driver.add(WireInTower(wire=wire, xpos=1.2192, ypos=7.62, phase=3))
self.wires_table.add(wire)
def example_2(self):
name = '4/0 6/1 ACSR'
r = 0.367851632 # ohm / km
x = 0 # ohm / km
gmr = 0.002481072 # m
incx = 0.1
incy = 0.1
wire = Wire(name=name, gmr=gmr, r=r, x=x)
self.tower_driver.add(WireInTower(wire, xpos=0, ypos=8.8392, phase=1))
self.tower_driver.add(WireInTower(wire, xpos=0.762, ypos=8.8392, phase=2))
self.tower_driver.add(WireInTower(wire, xpos=2.1336, ypos=8.8392, phase=3))
self.tower_driver.add(WireInTower(wire, xpos=1.2192, ypos=7.62, phase=0))
self.tower_driver.add(WireInTower(wire, xpos=incx + 0, ypos=8.8392, phase=1))
self.tower_driver.add(WireInTower(wire, xpos=incx + 0.762, ypos=8.8392, phase=2))
self.tower_driver.add(WireInTower(wire, xpos=incx + 2.1336, ypos=8.8392, phase=3))
# self.tower.add(Wire(name, xpos=incx+1.2192, ypos=7.62, gmr=gmr, r=r, x=x, phase=0))
self.tower_driver.add(WireInTower(wire, xpos=incx / 2 + 0, ypos=incy + 8.8392, phase=1))
self.tower_driver.add(WireInTower(wire, xpos=incx / 2 + 0.762, ypos=incy + 8.8392, phase=2))
self.tower_driver.add(WireInTower(wire, xpos=incx / 2 + 2.1336, ypos=incy + 8.8392, phase=3))
# self.tower.add(Wire(name, xpos=incx/2 + 1.2192, ypos=incy+7.62, gmr=gmr, r=r, x=x, phase=0))
self.wires_table.add(wire)
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
window = TowerBuilderGUI()
window.example_2()
window.compute()
window.resize(1.81 * 700.0, 700.0) # golden ratio
window.show()
sys.exit(app.exec_())
| gpl-3.0 |
rbalda/neural_ocr | env/lib/python2.7/site-packages/django/contrib/admin/sites.py | 120 | 21146 | from functools import update_wrapper
from django.apps import apps
from django.conf import settings
from django.contrib.admin import ModelAdmin, actions
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.core.urlresolvers import NoReverseMatch, reverse
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponseRedirect
from django.template.engine import Engine
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _, ugettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
system_check_errors = []
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
# Text to put at the end of each page's <title>.
site_title = ugettext_lazy('Django site admin')
# Text to put in each page's <h1>.
site_header = ugettext_lazy('Django administration')
# Text to put at the top of the admin index page.
index_title = ugettext_lazy('Site administration')
# URL for the "View site" link at the top of each admin page.
site_url = '/'
_empty_value_display = '-'
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Instantiate the admin class to save in the registry
admin_obj = admin_class(model, self)
if admin_class is not ModelAdmin and settings.DEBUG:
system_check_errors.extend(admin_obj.check())
self._registry[model] = admin_obj
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def is_registered(self, model):
"""
Check if a model class is registered with this `AdminSite`.
"""
return model in self._registry
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
@property
def empty_value_display(self):
return self._empty_value_display
@empty_value_display.setter
def empty_value_display(self, empty_value_display):
self._empty_value_display = empty_value_display
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that admin and contenttypes apps are
installed, as well as the auth context processor.
"""
if not apps.is_installed('django.contrib.admin'):
raise ImproperlyConfigured(
"Put 'django.contrib.admin' in your INSTALLED_APPS "
"setting in order to use the admin application.")
if not apps.is_installed('django.contrib.contenttypes'):
raise ImproperlyConfigured(
"Put 'django.contrib.contenttypes' in your INSTALLED_APPS "
"setting in order to use the admin application.")
try:
default_template_engine = Engine.get_default()
except Exception:
# Skip this non-critical check:
# 1. if the user has a non-trivial TEMPLATES setting and Django
# can't find a default template engine
# 2. if anything goes wrong while loading template engines, in
# order to avoid raising an exception from a confusing location
# Catching ImproperlyConfigured suffices for 1. but 2. requires
# catching all exceptions.
pass
else:
if ('django.contrib.auth.context_processors.auth'
not in default_template_engine.context_processors):
raise ImproperlyConfigured(
"Enable 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATES setting in order to use the admin "
"application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
if request.path == reverse('admin:logout', current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
# Inner import to prevent django.contrib.admin (app) from
# importing django.contrib.auth.models.User (unrelated model).
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
request.get_full_path(),
reverse('admin:login', current_app=self.name)
)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import url, include
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.contenttypes.views imports ContentType.
from django.contrib.contenttypes import views as contenttype_views
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
url(r'^$', wrap(self.index), name='index'),
url(r'^login/$', self.login, name='login'),
url(r'^logout/$', wrap(self.logout), name='logout'),
url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
name='view_on_site'),
]
# Add in each model's views, and create a list of valid URLS for the
# app_index
valid_app_labels = []
for model, model_admin in self._registry.items():
urlpatterns += [
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)),
]
if model._meta.app_label not in valid_app_labels:
valid_app_labels.append(model._meta.app_label)
# If there were ModelAdmins registered, we should have a list of app
# labels for which we need to allow access to the app_index view,
if valid_app_labels:
regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$'
urlpatterns += [
url(regex, wrap(self.app_index), name='app_list'),
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), 'admin', self.name
def each_context(self, request):
"""
Returns a dictionary of variables to put in the template context for
*every* page in the admin site.
"""
return {
'site_title': self.site_title,
'site_header': self.site_header,
'site_url': self.site_url,
'has_permission': self.has_permission(request),
'available_apps': self.get_app_list(request),
}
def password_change(self, request, extra_context=None):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.admin.forms import AdminPasswordChangeForm
from django.contrib.auth.views import password_change
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'password_change_form': AdminPasswordChangeForm,
'post_change_redirect': url,
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
request.current_app = self.name
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
request.current_app = self.name
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
if settings.USE_I18N:
from django.views.i18n import javascript_catalog
else:
from django.views.i18n import null_javascript_catalog as javascript_catalog
return javascript_catalog(request, packages=['django.conf', 'django.contrib.admin'])
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
request.current_app = self.name
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
if request.method == 'GET' and self.has_permission(request):
# Already logged-in, redirect to admin index
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
from django.contrib.auth.views import login
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.admin.forms eventually imports User.
from django.contrib.admin.forms import AdminAuthenticationForm
context = dict(self.each_context(request),
title=_('Log in'),
app_path=request.get_full_path(),
)
if (REDIRECT_FIELD_NAME not in request.GET and
REDIRECT_FIELD_NAME not in request.POST):
context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name)
context.update(extra_context or {})
defaults = {
'extra_context': context,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
request.current_app = self.name
return login(request, **defaults)
def _build_app_dict(self, request, label=None):
"""
Builds the app dictionary. Takes an optional label parameters to filter
models of a specific app.
"""
app_dict = {}
if label:
models = {
m: m_a for m, m_a in self._registry.items()
if m._meta.app_label == label
}
else:
models = self._registry
for model, model_admin in models.items():
app_label = model._meta.app_label
has_module_perms = model_admin.has_module_permission(request)
if not has_module_perms:
if label:
raise PermissionDenied
continue
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True not in perms.values():
continue
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change'):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add'):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': apps.get_app_config(app_label).verbose_name,
'app_label': app_label,
'app_url': reverse(
'admin:app_list',
kwargs={'app_label': app_label},
current_app=self.name,
),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if label:
return app_dict.get(label)
return app_dict
def get_app_list(self, request):
"""
Returns a sorted list of all the installed apps that have been
registered in this site.
"""
app_dict = self._build_app_dict(request)
# Sort the apps alphabetically.
app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower())
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
return app_list
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_list = self.get_app_list(request)
context = dict(
self.each_context(request),
title=self.index_title,
app_list=app_list,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.index_template or
'admin/index.html', context)
def app_index(self, request, app_label, extra_context=None):
app_dict = self._build_app_dict(request, app_label)
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
app_name = apps.get_app_config(app_label).verbose_name
context = dict(self.each_context(request),
title=_('%(app)s administration') % {'app': app_name},
app_list=[app_dict],
app_label=app_label,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
| mit |
yawd/django-sphinxdoc | sphinxdoc/management/commands/updatedoc.py | 1 | 4830 | # encoding: utf-8
"""
Management command for updading the documentation of one or more projects.
"""
import json
import optparse
import os
import os.path
import subprocess
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandError
from sphinxdoc.models import Project, Document
BUILDDIR = '_build'
EXTENSION = '.fjson'
SPECIAL_TITLES = {
'genindex': 'General Index',
'py-modindex': 'Module Index',
'np-modindex': 'Module Index',
'search': 'Search',
}
class Command(BaseCommand):
"""
Update (and optionally build) the *Sphinx* documentation for one ore more
projects.
You need to pass the slug of at least one project. If you pass the optional
parameter ``-b``, the command ``sphinx-build`` will be run for each project
before their files are read. If your project(s) are located in a different
*virtualenv* than your django site, you can provide a path to its
interpreter with ``--virtualenv path/to/env/bin/``
"""
args = '[-b [--virtualenv <path/to/bin/>]] <project_slug project_slug ...>'
help = ('Updates the documentation and the search index for the specified '
'projects.')
option_list = BaseCommand.option_list + (
optparse.make_option('-b', '--build',
action='store_true',
dest='build',
default=False,
help='Run "sphinx-build" for each project before updating it.'),
optparse.make_option('--virtualenv',
dest='virtualenv',
default='',
help='Use this virtualenv to build project docs.',
)
)
def handle(self, *args, **options):
"""
Updates (and optionally builds) the documenation for all projects in
``args``.
"""
build = options['build']
virtualenv = options['virtualenv']
for slug in args:
try:
project = Project.objects.get(slug=slug)
except Project.DoesNotExist:
raise CommandError('Project "%s" does not exist' % slug)
if build:
print 'Running "sphinx--build" for "%s" ...' % slug
self.build(project, virtualenv)
print 'Deleting old entries from database ...'
self.delete_documents(project)
print 'Importing JSON files for "%s" ...' % slug
self.import_files(project)
print 'Updating search index for "%s" ...' % slug
self.update_haystack()
print 'Done'
def build(self, project, virtualenv=''):
"""
Runs ``sphinx-build`` for ``project``. You can also specify a path to
the bin-directory of a ``virtualenv``, if your project requires it.
"""
cmd = 'sphinx-build'
if virtualenv:
cmd = os.path.expanduser(os.path.join(virtualenv, cmd))
cmd = [
cmd,
'-b',
'json',
'-d',
os.path.join(project.path, BUILDDIR, 'doctrees'),
project.path,
os.path.join(project.path, BUILDDIR, 'json'),
]
print 'Executing %s' % ' '.join(cmd)
subprocess.call(cmd)
def delete_documents(self, project):
"""Deletes all documents for ``project``."""
Document.objects.filter(project=project).delete()
def import_files(self, project):
"""
Creates a :class:`~sphinxdoc.models.Document` instance for each JSON
file of ``project``.
"""
path = os.path.join(project.path, BUILDDIR, 'json')
for dirpath, dirnames, filenames in os.walk(path):
for name in filter(lambda x: x.endswith(EXTENSION), filenames):
# Full path to the json file
filepath = os.path.join(dirpath, name)
# Get path relative to the build dir w/o file extension
relpath = os.path.relpath(filepath, path)[:-len(EXTENSION)]
# Some files have no title or body attribute
doc = json.load(open(filepath, 'rb'))
if 'title' not in doc:
page_name = os.path.basename(relpath)
doc['title'] = SPECIAL_TITLES[page_name]
if 'body' not in doc:
doc['body'] = ''
# Finally create the Document
d = Document(
project=project,
path=relpath,
content=json.dumps(doc),
title=doc['title'],
body=doc['body'],
)
d.full_clean()
d.save()
def update_haystack(self):
"""Updates Haystack’s search index."""
call_command('rebuild_index', interactive=False)
| bsd-3-clause |
rasata/ansible | lib/ansible/plugins/connections/jail.py | 131 | 7291 | # Based on local.py (c) 2012, Michael DeHaan <[email protected]>
# and chroot.py (c) 2013, Maykel Moya <[email protected]>
# (c) 2013, Michael Scherer <[email protected]>
# (c) 2015, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import distutils.spawn
import traceback
import os
import shlex
import subprocess
from ansible import errors
from ansible.utils.unicode import to_bytes
from ansible.callbacks import vvv
import ansible.constants as C
BUFSIZE = 65536
class Connection(object):
''' Local BSD Jail based connections '''
def _search_executable(self, executable):
cmd = distutils.spawn.find_executable(executable)
if not cmd:
raise errors.AnsibleError("%s command not found in PATH") % executable
return cmd
def list_jails(self):
p = subprocess.Popen([self.jls_cmd, '-q', 'name'],
cwd=self.runner.basedir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return stdout.split()
def get_jail_path(self):
p = subprocess.Popen([self.jls_cmd, '-j', self.jail, '-q', 'path'],
cwd=self.runner.basedir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
# remove \n
return stdout[:-1]
def __init__(self, runner, host, port, *args, **kwargs):
self.jail = host
self.runner = runner
self.host = host
self.has_pipelining = False
self.become_methods_supported=C.BECOME_METHODS
if os.geteuid() != 0:
raise errors.AnsibleError("jail connection requires running as root")
self.jls_cmd = self._search_executable('jls')
self.jexec_cmd = self._search_executable('jexec')
if not self.jail in self.list_jails():
raise errors.AnsibleError("incorrect jail name %s" % self.jail)
self.host = host
# port is unused, since this is local
self.port = port
def connect(self, port=None):
''' connect to the jail; nothing to do here '''
vvv("THIS IS A LOCAL JAIL DIR", host=self.jail)
return self
# a modifier
def _generate_cmd(self, executable, cmd):
if executable:
local_cmd = [self.jexec_cmd, self.jail, executable, '-c', cmd]
else:
# Prev to python2.7.3, shlex couldn't handle unicode type strings
cmd = to_bytes(cmd)
cmd = shlex.split(cmd)
local_cmd = [self.jexec_cmd, self.jail]
local_cmd += cmd
return local_cmd
def _buffered_exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None, stdin=subprocess.PIPE):
''' run a command on the jail. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file
into memory.
compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately.
'''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
# We enter zone as root so we ignore privilege escalation (probably need to fix in case we have to become a specific used [ex: postgres admin])?
local_cmd = self._generate_cmd(executable, cmd)
vvv("EXEC %s" % (local_cmd), host=self.jail)
p = subprocess.Popen(local_cmd, shell=False,
cwd=self.runner.basedir,
stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the jail '''
p = self._buffered_exec_command(cmd, tmp_path, become_user, sudoable, executable, in_data)
stdout, stderr = p.communicate()
return (p.returncode, '', stdout, stderr)
def put_file(self, in_path, out_path):
''' transfer a file from local to jail '''
vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)
try:
with open(in_path, 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), None, stdin=in_file)
except OSError:
raise errors.AnsibleError("jail connection requires dd command in the jail")
try:
stdout, stderr = p.communicate()
except:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
if p.returncode != 0:
raise errors.AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
except IOError:
raise errors.AnsibleError("file or module does not exist at: %s" % in_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from jail to local '''
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)
try:
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE), None)
except OSError:
raise errors.AnsibleError("jail connection requires dd command in the jail")
with open(out_path, 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
stdout, stderr = p.communicate()
if p.returncode != 0:
raise errors.AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def close(self):
''' terminate the connection; nothing to do here '''
pass
| gpl-3.0 |
Phuehvk/gyp | test/mac/gyptest-depend-on-bundle.py | 34 | 1270 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a dependency on a bundle causes the whole bundle to be built.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
print "This test is currently disabled: https://crbug.com/483696."
sys.exit(0)
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='depend-on-bundle')
test.build('test.gyp', 'dependent_on_bundle', chdir='depend-on-bundle')
# Binary itself.
test.built_file_must_exist('dependent_on_bundle', chdir='depend-on-bundle')
# Bundle dependency.
test.built_file_must_exist(
'my_bundle.framework/Versions/A/my_bundle',
chdir='depend-on-bundle')
test.built_file_must_exist( # package_framework
'my_bundle.framework/my_bundle',
chdir='depend-on-bundle')
test.built_file_must_exist( # plist
'my_bundle.framework/Versions/A/Resources/Info.plist',
chdir='depend-on-bundle')
test.built_file_must_exist(
'my_bundle.framework/Versions/A/Resources/English.lproj/' # Resources
'InfoPlist.strings',
chdir='depend-on-bundle')
test.pass_test()
| bsd-3-clause |
krintoxi/NoobSec-Toolkit | NoobSecToolkit - MAC OSX/tools/inject/thirdparty/colorama/initialise.py | 49 | 1222 | import atexit
import sys
from .ansitowin32 import AnsiToWin32
orig_stdout = sys.stdout
orig_stderr = sys.stderr
wrapped_stdout = sys.stdout
wrapped_stderr = sys.stderr
atexit_done = False
def reset_all():
AnsiToWin32(orig_stdout).reset_all()
def init(autoreset=False, convert=None, strip=None, wrap=True):
if not wrap and any([autoreset, convert, strip]):
raise ValueError('wrap=False conflicts with any other arg=True')
global wrapped_stdout, wrapped_stderr
sys.stdout = wrapped_stdout = \
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
sys.stderr = wrapped_stderr = \
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
global atexit_done
if not atexit_done:
atexit.register(reset_all)
atexit_done = True
def deinit():
sys.stdout = orig_stdout
sys.stderr = orig_stderr
def reinit():
sys.stdout = wrapped_stdout
sys.stderr = wrapped_stdout
def wrap_stream(stream, convert, strip, autoreset, wrap):
if wrap:
wrapper = AnsiToWin32(stream,
convert=convert, strip=strip, autoreset=autoreset)
if wrapper.should_wrap():
stream = wrapper.stream
return stream
| gpl-2.0 |
vipmunot/Data-Analysis-using-Python | Exploratory Data Visualization/Multiple plots-216.py | 1 | 2691 | ## 1. Recap ##
import pandas as pd
import matplotlib.pyplot as plt
unrate = pd.read_csv('unrate.csv')
unrate['DATE'] = pd.to_datetime(unrate['DATE'])
plt.plot(unrate['DATE'].head(12),unrate['VALUE'].head(12))
plt.xticks(rotation=90)
plt.xlabel('Month')
plt.ylabel('Unemployment Rate')
plt.title('Monthly Unemployment Trends, 1948')
## 2. Matplotlib Classes ##
import matplotlib.pyplot as plt
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2)
plt.show()
## 4. Adding Data ##
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2)
ax1.plot(unrate['DATE'].head(12),unrate['VALUE'].head(12))
ax2.plot(unrate['DATE'].iloc[12:24],unrate['VALUE'].iloc[12:24])
plt.show()
## 5. Formatting And Spacing ##
fig = plt.figure(figsize=(12,6))
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2)
ax1.plot(unrate[0:12]['DATE'], unrate[0:12]['VALUE'])
ax1.set_title('Monthly Unemployment Rate, 1948')
ax2.plot(unrate[12:24]['DATE'], unrate[12:24]['VALUE'])
ax2.set_title('Monthly Unemployment Rate, 1949')
plt.show()
## 6. Comparing Across More Years ##
fig = plt.figure(figsize=(12,12))
x = [0,12,24,36,48]
y = [12,24,36,48,60]
for i in range(5):
ax = fig.add_subplot(5,1,(i+1))
ax.plot(unrate[x[i]:y[i]]['DATE'],unrate[x[i]:y[i]]['VALUE'])
plt.show()
## 7. Overlaying Line Charts ##
unrate['MONTH'] = unrate['DATE'].dt.month
fig = plt.figure(figsize=(6,3))
plt.plot(unrate[0:12]['MONTH'], unrate[0:12]['VALUE'],c='red')
plt.plot(unrate[12:24]['MONTH'], unrate[12:24]['VALUE'],c='blue')
plt.show()
## 8. Adding More Lines ##
fig = plt.figure(figsize=(10,6))
x = [0,12,24,36,48]
y = [12,24,36,48,60]
color = ['red','blue','green','orange','black']
for i in range(5):
plt.plot(unrate[x[i]:y[i]]['MONTH'],unrate[x[i]:y[i]]['VALUE'],c = color[i])
plt.show()
## 9. Adding A Legend ##
fig = plt.figure(figsize=(10,6))
colors = ['red', 'blue', 'green', 'orange', 'black']
for i in range(5):
start_index = i*12
end_index = (i+1)*12
label = str(1948 + i)
subset = unrate[start_index:end_index]
plt.plot(subset['MONTH'], subset['VALUE'], c=colors[i],label=label)
plt.legend(loc='upper left')
plt.show()
## 10. Final Tweaks ##
fig = plt.figure(figsize=(10,6))
colors = ['red', 'blue', 'green', 'orange', 'black']
for i in range(5):
start_index = i*12
end_index = (i+1)*12
subset = unrate[start_index:end_index]
label = str(1948 + i)
plt.plot(subset['MONTH'], subset['VALUE'], c=colors[i], label=label)
plt.legend(loc='upper left')
plt.title("Monthly Unemployment Trends, 1948-1952")
plt.xlabel('Month, Integer')
plt.ylabel('Unemployment Rate, Percent')
plt.show() | mit |
openstack/glance | glance/tests/unit/test_notifier.py | 1 | 34473 | # Copyright 2011 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from unittest import mock
import glance_store
from oslo_config import cfg
import oslo_messaging
import webob
import glance.async_
from glance.common import exception
from glance.common import timeutils
import glance.context
from glance import notifier
import glance.tests.unit.utils as unit_test_utils
from glance.tests import utils
DATETIME = datetime.datetime(2012, 5, 16, 15, 27, 36, 325355)
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
class ImageStub(glance.domain.Image):
def get_data(self, offset=0, chunk_size=None):
return ['01234', '56789']
def set_data(self, data, size, backend=None, set_active=True):
for chunk in data:
pass
class ImageRepoStub(object):
def remove(self, *args, **kwargs):
return 'image_from_get'
def save(self, *args, **kwargs):
return 'image_from_save'
def add(self, *args, **kwargs):
return 'image_from_add'
def get(self, *args, **kwargs):
return 'image_from_get'
def list(self, *args, **kwargs):
return ['images_from_list']
class ImageMemberRepoStub(object):
def remove(self, *args, **kwargs):
return 'image_member_from_remove'
def save(self, *args, **kwargs):
return 'image_member_from_save'
def add(self, *args, **kwargs):
return 'image_member_from_add'
def get(self, *args, **kwargs):
return 'image_member_from_get'
def list(self, *args, **kwargs):
return ['image_members_from_list']
class TaskStub(glance.domain.TaskStub):
def run(self, executor):
pass
class Task(glance.domain.Task):
def succeed(self, result):
pass
def fail(self, message):
pass
class TaskRepoStub(object):
def remove(self, *args, **kwargs):
return 'task_from_remove'
def save(self, *args, **kwargs):
return 'task_from_save'
def add(self, *args, **kwargs):
return 'task_from_add'
def get_task(self, *args, **kwargs):
return 'task_from_get'
def list(self, *args, **kwargs):
return ['tasks_from_list']
class TestNotifier(utils.BaseTestCase):
@mock.patch.object(oslo_messaging, 'Notifier')
@mock.patch.object(oslo_messaging, 'get_notification_transport')
def _test_load_strategy(self,
mock_get_transport, mock_notifier,
url, driver):
nfier = notifier.Notifier()
mock_get_transport.assert_called_with(cfg.CONF)
self.assertIsNotNone(nfier._transport)
mock_notifier.assert_called_with(nfier._transport,
publisher_id='image.localhost')
self.assertIsNotNone(nfier._notifier)
def test_notifier_load(self):
self._test_load_strategy(url=None, driver=None)
@mock.patch.object(oslo_messaging, 'set_transport_defaults')
def test_set_defaults(self, mock_set_trans_defaults):
notifier.set_defaults(control_exchange='foo')
mock_set_trans_defaults.assert_called_with('foo')
notifier.set_defaults()
mock_set_trans_defaults.assert_called_with('glance')
class TestImageNotifications(utils.BaseTestCase):
"""Test Image Notifications work"""
def setUp(self):
super(TestImageNotifications, self).setUp()
self.image = ImageStub(
image_id=UUID1, name='image-1', status='active', size=1024,
created_at=DATETIME, updated_at=DATETIME, owner=TENANT1,
visibility='public', container_format='ami', virtual_size=2048,
tags=['one', 'two'], disk_format='ami', min_ram=128,
min_disk=10, checksum='ca425b88f047ce8ec45ee90e813ada91',
locations=['http://127.0.0.1'])
self.context = glance.context.RequestContext(tenant=TENANT2,
user=USER1)
self.image_repo_stub = ImageRepoStub()
self.notifier = unit_test_utils.FakeNotifier()
self.image_repo_proxy = glance.notifier.ImageRepoProxy(
self.image_repo_stub, self.context, self.notifier)
self.image_proxy = glance.notifier.ImageProxy(
self.image, self.context, self.notifier)
def test_image_save_notification(self):
self.image_repo_proxy.save(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.update', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
def test_image_save_notification_disabled(self):
self.config(disabled_notifications=["image.update"])
self.image_repo_proxy.save(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_add_notification(self):
self.image_repo_proxy.add(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.create', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
def test_image_add_notification_disabled(self):
self.config(disabled_notifications=["image.create"])
self.image_repo_proxy.add(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_delete_notification(self):
self.image_repo_proxy.remove(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.delete', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertTrue(output_log['payload']['deleted'])
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
def test_image_delete_notification_disabled(self):
self.config(disabled_notifications=['image.delete'])
self.image_repo_proxy.remove(self.image_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_get(self):
image = self.image_repo_proxy.get(UUID1)
self.assertIsInstance(image, glance.notifier.ImageProxy)
self.assertEqual('image_from_get', image.repo)
def test_image_list(self):
images = self.image_repo_proxy.list()
self.assertIsInstance(images[0], glance.notifier.ImageProxy)
self.assertEqual('images_from_list', images[0].repo)
def test_image_get_data_should_call_next_image_get_data(self):
with mock.patch.object(self.image, 'get_data') as get_data_mock:
self.image_proxy.get_data()
self.assertTrue(get_data_mock.called)
def test_image_get_data_notification(self):
self.image_proxy.size = 10
data = ''.join(self.image_proxy.get_data())
self.assertEqual('0123456789', data)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.send', output_log['event_type'])
self.assertEqual(self.image.image_id,
output_log['payload']['image_id'])
self.assertEqual(TENANT2, output_log['payload']['receiver_tenant_id'])
self.assertEqual(USER1, output_log['payload']['receiver_user_id'])
self.assertEqual(10, output_log['payload']['bytes_sent'])
self.assertEqual(TENANT1, output_log['payload']['owner_id'])
def test_image_get_data_notification_disabled(self):
self.config(disabled_notifications=['image.send'])
self.image_proxy.size = 10
data = ''.join(self.image_proxy.get_data())
self.assertEqual('0123456789', data)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_get_data_size_mismatch(self):
self.image_proxy.size = 11
list(self.image_proxy.get_data())
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.send', output_log['event_type'])
self.assertEqual(self.image.image_id,
output_log['payload']['image_id'])
def test_image_set_data_prepare_notification(self):
insurance = {'called': False}
def data_iterator():
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.prepare', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertEqual(['store1', 'store2'], output_log['payload'][
'os_glance_importing_to_stores'])
self.assertEqual([],
output_log['payload']['os_glance_failed_import'])
yield 'abcd'
yield 'efgh'
insurance['called'] = True
self.image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store1,store2'
self.image_proxy.extra_properties['os_glance_failed_import'] = ''
self.image_proxy.set_data(data_iterator(), 8)
self.assertTrue(insurance['called'])
def test_image_set_data_prepare_notification_disabled(self):
insurance = {'called': False}
def data_iterator():
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
yield 'abcd'
yield 'efgh'
insurance['called'] = True
self.config(disabled_notifications=['image.prepare'])
self.image_proxy.set_data(data_iterator(), 8)
self.assertTrue(insurance['called'])
def test_image_set_data_upload_and_activate_notification(self):
image = ImageStub(image_id=UUID1, name='image-1', status='queued',
created_at=DATETIME, updated_at=DATETIME,
owner=TENANT1, visibility='public')
context = glance.context.RequestContext(tenant=TENANT2, user=USER1)
fake_notifier = unit_test_utils.FakeNotifier()
image_proxy = glance.notifier.ImageProxy(image, context, fake_notifier)
def data_iterator():
fake_notifier.log = []
yield 'abcde'
yield 'fghij'
image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store2'
image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store1,store2'
image_proxy.extra_properties['os_glance_failed_import'] = ''
image_proxy.set_data(data_iterator(), 10)
output_logs = fake_notifier.get_logs()
self.assertEqual(2, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertEqual(['store2'], output_log['payload'][
'os_glance_importing_to_stores'])
self.assertEqual([],
output_log['payload']['os_glance_failed_import'])
output_log = output_logs[1]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.activate', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
def test_image_set_data_upload_and_not_activate_notification(self):
insurance = {'called': False}
def data_iterator():
self.notifier.log = []
yield 'abcde'
yield 'fghij'
self.image_proxy.extra_properties[
'os_glance_importing_to_stores'] = 'store2'
insurance['called'] = True
self.image_proxy.set_data(data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertEqual(self.image.image_id, output_log['payload']['id'])
self.assertTrue(insurance['called'])
def test_image_set_data_upload_and_activate_notification_disabled(self):
insurance = {'called': False}
image = ImageStub(image_id=UUID1, name='image-1', status='queued',
created_at=DATETIME, updated_at=DATETIME,
owner=TENANT1, visibility='public')
context = glance.context.RequestContext(tenant=TENANT2, user=USER1)
fake_notifier = unit_test_utils.FakeNotifier()
image_proxy = glance.notifier.ImageProxy(image, context, fake_notifier)
def data_iterator():
fake_notifier.log = []
yield 'abcde'
yield 'fghij'
insurance['called'] = True
self.config(disabled_notifications=['image.activate', 'image.upload'])
image_proxy.set_data(data_iterator(), 10)
self.assertTrue(insurance['called'])
output_logs = fake_notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_set_data_storage_full(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise glance_store.StorageFull(message='Modern Major General')
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Modern Major General', output_log['payload'])
def test_image_set_data_value_error(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise ValueError('value wrong')
self.assertRaises(webob.exc.HTTPBadRequest,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('value wrong', output_log['payload'])
def test_image_set_data_duplicate(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.Duplicate('Cant have duplicates')
self.assertRaises(webob.exc.HTTPConflict,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Cant have duplicates', output_log['payload'])
def test_image_set_data_storage_write_denied(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise glance_store.StorageWriteDenied(message='The Very Model')
self.assertRaises(webob.exc.HTTPServiceUnavailable,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('The Very Model', output_log['payload'])
def test_image_set_data_forbidden(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.Forbidden('Not allowed')
self.assertRaises(webob.exc.HTTPForbidden,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Not allowed', output_log['payload'])
def test_image_set_data_not_found(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.NotFound('Not found')
self.assertRaises(webob.exc.HTTPNotFound,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Not found', output_log['payload'])
def test_image_set_data_HTTP_error(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise webob.exc.HTTPError('Http issue')
self.assertRaises(webob.exc.HTTPError,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Http issue', output_log['payload'])
def test_image_set_data_error(self):
def data_iterator():
self.notifier.log = []
yield 'abcde'
raise exception.GlanceException('Failed')
self.assertRaises(exception.GlanceException,
self.image_proxy.set_data, data_iterator(), 10)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('ERROR', output_log['notification_type'])
self.assertEqual('image.upload', output_log['event_type'])
self.assertIn('Failed', output_log['payload'])
class TestImageMemberNotifications(utils.BaseTestCase):
"""Test Image Member Notifications work"""
def setUp(self):
super(TestImageMemberNotifications, self).setUp()
self.context = glance.context.RequestContext(tenant=TENANT2,
user=USER1)
self.notifier = unit_test_utils.FakeNotifier()
self.image = ImageStub(
image_id=UUID1, name='image-1', status='active', size=1024,
created_at=DATETIME, updated_at=DATETIME, owner=TENANT1,
visibility='public', container_format='ami',
tags=['one', 'two'], disk_format='ami', min_ram=128,
min_disk=10, checksum='ca425b88f047ce8ec45ee90e813ada91',
locations=['http://127.0.0.1'])
self.image_member = glance.domain.ImageMembership(
id=1, image_id=UUID1, member_id=TENANT1, created_at=DATETIME,
updated_at=DATETIME, status='accepted')
self.image_member_repo_stub = ImageMemberRepoStub()
self.image_member_repo_proxy = glance.notifier.ImageMemberRepoProxy(
self.image_member_repo_stub, self.image,
self.context, self.notifier)
self.image_member_proxy = glance.notifier.ImageMemberProxy(
self.image_member, self.context, self.notifier)
def _assert_image_member_with_notifier(self, output_log, deleted=False):
self.assertEqual(self.image_member.member_id,
output_log['payload']['member_id'])
self.assertEqual(self.image_member.image_id,
output_log['payload']['image_id'])
self.assertEqual(self.image_member.status,
output_log['payload']['status'])
self.assertEqual(timeutils.isotime(self.image_member.created_at),
output_log['payload']['created_at'])
self.assertEqual(timeutils.isotime(self.image_member.updated_at),
output_log['payload']['updated_at'])
if deleted:
self.assertTrue(output_log['payload']['deleted'])
self.assertIsNotNone(output_log['payload']['deleted_at'])
else:
self.assertFalse(output_log['payload']['deleted'])
self.assertIsNone(output_log['payload']['deleted_at'])
def test_image_member_add_notification(self):
self.image_member_repo_proxy.add(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.member.create', output_log['event_type'])
self._assert_image_member_with_notifier(output_log)
def test_image_member_add_notification_disabled(self):
self.config(disabled_notifications=['image.member.create'])
self.image_member_repo_proxy.add(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_member_save_notification(self):
self.image_member_repo_proxy.save(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.member.update', output_log['event_type'])
self._assert_image_member_with_notifier(output_log)
def test_image_member_save_notification_disabled(self):
self.config(disabled_notifications=['image.member.update'])
self.image_member_repo_proxy.save(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_member_delete_notification(self):
self.image_member_repo_proxy.remove(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('image.member.delete', output_log['event_type'])
self._assert_image_member_with_notifier(output_log, deleted=True)
def test_image_member_delete_notification_disabled(self):
self.config(disabled_notifications=['image.member.delete'])
self.image_member_repo_proxy.remove(self.image_member_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_image_member_get(self):
image_member = self.image_member_repo_proxy.get(TENANT1)
self.assertIsInstance(image_member, glance.notifier.ImageMemberProxy)
self.assertEqual('image_member_from_get', image_member.repo)
def test_image_member_list(self):
image_members = self.image_member_repo_proxy.list()
self.assertIsInstance(image_members[0],
glance.notifier.ImageMemberProxy)
self.assertEqual('image_members_from_list', image_members[0].repo)
class TestTaskNotifications(utils.BaseTestCase):
"""Test Task Notifications work"""
def setUp(self):
super(TestTaskNotifications, self).setUp()
task_input = {"loc": "fake"}
self.task_stub = TaskStub(
task_id='aaa',
task_type='import',
status='pending',
owner=TENANT2,
expires_at=None,
created_at=DATETIME,
updated_at=DATETIME,
image_id='fake_image_id',
user_id='fake_user',
request_id='fake_request_id',
)
self.task = Task(
task_id='aaa',
task_type='import',
status='pending',
owner=TENANT2,
expires_at=None,
created_at=DATETIME,
updated_at=DATETIME,
task_input=task_input,
result='res',
message='blah',
image_id='fake_image_id',
user_id='fake_user',
request_id='fake_request_id',
)
self.context = glance.context.RequestContext(
tenant=TENANT2,
user=USER1
)
self.task_repo_stub = TaskRepoStub()
self.notifier = unit_test_utils.FakeNotifier()
self.task_repo_proxy = glance.notifier.TaskRepoProxy(
self.task_repo_stub,
self.context,
self.notifier
)
self.task_proxy = glance.notifier.TaskProxy(
self.task,
self.context,
self.notifier
)
self.task_stub_proxy = glance.notifier.TaskStubProxy(
self.task_stub,
self.context,
self.notifier
)
self.patcher = mock.patch.object(timeutils, 'utcnow')
mock_utcnow = self.patcher.start()
mock_utcnow.return_value = datetime.datetime.utcnow()
def tearDown(self):
super(TestTaskNotifications, self).tearDown()
self.patcher.stop()
def test_task_create_notification(self):
self.task_repo_proxy.add(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.create', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
self.assertEqual(
timeutils.isotime(self.task.updated_at),
output_log['payload']['updated_at']
)
self.assertEqual(
timeutils.isotime(self.task.created_at),
output_log['payload']['created_at']
)
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_create_notification_disabled(self):
self.config(disabled_notifications=['task.create'])
self.task_repo_proxy.add(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_delete_notification(self):
now = timeutils.isotime()
self.task_repo_proxy.remove(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.delete', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
self.assertEqual(
timeutils.isotime(self.task.updated_at),
output_log['payload']['updated_at']
)
self.assertEqual(
timeutils.isotime(self.task.created_at),
output_log['payload']['created_at']
)
self.assertEqual(
now,
output_log['payload']['deleted_at']
)
if 'location' in output_log['payload']:
self.fail('Notification contained location field.')
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_delete_notification_disabled(self):
self.config(disabled_notifications=['task.delete'])
self.task_repo_proxy.remove(self.task_stub_proxy)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_run_notification(self):
with mock.patch('glance.async_.TaskExecutor') as mock_executor:
executor = mock_executor.return_value
executor._run.return_value = mock.Mock()
self.task_proxy.run(executor=mock_executor)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.run', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
self.assertFalse(
self.task.image_id in output_log['payload']
)
self.assertFalse(
self.task.user_id in output_log['payload']
)
self.assertFalse(
self.task.request_id in output_log['payload']
)
def test_task_run_notification_disabled(self):
self.config(disabled_notifications=['task.run'])
with mock.patch('glance.async_.TaskExecutor') as mock_executor:
executor = mock_executor.return_value
executor._run.return_value = mock.Mock()
self.task_proxy.run(executor=mock_executor)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_processing_notification(self):
self.task_proxy.begin_processing()
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.processing', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_processing_notification_disabled(self):
self.config(disabled_notifications=['task.processing'])
self.task_proxy.begin_processing()
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_success_notification(self):
self.task_proxy.begin_processing()
self.task_proxy.succeed(result=None)
output_logs = self.notifier.get_logs()
self.assertEqual(2, len(output_logs))
output_log = output_logs[1]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.success', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_success_notification_disabled(self):
self.config(disabled_notifications=['task.processing', 'task.success'])
self.task_proxy.begin_processing()
self.task_proxy.succeed(result=None)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
def test_task_failure_notification(self):
self.task_proxy.fail(message=None)
output_logs = self.notifier.get_logs()
self.assertEqual(1, len(output_logs))
output_log = output_logs[0]
self.assertEqual('INFO', output_log['notification_type'])
self.assertEqual('task.failure', output_log['event_type'])
self.assertEqual(self.task.task_id, output_log['payload']['id'])
# Verify newly added fields 'image_id', 'user_id' and
# 'request_id' are not part of notification yet
self.assertTrue('image_id' not in output_log['payload'])
self.assertTrue('user_id' not in output_log['payload'])
self.assertTrue('request_id' not in output_log['payload'])
def test_task_failure_notification_disabled(self):
self.config(disabled_notifications=['task.failure'])
self.task_proxy.fail(message=None)
output_logs = self.notifier.get_logs()
self.assertEqual(0, len(output_logs))
| apache-2.0 |
ramadhane/odoo | addons/decimal_precision/__openerp__.py | 261 | 1628 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Decimal Precision Configuration',
'description': """
Configure the price accuracy you need for different kinds of usage: accounting, sales, purchases.
=================================================================================================
The decimal precision is configured per company.
""",
'author': 'OpenERP SA',
'version': '0.1',
'depends': ['base'],
'category' : 'Hidden/Dependency',
'data': [
'decimal_precision_view.xml',
'security/ir.model.access.csv',
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
serverdensity/sd-agent-core-plugins | gunicorn/check.py | 1 | 4768 | # (C) Datadog, Inc. 2010-2017
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
"""
Collects metrics from the gunicorn web server.
http://gunicorn.org/
"""
# stdlib
import time
# 3rd party
import psutil
# project
from checks import AgentCheck
class GUnicornCheck(AgentCheck):
# Config
PROC_NAME = 'proc_name'
# Number of seconds to sleep between cpu time checks.
CPU_SLEEP_SECS = 0.1
# Worker state tags.
IDLE_TAGS = ["state:idle"]
WORKING_TAGS = ["state:working"]
SVC_NAME = "gunicorn.is_running"
def get_library_versions(self):
return {"psutil": psutil.__version__}
def check(self, instance):
""" Collect metrics for the given gunicorn instance. """
self.log.debug("Running instance: %s", instance)
# Validate the config.
if not instance or self.PROC_NAME not in instance:
raise GUnicornCheckError("instance must specify: %s" % self.PROC_NAME)
# Load the gunicorn master procedure.
proc_name = instance.get(self.PROC_NAME)
master_proc = self._get_master_proc_by_name(proc_name)
# Fetch the worker procs and count their states.
worker_procs = master_proc.children()
working, idle = self._count_workers(worker_procs)
# if no workers are running, alert CRITICAL, otherwise OK
msg = "%s working and %s idle workers for %s" % (working, idle, proc_name)
status = AgentCheck.CRITICAL if working == 0 and idle == 0 else AgentCheck.OK
tags = ['app:' + proc_name]
self.service_check(self.SVC_NAME, status, tags=tags, message=msg)
# Submit the data.
self.log.debug("instance %s procs - working:%s idle:%s" % (proc_name, working, idle))
self.gauge("gunicorn.workers", working, tags + self.WORKING_TAGS)
self.gauge("gunicorn.workers", idle, tags + self.IDLE_TAGS)
def _count_workers(self, worker_procs):
working = 0
idle = 0
if not worker_procs:
return working, idle
# Count how much sleep time is used by the workers.
cpu_time_by_pid = {}
for proc in worker_procs:
# cpu time is the sum of user + system time.
try:
cpu_time_by_pid[proc.pid] = sum(proc.cpu_times())
except psutil.NoSuchProcess:
self.warning('Process %s disappeared while scanning' % proc.name)
continue
# Let them do a little bit more work.
time.sleep(self.CPU_SLEEP_SECS)
# Processes which have used more CPU are considered active (this is a very
# naive check, but gunicorn exposes no stats API)
for proc in worker_procs:
if proc.pid not in cpu_time_by_pid:
# The process is not running anymore, we didn't collect initial cpu times
continue
try:
cpu_time = sum(proc.cpu_times())
except Exception:
# couldn't collect cpu time. assume it's dead.
self.log.debug("Couldn't collect cpu time for %s" % proc)
continue
if cpu_time == cpu_time_by_pid[proc.pid]:
idle += 1
else:
working += 1
return working, idle
def _get_master_proc_by_name(self, name):
""" Return a psutil process for the master gunicorn process with the given name. """
master_name = GUnicornCheck._get_master_proc_name(name)
master_procs = [p for p in psutil.process_iter() if p.cmdline() and p.cmdline()[0] == master_name]
if len(master_procs) == 0:
# process not found, it's dead.
self.service_check(self.SVC_NAME, AgentCheck.CRITICAL, tags=['app:' + name],
message="No gunicorn process with name %s found" % name)
raise GUnicornCheckError("Found no master process with name: %s" % master_name)
elif len(master_procs) > 1:
raise GUnicornCheckError("Found more than one master process with name: %s" % master_name)
else:
return master_procs[0]
@staticmethod
def _get_master_proc_name(name):
""" Return the name of the master gunicorn process for the given proc name. """
# Here's an example of a process list for a gunicorn box with name web1
# root 22976 0.1 0.1 60364 13424 ? Ss 19:30 0:00 gunicorn: master [web1]
# web 22984 20.7 2.3 521924 176136 ? Sl 19:30 1:58 gunicorn: worker [web1]
# web 22985 26.4 6.1 795288 449596 ? Sl 19:30 2:32 gunicorn: worker [web1]
return "gunicorn: master [%s]" % name
class GUnicornCheckError(Exception):
pass
| bsd-3-clause |
stahnma/rpmdistro-gitoverlay | rdgo/utils.py | 2 | 2141 | #!/usr/bin/env python
#
# Copyright (C) 2015 Colin Walters <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import sys
import stat
import shutil
import errno
import subprocess
import os
from gi.repository import GLib, Gio
def fatal(msg):
print >>sys.stderr, msg
sys.exit(1)
def log(msg):
"Print to standard output and flush it"
sys.stdout.write(msg)
sys.stdout.write('\n')
sys.stdout.flush()
def run_sync(args, **kwargs):
"""Wraps subprocess.check_call(), logging the command line too."""
if isinstance(args, str) or isinstance(args, unicode):
argstr = args
else:
argstr = subprocess.list2cmdline(args)
log("Running: {0}".format(argstr))
subprocess.check_call(args, **kwargs)
def rmrf(path):
try:
stbuf = os.lstat(path)
except OSError as e:
return
if stat.S_ISDIR(stbuf.st_mode):
shutil.rmtree(path)
else:
try:
os.unlink(path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def hardlink_or_copy(src, dest):
try:
os.link(src, dest)
except OSError as e:
if e.errno != errno.EXDEV:
raise
shutil.copy(src,dest)
def ensuredir(path, with_parents=False):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def ensure_clean_dir(path):
rmrf(path)
ensuredir(path)
| lgpl-2.1 |
d40223223/2015cdbg6team0622 | static/Brython3.1.1-20150328-091302/Lib/heapq.py | 628 | 18065 | """Heap queue algorithm (a.k.a. priority queue).
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
Usage:
heap = [] # creates an empty heap
heappush(heap, item) # pushes a new item on the heap
item = heappop(heap) # pops the smallest item from the heap
item = heap[0] # smallest item on the heap without popping it
heapify(x) # transforms list into a heap, in-place, in linear time
item = heapreplace(heap, item) # pops and returns smallest item, and adds
# new item; the heap size is unchanged
Our API differs from textbook heap algorithms as follows:
- We use 0-based indexing. This makes the relationship between the
index for a node and the indexes for its children slightly less
obvious, but is more suitable since Python uses 0-based indexing.
- Our heappop() method returns the smallest item, not the largest.
These two make it possible to view the heap as a regular Python list
without surprises: heap[0] is the smallest item, and heap.sort()
maintains the heap invariant!
"""
# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
__about__ = """Heap queues
[explanation by François Pinard]
Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
all k, counting elements from 0. For the sake of comparison,
non-existing elements are considered to be infinite. The interesting
property of a heap is that a[0] is always its smallest element.
The strange invariant above is meant to be an efficient memory
representation for a tournament. The numbers below are `k', not a[k]:
0
1 2
3 4 5 6
7 8 9 10 11 12 13 14
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In
an usual binary tournament we see in sports, each cell is the winner
over the two cells it tops, and we can trace the winner down the tree
to see all opponents s/he had. However, in many computer applications
of such tournaments, we do not need to trace the history of a winner.
To be more memory efficient, when a winner is promoted, we try to
replace it by something else at a lower level, and the rule becomes
that a cell and the two cells it tops contain three different items,
but the top cell "wins" over the two topped cells.
If this heap invariant is protected at all time, index 0 is clearly
the overall winner. The simplest algorithmic way to remove it and
find the "next" winner is to move some loser (let's say cell 30 in the
diagram above) into the 0 position, and then percolate this new 0 down
the tree, exchanging values, until the invariant is re-established.
This is clearly logarithmic on the total number of items in the tree.
By iterating over all items, you get an O(n ln n) sort.
A nice feature of this sort is that you can efficiently insert new
items while the sort is going on, provided that the inserted items are
not "better" than the last 0'th element you extracted. This is
especially useful in simulation contexts, where the tree holds all
incoming events, and the "win" condition means the smallest scheduled
time. When an event schedule other events for execution, they are
scheduled into the future, so they can easily go into the heap. So, a
heap is a good structure for implementing schedulers (this is what I
used for my MIDI sequencer :-).
Various structures for implementing schedulers have been extensively
studied, and heaps are good for this, as they are reasonably speedy,
the speed is almost constant, and the worst case is not much different
than the average case. However, there are other representations which
are more efficient overall, yet the worst cases might be terrible.
Heaps are also very useful in big disk sorts. You most probably all
know that a big sort implies producing "runs" (which are pre-sorted
sequences, which size is usually related to the amount of CPU memory),
followed by a merging passes for these runs, which merging is often
very cleverly organised[1]. It is very important that the initial
sort produces the longest runs possible. Tournaments are a good way
to that. If, using all the memory available to hold a tournament, you
replace and percolate items that happen to fit the current run, you'll
produce runs which are twice the size of the memory for random input,
and much better for input fuzzily ordered.
Moreover, if you output the 0'th item on disk and get an input which
may not fit in the current tournament (because the value "wins" over
the last output value), it cannot fit in the heap, so the size of the
heap decreases. The freed memory could be cleverly reused immediately
for progressively building a second heap, which grows at exactly the
same rate the first heap is melting. When the first heap completely
vanishes, you switch heaps and start a new run. Clever and quite
effective!
In a word, heaps are useful memory structures to know. I use them in
a few applications, and I think it is good to keep a `heap' module
around. :-)
--------------------
[1] The disk balancing algorithms which are current, nowadays, are
more annoying than clever, and this is a consequence of the seeking
capabilities of the disks. On devices which cannot seek, like big
tape drives, the story was quite different, and one had to be very
clever to ensure (far in advance) that each tape movement will be the
most effective possible (that is, will best participate at
"progressing" the merge). Some tapes were even able to read
backwards, and this was also used to avoid the rewinding time.
Believe me, real good tape sorts were quite spectacular to watch!
From all times, sorting has always been a Great Art! :-)
"""
__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'merge',
'nlargest', 'nsmallest', 'heappushpop']
from itertools import islice, count, tee, chain
def heappush(heap, item):
"""Push item onto heap, maintaining the heap invariant."""
heap.append(item)
_siftdown(heap, 0, len(heap)-1)
def heappop(heap):
"""Pop the smallest item off the heap, maintaining the heap invariant."""
lastelt = heap.pop() # raises appropriate IndexError if heap is empty
if heap:
returnitem = heap[0]
heap[0] = lastelt
_siftup(heap, 0)
else:
returnitem = lastelt
return returnitem
def heapreplace(heap, item):
"""Pop and return the current smallest value, and add the new item.
This is more efficient than heappop() followed by heappush(), and can be
more appropriate when using a fixed-size heap. Note that the value
returned may be larger than item! That constrains reasonable uses of
this routine unless written as part of a conditional replacement:
if item > heap[0]:
item = heapreplace(heap, item)
"""
returnitem = heap[0] # raises appropriate IndexError if heap is empty
heap[0] = item
_siftup(heap, 0)
return returnitem
def heappushpop(heap, item):
"""Fast version of a heappush followed by a heappop."""
if heap and heap[0] < item:
item, heap[0] = heap[0], item
_siftup(heap, 0)
return item
def heapify(x):
"""Transform list into a heap, in-place, in O(len(x)) time."""
n = len(x)
# Transform bottom-up. The largest index there's any point to looking at
# is the largest with a child index in-range, so must have 2*i + 1 < n,
# or i < (n-1)/2. If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
# j-1 is the largest, which is n//2 - 1. If n is odd = 2*j+1, this is
# (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
for i in reversed(range(n//2)):
_siftup(x, i)
def _heappushpop_max(heap, item):
"""Maxheap version of a heappush followed by a heappop."""
if heap and item < heap[0]:
item, heap[0] = heap[0], item
_siftup_max(heap, 0)
return item
def _heapify_max(x):
"""Transform list into a maxheap, in-place, in O(len(x)) time."""
n = len(x)
for i in reversed(range(n//2)):
_siftup_max(x, i)
def nlargest(n, iterable):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, reverse=True)[:n]
"""
if n < 0:
return []
it = iter(iterable)
result = list(islice(it, n))
if not result:
return result
heapify(result)
_heappushpop = heappushpop
for elem in it:
_heappushpop(result, elem)
result.sort(reverse=True)
return result
def nsmallest(n, iterable):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable)[:n]
"""
if n < 0:
return []
it = iter(iterable)
result = list(islice(it, n))
if not result:
return result
_heapify_max(result)
_heappushpop = _heappushpop_max
for elem in it:
_heappushpop(result, elem)
result.sort()
return result
# 'heap' is a heap at all indices >= startpos, except possibly for pos. pos
# is the index of a leaf with a possibly out-of-order value. Restore the
# heap invariant.
def _siftdown(heap, startpos, pos):
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if newitem < parent:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
# The child indices of heap index pos are already heaps, and we want to make
# a heap at index pos too. We do this by bubbling the smaller child of
# pos up (and so on with that child's children, etc) until hitting a leaf,
# then using _siftdown to move the oddball originally at index pos into place.
#
# We *could* break out of the loop as soon as we find a pos where newitem <=
# both its children, but turns out that's not a good idea, and despite that
# many books write the algorithm that way. During a heap pop, the last array
# element is sifted in, and that tends to be large, so that comparing it
# against values starting from the root usually doesn't pay (= usually doesn't
# get us out of the loop early). See Knuth, Volume 3, where this is
# explained and quantified in an exercise.
#
# Cutting the # of comparisons is important, since these routines have no
# way to extract "the priority" from an array element, so that intelligence
# is likely to be hiding in custom comparison methods, or in array elements
# storing (priority, record) tuples. Comparisons are thus potentially
# expensive.
#
# On random arrays of length 1000, making this change cut the number of
# comparisons made by heapify() a little, and those made by exhaustive
# heappop() a lot, in accord with theory. Here are typical results from 3
# runs (3 just to demonstrate how small the variance is):
#
# Compares needed by heapify Compares needed by 1000 heappops
# -------------------------- --------------------------------
# 1837 cut to 1663 14996 cut to 8680
# 1855 cut to 1659 14966 cut to 8678
# 1847 cut to 1660 15024 cut to 8703
#
# Building the heap by using heappush() 1000 times instead required
# 2198, 2148, and 2219 compares: heapify() is more efficient, when
# you can use it.
#
# The total compares needed by list.sort() on the same lists were 8627,
# 8627, and 8632 (this should be compared to the sum of heapify() and
# heappop() compares): list.sort() is (unsurprisingly!) more efficient
# for sorting.
def _siftup(heap, pos):
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the smaller child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of smaller child.
rightpos = childpos + 1
if rightpos < endpos and not heap[childpos] < heap[rightpos]:
childpos = rightpos
# Move the smaller child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown(heap, startpos, pos)
def _siftdown_max(heap, startpos, pos):
'Maxheap variant of _siftdown'
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if parent < newitem:
heap[pos] = parent
pos = parentpos
continue
break
heap[pos] = newitem
def _siftup_max(heap, pos):
'Maxheap variant of _siftup'
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the larger child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of larger child.
rightpos = childpos + 1
if rightpos < endpos and not heap[rightpos] < heap[childpos]:
childpos = rightpos
# Move the larger child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown_max(heap, startpos, pos)
# If available, use C implementation
#_heapq does not exist in brython, so lets just comment it out.
#try:
# from _heapq import *
#except ImportError:
# pass
def merge(*iterables):
'''Merge multiple sorted inputs into a single sorted output.
Similar to sorted(itertools.chain(*iterables)) but returns a generator,
does not pull the data into memory all at once, and assumes that each of
the input streams is already sorted (smallest to largest).
>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
'''
_heappop, _heapreplace, _StopIteration = heappop, heapreplace, StopIteration
_len = len
h = []
h_append = h.append
for itnum, it in enumerate(map(iter, iterables)):
try:
next = it.__next__
h_append([next(), itnum, next])
except _StopIteration:
pass
heapify(h)
while _len(h) > 1:
try:
while True:
v, itnum, next = s = h[0]
yield v
s[0] = next() # raises StopIteration when exhausted
_heapreplace(h, s) # restore heap condition
except _StopIteration:
_heappop(h) # remove empty iterator
if h:
# fast case when only a single iterator remains
v, itnum, next = h[0]
yield v
yield from next.__self__
# Extend the implementations of nsmallest and nlargest to use a key= argument
_nsmallest = nsmallest
def nsmallest(n, iterable, key=None):
"""Find the n smallest elements in a dataset.
Equivalent to: sorted(iterable, key=key)[:n]
"""
# Short-cut for n==1 is to use min() when len(iterable)>0
if n == 1:
it = iter(iterable)
head = list(islice(it, 1))
if not head:
return []
if key is None:
return [min(chain(head, it))]
return [min(chain(head, it), key=key)]
# When n>=size, it's faster to use sorted()
try:
size = len(iterable)
except (TypeError, AttributeError):
pass
else:
if n >= size:
return sorted(iterable, key=key)[:n]
# When key is none, use simpler decoration
if key is None:
it = zip(iterable, count()) # decorate
result = _nsmallest(n, it)
return [r[0] for r in result] # undecorate
# General case, slowest method
in1, in2 = tee(iterable)
it = zip(map(key, in1), count(), in2) # decorate
result = _nsmallest(n, it)
return [r[2] for r in result] # undecorate
_nlargest = nlargest
def nlargest(n, iterable, key=None):
"""Find the n largest elements in a dataset.
Equivalent to: sorted(iterable, key=key, reverse=True)[:n]
"""
# Short-cut for n==1 is to use max() when len(iterable)>0
if n == 1:
it = iter(iterable)
head = list(islice(it, 1))
if not head:
return []
if key is None:
return [max(chain(head, it))]
return [max(chain(head, it), key=key)]
# When n>=size, it's faster to use sorted()
try:
size = len(iterable)
except (TypeError, AttributeError):
pass
else:
if n >= size:
return sorted(iterable, key=key, reverse=True)[:n]
# When key is none, use simpler decoration
if key is None:
it = zip(iterable, count(0,-1)) # decorate
result = _nlargest(n, it)
return [r[0] for r in result] # undecorate
# General case, slowest method
in1, in2 = tee(iterable)
it = zip(map(key, in1), count(0,-1), in2) # decorate
result = _nlargest(n, it)
return [r[2] for r in result] # undecorate
if __name__ == "__main__":
# Simple sanity test
heap = []
data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
for item in data:
heappush(heap, item)
sort = []
while heap:
sort.append(heappop(heap))
print(sort)
import doctest
doctest.testmod()
| gpl-3.0 |
theochem/horton | horton/meanfield/test/test_scf_cdiis.py | 4 | 2038 | # -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import numpy as np
from nose.tools import assert_raises
from horton import * # pylint: disable=wildcard-import,unused-wildcard-import
from horton.meanfield.test.common import check_hf_cs_hf, check_lih_os_hf, \
check_water_cs_hfs, check_n2_cs_hfs, check_h3_os_hfs, check_h3_os_pbe, \
check_co_cs_pbe, check_vanadium_sc_hf, check_water_cs_m05, \
check_methyl_os_tpss
def test_hf_cs_hf():
check_hf_cs_hf(CDIISSCFSolver(threshold=1e-7))
def test_lih_os_hf():
check_lih_os_hf(CDIISSCFSolver(threshold=1e-7))
def test_water_cs_hfs():
check_water_cs_hfs(CDIISSCFSolver(threshold=1e-6))
def test_n2_cs_hfs():
check_n2_cs_hfs(CDIISSCFSolver(threshold=1e-6))
def test_h3_os_hfs():
check_h3_os_hfs(CDIISSCFSolver(threshold=1e-6))
def test_co_cs_pbe():
check_co_cs_pbe(CDIISSCFSolver(threshold=1e-5))
def test_h3_os_pbe():
check_h3_os_pbe(CDIISSCFSolver(threshold=1e-6))
def test_vanadium_sc_hf():
with assert_raises(NoSCFConvergence):
check_vanadium_sc_hf(CDIISSCFSolver(threshold=1e-10, maxiter=10))
def test_water_cs_m05():
check_water_cs_m05(CDIISSCFSolver(threshold=1e-6))
def test_methyl_os_tpss():
check_methyl_os_tpss(CDIISSCFSolver(threshold=1e-5))
| gpl-3.0 |
neilpelow/wmap-django | venv/lib/python3.5/site-packages/pip/_vendor/requests/packages/chardet/charsetprober.py | 3127 | 1902 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
| gpl-3.0 |
MQQiang/kbengine | kbe/res/scripts/common/Lib/xml/sax/xmlreader.py | 824 | 12612 | """An XML Reader is the SAX 2 name for an XML parser. XML Parsers
should be based on this code. """
from . import handler
from ._exceptions import SAXNotSupportedException, SAXNotRecognizedException
# ===== XMLREADER =====
class XMLReader:
"""Interface for reading an XML document using callbacks.
XMLReader is the interface that an XML parser's SAX2 driver must
implement. This interface allows an application to set and query
features and properties in the parser, to register event handlers
for document processing, and to initiate a document parse.
All SAX interfaces are assumed to be synchronous: the parse
methods must not return until parsing is complete, and readers
must wait for an event-handler callback to return before reporting
the next event."""
def __init__(self):
self._cont_handler = handler.ContentHandler()
self._dtd_handler = handler.DTDHandler()
self._ent_handler = handler.EntityResolver()
self._err_handler = handler.ErrorHandler()
def parse(self, source):
"Parse an XML document from a system identifier or an InputSource."
raise NotImplementedError("This method must be implemented!")
def getContentHandler(self):
"Returns the current ContentHandler."
return self._cont_handler
def setContentHandler(self, handler):
"Registers a new object to receive document content events."
self._cont_handler = handler
def getDTDHandler(self):
"Returns the current DTD handler."
return self._dtd_handler
def setDTDHandler(self, handler):
"Register an object to receive basic DTD-related events."
self._dtd_handler = handler
def getEntityResolver(self):
"Returns the current EntityResolver."
return self._ent_handler
def setEntityResolver(self, resolver):
"Register an object to resolve external entities."
self._ent_handler = resolver
def getErrorHandler(self):
"Returns the current ErrorHandler."
return self._err_handler
def setErrorHandler(self, handler):
"Register an object to receive error-message events."
self._err_handler = handler
def setLocale(self, locale):
"""Allow an application to set the locale for errors and warnings.
SAX parsers are not required to provide localization for errors
and warnings; if they cannot support the requested locale,
however, they must raise a SAX exception. Applications may
request a locale change in the middle of a parse."""
raise SAXNotSupportedException("Locale support not implemented")
def getFeature(self, name):
"Looks up and returns the state of a SAX2 feature."
raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
def setFeature(self, name, state):
"Sets the state of a SAX2 feature."
raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
def getProperty(self, name):
"Looks up and returns the value of a SAX2 property."
raise SAXNotRecognizedException("Property '%s' not recognized" % name)
def setProperty(self, name, value):
"Sets the value of a SAX2 property."
raise SAXNotRecognizedException("Property '%s' not recognized" % name)
class IncrementalParser(XMLReader):
"""This interface adds three extra methods to the XMLReader
interface that allow XML parsers to support incremental
parsing. Support for this interface is optional, since not all
underlying XML parsers support this functionality.
When the parser is instantiated it is ready to begin accepting
data from the feed method immediately. After parsing has been
finished with a call to close the reset method must be called to
make the parser ready to accept new data, either from feed or
using the parse method.
Note that these methods must _not_ be called during parsing, that
is, after parse has been called and before it returns.
By default, the class also implements the parse method of the XMLReader
interface using the feed, close and reset methods of the
IncrementalParser interface as a convenience to SAX 2.0 driver
writers."""
def __init__(self, bufsize=2**16):
self._bufsize = bufsize
XMLReader.__init__(self)
def parse(self, source):
from . import saxutils
source = saxutils.prepare_input_source(source)
self.prepareParser(source)
file = source.getByteStream()
buffer = file.read(self._bufsize)
while buffer:
self.feed(buffer)
buffer = file.read(self._bufsize)
self.close()
def feed(self, data):
"""This method gives the raw XML data in the data parameter to
the parser and makes it parse the data, emitting the
corresponding events. It is allowed for XML constructs to be
split across several calls to feed.
feed may raise SAXException."""
raise NotImplementedError("This method must be implemented!")
def prepareParser(self, source):
"""This method is called by the parse implementation to allow
the SAX 2.0 driver to prepare itself for parsing."""
raise NotImplementedError("prepareParser must be overridden!")
def close(self):
"""This method is called when the entire XML document has been
passed to the parser through the feed method, to notify the
parser that there are no more data. This allows the parser to
do the final checks on the document and empty the internal
data buffer.
The parser will not be ready to parse another document until
the reset method has been called.
close may raise SAXException."""
raise NotImplementedError("This method must be implemented!")
def reset(self):
"""This method is called after close has been called to reset
the parser so that it is ready to parse new documents. The
results of calling parse or feed after close without calling
reset are undefined."""
raise NotImplementedError("This method must be implemented!")
# ===== LOCATOR =====
class Locator:
"""Interface for associating a SAX event with a document
location. A locator object will return valid results only during
calls to DocumentHandler methods; at any other time, the
results are unpredictable."""
def getColumnNumber(self):
"Return the column number where the current event ends."
return -1
def getLineNumber(self):
"Return the line number where the current event ends."
return -1
def getPublicId(self):
"Return the public identifier for the current event."
return None
def getSystemId(self):
"Return the system identifier for the current event."
return None
# ===== INPUTSOURCE =====
class InputSource:
"""Encapsulation of the information needed by the XMLReader to
read entities.
This class may include information about the public identifier,
system identifier, byte stream (possibly with character encoding
information) and/or the character stream of an entity.
Applications will create objects of this class for use in the
XMLReader.parse method and for returning from
EntityResolver.resolveEntity.
An InputSource belongs to the application, the XMLReader is not
allowed to modify InputSource objects passed to it from the
application, although it may make copies and modify those."""
def __init__(self, system_id = None):
self.__system_id = system_id
self.__public_id = None
self.__encoding = None
self.__bytefile = None
self.__charfile = None
def setPublicId(self, public_id):
"Sets the public identifier of this InputSource."
self.__public_id = public_id
def getPublicId(self):
"Returns the public identifier of this InputSource."
return self.__public_id
def setSystemId(self, system_id):
"Sets the system identifier of this InputSource."
self.__system_id = system_id
def getSystemId(self):
"Returns the system identifier of this InputSource."
return self.__system_id
def setEncoding(self, encoding):
"""Sets the character encoding of this InputSource.
The encoding must be a string acceptable for an XML encoding
declaration (see section 4.3.3 of the XML recommendation).
The encoding attribute of the InputSource is ignored if the
InputSource also contains a character stream."""
self.__encoding = encoding
def getEncoding(self):
"Get the character encoding of this InputSource."
return self.__encoding
def setByteStream(self, bytefile):
"""Set the byte stream (a Python file-like object which does
not perform byte-to-character conversion) for this input
source.
The SAX parser will ignore this if there is also a character
stream specified, but it will use a byte stream in preference
to opening a URI connection itself.
If the application knows the character encoding of the byte
stream, it should set it with the setEncoding method."""
self.__bytefile = bytefile
def getByteStream(self):
"""Get the byte stream for this input source.
The getEncoding method will return the character encoding for
this byte stream, or None if unknown."""
return self.__bytefile
def setCharacterStream(self, charfile):
"""Set the character stream for this input source. (The stream
must be a Python 2.0 Unicode-wrapped file-like that performs
conversion to Unicode strings.)
If there is a character stream specified, the SAX parser will
ignore any byte stream and will not attempt to open a URI
connection to the system identifier."""
self.__charfile = charfile
def getCharacterStream(self):
"Get the character stream for this input source."
return self.__charfile
# ===== ATTRIBUTESIMPL =====
class AttributesImpl:
def __init__(self, attrs):
"""Non-NS-aware implementation.
attrs should be of the form {name : value}."""
self._attrs = attrs
def getLength(self):
return len(self._attrs)
def getType(self, name):
return "CDATA"
def getValue(self, name):
return self._attrs[name]
def getValueByQName(self, name):
return self._attrs[name]
def getNameByQName(self, name):
if name not in self._attrs:
raise KeyError(name)
return name
def getQNameByName(self, name):
if name not in self._attrs:
raise KeyError(name)
return name
def getNames(self):
return list(self._attrs.keys())
def getQNames(self):
return list(self._attrs.keys())
def __len__(self):
return len(self._attrs)
def __getitem__(self, name):
return self._attrs[name]
def keys(self):
return list(self._attrs.keys())
def __contains__(self, name):
return name in self._attrs
def get(self, name, alternative=None):
return self._attrs.get(name, alternative)
def copy(self):
return self.__class__(self._attrs)
def items(self):
return list(self._attrs.items())
def values(self):
return list(self._attrs.values())
# ===== ATTRIBUTESNSIMPL =====
class AttributesNSImpl(AttributesImpl):
def __init__(self, attrs, qnames):
"""NS-aware implementation.
attrs should be of the form {(ns_uri, lname): value, ...}.
qnames of the form {(ns_uri, lname): qname, ...}."""
self._attrs = attrs
self._qnames = qnames
def getValueByQName(self, name):
for (nsname, qname) in self._qnames.items():
if qname == name:
return self._attrs[nsname]
raise KeyError(name)
def getNameByQName(self, name):
for (nsname, qname) in self._qnames.items():
if qname == name:
return nsname
raise KeyError(name)
def getQNameByName(self, name):
return self._qnames[name]
def getQNames(self):
return list(self._qnames.values())
def copy(self):
return self.__class__(self._attrs, self._qnames)
def _test():
XMLReader()
IncrementalParser()
Locator()
if __name__ == "__main__":
_test()
| lgpl-3.0 |
techdragon/django | tests/project_template/test_settings.py | 23 | 1644 | import os
import shutil
import unittest
from django import conf
from django.test import TestCase
from django.utils import six
from django.utils._os import upath
@unittest.skipIf(
six.PY2,
'Python 2 cannot import the project template because '
'django/conf/project_template doesn\'t have an __init__.py file.'
)
class TestStartProjectSettings(TestCase):
def setUp(self):
# Ensure settings.py exists
project_dir = os.path.join(
os.path.dirname(upath(conf.__file__)),
'project_template',
'project_name',
)
template_settings_py = os.path.join(project_dir, 'settings.py-tpl')
test_settings_py = os.path.join(project_dir, 'settings.py')
shutil.copyfile(template_settings_py, test_settings_py)
self.addCleanup(os.remove, test_settings_py)
def test_middleware_headers(self):
"""
Ensure headers sent by the default MIDDLEWARE don't inadvertently
change. For example, we never want "Vary: Cookie" to appear in the list
since it prevents the caching of responses.
"""
from django.conf.project_template.project_name.settings import MIDDLEWARE
with self.settings(
MIDDLEWARE=MIDDLEWARE,
ROOT_URLCONF='project_template.urls',
):
response = self.client.get('/empty/')
headers = sorted(response.serialize_headers().split(b'\r\n'))
self.assertEqual(headers, [
b'Content-Length: 0',
b'Content-Type: text/html; charset=utf-8',
b'X-Frame-Options: SAMEORIGIN',
])
| bsd-3-clause |
t-tran/libcloud | libcloud/test/loadbalancer/test_dimensiondata_v2_3.py | 11 | 26689 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.dimensiondata import DimensionDataVIPNode, DimensionDataPool
from libcloud.common.dimensiondata import DimensionDataPoolMember
from libcloud.loadbalancer.base import LoadBalancer, Member, Algorithm
from libcloud.loadbalancer.drivers.dimensiondata \
import DimensionDataLBDriver as DimensionData
from libcloud.loadbalancer.types import State
from libcloud.test import MockHttp, unittest
from libcloud.test.file_fixtures import LoadBalancerFileFixtures
from libcloud.test.secrets import DIMENSIONDATA_PARAMS
class DimensionData_v2_3_Tests(unittest.TestCase):
def setUp(self):
DimensionData.connectionCls.active_api_version = '2.3'
DimensionData.connectionCls.conn_class = DimensionDataMockHttp
DimensionDataMockHttp.type = None
self.driver = DimensionData(*DIMENSIONDATA_PARAMS)
def test_invalid_region(self):
with self.assertRaises(ValueError):
self.driver = DimensionData(*DIMENSIONDATA_PARAMS, region='blah')
def test_invalid_creds(self):
DimensionDataMockHttp.type = 'UNAUTHORIZED'
with self.assertRaises(InvalidCredsError):
self.driver.list_balancers()
def test_create_balancer(self):
self.driver.ex_set_current_network_domain('1234')
members = []
members.append(Member(
id=None,
ip='1.2.3.4',
port=80))
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members,
ex_listener_ip_address='5.6.7.8')
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
self.assertEqual(balancer.extra['listener_ip_address'], '5.6.7.8')
def test_create_balancer_with_defaults(self):
self.driver.ex_set_current_network_domain('1234')
balancer = self.driver.create_balancer(
name='test',
port=None,
protocol=None,
algorithm=None,
members=None)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, None)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_no_members(self):
self.driver.ex_set_current_network_domain('1234')
members = None
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_empty_members(self):
self.driver.ex_set_current_network_domain('1234')
members = []
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '165.180.12.22')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_list_balancers(self):
bal = self.driver.list_balancers()
self.assertEqual(bal[0].name, 'myProduction.Virtual.Listener')
self.assertEqual(bal[0].id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal[0].port, '8899')
self.assertEqual(bal[0].ip, '165.180.12.22')
self.assertEqual(bal[0].state, State.RUNNING)
def test_balancer_list_members(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
members = self.driver.balancer_list_members(balancer)
self.assertEqual(2, len(members))
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].port, 9889)
def test_balancer_attach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id=None,
ip='112.12.2.2',
port=80,
balancer=balancer,
extra=None)
member = self.driver.balancer_attach_member(balancer, member)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
def test_balancer_attach_member_without_port(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id=None,
ip='112.12.2.2',
port=None,
balancer=balancer,
extra=None)
member = self.driver.balancer_attach_member(balancer, member)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.port, None)
def test_balancer_detach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id='3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0',
ip='112.12.2.2',
port=80,
balancer=balancer,
extra=None)
result = self.driver.balancer_detach_member(balancer, member)
self.assertEqual(result, True)
def test_destroy_balancer(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
response = self.driver.destroy_balancer(balancer)
self.assertEqual(response, True)
def test_set_get_network_domain_id(self):
self.driver.ex_set_current_network_domain('1234')
nwd = self.driver.ex_get_current_network_domain()
self.assertEqual(nwd, '1234')
def test_ex_create_pool_member(self):
pool = DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
)
node = DimensionDataVIPNode(
id='2344',
name='test',
status=State.RUNNING,
ip='123.23.3.2'
)
member = self.driver.ex_create_pool_member(
pool=pool,
node=node,
port=80
)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.ip, '123.23.3.2')
def test_ex_create_node(self):
node = self.driver.ex_create_node(
network_domain_id='12345',
name='test',
ip='123.12.32.2',
ex_description='',
connection_limit=25000,
connection_rate_limit=2000)
self.assertEqual(node.name, 'myProductionNode.1')
self.assertEqual(node.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
def test_ex_create_pool(self, ):
pool = self.driver.ex_create_pool(
network_domain_id='1234',
name='test',
balancer_method='ROUND_ROBIN',
ex_description='test',
service_down_action='NONE',
slow_ramp_time=30)
self.assertEqual(pool.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(pool.name, 'test')
self.assertEqual(pool.status, State.RUNNING)
def test_ex_create_virtual_listener(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=80,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_unusual_port(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=8900,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_without_port(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_without_pool(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test')
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_get_balancer(self):
bal = self.driver.get_balancer('6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.name, 'myProduction.Virtual.Listener')
self.assertEqual(bal.id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.port, '8899')
self.assertEqual(bal.ip, '165.180.12.22')
self.assertEqual(bal.state, State.RUNNING)
def test_list_protocols(self):
protocols = self.driver.list_protocols()
self.assertNotEqual(0, len(protocols))
def test_ex_get_nodes(self):
nodes = self.driver.ex_get_nodes()
self.assertEqual(2, len(nodes))
self.assertEqual(nodes[0].name, 'ProductionNode.1')
self.assertEqual(nodes[0].id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(nodes[0].ip, '10.10.10.101')
def test_ex_get_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.name, 'ProductionNode.2')
self.assertEqual(node.id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.ip, '10.10.10.101')
def test_ex_update_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
node.connection_limit = '100'
result = self.driver.ex_update_node(node)
self.assertEqual(result.connection_limit, '100')
def test_ex_destroy_node(self):
result = self.driver.ex_destroy_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertTrue(result)
def test_ex_set_node_state(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
result = self.driver.ex_set_node_state(node, False)
self.assertEqual(result.connection_limit, '10000')
def test_ex_get_pools(self):
pools = self.driver.ex_get_pools()
self.assertNotEqual(0, len(pools))
self.assertEqual(pools[0].name, 'myDevelopmentPool.1')
self.assertEqual(pools[0].id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_get_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(pool.name, 'myDevelopmentPool.1')
self.assertEqual(pool.id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_update_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
pool.slow_ramp_time = '120'
result = self.driver.ex_update_pool(pool)
self.assertTrue(result)
def test_ex_destroy_pool(self):
response = self.driver.ex_destroy_pool(
pool=DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None))
self.assertTrue(response)
def test_get_pool_members(self):
members = self.driver.ex_get_pool_members('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(2, len(members))
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].name, '10.0.3.13')
self.assertEqual(members[0].status, 'NORMAL')
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].port, 9889)
self.assertEqual(members[0].node_id, '3c207269-e75e-11e4-811f-005056806999')
def test_get_pool_member(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.status, 'NORMAL')
self.assertEqual(member.ip, '10.0.3.13')
self.assertEqual(member.port, 9889)
def test_set_pool_member_state(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
result = self.driver.ex_set_pool_member_state(member, True)
self.assertTrue(result)
def test_ex_destroy_pool_member(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='3c207269-e75e-11e4-811f-005056806999'),
destroy_node=False)
self.assertTrue(response)
def test_ex_destroy_pool_member_with_node(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='34de6ed6-46a4-4dae-a753-2f8d3840c6f9'),
destroy_node=True)
self.assertTrue(response)
def test_ex_get_default_health_monitors(self):
monitors = self.driver.ex_get_default_health_monitors(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(monitors), 6)
self.assertEqual(monitors[0].id, '01683574-d487-11e4-811f-005056806999')
self.assertEqual(monitors[0].name, 'CCDEFAULT.Http')
self.assertFalse(monitors[0].node_compatible)
self.assertTrue(monitors[0].pool_compatible)
def test_ex_get_default_persistence_profiles(self):
profiles = self.driver.ex_get_default_persistence_profiles(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(profiles), 4)
self.assertEqual(profiles[0].id, 'a34ca024-f3db-11e4-b010-005056806999')
self.assertEqual(profiles[0].name, 'CCDEFAULT.Cookie')
self.assertEqual(profiles[0].fallback_compatible, False)
self.assertEqual(len(profiles[0].compatible_listeners), 1)
self.assertEqual(profiles[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
def test_ex_get_default_irules(self):
irules = self.driver.ex_get_default_irules(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(irules), 4)
self.assertEqual(irules[0].id, '2b20cb2c-ffdc-11e4-b010-005056806999')
self.assertEqual(irules[0].name, 'CCDEFAULT.HttpsRedirect')
self.assertEqual(len(irules[0].compatible_listeners), 1)
self.assertEqual(irules[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
class DimensionDataMockHttp(MockHttp):
fixtures = LoadBalancerFileFixtures('dimensiondata')
def _oec_0_9_myaccount_UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED, "", {}, httplib.responses[httplib.UNAUTHORIZED])
def _oec_0_9_myaccount(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_INPROGRESS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_virtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_pool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_poolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createPool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_createPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createNode(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_createNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_addPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_addPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_createVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_removePoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_removePoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_deleteVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deletePool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_deletePool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteNode(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_deleteNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_node.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editNode(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_editNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPool(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_editPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_editPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultHealthMonitor(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_defaultHealthMonitor.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultPersistenceProfile(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_defaultPersistenceProfile.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_3_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultIrule(self, method, url, body, headers):
body = self.fixtures.load(
'networkDomainVip_defaultIrule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
| apache-2.0 |
NLeSC/cptm | cptm/utils/topics.py | 1 | 1882 | import pandas as pd
def get_top_topic_words(topics, opinions, t, top=10):
"""Return dataframe containing top topics and opinions.
Parameters
t : str - index of topic number
top : int - the number of words to store in the dataframe
Returns Pandas DataFrame
The DataFrame contains top topic words, weights of topic words and for
each perspective opinion words and weigths of opinion words.
"""
t = str(t)
topic = topics[t].copy()
topic.sort(ascending=False)
topic = topic[0:top]
df_t = pd.DataFrame(topic)
df_t.reset_index(level=0, inplace=True)
df_t.columns = ['topic', 'weights_topic']
dfs = [df_t]
for p, o in opinions.iteritems():
opinion = o[t].copy()
opinion.sort(ascending=False)
opinion = opinion[0:top]
df_o = pd.DataFrame(opinion)
df_o.reset_index(level=0, inplace=True)
df_o.columns = ['{}'.format(p),
'weights_{}'.format(p)]
dfs.append(df_o)
return pd.concat(dfs, axis=1)
def topic_str(df, single_line=False, weights=False, opinions=True):
if opinions:
opinion_labels = [l for l in df.columns if not l.startswith('weights')]
else:
opinion_labels = [l for l in df.columns if l.startswith('topic')]
if not single_line:
if not weights:
return str(df[opinion_labels])
else:
return str(df)
else:
lines = []
if not weights:
for l in opinion_labels:
lines.append(u'{}:\t'.format(l)+' '.join(df[l]))
else:
for l in opinion_labels:
zipped = zip(df[l], df['weights_{}'.format(l)])
line = [u'{}*{:.4f}'.format(wo, we) for wo, we in zipped]
lines.append(' '.join([u'{}:\t'.format(l)]+line))
return u'\n'.join(lines)
| apache-2.0 |
Oxygem/canaryd | canaryd_packages/requests/packages/chardet/latin1prober.py | 1778 | 5232 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe
from .compat import wrap_ord
FREQ_CAT_NUM = 4
UDF = 0 # undefined
OTH = 1 # other
ASC = 2 # ascii capital letter
ASS = 3 # ascii small letter
ACV = 4 # accent capital vowel
ACO = 5 # accent capital other
ASV = 6 # accent small vowel
ASO = 7 # accent small other
CLASS_NUM = 8 # total classes
Latin1_CharToClass = (
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
)
# 0 : illegal
# 1 : very unlikely
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
# UDF OTH ASC ASS ACV ACO ASV ASO
0, 0, 0, 0, 0, 0, 0, 0, # UDF
0, 3, 3, 3, 3, 3, 3, 3, # OTH
0, 3, 3, 3, 3, 3, 3, 3, # ASC
0, 3, 3, 3, 1, 1, 3, 3, # ASS
0, 3, 3, 3, 1, 2, 1, 2, # ACV
0, 3, 3, 3, 3, 3, 3, 3, # ACO
0, 3, 1, 3, 1, 1, 1, 3, # ASV
0, 3, 1, 3, 1, 1, 3, 3, # ASO
)
class Latin1Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self.reset()
def reset(self):
self._mLastCharClass = OTH
self._mFreqCounter = [0] * FREQ_CAT_NUM
CharSetProber.reset(self)
def get_charset_name(self):
return "windows-1252"
def feed(self, aBuf):
aBuf = self.filter_with_english_letters(aBuf)
for c in aBuf:
charClass = Latin1_CharToClass[wrap_ord(c)]
freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ charClass]
if freq == 0:
self._mState = eNotMe
break
self._mFreqCounter[freq] += 1
self._mLastCharClass = charClass
return self.get_state()
def get_confidence(self):
if self.get_state() == eNotMe:
return 0.01
total = sum(self._mFreqCounter)
if total < 0.01:
confidence = 0.0
else:
confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
/ total)
if confidence < 0.0:
confidence = 0.0
# lower the confidence of latin1 so that other more accurate
# detector can take priority.
confidence = confidence * 0.73
return confidence
| mit |
tund/kaggle-galaxy-zoo | avg_result.py | 1 | 2774 | # Copyright (c) 2014, Tu Dinh Nguyen ([email protected])
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
average several results
"""
import sys
import os
import csv
import cPickle as pickle
import numpy as np
import gzip
from numpy import isnan
RESULT_FILE = ["./RUN/avg_res/0.07889.csv",
"./RUN/avg_res/0.07895.csv",
"./RUN/avg_res/0.07911.csv",
"./RUN/avg_res/0.07939.csv"]
OUTPUT_FILE = "./RUN/avg_res/final_submission.csv"
def refine_result(res):
# all values > 1 should be = 1
res[res > 1] = 1
# al values < 0 should = 0
res[res < 0] = 0
def main():
res_total = np.zeros((79975, 37))
for i in xrange(len(RESULT_FILE)):
result = np.genfromtxt(RESULT_FILE[i], dtype=np.float32, delimiter=',', skip_header=1)
result = result[:, 1:]
res_total += result
res_total /= len(RESULT_FILE)
first_col = np.genfromtxt("./raw_data/kaggle_submission.csv",
dtype=np.int32, delimiter=',', skip_header=1, usecols=0)
first_col = first_col.reshape(len(first_col), 1)
r = csv.reader(open("./raw_data/kaggle_submission.csv", 'rb'), delimiter=",")
h = r.next()
refine_result(res_total)
with open(OUTPUT_FILE, 'wb') as f_out:
w = csv.writer(f_out, delimiter=",")
w.writerow(h)
for i in range(res_total.shape[0]):
w.writerow(np.hstack([first_col[i, 0], res_total[i, :]]).astype(np.single))
if __name__ == '__main__':
main()
| bsd-3-clause |
BAMitUp/Fantasy-Football-Shuffler | ENV/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langgreekmodel.py | 2763 | 12628 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin7_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0
110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
)
win1253_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0
110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.2851%
# first 1024 sequences:1.7001%
# rest sequences: 0.0359%
# negative sequences: 0.0148%
GreekLangModel = (
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
Latin7GreekModel = {
'charToOrderMap': Latin7_CharToOrderMap,
'precedenceMatrix': GreekLangModel,
'mTypicalPositiveRatio': 0.982851,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-7"
}
Win1253GreekModel = {
'charToOrderMap': win1253_CharToOrderMap,
'precedenceMatrix': GreekLangModel,
'mTypicalPositiveRatio': 0.982851,
'keepEnglishLetter': False,
'charsetName': "windows-1253"
}
# flake8: noqa
| gpl-3.0 |
chirilo/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/config/urls.py | 117 | 3095 | # Copyright (c) 2010, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
def view_source_url(local_path):
return "http://trac.webkit.org/browser/trunk/%s" % local_path
def view_revision_url(revision_number):
return "http://trac.webkit.org/changeset/%s" % revision_number
contribution_guidelines = "http://webkit.org/coding/contributing.html"
bug_server_domain = "webkit.org"
bug_server_host = "bugs." + bug_server_domain
_bug_server_regex = "https?://%s/" % re.sub('\.', '\\.', bug_server_host)
bug_server_url = "https://%s/" % bug_server_host
bug_url_long = _bug_server_regex + r"show_bug\.cgi\?id=(?P<bug_id>\d+)(&ctype=xml|&excludefield=attachmentdata)*"
bug_url_short = r"https?\://%s/b/(?P<bug_id>\d+)" % bug_server_domain
attachment_url = _bug_server_regex + r"attachment\.cgi\?id=(?P<attachment_id>\d+)(&action=(?P<action>\w+))?"
direct_attachment_url = r"https?://bug-(?P<bug_id>\d+)-attachments.%s/attachment\.cgi\?id=(?P<attachment_id>\d+)" % bug_server_domain
buildbot_url = "http://build.webkit.org"
def parse_bug_id(string):
if not string:
return None
match = re.search(bug_url_short, string)
if match:
return int(match.group('bug_id'))
match = re.search(bug_url_long, string)
if match:
return int(match.group('bug_id'))
return None
def parse_attachment_id(string):
if not string:
return None
match = re.search(attachment_url, string)
if match:
return int(match.group('attachment_id'))
match = re.search(direct_attachment_url, string)
if match:
return int(match.group('attachment_id'))
return None
| bsd-3-clause |
arxcruz/tempest-tool | tempestmail/tests/test_bugs.py | 1 | 2571 | import mock
import unittest
import tempestmail.bugs as bugs
class TestBugs(unittest.TestCase):
def setUp(self):
self.bz_url = 'https://bugzilla.redhat.com/show_bug.cgi?id=1386421'
self.lp_url = 'https://bugs.launchpad.net/tripleo/+bug/1634824'
self.error_url = 'https://www.google.com'
super(TestBugs, self).setUp()
def test_bug_factory_launchpad(self):
connector = bugs.bug_factory(self.lp_url)
self.assertIsInstance(connector, bugs.Launchpad)
def test_bug_factory_bugzilla(self):
connector = bugs.bug_factory(self.bz_url)
self.assertIsInstance(connector, bugs.Bugzilla)
def test_bug_factory_error(self):
with self.assertRaises(ValueError):
bugs.bug_factory(self.error_url)
@mock.patch('tempestmail.utils.get_html')
def test_bug_status_bugzilla(self, get_html_mock):
title = ('<title>Bug 1386421 – Tempest fail: tempest.api.'
'compute.servers.test_server_actions.ServerActionsTestJSON'
'</title>')
returned_title = (u'Bug 1386421 \u2013 Tempest fail: tempest.api.'
'compute.servers.test_server_actions.'
'ServerActionsTestJSON')
expected_return = (returned_title, None)
get_html_mock.return_value.content.decode.return_value = title
connector = bugs.connect_to_bug_system(self.bz_url)
name = connector.bug_status()
self.assertEquals(name, expected_return)
get_html_mock.return_value = None
connector = bugs.connect_to_bug_system(self.bz_url)
name = connector.bug_status()
self.assertEquals(name, ('', None))
get_html_mock.assert_called()
@mock.patch('tempestmail.utils.get_html')
def test_bug_status_launchpad(self, get_html_mock):
title = ('<title>Bug #1633713 "puppet-ntp is breaking ci" : Bugs : '
'tripleo</title>')
returned_title = ('Bug #1633713 "puppet-ntp is breaking ci" : Bugs : '
'tripleo')
expected_return = (returned_title, None)
get_html_mock.return_value.content.decode.return_value = title
connector = bugs.connect_to_bug_system(self.lp_url)
name = connector.bug_status()
self.assertEquals(name, expected_return)
get_html_mock.return_value = None
connector = bugs.connect_to_bug_system(self.lp_url)
name = connector.bug_status()
self.assertEquals(name, ('', None))
get_html_mock.assert_called()
| gpl-3.0 |
victorbriz/sumatrapdf | scripts/trans_langs.py | 16 | 5002 | # List of languages we support, their iso codes and id as understood
# by Windows SDK (LANG_* and SUBLANG_*_*).
# See http://msdn.microsoft.com/en-us/library/dd318693.aspx for the full list.
g_langs = [
('af', 'Afrikaans', '_LANGID(LANG_AFRIKAANS)'),
('am', 'Armenian (Հայերեն)', '_LANGID(LANG_ARMENIAN)'),
('ar', 'Arabic (الْعَرَبيّة)', '_LANGID(LANG_ARABIC)', 'RTL'),
('az', 'Azerbaijani (Azərbaycanca)', '_LANGID(LANG_AZERI)'),
('bg', 'Bulgarian (Български)', '_LANGID(LANG_BULGARIAN)'),
('bn', 'Bengali (বাংলা)', '_LANGID(LANG_BENGALI)'),
('br', 'Portuguese - Brazil (Português)', 'MAKELANGID(LANG_PORTUGUESE, SUBLANG_PORTUGUESE_BRAZILIAN)'),
('bs', 'Bosnian (Bosanski)', 'MAKELANGID(LANG_BOSNIAN, SUBLANG_BOSNIAN_BOSNIA_HERZEGOVINA_LATIN)'),
('by', 'Belarusian (Беларуская)', '_LANGID(LANG_BELARUSIAN)'),
('ca', 'Catalan (Català)', '_LANGID(LANG_CATALAN)'),
('ca-xv', 'Catalan-Valencian (Català-Valencià)', '(LANGID)-1'),
('cn', 'Chinese Simplified (简体中文)', 'MAKELANGID(LANG_CHINESE, SUBLANG_CHINESE_SIMPLIFIED)'),
('cy', 'Welsh (Cymraeg)', '_LANGID(LANG_WELSH)'),
('cz', 'Czech (Čeština)', '_LANGID(LANG_CZECH)'),
('de', 'German (Deutsch)', '_LANGID(LANG_GERMAN)'),
('dk', 'Danish (Dansk)', '_LANGID(LANG_DANISH)'),
('el', 'Greek (Ελληνικά)', '_LANGID(LANG_GREEK)'),
('en', 'English', '_LANGID(LANG_ENGLISH)'),
('es', 'Spanish (Español)', '_LANGID(LANG_SPANISH)'),
('et', 'Estonian (Eesti)', '_LANGID(LANG_ESTONIAN)'),
('eu', 'Basque (Euskara)', '_LANGID(LANG_BASQUE)'),
('fa', 'Persian (فارسی)', '_LANGID(LANG_FARSI)', 'RTL'),
('fi', 'Finnish (Suomi)', '_LANGID(LANG_FINNISH)'),
('fr', 'French (Français)', '_LANGID(LANG_FRENCH)'),
('fy-nl', 'Frisian (Frysk)', '_LANGID(LANG_FRISIAN)'),
('ga', 'Irish (Gaeilge)', '_LANGID(LANG_IRISH)'),
('gl', 'Galician (Galego)', '_LANGID(LANG_GALICIAN)'),
('he', 'Hebrew (עברית)', '_LANGID(LANG_HEBREW)', 'RTL'),
('hi', 'Hindi (हिंदी)', '_LANGID(LANG_HINDI)'),
('hr', 'Croatian (Hrvatski)', '_LANGID(LANG_CROATIAN)'),
('hu', 'Hungarian (Magyar)', '_LANGID(LANG_HUNGARIAN)'),
('id', 'Indonesian (Bahasa Indonesia)', '_LANGID(LANG_INDONESIAN)'),
('it', 'Italian (Italiano)', '_LANGID(LANG_ITALIAN)'),
('ja', 'Japanese (日本語)', '_LANGID(LANG_JAPANESE)'),
('jv', 'Javanese (ꦧꦱꦗꦮ)', '(LANGID)-1'),
('ka', 'Georgian (ქართული)', '_LANGID(LANG_GEORGIAN)'),
('kr', 'Korean (한국어)', '_LANGID(LANG_KOREAN)'),
('ku', 'Kurdish (كوردی)', 'MAKELANGID(LANG_CENTRAL_KURDISH, SUBLANG_CENTRAL_KURDISH_CENTRAL_KURDISH_IRAQ)', 'RTL'),
('kw', 'Cornish (Kernewek)', '(LANGID)-1'),
('lt', 'Lithuanian (Lietuvių)', '_LANGID(LANG_LITHUANIAN)'),
('lv', 'Latvian (latviešu valoda)', '_LANGID(LANG_LATVIAN)'),
('mk', 'Macedonian (македонски)', '_LANGID(LANG_MACEDONIAN)'),
('ml', 'Malayalam (മലയാളം)', '_LANGID(LANG_MALAYALAM)'),
('mm', 'Burmese (ဗမာ စာ)', '(LANGID)-1'),
('my', 'Malaysian (Bahasa Melayu)', '_LANGID(LANG_MALAY)'),
('ne', 'Nepali (नेपाली)', '_LANGID(LANG_NEPALI)'),
('nl', 'Dutch (Nederlands)', '_LANGID(LANG_DUTCH)'),
('nn', 'Norwegian Neo-Norwegian (Norsk nynorsk)', 'MAKELANGID(LANG_NORWEGIAN, SUBLANG_NORWEGIAN_NYNORSK)'),
('no', 'Norwegian (Norsk)', 'MAKELANGID(LANG_NORWEGIAN, SUBLANG_NORWEGIAN_BOKMAL)'),
('pa', 'Punjabi (ਪੰਜਾਬੀ)', '_LANGID(LANG_PUNJABI)'),
('pl', 'Polish (Polski)', '_LANGID(LANG_POLISH)'),
('pt', 'Portuguese - Portugal (Português)', '_LANGID(LANG_PORTUGUESE)'),
('ro', 'Romanian (Română)', '_LANGID(LANG_ROMANIAN)'),
('ru', 'Russian (Русский)', '_LANGID(LANG_RUSSIAN)'),
('si', 'Sinhala (සිංහල)', '_LANGID(LANG_SINHALESE)'),
('sk', 'Slovak (Slovenčina)', '_LANGID(LANG_SLOVAK)'),
('sl', 'Slovenian (Slovenščina)', '_LANGID(LANG_SLOVENIAN)'),
('sn', 'Shona (Shona)', '(LANGID)-1'),
('sp-rs', 'Serbian (Latin)', 'MAKELANGID(LANG_SERBIAN, SUBLANG_SERBIAN_LATIN)'),
('sq', 'Albanian (Shqip)', '_LANGID(LANG_ALBANIAN)'),
('sr-rs', 'Serbian (Cyrillic)', 'MAKELANGID(LANG_SERBIAN, SUBLANG_SERBIAN_CYRILLIC)'),
('sv', 'Swedish (Svenska)', '_LANGID(LANG_SWEDISH)'),
('ta', 'Tamil (தமிழ்)', '_LANGID(LANG_TAMIL)'),
('th', 'Thai (ภาษาไทย)', '_LANGID(LANG_THAI)'),
('tl', 'Tagalog (Tagalog)', '_LANGID(LANG_FILIPINO)'),
('tr', 'Turkish (Türkçe)', '_LANGID(LANG_TURKISH)'),
('tw', 'Chinese Traditional (繁體中文)', 'MAKELANGID(LANG_CHINESE, SUBLANG_CHINESE_TRADITIONAL)'),
('uk', 'Ukrainian (Українська)', '_LANGID(LANG_UKRAINIAN)'),
('uz', 'Uzbek (O\'zbek)', '_LANGID(LANG_UZBEK)'),
('vn', 'Vietnamese (Việt Nam)', '_LANGID(LANG_VIETNAMESE)'),
]
| gpl-3.0 |
greenlion/mysql-server | storage/ndb/mcc/tst/unittest2/__init__.py | 155 | 2406 | """
unittest2
unittest2 is a backport of the new features added to the unittest testing
framework in Python 2.7. It is tested to run on Python 2.4 - 2.6.
To use unittest2 instead of unittest simply replace ``import unittest`` with
``import unittest2``.
Copyright (c) 1999-2003 Steve Purcell
Copyright (c) 2003-2010 Python Software Foundation
This module is free software, and you may redistribute it and/or modify
it under the same terms as Python itself, so long as this copyright message
and disclaimer are retained in their original form.
IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
__all__ = ['TestResult', 'TestCase', 'TestSuite',
'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main',
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
'expectedFailure', 'TextTestResult', '__version__', 'collector']
__version__ = '0.5.1'
# Expose obsolete functions for backwards compatibility
__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
from unittest2.collector import collector
from unittest2.result import TestResult
from unittest2.case import (
TestCase, FunctionTestCase, SkipTest, skip, skipIf,
skipUnless, expectedFailure
)
from unittest2.suite import BaseTestSuite, TestSuite
from unittest2.loader import (
TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases
)
from unittest2.main import TestProgram, main, main_
from unittest2.runner import TextTestRunner, TextTestResult
try:
from unittest2.signals import (
installHandler, registerResult, removeResult, removeHandler
)
except ImportError:
# Compatibility with platforms that don't have the signal module
pass
else:
__all__.extend(['installHandler', 'registerResult', 'removeResult',
'removeHandler'])
# deprecated
_TextTestResult = TextTestResult
__unittest = True | gpl-2.0 |
AlperSaltabas/OR_Tools_Google_API | examples/python/volsay.py | 34 | 1857 | # Copyright 2011 Hakan Kjellerstrand [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Volsay problem in Google or-tools.
From the OPL model volsay.mod
This model was created by Hakan Kjellerstrand ([email protected])
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from ortools.linear_solver import pywraplp
def main(unused_argv):
# Create the solver.
# using GLPK
solver = pywraplp.Solver('CoinsGridGLPK',
pywraplp.Solver.GLPK_LINEAR_PROGRAMMING)
# Using CLP
# solver = pywraplp.Solver('CoinsGridCLP',
# pywraplp.Solver.CLP_LINEAR_PROGRAMMING)
# data
# declare variables
Gas = solver.NumVar(0, 100000, 'Gas')
Chloride = solver.NumVar(0, 100000, 'Cloride')
#
# constraints
#
solver.Add(Gas + Chloride <= 50)
solver.Add(3 * Gas + 4 * Chloride <= 180)
# objective
objective = solver.Maximize(40 * Gas + 50 * Chloride)
print 'NumConstraints:', solver.NumConstraints()
#
# solution and search
#
solver.Solve()
print
print 'objective = ', solver.Objective().Value()
print 'Gas = ', Gas.SolutionValue(), 'ReducedCost =', Gas.ReducedCost()
print 'Chloride:', Chloride.SolutionValue(), 'ReducedCost =', Chloride.ReducedCost()
if __name__ == '__main__':
main('Volsay')
| apache-2.0 |
tibotic/simple-pokemongo-bot | pokemongo_bot/health_record/bot_event.py | 17 | 2639 | # -*- coding: utf-8 -*-
from time import sleep
import logging
from raven import Client
import raven
import os
import uuid
import requests
import time
class BotEvent(object):
def __init__(self, config):
self.config = config
self.logger = logging.getLogger(__name__)
# UniversalAnalytics can be reviewed here:
# https://github.com/analytics-pros/universal-analytics-python
if self.config.health_record:
self.logger.info('Health check is enabled. For more information:')
self.logger.info('https://github.com/PokemonGoF/PokemonGo-Bot/tree/dev#analytics')
self.client = Client(
dsn='https://8abac56480f34b998813d831de262514:[email protected]/90254',
name='PokemonGof-Bot',
processors = (
'raven.processors.SanitizePasswordsProcessor',
'raven.processors.RemoveStackLocalsProcessor'
),
install_logging_hook = False,
hook_libraries = (),
enable_breadcrumbs = False,
logging = False,
context = {}
)
self.client_id = uuid.uuid4()
self.heartbeat_wait = 30 # seconds
self.last_heartbeat = time.time()
def capture_error(self):
if self.config.health_record:
self.client.captureException()
def login_success(self):
if self.config.health_record:
self.last_heartbeat = time.time()
self.track_url('/loggedin')
def login_failed(self):
if self.config.health_record:
self.track_url('/login')
def login_retry(self):
if self.config.health_record:
self.track_url('/relogin')
def logout(self):
if self.config.health_record:
self.track_url('/logout')
def heartbeat(self):
if self.config.health_record:
current_time = time.time()
if current_time - self.heartbeat_wait > self.last_heartbeat:
self.last_heartbeat = current_time
self.track_url('/heartbeat')
def track_url(self, path):
data = {
'v': '1',
'tid': 'UA-81469507-1',
'aip': '1', # Anonymize IPs
'cid': self.client_id,
't': 'pageview',
'dp': path
}
try:
response = requests.post(
'http://www.google-analytics.com/collect', data=data)
response.raise_for_status()
except requests.exceptions.HTTPError:
pass
| mit |
andreasots/qrpth.eu | prism.py | 1 | 2192 | from server import app
import datetime
import psycopg2
import flask
import pytz
import re
TIMEZONE = pytz.timezone("America/Vancouver")
CONN = "postgres:///lrrbot"
def convert_timezone(row):
return (row[0], row[1], TIMEZONE.normalize(row[2].astimezone(TIMEZONE)))
@app.route("/prism/")
def prism():
with psycopg2.connect(CONN) as conn:
with conn.cursor() as cur:
cur.execute("SELECT MIN(time) FROM log")
start = next(iter(cur))[0]
start = TIMEZONE.localize(datetime.datetime(start.year, start.month, start.day))
stop = datetime.datetime.now(TIMEZONE)
stop = TIMEZONE.localize(datetime.datetime(stop.year, stop.month, stop.day, 23, 59, 59)) + datetime.timedelta(seconds=1)
days = []
while start < stop:
days += [(start, start+datetime.timedelta(days=1))]
start += datetime.timedelta(days=1)
return flask.render_template("prism.html", page="prism", days=days)
@app.route("/prism/log")
def logs():
start = datetime.datetime.fromtimestamp(float(flask.request.args["start"]), pytz.utc)
stop = datetime.datetime.fromtimestamp(float(flask.request.args["stop"]), pytz.utc)
with psycopg2.connect(CONN) as conn:
with conn.cursor() as cur:
cur.execute("SELECT id, messagehtml, time FROM log WHERE target = '#loadingreadyrun' AND time BETWEEN %s AND %s ORDER BY TIME", (start, stop))
return flask.render_template("prism-log.html", page="prism", messages=map(convert_timezone, cur))
@app.route("/prism/search")
def search():
query = flask.request.args["q"]
with psycopg2.connect(CONN) as conn:
with conn.cursor() as cur:
cur.execute("""
SELECT id, messagehtml, time, TS_RANK_CD(TO_TSVECTOR('english', message), query) AS rank
FROM log, PLAINTO_TSQUERY('english', %s) query
WHERE TO_TSVECTOR('english', message) @@ query
ORDER BY rank DESC
""", (query, ))
return flask.render_template("prism-search.html", page="prism", messages=map(convert_timezone, cur), query=query)
| gpl-2.0 |
Franky333/crazyflie-clients-python | src/cfclient/headless.py | 1 | 6707 | # -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Headless client for the Crazyflie.
"""
import logging
import os
import signal
import sys
import cfclient.utils
import cflib.crtp
from cfclient.utils.input import JoystickReader
from cflib.crazyflie import Crazyflie
if os.name == 'posix':
print('Disabling standard output for libraries!')
stdout = os.dup(1)
os.dup2(os.open('/dev/null', os.O_WRONLY), 1)
sys.stdout = os.fdopen(stdout, 'w')
# set SDL to use the dummy NULL video driver,
# so it doesn't need a windowing system.
os.environ["SDL_VIDEODRIVER"] = "dummy"
class HeadlessClient():
"""Crazyflie headless client"""
def __init__(self):
"""Initialize the headless client and libraries"""
cflib.crtp.init_drivers()
self._jr = JoystickReader(do_device_discovery=False)
self._cf = Crazyflie(ro_cache=None,
rw_cache=cfclient.config_path + "/cache")
signal.signal(signal.SIGINT, signal.SIG_DFL)
self._devs = []
for d in self._jr.available_devices():
self._devs.append(d.name)
def setup_controller(self, input_config, input_device=0, xmode=False):
"""Set up the device reader"""
# Set up the joystick reader
self._jr.device_error.add_callback(self._input_dev_error)
print("Client side X-mode: %s" % xmode)
if (xmode):
self._cf.commander.set_client_xmode(xmode)
devs = self._jr.available_devices() # noqa, is this a bug?
print("Will use [%s] for input" % self._devs[input_device])
self._jr.start_input(self._devs[input_device])
self._jr.set_input_map(self._devs[input_device], input_config)
def controller_connected(self):
""" Return True if a controller is connected"""
return True if (len(self._jr.available_devices()) > 0) else False
def list_controllers(self):
"""List the available controllers and input mapping"""
print("\nAvailable controllers:")
for i, dev in enumerate(self._devs):
print(" - Controller #{}: {}".format(i, dev))
print("\nAvailable input mapping:")
for map in os.listdir(cfclient.config_path + '/input'):
print(" - " + map.split(".json")[0])
def connect_crazyflie(self, link_uri):
"""Connect to a Crazyflie on the given link uri"""
self._cf.connection_failed.add_callback(self._connection_failed)
# 2014-11-25 chad: Add a callback for when we have a good connection.
self._cf.connected.add_callback(self._connected)
self._cf.param.add_update_callback(
group="imu_sensors", name="HMC5883L", cb=(
lambda name, found: self._jr.set_alt_hold_available(
eval(found))))
self._jr.assisted_control_updated.add_callback(
lambda enabled: self._cf.param.set_value("flightmode.althold",
enabled))
self._cf.open_link(link_uri)
self._jr.input_updated.add_callback(self._cf.commander.send_setpoint)
def _connected(self, link):
"""Callback for a successful Crazyflie connection."""
print("Connected to {}".format(link))
def _connection_failed(self, link, message):
"""Callback for a failed Crazyflie connection"""
print("Connection failed on {}: {}".format(link, message))
sys.exit(-1)
def _input_dev_error(self, message):
"""Callback for an input device error"""
print("Error when reading device: {}".format(message))
sys.exit(-1)
def main():
"""Main Crazyflie headless application"""
import argparse
parser = argparse.ArgumentParser(prog="cfheadless")
parser.add_argument("-u", "--uri", action="store", dest="uri", type=str,
default="radio://0/10/250K",
help="URI to use for connection to the Crazyradio"
" dongle, defaults to radio://0/10/250K")
parser.add_argument("-i", "--input", action="store", dest="input",
type=str, default="PS3_Mode_1",
help="Input mapping to use for the controller,"
"defaults to PS3_Mode_1")
parser.add_argument("-d", "--debug", action="store_true", dest="debug",
help="Enable debug output")
parser.add_argument("-c", "--controller", action="store", type=int,
dest="controller", default=0,
help="Use controller with specified id,"
" id defaults to 0")
parser.add_argument("--controllers", action="store_true",
dest="list_controllers",
help="Only display available controllers and exit")
parser.add_argument("-x", "--x-mode", action="store_true",
dest="xmode",
help="Enable client-side X-mode")
(args, unused) = parser.parse_known_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
headless = HeadlessClient()
if (args.list_controllers):
headless.list_controllers()
else:
if headless.controller_connected():
headless.setup_controller(input_config=args.input,
input_device=args.controller,
xmode=args.xmode)
headless.connect_crazyflie(link_uri=args.uri)
else:
print("No input-device connected, exiting!")
if __name__ == "__main__":
main()
| gpl-2.0 |
NaturalGIS/naturalgis_qgis | python/plugins/processing/modeler/ModelerScene.py | 33 | 1934 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ModelerScene.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
from qgis.gui import QgsModelGraphicsScene
from processing.modeler.ModelerGraphicItem import (
ModelerInputGraphicItem,
ModelerOutputGraphicItem,
ModelerChildAlgorithmGraphicItem
)
class ModelerScene(QgsModelGraphicsScene):
"""
IMPORTANT! This is intentionally a MINIMAL class, only containing code which HAS TO BE HERE
because it contains Python code for compatibility with deprecated methods ONLY.
Don't add anything here -- edit the c++ base class instead!
"""
def __init__(self, parent=None):
super().__init__(parent)
def createParameterGraphicItem(self, model, param):
return ModelerInputGraphicItem(param.clone(), model)
def createChildAlgGraphicItem(self, model, child):
return ModelerChildAlgorithmGraphicItem(child.clone(), model)
def createOutputGraphicItem(self, model, output):
return ModelerOutputGraphicItem(output.clone(), model)
| gpl-2.0 |
rahul67/hue | desktop/core/ext-py/boto-2.38.0/boto/sdb/queryresultset.py | 153 | 3674 | from boto.compat import six
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
def query_lister(domain, query='', max_items=None, attr_names=None):
more_results = True
num_results = 0
next_token = None
while more_results:
rs = domain.connection.query_with_attributes(domain, query, attr_names,
next_token=next_token)
for item in rs:
if max_items:
if num_results == max_items:
raise StopIteration
yield item
num_results += 1
next_token = rs.next_token
more_results = next_token is not None
class QueryResultSet(object):
def __init__(self, domain=None, query='', max_items=None, attr_names=None):
self.max_items = max_items
self.domain = domain
self.query = query
self.attr_names = attr_names
def __iter__(self):
return query_lister(self.domain, self.query, self.max_items, self.attr_names)
def select_lister(domain, query='', max_items=None):
more_results = True
num_results = 0
next_token = None
while more_results:
rs = domain.connection.select(domain, query, next_token=next_token)
for item in rs:
if max_items:
if num_results == max_items:
raise StopIteration
yield item
num_results += 1
next_token = rs.next_token
more_results = next_token is not None
class SelectResultSet(object):
def __init__(self, domain=None, query='', max_items=None,
next_token=None, consistent_read=False):
self.domain = domain
self.query = query
self.consistent_read = consistent_read
self.max_items = max_items
self.next_token = next_token
def __iter__(self):
more_results = True
num_results = 0
while more_results:
rs = self.domain.connection.select(self.domain, self.query,
next_token=self.next_token,
consistent_read=self.consistent_read)
for item in rs:
if self.max_items and num_results >= self.max_items:
raise StopIteration
yield item
num_results += 1
self.next_token = rs.next_token
if self.max_items and num_results >= self.max_items:
raise StopIteration
more_results = self.next_token is not None
def next(self):
return next(self.__iter__())
| apache-2.0 |
imtapps/django-imt-fork | tests/regressiontests/expressions_regress/tests.py | 46 | 15966 | """
Spanning tests for all the operations that F() expressions can perform.
"""
from __future__ import absolute_import
import datetime
from django.db import connection
from django.db.models import F
from django.test import TestCase, Approximate, skipUnlessDBFeature
from .models import Number, Experiment
class ExpressionsRegressTests(TestCase):
def setUp(self):
Number(integer=-1).save()
Number(integer=42).save()
Number(integer=1337).save()
self.assertEqual(Number.objects.update(float=F('integer')), 3)
def test_fill_with_value_from_same_object(self):
"""
We can fill a value in all objects with an other value of the
same object.
"""
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 42, 42.000>',
'<Number: 1337, 1337.000>'
]
)
def test_increment_value(self):
"""
We can increment a value of all objects in a query set.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_filter_not_equals_other_field(self):
"""
We can filter for objects, where a value is not equals the value
of an other field.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.exclude(float=F('integer')),
[
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_complex_expressions(self):
"""
Complex expressions of different connection types are possible.
"""
n = Number.objects.create(integer=10, float=123.45)
self.assertEqual(Number.objects.filter(pk=n.pk)
.update(float=F('integer') + F('float') * 2),
1)
self.assertEqual(Number.objects.get(pk=n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3))
class ExpressionOperatorTests(TestCase):
def setUp(self):
self.n = Number.objects.create(integer=42, float=15.5)
def test_lefthand_addition(self):
# LH Addition of floats and integers
Number.objects.filter(pk=self.n.pk).update(
integer=F('integer') + 15,
float=F('float') + 42.7
)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_lefthand_subtraction(self):
# LH Subtraction of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15,
float=F('float') - 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3))
def test_lefthand_multiplication(self):
# Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15,
float=F('float') * 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_lefthand_division(self):
# LH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2,
float=F('float') / 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3))
def test_lefthand_modulo(self):
# LH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_lefthand_bitwise_and(self):
# LH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitand(56))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
@skipUnlessDBFeature('supports_bitwise_or')
def test_lefthand_bitwise_or(self):
# LH Bitwise or on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer').bitor(48))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_right_hand_addition(self):
# Right hand operators
Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'),
float=42.7 + F('float'))
# RH Addition of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_right_hand_subtraction(self):
Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'),
float=42.7 - F('float'))
# RH Subtraction of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3))
def test_right_hand_multiplication(self):
# RH Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'),
float=42.7 * F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_right_hand_division(self):
# RH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'),
float=42.7 / F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3))
def test_right_hand_modulo(self):
# RH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
class FTimeDeltaTests(TestCase):
def setUp(self):
sday = datetime.date(2010, 6, 25)
stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
midnight = datetime.time(0)
delta0 = datetime.timedelta(0)
delta1 = datetime.timedelta(microseconds=253000)
delta2 = datetime.timedelta(seconds=44)
delta3 = datetime.timedelta(hours=21, minutes=8)
delta4 = datetime.timedelta(days=10)
# Test data is set so that deltas and delays will be
# strictly increasing.
self.deltas = []
self.delays = []
self.days_long = []
# e0: started same day as assigned, zero duration
end = stime+delta0
e0 = Experiment.objects.create(name='e0', assigned=sday, start=stime,
end=end, completed=end.date())
self.deltas.append(delta0)
self.delays.append(e0.start-
datetime.datetime.combine(e0.assigned, midnight))
self.days_long.append(e0.completed-e0.assigned)
# e1: started one day after assigned, tiny duration, data
# set so that end time has no fractional seconds, which
# tests an edge case on sqlite. This Experiment is only
# included in the test data when the DB supports microsecond
# precision.
if connection.features.supports_microsecond_precision:
delay = datetime.timedelta(1)
end = stime + delay + delta1
e1 = Experiment.objects.create(name='e1', assigned=sday,
start=stime+delay, end=end, completed=end.date())
self.deltas.append(delta1)
self.delays.append(e1.start-
datetime.datetime.combine(e1.assigned, midnight))
self.days_long.append(e1.completed-e1.assigned)
# e2: started three days after assigned, small duration
end = stime+delta2
e2 = Experiment.objects.create(name='e2',
assigned=sday-datetime.timedelta(3), start=stime, end=end,
completed=end.date())
self.deltas.append(delta2)
self.delays.append(e2.start-
datetime.datetime.combine(e2.assigned, midnight))
self.days_long.append(e2.completed-e2.assigned)
# e3: started four days after assigned, medium duration
delay = datetime.timedelta(4)
end = stime + delay + delta3
e3 = Experiment.objects.create(name='e3',
assigned=sday, start=stime+delay, end=end, completed=end.date())
self.deltas.append(delta3)
self.delays.append(e3.start-
datetime.datetime.combine(e3.assigned, midnight))
self.days_long.append(e3.completed-e3.assigned)
# e4: started 10 days after assignment, long duration
end = stime + delta4
e4 = Experiment.objects.create(name='e4',
assigned=sday-datetime.timedelta(10), start=stime, end=end,
completed=end.date())
self.deltas.append(delta4)
self.delays.append(e4.start-
datetime.datetime.combine(e4.assigned, midnight))
self.days_long.append(e4.completed-e4.assigned)
self.expnames = [e.name for e in Experiment.objects.all()]
def test_delta_add(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.filter(end__lt=F('start')+delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(end__lte=F('start')+delta)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_delta_subtract(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.filter(start__gt=F('end')-delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(start__gte=F('end')-delta)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_exclude(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.exclude(end__lt=F('start')+delta)]
self.assertEqual(test_set, self.expnames[i:])
test_set = [e.name for e in
Experiment.objects.exclude(end__lte=F('start')+delta)]
self.assertEqual(test_set, self.expnames[i+1:])
def test_date_comparison(self):
for i in range(len(self.days_long)):
days = self.days_long[i]
test_set = [e.name for e in
Experiment.objects.filter(completed__lt=F('assigned')+days)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(completed__lte=F('assigned')+days)]
self.assertEqual(test_set, self.expnames[:i+1])
@skipUnlessDBFeature("supports_mixed_date_datetime_comparisons")
def test_mixed_comparisons1(self):
for i in range(len(self.delays)):
delay = self.delays[i]
if not connection.features.supports_microsecond_precision:
delay = datetime.timedelta(delay.days, delay.seconds)
test_set = [e.name for e in
Experiment.objects.filter(assigned__gt=F('start')-delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(assigned__gte=F('start')-delay)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_mixed_comparisons2(self):
delays = [datetime.timedelta(delay.days) for delay in self.delays]
for i in range(len(delays)):
delay = delays[i]
test_set = [e.name for e in
Experiment.objects.filter(start__lt=F('assigned')+delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(start__lte=F('assigned')+delay+
datetime.timedelta(1))]
self.assertEqual(test_set, self.expnames[:i+1])
def test_delta_update(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
exps = Experiment.objects.all()
expected_durations = [e.duration() for e in exps]
expected_starts = [e.start+delta for e in exps]
expected_ends = [e.end+delta for e in exps]
Experiment.objects.update(start=F('start')+delta, end=F('end')+delta)
exps = Experiment.objects.all()
new_starts = [e.start for e in exps]
new_ends = [e.end for e in exps]
new_durations = [e.duration() for e in exps]
self.assertEqual(expected_starts, new_starts)
self.assertEqual(expected_ends, new_ends)
self.assertEqual(expected_durations, new_durations)
def test_delta_invalid_op_mult(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')*self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to multiply datetime by timedelta.")
def test_delta_invalid_op_div(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')/self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to divide datetime by timedelta.")
def test_delta_invalid_op_mod(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')%self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to modulo divide datetime by timedelta.")
def test_delta_invalid_op_and(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start').bitand(self.deltas[0])))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to binary and a datetime with a timedelta.")
def test_delta_invalid_op_or(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start').bitor(self.deltas[0])))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to binary or a datetime with a timedelta.")
| bsd-3-clause |
Jank1310/gtest-mirror | test/gtest_color_test.py | 3259 | 4911 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly determines whether to use colors."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name = 'nt'
COLOR_ENV_VAR = 'GTEST_COLOR'
COLOR_FLAG = 'gtest_color'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def UsesColor(term, color_env_var, color_flag):
"""Runs gtest_color_test_ and returns its exit code."""
SetEnvVar('TERM', term)
SetEnvVar(COLOR_ENV_VAR, color_env_var)
if color_flag is None:
args = []
else:
args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
p = gtest_test_utils.Subprocess([COMMAND] + args)
return not p.exited or p.exit_code
class GTestColorTest(gtest_test_utils.TestCase):
def testNoEnvVarNoFlag(self):
"""Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', None, None))
self.assert_(not UsesColor('emacs', None, None))
self.assert_(not UsesColor('xterm-mono', None, None))
self.assert_(not UsesColor('unknown', None, None))
self.assert_(not UsesColor(None, None, None))
self.assert_(UsesColor('linux', None, None))
self.assert_(UsesColor('cygwin', None, None))
self.assert_(UsesColor('xterm', None, None))
self.assert_(UsesColor('xterm-color', None, None))
self.assert_(UsesColor('xterm-256color', None, None))
def testFlagOnly(self):
"""Tests the case when there's --gtest_color but not GTEST_COLOR."""
self.assert_(not UsesColor('dumb', None, 'no'))
self.assert_(not UsesColor('xterm-color', None, 'no'))
if not IS_WINDOWS:
self.assert_(not UsesColor('emacs', None, 'auto'))
self.assert_(UsesColor('xterm', None, 'auto'))
self.assert_(UsesColor('dumb', None, 'yes'))
self.assert_(UsesColor('xterm', None, 'yes'))
def testEnvVarOnly(self):
"""Tests the case when there's GTEST_COLOR but not --gtest_color."""
self.assert_(not UsesColor('dumb', 'no', None))
self.assert_(not UsesColor('xterm-color', 'no', None))
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', 'auto', None))
self.assert_(UsesColor('xterm-color', 'auto', None))
self.assert_(UsesColor('dumb', 'yes', None))
self.assert_(UsesColor('xterm-color', 'yes', None))
def testEnvVarAndFlag(self):
"""Tests the case when there are both GTEST_COLOR and --gtest_color."""
self.assert_(not UsesColor('xterm-color', 'no', 'no'))
self.assert_(UsesColor('dumb', 'no', 'yes'))
self.assert_(UsesColor('xterm-color', 'no', 'auto'))
def testAliasesOfYesAndNo(self):
"""Tests using aliases in specifying --gtest_color."""
self.assert_(UsesColor('dumb', None, 'true'))
self.assert_(UsesColor('dumb', None, 'YES'))
self.assert_(UsesColor('dumb', None, 'T'))
self.assert_(UsesColor('dumb', None, '1'))
self.assert_(not UsesColor('xterm', None, 'f'))
self.assert_(not UsesColor('xterm', None, 'false'))
self.assert_(not UsesColor('xterm', None, '0'))
self.assert_(not UsesColor('xterm', None, 'unknown'))
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
factorlibre/OCB | addons/note/__openerp__.py | 260 | 2182 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Notes',
'version': '1.0',
'category': 'Tools',
'description': """
This module allows users to create their own notes inside OpenERP
=================================================================
Use notes to write meeting minutes, organize ideas, organize personal todo
lists, etc. Each user manages his own personal Notes. Notes are available to
their authors only, but they can share notes to others users so that several
people can work on the same note in real time. It's very efficient to share
meeting minutes.
Notes can be found in the 'Home' menu.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/notes',
'summary': 'Sticky notes, Collaborative, Memos',
'sequence': 9,
'depends': [
'mail',
],
'data': [
'security/note_security.xml',
'security/ir.rule.xml',
'security/ir.model.access.csv',
'note_data.xml',
'note_view.xml',
'views/note.xml',
],
'demo': [
'note_demo.xml',
],
'test': [
],
'installable': True,
'application': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
emilroz/openmicroscopy | components/tools/OmeroPy/src/path.py | 1 | 34018 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" path.py - An object representing a path to a file or directory.
Example:
from path import path
d = path('/home/guido/bin')
for f in d.files('*.py'):
f.chmod(0755)
This module requires Python 2.2 or later.
URL: http://www.jorendorff.com/articles/python/path
Author: Jason Orendorff <jason.orendorff\x40gmail\x2ecom> (and others - see the url!)
Date: 9 Mar 2007
+------------------------------------------------------------------------------------+
Added by Colin Blackburn 17 Jun 2009
The above url is dead and development of path.py has ceased. This copy of path.py
is now maintained by the OMERO developers.
Fo reference, the original package is now available from:
http://pypi.python.org/pypi/path.py/2.2
Notes:
* The functionality of the method read_md5() has been removed from this copy.
This is due to the changes in the hash libraries between Python 2.4 and 2.5.
* Added parpath (2009/09/21)
* Improved listdir to handle unreadable directories. See #9156. (2013/03/06)
* Merged changes from https://github.com/jaraco/path.py/commit/bee13d0de4d44599d397b603e1c83dce1ce9818d (2014/04/28)
"""
# TODO
# - Tree-walking functions don't avoid symlink loops. Matt Harrison
# sent me a patch for this.
# - Bug in write_text(). It doesn't support Universal newline mode.
# - Better error message in listdir() when self isn't a
# directory. (On Windows, the error message really sucks.)
# - Make sure everything has a good docstring.
# - Add methods for regex find and replace.
# - guess_content_type() method?
# - Perhaps support arguments to touch().
from __future__ import generators
import sys, warnings, os, fnmatch, glob, shutil, codecs
__version__ = '2.2'
__all__ = ['path']
# Platform-specific support for path.owner
if os.name == 'nt':
try:
import win32security
except ImportError:
win32security = None
else:
try:
import pwd
except ImportError:
pwd = None
# Pre-2.3 support. Are unicode filenames supported?
_base = str
_getcwd = os.getcwd
try:
if os.path.supports_unicode_filenames:
_base = unicode
_getcwd = os.getcwdu
except AttributeError:
pass
# Pre-2.3 workaround for booleans
try:
True, False
except NameError:
True, False = 1, 0
# Pre-2.3 workaround for basestring.
try:
basestring
except NameError:
basestring = (str, unicode)
# Universal newline support
_textmode = 'r'
if hasattr(file, 'newlines'):
_textmode = 'U'
class TreeWalkWarning(Warning):
pass
class path(_base):
""" Represents a filesystem path.
For documentation on individual methods, consult their
counterparts in os.path.
"""
# --- Special Python methods.
def __repr__(self):
return 'path(%s)' % _base.__repr__(self)
# Adding a path and a string yields a path.
def __add__(self, more):
try:
resultStr = _base.__add__(self, more)
except TypeError: #Python bug
resultStr = NotImplemented
if resultStr is NotImplemented:
return resultStr
return self.__class__(resultStr)
def __radd__(self, other):
if isinstance(other, basestring):
return self.__class__(other.__add__(self))
else:
return NotImplemented
# The / operator joins paths.
def __div__(self, rel):
""" fp.__div__(rel) == fp / rel == fp.joinpath(rel)
Join two path components, adding a separator character if
needed.
"""
return self.__class__(os.path.join(self, rel))
# Make the / operator work even when true division is enabled.
__truediv__ = __div__
def getcwd(cls):
""" Return the current working directory as a path object. """
return cls(_getcwd())
getcwd = classmethod(getcwd)
# --- Operations on path strings.
isabs = os.path.isabs
def abspath(self): return self.__class__(os.path.abspath(self))
def normcase(self): return self.__class__(os.path.normcase(self))
def normpath(self): return self.__class__(os.path.normpath(self))
def realpath(self): return self.__class__(os.path.realpath(self))
def expanduser(self): return self.__class__(os.path.expanduser(self))
def expandvars(self): return self.__class__(os.path.expandvars(self))
def dirname(self): return self.__class__(os.path.dirname(self))
basename = os.path.basename
def expand(self):
""" Clean up a filename by calling expandvars(),
expanduser(), and normpath() on it.
This is commonly everything needed to clean up a filename
read from a configuration file, for example.
"""
return self.expandvars().expanduser().normpath()
def _get_namebase(self):
base, ext = os.path.splitext(self.name)
return base
def _get_ext(self):
f, ext = os.path.splitext(_base(self))
return ext
def _get_drive(self):
drive, r = os.path.splitdrive(self)
return self.__class__(drive)
parent = property(
dirname, None, None,
""" This path's parent directory, as a new path object.
For example, path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib')
""")
name = property(
basename, None, None,
""" The name of this file or directory without the full path.
For example, path('/usr/local/lib/libpython.so').name == 'libpython.so'
""")
namebase = property(
_get_namebase, None, None,
""" The same as path.name, but with one file extension stripped off.
For example, path('/home/guido/python.tar.gz').name == 'python.tar.gz',
but path('/home/guido/python.tar.gz').namebase == 'python.tar'
""")
ext = property(
_get_ext, None, None,
""" The file extension, for example '.py'. """)
drive = property(
_get_drive, None, None,
""" The drive specifier, for example 'C:'.
This is always empty on systems that don't use drive specifiers.
""")
def splitpath(self):
""" p.splitpath() -> Return (p.parent, p.name). """
parent, child = os.path.split(self)
return self.__class__(parent), child
def splitdrive(self):
""" p.splitdrive() -> Return (p.drive, <the rest of p>).
Split the drive specifier from this path. If there is
no drive specifier, p.drive is empty, so the return value
is simply (path(''), p). This is always the case on Unix.
"""
drive, rel = os.path.splitdrive(self)
return self.__class__(drive), rel
def splitext(self):
""" p.splitext() -> Return (p.stripext(), p.ext).
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from '.' to the end of the
last path segment. This has the property that if
(a, b) == p.splitext(), then a + b == p.
"""
filename, ext = os.path.splitext(self)
return self.__class__(filename), ext
def stripext(self):
""" p.stripext() -> Remove one file extension from the path.
For example, path('/home/guido/python.tar.gz').stripext()
returns path('/home/guido/python.tar').
"""
return self.splitext()[0]
if hasattr(os.path, 'splitunc'):
def splitunc(self):
unc, rest = os.path.splitunc(self)
return self.__class__(unc), rest
def _get_uncshare(self):
unc, r = os.path.splitunc(self)
return self.__class__(unc)
uncshare = property(
_get_uncshare, None, None,
""" The UNC mount point for this path.
This is empty for paths on local drives. """)
def joinpath(self, *args):
""" Join two or more path components, adding a separator
character (os.sep) if needed. Returns a new path
object.
"""
return self.__class__(os.path.join(self, *args))
def splitall(self):
r""" Return a list of the path components in this path.
The first item in the list will be a path. Its value will be
either os.curdir, os.pardir, empty, or the root directory of
this path (for example, '/' or 'C:\\'). The other items in
the list will be strings.
path.path.joinpath(*result) will yield the original path.
"""
parts = []
loc = self
while loc != os.curdir and loc != os.pardir:
prev = loc
loc, child = prev.splitpath()
if loc == prev:
break
parts.append(child)
parts.append(loc)
parts.reverse()
return parts
def relpath(self):
""" Return this path as a relative path,
based from the current working directory.
"""
cwd = self.__class__(os.getcwd())
return cwd.relpathto(self)
def parpath(self, dest):
"""
Return the command to move up the directory hierarchy
from the current path to the dest path. This method
can be used to test if one path object is contained
within another:
p1 = path.path("/tmp")
p2 = path.path("/tmp/foo")
if p2.parpath(p1):
print "p2 is contained in p1"
This method does not follow symlinks.
"""
dest = self.__class__(dest)
rel = self.relpathto(dest)
dots = rel.split(os.sep)
for dot in dots:
if os.pardir != str(dot) and os.curdir != str(dot):
return []
return dots
def relpathto(self, dest):
""" Return a relative path from self to dest.
If there is no relative path from self to dest, for example if
they reside on different drives in Windows, then this returns
dest.abspath().
"""
origin = self.abspath()
dest = self.__class__(dest).abspath()
orig_list = origin.normcase().splitall()
# Don't normcase dest! We want to preserve the case.
dest_list = dest.splitall()
if orig_list[0] != os.path.normcase(dest_list[0]):
# Can't get here from there.
return dest
# Find the location where the two paths start to differ.
i = 0
for start_seg, dest_seg in zip(orig_list, dest_list):
if start_seg != os.path.normcase(dest_seg):
break
i += 1
# Now i is the point where the two paths diverge.
# Need a certain number of "os.pardir"s to work up
# from the origin to the point of divergence.
segments = [os.pardir] * (len(orig_list) - i)
# Need to add the diverging part of dest_list.
segments += dest_list[i:]
if len(segments) == 0:
# If they happen to be identical, use os.curdir.
relpath = os.curdir
else:
relpath = os.path.join(*segments)
return self.__class__(relpath)
# --- Listing, searching, walking, and matching
def listdir(self, pattern=None):
""" D.listdir() -> List of items in this directory.
Use D.files() or D.dirs() instead if you want a listing
of just files or just subdirectories.
The elements of the list are path objects.
With the optional 'pattern' argument, this only lists
items whose names match the given pattern.
try/except added to handle unreadable directories. See #9156.
"""
try:
names = os.listdir(self)
except OSError:
# ignore unreadable directories
names = []
if pattern is not None:
names = fnmatch.filter(names, pattern)
return [self / child for child in names]
def dirs(self, pattern=None):
""" D.dirs() -> List of this directory's subdirectories.
The elements of the list are path objects.
This does not walk recursively into subdirectories
(but see path.walkdirs).
With the optional 'pattern' argument, this only lists
directories whose names match the given pattern. For
example, d.dirs('build-*').
"""
return [p for p in self.listdir(pattern) if p.isdir()]
def files(self, pattern=None):
""" D.files() -> List of the files in this directory.
The elements of the list are path objects.
This does not walk into subdirectories (see path.walkfiles).
With the optional 'pattern' argument, this only lists files
whose names match the given pattern. For example,
d.files('*.pyc').
"""
return [p for p in self.listdir(pattern) if p.isfile()]
def walk(self, pattern=None, errors='strict'):
""" D.walk() -> iterator over files and subdirs, recursively.
The iterator yields path objects naming each child item of
this directory and its descendants. This requires that
D.isdir().
This performs a depth-first traversal of the directory tree.
Each directory is returned just before all its children.
The errors= keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via warnings.warn(), and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
if pattern is None or child.fnmatch(pattern):
yield child
try:
isdir = child.isdir()
except Exception:
if errors == 'ignore':
isdir = False
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (child, sys.exc_info()[1]),
TreeWalkWarning)
isdir = False
else:
raise
if isdir:
for item in child.walk(pattern, errors):
yield item
def walkdirs(self, pattern=None, errors='strict'):
""" D.walkdirs() -> iterator over subdirs, recursively.
With the optional 'pattern' argument, this yields only
directories whose names match the given pattern. For
example, mydir.walkdirs('*test') yields only directories
with names ending in 'test'.
The errors= keyword argument controls behavior when an
error occurs. The default is 'strict', which causes an
exception. The other allowed values are 'warn', which
reports the error via warnings.warn(), and 'ignore'.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
dirs = self.dirs()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in dirs:
if pattern is None or child.fnmatch(pattern):
yield child
for subsubdir in child.walkdirs(pattern, errors):
yield subsubdir
def walkfiles(self, pattern=None, errors='strict'):
""" D.walkfiles() -> iterator over files in D, recursively.
The optional argument, pattern, limits the results to files
with names that match the pattern. For example,
mydir.walkfiles('*.tmp') yields only files with the .tmp
extension.
"""
if errors not in ('strict', 'warn', 'ignore'):
raise ValueError("invalid errors parameter")
try:
childList = self.listdir()
except Exception:
if errors == 'ignore':
return
elif errors == 'warn':
warnings.warn(
"Unable to list directory '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
return
else:
raise
for child in childList:
try:
isfile = child.isfile()
isdir = not isfile and child.isdir()
except:
if errors == 'ignore':
continue
elif errors == 'warn':
warnings.warn(
"Unable to access '%s': %s"
% (self, sys.exc_info()[1]),
TreeWalkWarning)
continue
else:
raise
if isfile:
if pattern is None or child.fnmatch(pattern):
yield child
elif isdir:
for f in child.walkfiles(pattern, errors):
yield f
def fnmatch(self, pattern):
""" Return True if self.name matches the given pattern.
pattern - A filename pattern with wildcards,
for example '*.py'.
"""
return fnmatch.fnmatch(self.name, pattern)
def glob(self, pattern):
""" Return a list of path objects that match the pattern.
pattern - a path relative to this directory, with wildcards.
For example, path('/users').glob('*/bin/*') returns a list
of all the files users have in their bin directories.
"""
cls = self.__class__
return [cls(s) for s in glob.glob(_base(self / pattern))]
# --- Reading or writing an entire file at once.
def open(self, mode='r'):
""" Open this file. Return a file object. """
return file(self, mode)
def bytes(self):
""" Open this file, read all bytes, return them as a string. """
f = self.open('rb')
try:
return f.read()
finally:
f.close()
def write_bytes(self, bytes, append=False):
""" Open this file and write the given bytes to it.
Default behavior is to overwrite any existing file.
Call p.write_bytes(bytes, append=True) to append instead.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
f = self.open(mode)
try:
f.write(bytes)
finally:
f.close()
def text(self, encoding=None, errors='strict'):
r""" Open this file, read it in, return the content as a string.
This uses 'U' mode in Python 2.3 and later, so '\r\n' and '\r'
are automatically translated to '\n'.
Optional arguments:
encoding - The Unicode encoding (or character set) of
the file. If present, the content of the file is
decoded and returned as a unicode object; otherwise
it is returned as an 8-bit str.
errors - How to handle Unicode errors; see help(str.decode)
for the options. Default is 'strict'.
"""
if encoding is None:
# 8-bit
f = self.open(_textmode)
try:
return f.read()
finally:
f.close()
else:
# Unicode
f = codecs.open(self, 'r', encoding, errors)
# (Note - Can't use 'U' mode here, since codecs.open
# doesn't support 'U' mode, even in Python 2.3.)
try:
t = f.read()
finally:
f.close()
return (t.replace(u'\r\n', u'\n')
.replace(u'\r\x85', u'\n')
.replace(u'\r', u'\n')
.replace(u'\x85', u'\n')
.replace(u'\u2028', u'\n'))
def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False):
r""" Write the given text to this file.
The default behavior is to overwrite any existing file;
to append instead, use the 'append=True' keyword argument.
There are two differences between path.write_text() and
path.write_bytes(): newline handling and Unicode handling.
See below.
Parameters:
- text - str/unicode - The text to be written.
- encoding - str - The Unicode encoding that will be used.
This is ignored if 'text' isn't a Unicode string.
- errors - str - How to handle Unicode encoding errors.
Default is 'strict'. See help(unicode.encode) for the
options. This is ignored if 'text' isn't a Unicode
string.
- linesep - keyword argument - str/unicode - The sequence of
characters to be used to mark end-of-line. The default is
os.linesep. You can also specify None; this means to
leave all newlines as they are in 'text'.
- append - keyword argument - bool - Specifies what to do if
the file already exists (True: append to the end of it;
False: overwrite it.) The default is False.
--- Newline handling.
write_text() converts all standard end-of-line sequences
('\n', '\r', and '\r\n') to your platform's default end-of-line
sequence (see os.linesep; on Windows, for example, the
end-of-line marker is '\r\n').
If you don't like your platform's default, you can override it
using the 'linesep=' keyword argument. If you specifically want
write_text() to preserve the newlines as-is, use 'linesep=None'.
This applies to Unicode text the same as to 8-bit text, except
there are three additional standard Unicode end-of-line sequences:
u'\x85', u'\r\x85', and u'\u2028'.
(This is slightly different from when you open a file for
writing with fopen(filename, "w") in C or file(filename, 'w')
in Python.)
--- Unicode
If 'text' isn't Unicode, then apart from newline handling, the
bytes are written verbatim to the file. The 'encoding' and
'errors' arguments are not used and must be omitted.
If 'text' is Unicode, it is first converted to bytes using the
specified 'encoding' (or the default encoding if 'encoding'
isn't specified). The 'errors' argument applies only to this
conversion.
"""
if isinstance(text, unicode):
if linesep is not None:
# Convert all standard end-of-line sequences to
# ordinary newline characters.
text = (text.replace(u'\r\n', u'\n')
.replace(u'\r\x85', u'\n')
.replace(u'\r', u'\n')
.replace(u'\x85', u'\n')
.replace(u'\u2028', u'\n'))
text = text.replace(u'\n', linesep)
if encoding is None:
encoding = sys.getdefaultencoding()
bytes = text.encode(encoding, errors)
else:
# It is an error to specify an encoding if 'text' is
# an 8-bit string.
assert encoding is None
if linesep is not None:
text = (text.replace('\r\n', '\n')
.replace('\r', '\n'))
bytes = text.replace('\n', linesep)
self.write_bytes(bytes, append)
def lines(self, encoding=None, errors='strict', retain=True):
r""" Open this file, read all lines, return them in a list.
Optional arguments:
encoding - The Unicode encoding (or character set) of
the file. The default is None, meaning the content
of the file is read as 8-bit characters and returned
as a list of (non-Unicode) str objects.
errors - How to handle Unicode errors; see help(str.decode)
for the options. Default is 'strict'
retain - If true, retain newline characters; but all newline
character combinations ('\r', '\n', '\r\n') are
translated to '\n'. If false, newline characters are
stripped off. Default is True.
This uses 'U' mode in Python 2.3 and later.
"""
if encoding is None and retain:
f = self.open(_textmode)
try:
return f.readlines()
finally:
f.close()
else:
return self.text(encoding, errors).splitlines(retain)
def write_lines(self, lines, encoding=None, errors='strict',
linesep=os.linesep, append=False):
r""" Write the given lines of text to this file.
By default this overwrites any existing file at this path.
This puts a platform-specific newline sequence on every line.
See 'linesep' below.
lines - A list of strings.
encoding - A Unicode encoding to use. This applies only if
'lines' contains any Unicode strings.
errors - How to handle errors in Unicode encoding. This
also applies only to Unicode strings.
linesep - The desired line-ending. This line-ending is
applied to every line. If a line already has any
standard line ending ('\r', '\n', '\r\n', u'\x85',
u'\r\x85', u'\u2028'), that will be stripped off and
this will be used instead. The default is os.linesep,
which is platform-dependent ('\r\n' on Windows, '\n' on
Unix, etc.) Specify None to write the lines as-is,
like file.writelines().
Use the keyword argument append=True to append lines to the
file. The default is to overwrite the file. Warning:
When you use this with Unicode data, if the encoding of the
existing data in the file is different from the encoding
you specify with the encoding= parameter, the result is
mixed-encoding data, which can really confuse someone trying
to read the file later.
"""
if append:
mode = 'ab'
else:
mode = 'wb'
f = self.open(mode)
try:
for line in lines:
isUnicode = isinstance(line, unicode)
if linesep is not None:
# Strip off any existing line-end and add the
# specified linesep string.
if isUnicode:
if line[-2:] in (u'\r\n', u'\x0d\x85'):
line = line[:-2]
elif line[-1:] in (u'\r', u'\n',
u'\x85', u'\u2028'):
line = line[:-1]
else:
if line[-2:] == '\r\n':
line = line[:-2]
elif line[-1:] in ('\r', '\n'):
line = line[:-1]
line += linesep
if isUnicode:
if encoding is None:
encoding = sys.getdefaultencoding()
line = line.encode(encoding, errors)
f.write(line)
finally:
f.close()
def read_md5(self):
""" Calculate the md5 hash for this file.
This reads through the entire file.
"""
raise Exception("read_md5 has been removed from this installation of path.py")
# --- Methods for querying the filesystem.
# N.B. On some platforms, the os.path functions may be implemented in C
# (e.g. isdir on Windows, Python 3.2.2), and compiled functions don't get
# bound. Playing it safe and wrapping them all in method calls.
def isabs(self): return os.path.isabs(self)
def exists(self): return os.path.exists(self)
def isdir(self): return os.path.isdir(self)
def isfile(self): return os.path.isfile(self)
def islink(self): return os.path.islink(self)
def ismount(self): return os.path.ismount(self)
if hasattr(os.path, 'samefile'):
def samefile(self): return os.path.samefile(self)
def getatime(self): return os.path.getatime(self)
atime = property(
getatime, None, None,
""" Last access time of the file. """)
def getmtime(self): return os.path.getmtime(self)
mtime = property(
getmtime, None, None,
""" Last-modified time of the file. """)
if hasattr(os.path, 'getctime'):
def getctime(self): return os.path.getctime(self)
ctime = property(
getctime, None, None,
""" Creation time of the file. """)
def getsize(self): return os.path.getsize(self)
size = property(
getsize, None, None,
""" Size of the file, in bytes. """)
if hasattr(os, 'access'):
def access(self, mode):
""" Return true if current user has access to this path.
mode - One of the constants os.F_OK, os.R_OK, os.W_OK, os.X_OK
"""
return os.access(self, mode)
def stat(self):
""" Perform a stat() system call on this path. """
return os.stat(self)
def lstat(self):
""" Like path.stat(), but do not follow symbolic links. """
return os.lstat(self)
def get_owner(self):
r""" Return the name of the owner of this file or directory.
This follows symbolic links.
On Windows, this returns a name of the form ur'DOMAIN\User Name'.
On Windows, a group can own a file or directory.
"""
if os.name == 'nt':
if win32security is None:
raise Exception("path.owner requires win32all to be installed")
desc = win32security.GetFileSecurity(
self, win32security.OWNER_SECURITY_INFORMATION)
sid = desc.GetSecurityDescriptorOwner()
account, domain, typecode = win32security.LookupAccountSid(None, sid)
return domain + u'\\' + account
else:
if pwd is None:
raise NotImplementedError("path.owner is not implemented on this platform.")
st = self.stat()
return pwd.getpwuid(st.st_uid).pw_name
owner = property(
get_owner, None, None,
""" Name of the owner of this file or directory. """)
if hasattr(os, 'statvfs'):
def statvfs(self):
""" Perform a statvfs() system call on this path. """
return os.statvfs(self)
if hasattr(os, 'pathconf'):
def pathconf(self, name):
return os.pathconf(self, name)
# --- Modifying operations on files and directories
def utime(self, times):
""" Set the access and modified times of this file. """
os.utime(self, times)
def chmod(self, mode):
os.chmod(self, mode)
if hasattr(os, 'chown'):
def chown(self, uid, gid):
os.chown(self, uid, gid)
def rename(self, new):
os.rename(self, new)
def renames(self, new):
os.renames(self, new)
# --- Create/delete operations on directories
def mkdir(self, mode=0777):
os.mkdir(self, mode)
def makedirs(self, mode=0777):
os.makedirs(self, mode)
def rmdir(self):
os.rmdir(self)
def removedirs(self):
os.removedirs(self)
# --- Modifying operations on files
def touch(self):
""" Set the access/modified times of this file to the current time.
Create the file if it does not exist.
"""
fd = os.open(self, os.O_WRONLY | os.O_CREAT, 0666)
os.close(fd)
os.utime(self, None)
def remove(self):
os.remove(self)
def unlink(self):
os.unlink(self)
# --- Links
if hasattr(os, 'link'):
def link(self, newpath):
""" Create a hard link at 'newpath', pointing to this file. """
os.link(self, newpath)
if hasattr(os, 'symlink'):
def symlink(self, newlink):
""" Create a symbolic link at 'newlink', pointing here. """
os.symlink(self, newlink)
if hasattr(os, 'readlink'):
def readlink(self):
""" Return the path to which this symbolic link points.
The result may be an absolute or a relative path.
"""
return self.__class__(os.readlink(self))
def readlinkabs(self):
""" Return the path to which this symbolic link points.
The result is always an absolute path.
"""
p = self.readlink()
if p.isabs():
return p
else:
return (self.parent / p).abspath()
# --- High-level functions from shutil
copyfile = shutil.copyfile
copymode = shutil.copymode
copystat = shutil.copystat
copy = shutil.copy
copy2 = shutil.copy2
copytree = shutil.copytree
if hasattr(shutil, 'move'):
move = shutil.move
rmtree = shutil.rmtree
# --- Special stuff from os
if hasattr(os, 'chroot'):
def chroot(self):
os.chroot(self)
if hasattr(os, 'startfile'):
def startfile(self):
os.startfile(self)
| gpl-2.0 |
cloud9UG/odoo | doc/conf.py | 184 | 8222 | # -*- coding: utf-8 -*-
import sys, os
import sphinx
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
DIR = os.path.dirname(__file__)
sys.path.append(
os.path.abspath(
os.path.join(DIR, '_extensions')))
# autodoc
sys.path.append(os.path.abspath(os.path.join(DIR, '..')))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.ifconfig',
'sphinx.ext.todo',
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.linkcode',
'github_link',
'odoo',
'html_domain',
'exercise_admonition',
'patchqueue'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'odoo'
copyright = u'Odoo S.A.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '8.0'
# The full version, including alpha/beta/rc tags.
release = '8.0'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'odoo'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'odoo'
odoo_cover_default = 'banners/installing_odoo.jpg'
odoo_cover_external = {
'https://odoo.com/documentation/functional/accounting.html' : 'banners/m_accounting.jpg',
'https://odoo.com/documentation/functional/double-entry.html' : 'banners/m_1.jpg',
'https://odoo.com/documentation/functional/valuation.html' : 'banners/m_2.jpg',
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_extensions']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_add_permalinks = u''
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# FIXME: no sidebar on index?
html_sidebars = {
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
latex_elements = {
'papersize': r'a4paper',
'preamble': u'''\\setcounter{tocdepth}{2}
''',
}
# default must be set otherwise ifconfig blows up
todo_include_todos = False
intersphinx_mapping = {
'python': ('https://docs.python.org/2/', None),
'werkzeug': ('http://werkzeug.pocoo.org/docs/', None),
'sqlalchemy': ('http://docs.sqlalchemy.org/en/rel_0_9/', None),
'django': ('https://django.readthedocs.org/en/latest/', None),
}
github_user = 'odoo'
github_project = 'odoo'
# monkeypatch PHP lexer to not require <?php
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True)
def setup(app):
app.connect('html-page-context', canonicalize)
app.add_config_value('canonical_root', None, 'env')
app.add_config_value('canonical_branch', 'master', 'env')
app.connect('html-page-context', versionize)
app.add_config_value('versions', '', 'env')
app.connect('html-page-context', analytics)
app.add_config_value('google_analytics_key', '', 'env')
def canonicalize(app, pagename, templatename, context, doctree):
""" Adds a 'canonical' URL for the current document in the rendering
context. Requires the ``canonical_root`` setting being set. The canonical
branch is ``master`` but can be overridden using ``canonical_branch``.
"""
if not app.config.canonical_root:
return
context['canonical'] = _build_url(
app.config.canonical_root, app.config.canonical_branch, pagename)
def versionize(app, pagename, templatename, context, doctree):
""" Adds a version switcher below the menu, requires ``canonical_root``
and ``versions`` (an ordered, space-separated lists of all possible
versions).
"""
if not (app.config.canonical_root and app.config.versions):
return
context['versions'] = [
(vs, _build_url(app.config.canonical_root, vs, pagename))
for vs in app.config.versions.split(',')
if vs != app.config.version
]
def analytics(app, pagename, templatename, context, doctree):
if not app.config.google_analytics_key:
return
context['google_analytics_key'] = app.config.google_analytics_key
def _build_url(root, branch, pagename):
return "{canonical_url}{canonical_branch}/{canonical_page}".format(
canonical_url=root,
canonical_branch=branch,
canonical_page=(pagename + '.html').replace('index.html', '')
.replace('index/', ''),
)
| agpl-3.0 |
dwfreed/mitmproxy | test/mitmproxy/console/test_pathedit.py | 4 | 2245 | import os
from os.path import normpath
from mitmproxy.tools.console import pathedit
from mitmproxy.test import tutils
from unittest.mock import patch
class TestPathCompleter:
def test_lookup_construction(self):
c = pathedit._PathCompleter()
cd = os.path.normpath(tutils.test_data.path("mitmproxy/completion"))
ca = os.path.join(cd, "a")
assert c.complete(ca).endswith(normpath("/completion/aaa"))
assert c.complete(ca).endswith(normpath("/completion/aab"))
c.reset()
ca = os.path.join(cd, "aaa")
assert c.complete(ca).endswith(normpath("/completion/aaa"))
assert c.complete(ca).endswith(normpath("/completion/aaa"))
c.reset()
assert c.complete(cd).endswith(normpath("/completion/aaa"))
def test_completion(self):
c = pathedit._PathCompleter(True)
c.reset()
c.lookup = [
("a", "x/a"),
("aa", "x/aa"),
]
assert c.complete("a") == "a"
assert c.final == "x/a"
assert c.complete("a") == "aa"
assert c.complete("a") == "a"
c = pathedit._PathCompleter(True)
r = c.complete("l")
assert c.final.endswith(r)
c.reset()
assert c.complete("/nonexistent") == "/nonexistent"
assert c.final == "/nonexistent"
c.reset()
assert c.complete("~") != "~"
c.reset()
s = "thisisatotallynonexistantpathforsure"
assert c.complete(s) == s
assert c.final == s
class TestPathEdit:
def test_keypress(self):
pe = pathedit.PathEdit("", "")
with patch('urwid.widget.Edit.get_edit_text') as get_text, \
patch('urwid.widget.Edit.set_edit_text') as set_text:
cd = os.path.normpath(tutils.test_data.path("mitmproxy/completion"))
get_text.return_value = os.path.join(cd, "a")
# Pressing tab should set completed path
pe.keypress((1,), "tab")
set_text_called_with = set_text.call_args[0][0]
assert set_text_called_with.endswith(normpath("/completion/aaa"))
# Pressing any other key should reset
pe.keypress((1,), "a")
assert pe.lookup is None
| mit |
veltzer/demos-python | src/examples/short/parsing/math_expression.py | 1 | 1056 | #!/usr/bin/env python
import pprint
import sys
import simpleparse.dispatchprocessor
declaration = r'''# note use of raw string when embedding in python code...
full := ws,expr,ws
number := [0-9eE+.-]+
expr := number,'+',number/number,'-',number
ws := [ \t\v]*
'''
class MyProcessorClass(simpleparse.dispatchprocessor.DispatchProcessor):
# def __init__(self):
# print('cons')
def number(self, tup, buf):
print('in number')
'''Process the given production and it's children'''
def expr(self, tup, buf):
print('in expr')
'''Process the given production and it's children'''
def __call__(self, value, data):
print('value is ' + str(value))
print('data is ' + str(data))
return value
# return super(self.__class__,self).__call__(self,value,data)
class MyParser(simpleparse.parser.Parser):
def buildProcessor(self):
return MyProcessorClass()
parser = MyParser(declaration, 'full')
pprint.pprint(parser.parse(sys.argv[1]))
| gpl-3.0 |
sfepy/sfepy | examples/linear_elasticity/dispersion_analysis.py | 2 | 35004 | #!/usr/bin/env python
"""
Dispersion analysis of a heterogeneous finite scale periodic cell.
The periodic cell mesh has to contain two subdomains Y1 (with the cell ids 1),
Y2 (with the cell ids 2), so that different material properties can be defined
in each of the subdomains (see ``--pars`` option). The command line parameters
can be given in any consistent unit set, for example the basic SI units. The
``--unit-multipliers`` option can be used to rescale the input units to ones
more suitable to the simulation, for example to prevent having different
matrix blocks with large differences of matrix entries magnitudes. The results
are then in the rescaled units.
Usage Examples
--------------
Default material parameters, a square periodic cell with a spherical inclusion,
logs also standard pressure dilatation and shear waves, no eigenvectors::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --log-std-waves --eigs-only
As above, with custom eigenvalue solver parameters, and different number of
eigenvalues, mesh size and units used in the calculation::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --solver-conf="kind='eig.scipy', method='eigsh', tol=1e-10, maxiter=1000, which='LM', sigma=0" --log-std-waves -n 5 --range=0,640,101 --mode=omega --unit-multipliers=1e-6,1e-2,1e-3 --mesh-size=1e-2 --eigs-only
Default material parameters, a square periodic cell with a square inclusion,
and a very small mesh to allow comparing the omega and kappa modes (full matrix
solver required!)::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/square_2m.mesh --solver-conf="kind='eig.scipy', method='eigh'" --log-std-waves -n 10 --range=0,640,101 --mesh-size=1e-2 --mode=omega --eigs-only --no-legends --unit-multipliers=1e-6,1e-2,1e-3 -o output/omega
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/square_2m.mesh --solver-conf="kind='eig.qevp', method='companion', mode='inverted', solver={kind='eig.scipy', method='eig'}" --log-std-waves -n 500 --range=0,4000000,1001 --mesh-size=1e-2 --mode=kappa --eigs-only --no-legends --unit-multipliers=1e-6,1e-2,1e-3 -o output/kappa
View/compare the resulting logs::
python script/plot_logs.py output/omega/frequencies.txt --no-legends -g 1 -o mode-omega.png
python script/plot_logs.py output/kappa/wave-numbers.txt --no-legends -o mode-kappa.png
python script/plot_logs.py output/kappa/wave-numbers.txt --no-legends --swap-axes -o mode-kappa-t.png
In contrast to the heterogeneous square periodic cell, a homogeneous
square periodic cell (the region Y2 is empty)::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/square_1m.mesh --solver-conf="kind='eig.scipy', method='eigh'" --log-std-waves -n 10 --range=0,640,101 --mesh-size=1e-2 --mode=omega --eigs-only --no-legends --unit-multipliers=1e-6,1e-2,1e-3 -o output/omega-h
python script/plot_logs.py output/omega-h/frequencies.txt --no-legends -g 1 -o mode-omega-h.png
Use the Brillouin stepper::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --log-std-waves -n=60 --eigs-only --no-legends --stepper=brillouin
python script/plot_logs.py output/frequencies.txt -g 0 --rc="'font.size':14, 'lines.linewidth' : 3, 'lines.markersize' : 4" -o brillouin-stepper-kappas.png
python script/plot_logs.py output/frequencies.txt -g 1 --no-legends --rc="'font.size':14, 'lines.linewidth' : 3, 'lines.markersize' : 4" -o brillouin-stepper-omegas.png
Additional arguments can be passed to the problem configuration's
:func:`define()` function using the ``--define-kwargs`` option. In this file,
only the mesh vertex separation parameter `mesh_eps` can be used::
python examples/linear_elasticity/dispersion_analysis.py meshes/2d/special/circle_in_square.mesh --log-std-waves --eigs-only --define-kwargs="mesh_eps=1e-10" --save-regions
"""
from __future__ import absolute_import
import os
import sys
sys.path.append('.')
import gc
from copy import copy
from argparse import ArgumentParser, RawDescriptionHelpFormatter
import numpy as nm
import matplotlib.pyplot as plt
from sfepy.base.base import import_file, output, Struct
from sfepy.base.conf import dict_from_string, ProblemConf
from sfepy.base.ioutils import ensure_path, remove_files_patterns, save_options
from sfepy.base.log import Log
from sfepy.discrete.fem import MeshIO
from sfepy.mechanics.matcoefs import stiffness_from_youngpoisson as stiffness
import sfepy.mechanics.matcoefs as mc
from sfepy.mechanics.units import apply_unit_multipliers, apply_units_to_pars
import sfepy.discrete.fem.periodic as per
from sfepy.discrete.fem.meshio import convert_complex_output
from sfepy.homogenization.utils import define_box_regions
from sfepy.discrete import Problem
from sfepy.mechanics.tensors import get_von_mises_stress
from sfepy.solvers import Solver
from sfepy.solvers.ts import get_print_info, TimeStepper
from sfepy.linalg.utils import output_array_stats, max_diff_csr
pars_kinds = {
'young1' : 'stress',
'poisson1' : 'one',
'density1' : 'density',
'young2' : 'stress',
'poisson2' : 'one',
'density2' : 'density',
}
def define(filename_mesh, pars, approx_order, refinement_level, solver_conf,
plane='strain', post_process=False, mesh_eps=1e-8):
io = MeshIO.any_from_filename(filename_mesh)
bbox = io.read_bounding_box()
dim = bbox.shape[1]
options = {
'absolute_mesh_path' : True,
'refinement_level' : refinement_level,
'allow_empty_regions' : True,
'post_process_hook' : 'compute_von_mises' if post_process else None,
}
fields = {
'displacement': ('complex', dim, 'Omega', approx_order),
}
materials = {
'm' : ({
'D' : {'Y1' : stiffness(dim,
young=pars.young1,
poisson=pars.poisson1,
plane=plane),
'Y2' : stiffness(dim,
young=pars.young2,
poisson=pars.poisson2,
plane=plane)},
'density' : {'Y1' : pars.density1, 'Y2' : pars.density2},
},),
'wave' : 'get_wdir',
}
variables = {
'u' : ('unknown field', 'displacement', 0),
'v' : ('test field', 'displacement', 'u'),
}
regions = {
'Omega' : 'all',
'Y1': 'cells of group 1',
'Y2': 'cells of group 2',
}
regions.update(define_box_regions(dim,
bbox[0], bbox[1], mesh_eps))
ebcs = {
}
if dim == 3:
epbcs = {
'periodic_x' : (['Left', 'Right'], {'u.all' : 'u.all'},
'match_x_plane'),
'periodic_y' : (['Near', 'Far'], {'u.all' : 'u.all'},
'match_y_plane'),
'periodic_z' : (['Top', 'Bottom'], {'u.all' : 'u.all'},
'match_z_plane'),
}
else:
epbcs = {
'periodic_x' : (['Left', 'Right'], {'u.all' : 'u.all'},
'match_y_line'),
'periodic_y' : (['Bottom', 'Top'], {'u.all' : 'u.all'},
'match_x_line'),
}
per.set_accuracy(mesh_eps)
functions = {
'match_x_plane' : (per.match_x_plane,),
'match_y_plane' : (per.match_y_plane,),
'match_z_plane' : (per.match_z_plane,),
'match_x_line' : (per.match_x_line,),
'match_y_line' : (per.match_y_line,),
'get_wdir' : (get_wdir,),
}
integrals = {
'i' : 2 * approx_order,
}
equations = {
'K' : 'dw_lin_elastic.i.Omega(m.D, v, u)',
'S' : 'dw_elastic_wave.i.Omega(m.D, wave.vec, v, u)',
'R' : """1j * dw_elastic_wave_cauchy.i.Omega(m.D, wave.vec, u, v)
- 1j * dw_elastic_wave_cauchy.i.Omega(m.D, wave.vec, v, u)""",
'M' : 'dw_dot.i.Omega(m.density, v, u)',
}
solver_0 = solver_conf.copy()
solver_0['name'] = 'eig'
return locals()
def get_wdir(ts, coors, mode=None,
equations=None, term=None, problem=None, wdir=None, **kwargs):
if mode == 'special':
return {'vec' : wdir}
def set_wave_dir(pb, wdir):
materials = pb.get_materials()
wave_mat = materials['wave']
wave_mat.set_extra_args(wdir=wdir)
def save_materials(output_dir, pb, options):
stiffness = pb.evaluate('ev_integrate_mat.2.Omega(m.D, u)',
mode='el_avg', copy_materials=False, verbose=False)
young, poisson = mc.youngpoisson_from_stiffness(stiffness,
plane=options.plane)
density = pb.evaluate('ev_integrate_mat.2.Omega(m.density, u)',
mode='el_avg', copy_materials=False, verbose=False)
out = {}
out['young'] = Struct(name='young', mode='cell',
data=young[..., None, None])
out['poisson'] = Struct(name='poisson', mode='cell',
data=poisson[..., None, None])
out['density'] = Struct(name='density', mode='cell', data=density)
materials_filename = os.path.join(output_dir, 'materials.vtk')
pb.save_state(materials_filename, out=out)
def get_std_wave_fun(pb, options):
stiffness = pb.evaluate('ev_integrate_mat.2.Omega(m.D, u)',
mode='el_avg', copy_materials=False, verbose=False)
young, poisson = mc.youngpoisson_from_stiffness(stiffness,
plane=options.plane)
density = pb.evaluate('ev_integrate_mat.2.Omega(m.density, u)',
mode='el_avg', copy_materials=False, verbose=False)
lam, mu = mc.lame_from_youngpoisson(young, poisson,
plane=options.plane)
alam = nm.average(lam)
amu = nm.average(mu)
adensity = nm.average(density)
cp = nm.sqrt((alam + 2.0 * amu) / adensity)
cs = nm.sqrt(amu / adensity)
output('average p-wave speed:', cp)
output('average shear wave speed:', cs)
log_names = [r'$\omega_p$', r'$\omega_s$']
log_plot_kwargs = [{'ls' : '--', 'color' : 'k'},
{'ls' : '--', 'color' : 'gray'}]
if options.mode == 'omega':
fun = lambda wmag, wdir: (cp * wmag, cs * wmag)
else:
fun = lambda wmag, wdir: (wmag / cp, wmag / cs)
return fun, log_names, log_plot_kwargs
def get_stepper(rng, pb, options):
if options.stepper == 'linear':
stepper = TimeStepper(rng[0], rng[1], dt=None, n_step=rng[2])
return stepper
bbox = pb.domain.mesh.get_bounding_box()
bzone = 2.0 * nm.pi / (bbox[1] - bbox[0])
num = rng[2] // 3
class BrillouinStepper(Struct):
"""
Step over 1. Brillouin zone in xy plane.
"""
def __init__(self, t0, t1, dt=None, n_step=None, step=None, **kwargs):
Struct.__init__(self, t0=t0, t1=t1, dt=dt, n_step=n_step, step=step)
self.n_digit, self.format, self.suffix = get_print_info(self.n_step)
def __iter__(self):
ts = TimeStepper(0, bzone[0], dt=None, n_step=num)
for ii, val in ts:
yield ii, val, nm.array([1.0, 0.0])
if ii == (num-2): break
ts = TimeStepper(0, bzone[1], dt=None, n_step=num)
for ii, k1 in ts:
wdir = nm.array([bzone[0], k1])
val = nm.linalg.norm(wdir)
wdir = wdir / val
yield num + ii, val, wdir
if ii == (num-2): break
wdir = nm.array([bzone[0], bzone[1]])
val = nm.linalg.norm(wdir)
wdir = wdir / val
ts = TimeStepper(0, 1, dt=None, n_step=num)
for ii, _ in ts:
yield 2 * num + ii, val * (1.0 - float(ii)/(num-1)), wdir
stepper = BrillouinStepper(0, 1, n_step=rng[2])
return stepper
def compute_von_mises(out, pb, state, extend=False, wmag=None, wdir=None):
"""
Calculate the von Mises stress.
"""
stress = pb.evaluate('ev_cauchy_stress.i.Omega(m.D, u)', mode='el_avg')
vms = get_von_mises_stress(stress.squeeze())
vms.shape = (vms.shape[0], 1, 1, 1)
out['von_mises_stress'] = Struct(name='output_data', mode='cell',
data=vms)
return out
def save_eigenvectors(filename, svecs, wmag, wdir, pb):
if svecs is None: return
variables = pb.get_variables()
# Make full eigenvectors (add DOFs fixed by boundary conditions).
vecs = nm.empty((variables.di.ptr[-1], svecs.shape[1]),
dtype=svecs.dtype)
for ii in range(svecs.shape[1]):
vecs[:, ii] = variables.make_full_vec(svecs[:, ii])
# Save the eigenvectors.
out = {}
state = pb.create_state()
pp_name = pb.conf.options.get('post_process_hook')
pp = getattr(pb.conf.funmod, pp_name if pp_name is not None else '',
lambda out, *args, **kwargs: out)
for ii in range(svecs.shape[1]):
state.set_full(vecs[:, ii])
aux = state.create_output_dict()
aux2 = {}
pp(aux2, pb, state, wmag=wmag, wdir=wdir)
aux.update(convert_complex_output(aux2))
out.update({key + '%03d' % ii : aux[key] for key in aux})
pb.save_state(filename, out=out)
def assemble_matrices(define, mod, pars, set_wave_dir, options, wdir=None):
"""
Assemble the blocks of dispersion eigenvalue problem matrices.
"""
define_dict = define(filename_mesh=options.mesh_filename,
pars=pars,
approx_order=options.order,
refinement_level=options.refine,
solver_conf=options.solver_conf,
plane=options.plane,
post_process=options.post_process,
**options.define_kwargs)
conf = ProblemConf.from_dict(define_dict, mod)
pb = Problem.from_conf(conf)
pb.dispersion_options = options
pb.set_output_dir(options.output_dir)
dim = pb.domain.shape.dim
# Set the normalized wave vector direction to the material(s).
if wdir is None:
wdir = nm.asarray(options.wave_dir[:dim], dtype=nm.float64)
wdir = wdir / nm.linalg.norm(wdir)
set_wave_dir(pb, wdir)
bbox = pb.domain.mesh.get_bounding_box()
size = (bbox[1] - bbox[0]).max()
scaling0 = apply_unit_multipliers([1.0], ['length'],
options.unit_multipliers)[0]
scaling = scaling0
if options.mesh_size is not None:
scaling *= options.mesh_size / size
output('scaling factor of periodic cell mesh coordinates:', scaling)
output('new mesh size with applied unit multipliers:', scaling * size)
pb.domain.mesh.coors[:] *= scaling
pb.set_mesh_coors(pb.domain.mesh.coors, update_fields=True)
bzone = 2.0 * nm.pi / (scaling * size)
output('1. Brillouin zone size:', bzone * scaling0)
output('1. Brillouin zone size with applied unit multipliers:', bzone)
pb.time_update()
pb.update_materials()
# Assemble the matrices.
mtxs = {}
for key, eq in pb.equations.iteritems():
mtxs[key] = mtx = pb.mtx_a.copy()
mtx = eq.evaluate(mode='weak', dw_mode='matrix', asm_obj=mtx)
mtx.eliminate_zeros()
output_array_stats(mtx.data, 'nonzeros in %s' % key)
output('symmetry checks:')
output('%s - %s^T:' % (key, key), max_diff_csr(mtx, mtx.T))
output('%s - %s^H:' % (key, key), max_diff_csr(mtx, mtx.H))
return pb, wdir, bzone, mtxs
def setup_n_eigs(options, pb, mtxs):
"""
Setup the numbers of eigenvalues based on options and numbers of DOFs.
"""
solver_n_eigs = n_eigs = options.n_eigs
n_dof = mtxs['K'].shape[0]
if options.mode == 'omega':
if options.n_eigs > n_dof:
n_eigs = n_dof
solver_n_eigs = None
else:
if options.n_eigs > 2 * n_dof:
n_eigs = 2 * n_dof
solver_n_eigs = None
return solver_n_eigs, n_eigs
def build_evp_matrices(mtxs, val, mode, pb):
"""
Build the matrices of the dispersion eigenvalue problem.
"""
if mode == 'omega':
mtx_a = mtxs['K'] + val**2 * mtxs['S'] + val * mtxs['R']
output('A - A^H:', max_diff_csr(mtx_a, mtx_a.H))
evp_mtxs = (mtx_a, mtxs['M'])
else:
evp_mtxs = (mtxs['S'], mtxs['R'], mtxs['K'] - val**2 * mtxs['M'])
return evp_mtxs
def process_evp_results(eigs, svecs, val, wdir, bzone, pb, mtxs, options,
std_wave_fun=None):
"""
Transform eigenvalues to either omegas or kappas, depending on `mode`.
Transform eigenvectors, if available, depending on `mode`.
Return also the values to log.
"""
if options.mode == 'omega':
omegas = nm.sqrt(eigs)
output('eigs, omegas:')
for ii, om in enumerate(omegas):
output('{:>3}. {: .10e}, {:.10e}'.format(ii, eigs[ii], om))
if options.stepper == 'linear':
out = tuple(eigs) + tuple(omegas)
else:
out = tuple(val * wdir) + tuple(omegas)
if std_wave_fun is not None:
out = out + std_wave_fun(val, wdir)
return omegas, svecs, out
else:
kappas = eigs.copy()
rks = kappas.copy()
# Mask modes far from 1. Brillouin zone.
max_kappa = 1.2 * bzone
kappas[kappas.real > max_kappa] = nm.nan
# Mask non-physical modes.
kappas[kappas.real < 0] = nm.nan
kappas[nm.abs(kappas.imag) > 1e-10] = nm.nan
out = tuple(kappas.real)
output('raw kappas, masked real part:',)
for ii, kr in enumerate(kappas.real):
output('{:>3}. {: 23.5e}, {:.10e}'.format(ii, rks[ii], kr))
if svecs is not None:
n_dof = mtxs['K'].shape[0]
# Select only vectors corresponding to physical modes.
ii = nm.isfinite(kappas.real)
svecs = svecs[:n_dof, ii]
if std_wave_fun is not None:
out = out + tuple(ii if ii <= max_kappa else nm.nan
for ii in std_wave_fun(val, wdir))
return kappas, svecs, out
helps = {
'pars' :
'material parameters in Y1, Y2 subdomains in basic units.'
' The default parameters are:'
' young1, poisson1, density1, young2, poisson2, density2'
' [default: %(default)s]',
'conf' :
'if given, an alternative problem description file with apply_units() and'
' define() functions [default: %(default)s]',
'define_kwargs' : 'additional keyword arguments passed to define()',
'mesh_size' :
'desired mesh size (max. of bounding box dimensions) in basic units'
' - the input periodic cell mesh is rescaled to this size'
' [default: %(default)s]',
'unit_multipliers' :
'basic unit multipliers (time, length, mass) [default: %(default)s]',
'plane' :
'for 2D problems, plane strain or stress hypothesis selection'
' [default: %(default)s]',
'wave_dir' : 'the wave vector direction (will be normalized)'
' [default: %(default)s]',
'mode' : 'solution mode: omega = solve a generalized EVP for omega,'
' kappa = solve a quadratic generalized EVP for kappa'
' [default: %(default)s]',
'stepper' : 'the range stepper. For "brillouin", only the number'
' of items from --range is used'
' [default: %(default)s]',
'range' : 'the wave vector magnitude / frequency range'
' (like numpy.linspace) depending on the mode option'
' [default: %(default)s]',
'order' : 'displacement field approximation order [default: %(default)s]',
'refine' : 'number of uniform mesh refinements [default: %(default)s]',
'n_eigs' : 'the number of eigenvalues to compute [default: %(default)s]',
'eigs_only' : 'compute only eigenvalues, not eigenvectors',
'post_process' : 'post-process eigenvectors',
'solver_conf' : 'eigenvalue problem solver configuration options'
' [default: %(default)s]',
'save_regions' : 'save defined regions into'
' <output_directory>/regions.vtk',
'save_materials' : 'save material parameters into'
' <output_directory>/materials.vtk',
'log_std_waves' : 'log also standard pressure dilatation and shear waves',
'no_legends' :
'do not show legends in the log plots',
'no_show' :
'do not show the log figure',
'silent' : 'do not print messages to screen',
'clear' :
'clear old solution files from output directory',
'output_dir' :
'output directory [default: %(default)s]',
'mesh_filename' :
'input periodic cell mesh file name [default: %(default)s]',
}
def main():
# Aluminium and epoxy.
default_pars = '70e9,0.35,2.799e3,3.8e9,0.27,1.142e3'
default_solver_conf = ("kind='eig.scipy',method='eigsh',tol=1.0e-5,"
"maxiter=1000,which='LM',sigma=0.0")
parser = ArgumentParser(description=__doc__,
formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('--pars', metavar='name1=value1,name2=value2,...'
' or value1,value2,...',
action='store', dest='pars',
default=default_pars, help=helps['pars'])
parser.add_argument('--conf', metavar='filename',
action='store', dest='conf',
default=None, help=helps['conf'])
parser.add_argument('--define-kwargs', metavar='dict-like',
action='store', dest='define_kwargs',
default=None, help=helps['define_kwargs'])
parser.add_argument('--mesh-size', type=float, metavar='float',
action='store', dest='mesh_size',
default=None, help=helps['mesh_size'])
parser.add_argument('--unit-multipliers',
metavar='c_time,c_length,c_mass',
action='store', dest='unit_multipliers',
default='1.0,1.0,1.0', help=helps['unit_multipliers'])
parser.add_argument('--plane', action='store', dest='plane',
choices=['strain', 'stress'],
default='strain', help=helps['plane'])
parser.add_argument('--wave-dir', metavar='float,float[,float]',
action='store', dest='wave_dir',
default='1.0,0.0,0.0', help=helps['wave_dir'])
parser.add_argument('--mode', action='store', dest='mode',
choices=['omega', 'kappa'],
default='omega', help=helps['mode'])
parser.add_argument('--stepper', action='store', dest='stepper',
choices=['linear', 'brillouin'],
default='linear', help=helps['stepper'])
parser.add_argument('--range', metavar='start,stop,count',
action='store', dest='range',
default='0,6.4,33', help=helps['range'])
parser.add_argument('--order', metavar='int', type=int,
action='store', dest='order',
default=1, help=helps['order'])
parser.add_argument('--refine', metavar='int', type=int,
action='store', dest='refine',
default=0, help=helps['refine'])
parser.add_argument('-n', '--n-eigs', metavar='int', type=int,
action='store', dest='n_eigs',
default=6, help=helps['n_eigs'])
group = parser.add_mutually_exclusive_group()
group.add_argument('--eigs-only',
action='store_true', dest='eigs_only',
default=False, help=helps['eigs_only'])
group.add_argument('--post-process',
action='store_true', dest='post_process',
default=False, help=helps['post_process'])
parser.add_argument('--solver-conf', metavar='dict-like',
action='store', dest='solver_conf',
default=default_solver_conf, help=helps['solver_conf'])
parser.add_argument('--save-regions',
action='store_true', dest='save_regions',
default=False, help=helps['save_regions'])
parser.add_argument('--save-materials',
action='store_true', dest='save_materials',
default=False, help=helps['save_materials'])
parser.add_argument('--log-std-waves',
action='store_true', dest='log_std_waves',
default=False, help=helps['log_std_waves'])
parser.add_argument('--no-legends',
action='store_false', dest='show_legends',
default=True, help=helps['no_legends'])
parser.add_argument('--no-show',
action='store_false', dest='show',
default=True, help=helps['no_show'])
parser.add_argument('--silent',
action='store_true', dest='silent',
default=False, help=helps['silent'])
parser.add_argument('-c', '--clear',
action='store_true', dest='clear',
default=False, help=helps['clear'])
parser.add_argument('-o', '--output-dir', metavar='path',
action='store', dest='output_dir',
default='output', help=helps['output_dir'])
parser.add_argument('mesh_filename', default='',
help=helps['mesh_filename'])
options = parser.parse_args()
output_dir = options.output_dir
output.set_output(filename=os.path.join(output_dir,'output_log.txt'),
combined=options.silent == False)
if options.conf is not None:
mod = import_file(options.conf)
else:
mod = sys.modules[__name__]
pars_kinds = mod.pars_kinds
define = mod.define
set_wave_dir = mod.set_wave_dir
setup_n_eigs = mod.setup_n_eigs
build_evp_matrices = mod.build_evp_matrices
save_materials = mod.save_materials
get_std_wave_fun = mod.get_std_wave_fun
get_stepper = mod.get_stepper
process_evp_results = mod.process_evp_results
save_eigenvectors = mod.save_eigenvectors
try:
options.pars = dict_from_string(options.pars)
except:
aux = [float(ii) for ii in options.pars.split(',')]
options.pars = {key : aux[ii]
for ii, key in enumerate(pars_kinds.keys())}
options.unit_multipliers = [float(ii)
for ii in options.unit_multipliers.split(',')]
options.wave_dir = [float(ii)
for ii in options.wave_dir.split(',')]
aux = options.range.split(',')
options.range = [float(aux[0]), float(aux[1]), int(aux[2])]
options.solver_conf = dict_from_string(options.solver_conf)
options.define_kwargs = dict_from_string(options.define_kwargs)
if options.clear:
remove_files_patterns(output_dir,
['*.h5', '*.vtk', '*.txt'],
ignores=['output_log.txt'],
verbose=True)
filename = os.path.join(output_dir, 'options.txt')
ensure_path(filename)
save_options(filename, [('options', vars(options))],
quote_command_line=True)
pars = apply_units_to_pars(options.pars, pars_kinds,
options.unit_multipliers)
output('material parameter names and kinds:')
output(pars_kinds)
output('material parameters with applied unit multipliers:')
output(pars)
pars = Struct(**pars)
if options.mode == 'omega':
rng = copy(options.range)
rng[:2] = apply_unit_multipliers(options.range[:2],
['wave_number', 'wave_number'],
options.unit_multipliers)
output('wave number range with applied unit multipliers:', rng)
else:
if options.stepper == 'brillouin':
raise ValueError('Cannot use "brillouin" stepper in kappa mode!')
rng = copy(options.range)
rng[:2] = apply_unit_multipliers(options.range[:2],
['frequency', 'frequency'],
options.unit_multipliers)
output('frequency range with applied unit multipliers:', rng)
pb, wdir, bzone, mtxs = assemble_matrices(define, mod, pars, set_wave_dir,
options)
dim = pb.domain.shape.dim
if dim != 2:
options.plane = 'strain'
if options.save_regions:
pb.save_regions_as_groups(os.path.join(output_dir, 'regions'))
if options.save_materials:
save_materials(output_dir, pb, options)
conf = pb.solver_confs['eig']
eig_solver = Solver.any_from_conf(conf)
n_eigs, options.n_eigs = setup_n_eigs(options, pb, mtxs)
get_color = lambda ii: plt.cm.viridis((float(ii)
/ (max(options.n_eigs, 2) - 1)))
plot_kwargs = [{'color' : get_color(ii), 'ls' : '', 'marker' : 'o'}
for ii in range(options.n_eigs)]
get_color_dim = lambda ii: plt.cm.viridis((float(ii) / (max(dim, 2) -1)))
plot_kwargs_dim = [{'color' : get_color_dim(ii), 'ls' : '', 'marker' : 'o'}
for ii in range(dim)]
log_names = []
log_plot_kwargs = []
if options.log_std_waves:
std_wave_fun, log_names, log_plot_kwargs = get_std_wave_fun(
pb, options)
else:
std_wave_fun = None
stepper = get_stepper(rng, pb, options)
if options.mode == 'omega':
eigenshapes_filename = os.path.join(output_dir,
'frequency-eigenshapes-%s.vtk'
% stepper.suffix)
if options.stepper == 'linear':
log = Log([[r'$\lambda_{%d}$' % ii for ii in range(options.n_eigs)],
[r'$\omega_{%d}$'
% ii for ii in range(options.n_eigs)] + log_names],
plot_kwargs=[plot_kwargs, plot_kwargs + log_plot_kwargs],
formats=[['{:.12e}'] * options.n_eigs,
['{:.12e}'] * (options.n_eigs + len(log_names))],
yscales=['linear', 'linear'],
xlabels=[r'$\kappa$', r'$\kappa$'],
ylabels=[r'eigenvalues $\lambda_i$',
r'frequencies $\omega_i$'],
show_legends=options.show_legends,
is_plot=options.show,
log_filename=os.path.join(output_dir, 'frequencies.txt'),
aggregate=1000, sleep=0.1)
else:
log = Log([[r'$\kappa_{%d}$'% ii for ii in range(dim)],
[r'$\omega_{%d}$'
% ii for ii in range(options.n_eigs)] + log_names],
plot_kwargs=[plot_kwargs_dim,
plot_kwargs + log_plot_kwargs],
formats=[['{:.12e}'] * dim,
['{:.12e}'] * (options.n_eigs + len(log_names))],
yscales=['linear', 'linear'],
xlabels=[r'', r''],
ylabels=[r'wave vector $\kappa$',
r'frequencies $\omega_i$'],
show_legends=options.show_legends,
is_plot=options.show,
log_filename=os.path.join(output_dir, 'frequencies.txt'),
aggregate=1000, sleep=0.1)
for aux in stepper:
if options.stepper == 'linear':
iv, wmag = aux
else:
iv, wmag, wdir = aux
output('step %d: wave vector %s' % (iv, wmag * wdir))
if options.stepper == 'brillouin':
pb, _, bzone, mtxs = assemble_matrices(
define, mod, pars, set_wave_dir, options, wdir=wdir)
evp_mtxs = build_evp_matrices(mtxs, wmag, options.mode, pb)
if options.eigs_only:
eigs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=False)
svecs = None
else:
eigs, svecs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=True)
omegas, svecs, out = process_evp_results(
eigs, svecs, wmag, wdir, bzone, pb, mtxs, options,
std_wave_fun=std_wave_fun
)
if options.stepper == 'linear':
log(*out, x=[wmag, wmag])
else:
log(*out, x=[iv, iv])
save_eigenvectors(eigenshapes_filename % iv, svecs, wmag, wdir, pb)
gc.collect()
log(save_figure=os.path.join(output_dir, 'frequencies.png'))
log(finished=True)
else:
eigenshapes_filename = os.path.join(output_dir,
'wave-number-eigenshapes-%s.vtk'
% stepper.suffix)
log = Log([[r'$\kappa_{%d}$' % ii for ii in range(options.n_eigs)]
+ log_names],
plot_kwargs=[plot_kwargs + log_plot_kwargs],
formats=[['{:.12e}'] * (options.n_eigs + len(log_names))],
yscales=['linear'],
xlabels=[r'$\omega$'],
ylabels=[r'wave numbers $\kappa_i$'],
show_legends=options.show_legends,
is_plot=options.show,
log_filename=os.path.join(output_dir, 'wave-numbers.txt'),
aggregate=1000, sleep=0.1)
for io, omega in stepper:
output('step %d: frequency %s' % (io, omega))
evp_mtxs = build_evp_matrices(mtxs, omega, options.mode, pb)
if options.eigs_only:
eigs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=False)
svecs = None
else:
eigs, svecs = eig_solver(*evp_mtxs, n_eigs=n_eigs,
eigenvectors=True)
kappas, svecs, out = process_evp_results(
eigs, svecs, omega, wdir, bzone, pb, mtxs, options,
std_wave_fun=std_wave_fun
)
log(*out, x=[omega])
save_eigenvectors(eigenshapes_filename % io, svecs, kappas, wdir,
pb)
gc.collect()
log(save_figure=os.path.join(output_dir, 'wave-numbers.png'))
log(finished=True)
if __name__ == '__main__':
main()
| bsd-3-clause |
rajanandakumar/DIRAC | FrameworkSystem/private/monitoring/Activity.py | 19 | 3551 | # $HeadURL$
__RCSID__ = "$Id$"
import types
from string import Template
class Activity:
dbFields = [ 'activities.unit',
'activities.type',
'activities.description',
'activities.filename',
'activities.bucketLength',
'sources.site',
'sources.componentType',
'sources.componentLocation',
'sources.componentName'
]
dbMapping = {
}
def __init__( self, dataList ):
"""
Init an activity
"""
self.dataList = dataList
self.groupList = []
self.groupLabel = ""
self.__initMapping()
self.templateMap = {}
self.scaleFactor = 1
self.labelTemplate = ""
for fieldName in self.dbFields:
capsFieldName = fieldName.split(".")[1].upper()
self.templateMap[ capsFieldName ] = self.__getField( fieldName )
def __initMapping(self):
"""
Init static maping
"""
if not self.dbMapping:
for index in range( len( self.dbFields ) ):
self.dbMapping[ self.dbFields[index] ] = index
def setBucketScaleFactor( self, scaleFactor ):
self.scaleFactor = scaleFactor
self.__calculateUnit()
def __calculateUnit( self ):
self.dataList = list( self.dataList )
unit = self.dataList[ self.dbMapping[ 'activities.unit' ] ].split("/")[0]
if self.getType() in ( "sum" ):
sF = int( self.getBucketLength() * self.scaleFactor ) / 60
if sF == 1:
unit = "%s/min" % unit
else:
unit = "%s/%s mins" % ( unit, sF )
if self.getType() in ( "rate" ):
unit = "%s/seconds" % unit
self.dataList[ self.dbMapping[ 'activities.unit' ] ] = unit
self.templateMap[ 'UNIT' ] = unit
def setGroup( self, group ):
"""
Set group to which this activity belongs
"""
self.groupList = group
self.groupLabel = "Grouped for"
for fieldName in self.groupList:
self.groupLabel += " %s," % fieldName
self.groupLabel = self.groupLabel[:-1]
def setLabel( self, labelTemplate ):
"""
Set activity label
"""
self.labelTemplate = labelTemplate
def __getField( self, name ):
"""
Get field value
"""
return self.dataList[ self.dbMapping[ name ] ]
def getUnit(self):
return self.__getField( 'activities.unit' )
def getFile(self):
return self.__getField( 'activities.filename' )
def getType(self):
return self.__getField( 'activities.type' )
def getDescription(self):
return self.__getField( 'activities.description' )
def getBucketLength(self):
return self.__getField( 'activities.bucketLength' )
def getSite(self):
return self.__getField( 'sources.site' )
def getComponentType(self):
return self.__getField( 'sources.componentType' )
def getComponentName(self):
return self.__getField( 'sources.componentName' )
def getComponentLocation(self):
return self.__getField( 'sources.componentLocation' )
def getGroupLabel(self):
return self.groupLabel
def getLabel(self):
if type( self.labelTemplate ) == types.UnicodeType:
self.labelTemplate = self.labelTemplate.encode( "utf-8" )
return Template( self.labelTemplate ).safe_substitute( self.templateMap )
def __str__( self ):
return "[%s][%s][%s]" % ( self.getLabel(), self.getGroupLabel(), str( self.dataList ) )
def __repr__( self ):
return self.__str__()
def __lt__( self, act ):
label = self.getLabel()
try:
return label < act.getLabel()
except:
return label < act | gpl-3.0 |
ging/horizon | openstack_dashboard/dashboards/admin/defaults/tables.py | 9 | 2665 | # Copyright 2013 Kylin, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
class QuotaFilterAction(tables.FilterAction):
def filter(self, table, tenants, filter_string):
q = filter_string.lower()
def comp(tenant):
if q in tenant.name.lower():
return True
return False
return filter(comp, tenants)
class UpdateDefaultQuotas(tables.LinkAction):
name = "update_defaults"
verbose_name = _("Update Defaults")
url = "horizon:admin:defaults:update_defaults"
classes = ("ajax-modal", "btn-edit")
def get_quota_name(quota):
QUOTA_NAMES = {
'injected_file_content_bytes': _('Injected File Content Bytes'),
'injected_file_path_bytes': _('Length of Injected File Path'),
'metadata_items': _('Metadata Items'),
'cores': _('VCPUs'),
'instances': _('Instances'),
'injected_files': _('Injected Files'),
'volumes': _('Volumes'),
'snapshots': _('Volume Snapshots'),
'gigabytes': _('Total Size of Volumes and Snapshots (GB)'),
'ram': _('RAM (MB)'),
'floating_ips': _('Floating IPs'),
'security_groups': _('Security Groups'),
'security_group_rules': _('Security Group Rules'),
'key_pairs': _('Key Pairs'),
'fixed_ips': _('Fixed IPs'),
'volumes_volume_luks': _('LUKS Volumes'),
'snapshots_volume_luks': _('LUKS Volume Snapshots'),
'gigabytes_volume_luks':
_('Total Size of LUKS Volumes and Snapshots (GB)'),
'dm-crypt': _('dm-crypt'),
}
return QUOTA_NAMES.get(quota.name, quota.name.replace("_", " ").title())
class QuotasTable(tables.DataTable):
name = tables.Column(get_quota_name, verbose_name=_('Quota Name'))
limit = tables.Column("limit", verbose_name=_('Limit'))
def get_object_id(self, obj):
return obj.name
class Meta:
name = "quotas"
verbose_name = _("Quotas")
table_actions = (QuotaFilterAction, UpdateDefaultQuotas)
multi_select = False
| apache-2.0 |
pplatek/odoo | addons/portal/mail_thread.py | 390 | 2004 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-TODAY OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import osv
class mail_thread(osv.AbstractModel):
""" Update of mail_mail class, to add the signin URL to notifications. """
_inherit = 'mail.thread'
def _get_inbox_action_xml_id(self, cr, uid, context=None):
""" For a given message, return an action that either
- opens the form view of the related document if model, res_id, and
read access to the document
- opens the Inbox with a default search on the conversation if model,
res_id
- opens the Inbox with context propagated
"""
cur_user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
# if uid is a portal user -> action is different
if any(group.is_portal for group in cur_user.groups_id):
return ('portal', 'action_mail_inbox_feeds_portal')
else:
return super(mail_thread, self)._get_inbox_action_xml_id(cr, uid, context=context)
| agpl-3.0 |
jfmartinez64/test | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/vine.py | 25 | 3373 | from __future__ import unicode_literals
import re
import json
import itertools
from .common import InfoExtractor
from ..utils import unified_strdate
class VineIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?vine\.co/v/(?P<id>\w+)'
_TEST = {
'url': 'https://vine.co/v/b9KOOWX7HUx',
'md5': '2f36fed6235b16da96ce9b4dc890940d',
'info_dict': {
'id': 'b9KOOWX7HUx',
'ext': 'mp4',
'title': 'Chicken.',
'alt_title': 'Vine by Jack Dorsey',
'description': 'Chicken.',
'upload_date': '20130519',
'uploader': 'Jack Dorsey',
'uploader_id': '76',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage('https://vine.co/v/' + video_id, video_id)
data = json.loads(self._html_search_regex(
r'window\.POST_DATA = { %s: ({.+?}) }' % video_id, webpage, 'vine data'))
formats = [{
'url': data['videoLowURL'],
'ext': 'mp4',
'format_id': 'low',
}, {
'url': data['videoUrl'],
'ext': 'mp4',
'format_id': 'standard',
}]
return {
'id': video_id,
'title': self._og_search_title(webpage),
'alt_title': self._og_search_description(webpage),
'description': data['description'],
'thumbnail': data['thumbnailUrl'],
'upload_date': unified_strdate(data['created']),
'uploader': data['username'],
'uploader_id': data['userIdStr'],
'like_count': data['likes']['count'],
'comment_count': data['comments']['count'],
'repost_count': data['reposts']['count'],
'formats': formats,
}
class VineUserIE(InfoExtractor):
IE_NAME = 'vine:user'
_VALID_URL = r'(?:https?://)?vine\.co/(?P<u>u/)?(?P<user>[^/]+)/?(\?.*)?$'
_VINE_BASE_URL = "https://vine.co/"
_TESTS = [
{
'url': 'https://vine.co/Visa',
'info_dict': {
'id': 'Visa',
},
'playlist_mincount': 46,
},
{
'url': 'https://vine.co/u/941705360593584128',
'only_matching': True,
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
user = mobj.group('user')
u = mobj.group('u')
profile_url = "%sapi/users/profiles/%s%s" % (
self._VINE_BASE_URL, 'vanity/' if not u else '', user)
profile_data = self._download_json(
profile_url, user, note='Downloading user profile data')
user_id = profile_data['data']['userId']
timeline_data = []
for pagenum in itertools.count(1):
timeline_url = "%sapi/timelines/users/%s?page=%s&size=100" % (
self._VINE_BASE_URL, user_id, pagenum)
timeline_page = self._download_json(
timeline_url, user, note='Downloading page %d' % pagenum)
timeline_data.extend(timeline_page['data']['records'])
if timeline_page['data']['nextPage'] is None:
break
entries = [
self.url_result(e['permalinkUrl'], 'Vine') for e in timeline_data]
return self.playlist_result(entries, user)
| gpl-3.0 |
etetoolkit/ete | ete3/tools/ete_build_lib/task/uhire.py | 4 | 4331 | from __future__ import absolute_import
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: [email protected]
#
#
# #END_LICENSE#############################################################
import os
import logging
log = logging.getLogger("main")
from ..master_task import AlgTask
from ..master_job import Job
from ..utils import SeqGroup, OrderedDict
__all__ = ["Uhire"]
class Uhire(AlgTask):
def __init__(self, nodeid, multiseq_file, seqtype, conf):
# Initialize task
AlgTask.__init__(self, nodeid, "alg", "Usearch-Uhire",
OrderedDict(), conf["uhire"])
self.conf = conf
self.seqtype = seqtype
self.multiseq_file = multiseq_file
self.init()
self.alg_fasta_file = os.path.join(self.taskdir, "final_alg.fasta")
self.alg_phylip_file = os.path.join(self.taskdir, "final_alg.iphylip")
def load_jobs(self):
# split the original set of sequences in clusters.
uhire_args = {
"--clumpfasta": "./",
"--maxclump": "%s" %self.conf["uhire"]["_maxclump"],
"--usersort": "",
"--uhire": self.multiseq_file,
}
uhire_job = Job(self.conf["app"]["usearch"], uhire_args, "usearch-uhire",
parent_ids=[self.nodeid])
# Builds a muscle alignment for each of those clusters. (This
# is a special job to align all clumps independently. The
# whole shell command is used as job binary, so it is very
# important that there is no trailing lines at the end of the
# command.)
cmd = """
(mkdir clumpalgs/;
for fname in %s/clump.* %s/master;
do %s -in $fname -out clumpalgs/`basename $fname` -maxiters %s;
done;) """ %(
os.path.join("../",uhire_job.jobid),
os.path.join("../",uhire_job.jobid),
self.conf["app"]["muscle"],
self.conf["uhire"]["_muscle_maxiters"])
alg_job = Job(cmd, {}, "uhire_muscle_algs", parent_ids=[uhire_job.jobid])
alg_job.dependencies.add(uhire_job)
# Merge the cluster alignemnts into a single one
umerge_args = {
"--maxlen": self.conf["uhire"]["_max_seq_length"],
"--mergeclumps": "../%s/clumpalgs/" %alg_job.jobid,
"--output": "alg.fasta",
}
umerge_job = Job(self.conf["app"]["usearch"], umerge_args, "usearch-umerge",
parent_ids=[alg_job.jobid])
umerge_job.dependencies.add(alg_job)
# Add all jobs to the task queue queue
self.jobs.extend([uhire_job, alg_job, umerge_job])
def finish(self):
# Once executed, alignment is converted into relaxed
# interleaved phylip format.
final_job = self.jobs[2]
alg = SeqGroup(os.path.join(final_job.jobdir, "alg.fasta"))
alg.write(outfile=self.alg_fasta_file, format="fasta")
alg.write(outfile=self.alg_phylip_file, format="iphylip_relaxed")
AlgTask.finish(self)
| gpl-3.0 |
CEG-FYP-OpenStack/scheduler | nova/api/openstack/compute/legacy_v2/contrib/rescue.py | 5 | 3852 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The rescue mode extension."""
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions as exts
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova import utils
authorize = exts.extension_authorizer('compute', 'rescue')
class RescueController(wsgi.Controller):
def __init__(self, ext_mgr, *args, **kwargs):
super(RescueController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
self.ext_mgr = ext_mgr
@wsgi.action('rescue')
def _rescue(self, req, id, body):
"""Rescue an instance."""
context = req.environ["nova.context"]
authorize(context)
if body['rescue'] and 'adminPass' in body['rescue']:
password = body['rescue']['adminPass']
else:
password = utils.generate_password()
instance = common.get_instance(self.compute_api, context, id)
try:
rescue_image_ref = None
if self.ext_mgr.is_loaded("os-extended-rescue-with-image"):
if body['rescue'] and 'rescue_image_ref' in body['rescue']:
rescue_image_ref = body['rescue']['rescue_image_ref']
self.compute_api.rescue(context, instance,
rescue_password=password, rescue_image_ref=rescue_image_ref)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'rescue', id)
except exception.InvalidVolume as volume_error:
raise exc.HTTPConflict(explanation=volume_error.format_message())
except exception.InstanceNotRescuable as non_rescuable:
raise exc.HTTPBadRequest(
explanation=non_rescuable.format_message())
return {'adminPass': password}
@wsgi.action('unrescue')
def _unrescue(self, req, id, body):
"""Unrescue an instance."""
context = req.environ["nova.context"]
authorize(context)
instance = common.get_instance(self.compute_api, context, id)
try:
self.compute_api.unrescue(context, instance)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'unrescue',
id)
return webob.Response(status_int=202)
class Rescue(exts.ExtensionDescriptor):
"""Instance rescue mode."""
name = "Rescue"
alias = "os-rescue"
namespace = "http://docs.openstack.org/compute/ext/rescue/api/v1.1"
updated = "2011-08-18T00:00:00Z"
def get_controller_extensions(self):
controller = RescueController(self.ext_mgr)
extension = exts.ControllerExtension(self, 'servers', controller)
return [extension]
| apache-2.0 |
HazenBabcock/opensdraw | opensdraw/scripts/list_parts_in_mpd.py | 1 | 3902 | #!/usr/bin/env python
"""
Lists all the unique parts and their colors in a .mpd file. This
is sometimes useful for determining the name of a part and/or a
color.
Hazen 04/15
"""
import os
import re
import sys
import opensdraw.lcad_lib.datFileParser as datFileParser
if (len(sys.argv) != 2):
print("usage: <ldraw file (input)>")
exit()
parts = {}
def arrayToString(array):
"""
Convert an array to a string & clean it up. The assumption is that
no part in the LDraw library is going to have a space in the name..
"""
return re.sub(r'[^a-zA-Z0-9\.]', '_', "".join(array))
class PartsFinder(datFileParser.Parser):
"""
This class finds all the parts in ldraw format file and records the
name, id and color of the parts that also exist in the ldraw parts
library.
"""
def __init__(self):
datFileParser.Parser.__init__(self, None, None)
self.sub_parts = {}
self.parts = {}
def command(self, parsed_line):
pass
def endFile(self):
pass
def line(self, parsed_line):
pass
def newFile(self, parsed_line):
ldraw_color = parsed_line[1]
ldraw_id = parsed_line[14]
part_file = None
# Try and find part in parts folder.
try:
part_file = datFileParser.findPartFile(ldraw_id)
except IOError:
pass
# If this part exists, figure out whether it is a part or a sub-part
# based on the path & add to the appropriate dictionary.
if part_file is not None:
is_part = True
if (os.path.split(os.path.split(part_file)[0])[1] != "parts"):
is_part = False
fp = open(part_file)
description = fp.readline()[2:].strip()
fp.close()
part_id = ldraw_id + "_" + ldraw_color
if is_part:
self.parts[part_id] = description
else:
self.sub_parts[part_id] = description
def optionalLine(self, parsed_line):
pass
def quadrilateral(self, parsed_line):
pass
def startFile(self, depth):
pass
def triangle(self, parsed_line):
pass
# Find all the parts.
partsFinder = PartsFinder()
datFileParser.parsePartFile(partsFinder, sys.argv[1])
print("Parts:")
for key in sorted(partsFinder.parts, key = partsFinder.parts.get):
[part_id, part_color] = key.split("_")
print(part_id[:-4] + ", " + part_color + ", " + partsFinder.parts[key])
print("\n")
print("Sub Parts:")
for key in sorted(partsFinder.sub_parts, key = partsFinder.sub_parts.get):
[part_id, part_color] = key.split("_")
print(part_id[:-4] + ", " + part_color + ", " + partsFinder.sub_parts[key])
print("\n")
#
# The MIT License
#
# Copyright (c) 2015 Hazen Babcock
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
| mit |
tectronics/timeside | tests/test_yaafe.py | 2 | 2645 | #! /usr/bin/env python
from unit_timeside import *
from timeside.decoder import *
from timeside.analyzer import Yaafe
from yaafelib import DataFlow,FeaturePlan
class TestYaafe(unittest.TestCase):
def setUp(self):
self.sample_rate = 16000
def testOnSweepWithFeaturePlan(self):
"runs on sweep and define feature plan manually"
self.source = os.path.join (os.path.dirname(__file__), "samples", "sweep.wav")
# Setup Yaafe Analyzer
# Define Yaafe Feature Plan
fp = FeaturePlan(sample_rate=self.sample_rate)
# add feature definitions manually
fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256')
fp.addFeature('mfcc_d1: MFCC blockSize=512 stepSize=256 > Derivate DOrder=1')
fp.addFeature('mfcc_d2: MFCC blockSize=512 stepSize=256 > Derivate DOrder=2')
# Setup a new Yaafe TimeSide analyzer
# from FeaturePlan
self.analyzer = Yaafe(fp)
# Expected Results
self.result_length = 3
def testOnGuitarWithFeaturePlanFromFile(self):
"runs on guitar and load Yaafe feature plan from file"
self.source = os.path.join (os.path.dirname(__file__), "samples", "guitar.wav")
# Setup Yaafe Analyzer
# Load Yaafe Feature Plan
fp = FeaturePlan(sample_rate=self.sample_rate)
fp_file = os.path.join (os.path.dirname(__file__), "yaafe_config", "yaafeFeaturePlan")
fp.loadFeaturePlan(fp_file)
# Setup a new Yaafe TimeSide analyzer
# from FeaturePlan
self.analyzer = Yaafe(fp)
# Expected Results
self.result_length = 3
def testOnGuitarWithDataFlow(self):
"runs on guitar and load Yaafe dataflow from file"
self.source = os.path.join (os.path.dirname(__file__), "samples", "guitar.wav")
# Setup Yaafe Analyzer
# Load DataFlow from file
df = DataFlow()
df_file = os.path.join (os.path.dirname(__file__), "yaafe_config", "yaafeDataFlow")
df.load(df_file)
# Setup a new Yaafe TimeSide analyzer
# from DataFlow
self.analyzer = Yaafe(df)
# Expected Results
self.result_length = 5
def tearDown(self):
decoder = FileDecoder(self.source)
decoder.output_samplerate = self.sample_rate
(decoder | self.analyzer).run()
results = self.analyzer.results
self.assertEquals(self.result_length, len(results))
#print results
#print results.to_yaml()
#print results.to_json()
#print results.to_xml()
if __name__ == '__main__':
unittest.main(testRunner=TestRunner())
| gpl-2.0 |
harmy/kbengine | kbe/res/scripts/common/Lib/cgi.py | 3 | 35512 | #! /usr/local/bin/python
# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
# intentionally NOT "/usr/bin/env python". On many systems
# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
# scripts, and /usr/local/bin is the default directory where Python is
# installed, so /usr/bin/env would be unable to find python. Granted,
# binary installations by Linux vendors often install Python in
# /usr/bin. So let those vendors patch cgi.py to match their choice
# of installation.
"""Support module for CGI (Common Gateway Interface) scripts.
This module defines a number of utilities for use by CGI scripts
written in Python.
"""
# History
# -------
#
# Michael McLay started this module. Steve Majewski changed the
# interface to SvFormContentDict and FormContentDict. The multipart
# parsing was inspired by code submitted by Andreas Paepcke. Guido van
# Rossum rewrote, reformatted and documented the module and is currently
# responsible for its maintenance.
#
__version__ = "2.6"
# Imports
# =======
from io import StringIO, BytesIO, TextIOWrapper
import sys
import os
import urllib.parse
from email.parser import FeedParser
from warnings import warn
import html
import locale
import tempfile
__all__ = ["MiniFieldStorage", "FieldStorage",
"parse", "parse_qs", "parse_qsl", "parse_multipart",
"parse_header", "print_exception", "print_environ",
"print_form", "print_directory", "print_arguments",
"print_environ_usage", "escape"]
# Logging support
# ===============
logfile = "" # Filename to log to, if not empty
logfp = None # File object to log to, if not None
def initlog(*allargs):
"""Write a log message, if there is a log file.
Even though this function is called initlog(), you should always
use log(); log is a variable that is set either to initlog
(initially), to dolog (once the log file has been opened), or to
nolog (when logging is disabled).
The first argument is a format string; the remaining arguments (if
any) are arguments to the % operator, so e.g.
log("%s: %s", "a", "b")
will write "a: b" to the log file, followed by a newline.
If the global logfp is not None, it should be a file object to
which log data is written.
If the global logfp is None, the global logfile may be a string
giving a filename to open, in append mode. This file should be
world writable!!! If the file can't be opened, logging is
silently disabled (since there is no safe place where we could
send an error message).
"""
global logfp, log
if logfile and not logfp:
try:
logfp = open(logfile, "a")
except IOError:
pass
if not logfp:
log = nolog
else:
log = dolog
log(*allargs)
def dolog(fmt, *args):
"""Write a log message to the log file. See initlog() for docs."""
logfp.write(fmt%args + "\n")
def nolog(*allargs):
"""Dummy function, assigned to log when logging is disabled."""
pass
log = initlog # The current logging function
# Parsing functions
# =================
# Maximum input we will accept when REQUEST_METHOD is POST
# 0 ==> unlimited input
maxlen = 0
def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
"""Parse a query in the environment or from a file (default stdin)
Arguments, all optional:
fp : file pointer; default: sys.stdin.buffer
environ : environment dictionary; default: os.environ
keep_blank_values: flag indicating whether blank values in
percent-encoded forms should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
if fp is None:
fp = sys.stdin
# field keys and values (except for files) are returned as strings
# an encoding is required to decode the bytes read from self.fp
if hasattr(fp,'encoding'):
encoding = fp.encoding
else:
encoding = 'latin-1'
# fp.read() must return bytes
if isinstance(fp, TextIOWrapper):
fp = fp.buffer
if not 'REQUEST_METHOD' in environ:
environ['REQUEST_METHOD'] = 'GET' # For testing stand-alone
if environ['REQUEST_METHOD'] == 'POST':
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
if ctype == 'multipart/form-data':
return parse_multipart(fp, pdict)
elif ctype == 'application/x-www-form-urlencoded':
clength = int(environ['CONTENT_LENGTH'])
if maxlen and clength > maxlen:
raise ValueError('Maximum content length exceeded')
qs = fp.read(clength).decode(encoding)
else:
qs = '' # Unknown content-type
if 'QUERY_STRING' in environ:
if qs: qs = qs + '&'
qs = qs + environ['QUERY_STRING']
elif sys.argv[1:]:
if qs: qs = qs + '&'
qs = qs + sys.argv[1]
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
elif 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
else:
if sys.argv[1:]:
qs = sys.argv[1]
else:
qs = ""
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing,
encoding=encoding)
# parse query string function called from urlparse,
# this is done in order to maintain backward compatiblity.
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument."""
warn("cgi.parse_qs is deprecated, use urllib.parse.parse_qs instead",
DeprecationWarning, 2)
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing)
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument."""
warn("cgi.parse_qsl is deprecated, use urllib.parse.parse_qsl instead",
DeprecationWarning, 2)
return urllib.parse.parse_qsl(qs, keep_blank_values, strict_parsing)
def parse_multipart(fp, pdict):
"""Parse multipart input.
Arguments:
fp : input file
pdict: dictionary containing other parameters of content-type header
Returns a dictionary just like parse_qs(): keys are the field names, each
value is a list of values for that field. This is easy to use but not
much good if you are expecting megabytes to be uploaded -- in that case,
use the FieldStorage class instead which is much more flexible. Note
that content-type is the raw, unparsed contents of the content-type
header.
XXX This does not parse nested multipart parts -- use FieldStorage for
that.
XXX This should really be subsumed by FieldStorage altogether -- no
point in having two implementations of the same parsing algorithm.
Also, FieldStorage protects itself better against certain DoS attacks
by limiting the size of the data read in one chunk. The API here
does not support that kind of protection. This also affects parse()
since it can call parse_multipart().
"""
import http.client
boundary = ""
if 'boundary' in pdict:
boundary = pdict['boundary']
if not valid_boundary(boundary):
raise ValueError('Invalid boundary in multipart form: %r'
% (boundary,))
nextpart = "--" + boundary
lastpart = "--" + boundary + "--"
partdict = {}
terminator = ""
while terminator != lastpart:
bytes = -1
data = None
if terminator:
# At start of next part. Read headers first.
headers = http.client.parse_headers(fp)
clength = headers.get('content-length')
if clength:
try:
bytes = int(clength)
except ValueError:
pass
if bytes > 0:
if maxlen and bytes > maxlen:
raise ValueError('Maximum content length exceeded')
data = fp.read(bytes)
else:
data = ""
# Read lines until end of part.
lines = []
while 1:
line = fp.readline()
if not line:
terminator = lastpart # End outer loop
break
if line.startswith("--"):
terminator = line.rstrip()
if terminator in (nextpart, lastpart):
break
lines.append(line)
# Done with part.
if data is None:
continue
if bytes < 0:
if lines:
# Strip final line terminator
line = lines[-1]
if line[-2:] == "\r\n":
line = line[:-2]
elif line[-1:] == "\n":
line = line[:-1]
lines[-1] = line
data = "".join(lines)
line = headers['content-disposition']
if not line:
continue
key, params = parse_header(line)
if key != 'form-data':
continue
if 'name' in params:
name = params['name']
else:
continue
if name in partdict:
partdict[name].append(data)
else:
partdict[name] = [data]
return partdict
def _parseparam(s):
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and s.count('"', 0, end) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
yield f.strip()
s = s[end:]
def parse_header(line):
"""Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
parts = _parseparam(';' + line)
key = parts.__next__()
pdict = {}
for p in parts:
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
value = p[i+1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
pdict[name] = value
return key, pdict
# Classes for field storage
# =========================
class MiniFieldStorage:
"""Like FieldStorage, for use when no file uploads are possible."""
# Dummy attributes
filename = None
list = None
type = None
file = None
type_options = {}
disposition = None
disposition_options = {}
headers = {}
def __init__(self, name, value):
"""Constructor from field name and value."""
self.name = name
self.value = value
# self.file = StringIO(value)
def __repr__(self):
"""Return printable representation."""
return "MiniFieldStorage(%r, %r)" % (self.name, self.value)
class FieldStorage:
"""Store a sequence of fields, reading multipart/form-data.
This class provides naming, typing, files stored on disk, and
more. At the top level, it is accessible like a dictionary, whose
keys are the field names. (Note: None can occur as a field name.)
The items are either a Python list (if there's multiple values) or
another FieldStorage or MiniFieldStorage object. If it's a single
object, it has the following attributes:
name: the field name, if specified; otherwise None
filename: the filename, if specified; otherwise None; this is the
client side filename, *not* the file name on which it is
stored (that's a temporary file you don't deal with)
value: the value as a *string*; for file uploads, this
transparently reads the file every time you request the value
and returns *bytes*
file: the file(-like) object from which you can read the data *as
bytes* ; None if the data is stored a simple string
type: the content-type, or None if not specified
type_options: dictionary of options specified on the content-type
line
disposition: content-disposition, or None if not specified
disposition_options: dictionary of corresponding options
headers: a dictionary(-like) object (sometimes email.message.Message or a
subclass thereof) containing *all* headers
The class is subclassable, mostly for the purpose of overriding
the make_file() method, which is called internally to come up with
a file open for reading and writing. This makes it possible to
override the default choice of storing all files in a temporary
directory and unlinking them as soon as they have been opened.
"""
def __init__(self, fp=None, headers=None, outerboundary=b'',
environ=os.environ, keep_blank_values=0, strict_parsing=0,
limit=None, encoding='utf-8', errors='replace'):
"""Constructor. Read multipart/* until last part.
Arguments, all optional:
fp : file pointer; default: sys.stdin.buffer
(not used when the request method is GET)
Can be :
1. a TextIOWrapper object
2. an object whose read() and readline() methods return bytes
headers : header dictionary-like object; default:
taken from environ as per CGI spec
outerboundary : terminating multipart boundary
(for internal use only)
environ : environment dictionary; default: os.environ
keep_blank_values: flag indicating whether blank values in
percent-encoded forms should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
limit : used internally to read parts of multipart/form-data forms,
to exit from the reading loop when reached. It is the difference
between the form content-length and the number of bytes already
read
encoding, errors : the encoding and error handler used to decode the
binary stream to strings. Must be the same as the charset defined
for the page sending the form (content-type : meta http-equiv or
header)
"""
method = 'GET'
self.keep_blank_values = keep_blank_values
self.strict_parsing = strict_parsing
if 'REQUEST_METHOD' in environ:
method = environ['REQUEST_METHOD'].upper()
self.qs_on_post = None
if method == 'GET' or method == 'HEAD':
if 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
elif sys.argv[1:]:
qs = sys.argv[1]
else:
qs = ""
qs = qs.encode(locale.getpreferredencoding(), 'surrogateescape')
fp = BytesIO(qs)
if headers is None:
headers = {'content-type':
"application/x-www-form-urlencoded"}
if headers is None:
headers = {}
if method == 'POST':
# Set default content-type for POST to what's traditional
headers['content-type'] = "application/x-www-form-urlencoded"
if 'CONTENT_TYPE' in environ:
headers['content-type'] = environ['CONTENT_TYPE']
if 'QUERY_STRING' in environ:
self.qs_on_post = environ['QUERY_STRING']
if 'CONTENT_LENGTH' in environ:
headers['content-length'] = environ['CONTENT_LENGTH']
if fp is None:
self.fp = sys.stdin.buffer
# self.fp.read() must return bytes
elif isinstance(fp, TextIOWrapper):
self.fp = fp.buffer
else:
self.fp = fp
self.encoding = encoding
self.errors = errors
self.headers = headers
if not isinstance(outerboundary, bytes):
raise TypeError('outerboundary must be bytes, not %s'
% type(outerboundary).__name__)
self.outerboundary = outerboundary
self.bytes_read = 0
self.limit = limit
# Process content-disposition header
cdisp, pdict = "", {}
if 'content-disposition' in self.headers:
cdisp, pdict = parse_header(self.headers['content-disposition'])
self.disposition = cdisp
self.disposition_options = pdict
self.name = None
if 'name' in pdict:
self.name = pdict['name']
self.filename = None
if 'filename' in pdict:
self.filename = pdict['filename']
self._binary_file = self.filename is not None
# Process content-type header
#
# Honor any existing content-type header. But if there is no
# content-type header, use some sensible defaults. Assume
# outerboundary is "" at the outer level, but something non-false
# inside a multi-part. The default for an inner part is text/plain,
# but for an outer part it should be urlencoded. This should catch
# bogus clients which erroneously forget to include a content-type
# header.
#
# See below for what we do if there does exist a content-type header,
# but it happens to be something we don't understand.
if 'content-type' in self.headers:
ctype, pdict = parse_header(self.headers['content-type'])
elif self.outerboundary or method != 'POST':
ctype, pdict = "text/plain", {}
else:
ctype, pdict = 'application/x-www-form-urlencoded', {}
self.type = ctype
self.type_options = pdict
if 'boundary' in pdict:
self.innerboundary = pdict['boundary'].encode(self.encoding)
else:
self.innerboundary = b""
clen = -1
if 'content-length' in self.headers:
try:
clen = int(self.headers['content-length'])
except ValueError:
pass
if maxlen and clen > maxlen:
raise ValueError('Maximum content length exceeded')
self.length = clen
if self.limit is None and clen:
self.limit = clen
self.list = self.file = None
self.done = 0
if ctype == 'application/x-www-form-urlencoded':
self.read_urlencoded()
elif ctype[:10] == 'multipart/':
self.read_multi(environ, keep_blank_values, strict_parsing)
else:
self.read_single()
def __repr__(self):
"""Return a printable representation."""
return "FieldStorage(%r, %r, %r)" % (
self.name, self.filename, self.value)
def __iter__(self):
return iter(self.keys())
def __getattr__(self, name):
if name != 'value':
raise AttributeError(name)
if self.file:
self.file.seek(0)
value = self.file.read()
self.file.seek(0)
elif self.list is not None:
value = self.list
else:
value = None
return value
def __getitem__(self, key):
"""Dictionary style indexing."""
if self.list is None:
raise TypeError("not indexable")
found = []
for item in self.list:
if item.name == key: found.append(item)
if not found:
raise KeyError(key)
if len(found) == 1:
return found[0]
else:
return found
def getvalue(self, key, default=None):
"""Dictionary style get() method, including 'value' lookup."""
if key in self:
value = self[key]
if isinstance(value, list):
return [x.value for x in value]
else:
return value.value
else:
return default
def getfirst(self, key, default=None):
""" Return the first value received."""
if key in self:
value = self[key]
if isinstance(value, list):
return value[0].value
else:
return value.value
else:
return default
def getlist(self, key):
""" Return list of received values."""
if key in self:
value = self[key]
if isinstance(value, list):
return [x.value for x in value]
else:
return [value.value]
else:
return []
def keys(self):
"""Dictionary style keys() method."""
if self.list is None:
raise TypeError("not indexable")
return list(set(item.name for item in self.list))
def __contains__(self, key):
"""Dictionary style __contains__ method."""
if self.list is None:
raise TypeError("not indexable")
return any(item.name == key for item in self.list)
def __len__(self):
"""Dictionary style len(x) support."""
return len(self.keys())
def __nonzero__(self):
return bool(self.list)
def read_urlencoded(self):
"""Internal: read data in query string format."""
qs = self.fp.read(self.length)
if not isinstance(qs, bytes):
raise ValueError("%s should return bytes, got %s" \
% (self.fp, type(qs).__name__))
qs = qs.decode(self.encoding, self.errors)
if self.qs_on_post:
qs += '&' + self.qs_on_post
self.list = []
query = urllib.parse.parse_qsl(
qs, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors)
for key, value in query:
self.list.append(MiniFieldStorage(key, value))
self.skip_lines()
FieldStorageClass = None
def read_multi(self, environ, keep_blank_values, strict_parsing):
"""Internal: read a part that is itself multipart."""
ib = self.innerboundary
if not valid_boundary(ib):
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
self.list = []
if self.qs_on_post:
query = urllib.parse.parse_qsl(
self.qs_on_post, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors)
for key, value in query:
self.list.append(MiniFieldStorage(key, value))
FieldStorageClass = None
klass = self.FieldStorageClass or self.__class__
first_line = self.fp.readline() # bytes
if not isinstance(first_line, bytes):
raise ValueError("%s should return bytes, got %s" \
% (self.fp, type(first_line).__name__))
self.bytes_read += len(first_line)
# first line holds boundary ; ignore it, or check that
# b"--" + ib == first_line.strip() ?
while True:
parser = FeedParser()
hdr_text = b""
while True:
data = self.fp.readline()
hdr_text += data
if not data.strip():
break
if not hdr_text:
break
# parser takes strings, not bytes
self.bytes_read += len(hdr_text)
parser.feed(hdr_text.decode(self.encoding, self.errors))
headers = parser.close()
part = klass(self.fp, headers, ib, environ, keep_blank_values,
strict_parsing,self.limit-self.bytes_read,
self.encoding, self.errors)
self.bytes_read += part.bytes_read
self.list.append(part)
if self.bytes_read >= self.length:
break
self.skip_lines()
def read_single(self):
"""Internal: read an atomic part."""
if self.length >= 0:
self.read_binary()
self.skip_lines()
else:
self.read_lines()
self.file.seek(0)
bufsize = 8*1024 # I/O buffering size for copy to file
def read_binary(self):
"""Internal: read binary data."""
self.file = self.make_file()
todo = self.length
if todo >= 0:
while todo > 0:
data = self.fp.read(min(todo, self.bufsize)) # bytes
if not isinstance(data, bytes):
raise ValueError("%s should return bytes, got %s"
% (self.fp, type(data).__name__))
self.bytes_read += len(data)
if not data:
self.done = -1
break
self.file.write(data)
todo = todo - len(data)
def read_lines(self):
"""Internal: read lines until EOF or outerboundary."""
if self._binary_file:
self.file = self.__file = BytesIO() # store data as bytes for files
else:
self.file = self.__file = StringIO() # as strings for other fields
if self.outerboundary:
self.read_lines_to_outerboundary()
else:
self.read_lines_to_eof()
def __write(self, line):
"""line is always bytes, not string"""
if self.__file is not None:
if self.__file.tell() + len(line) > 1000:
self.file = self.make_file()
data = self.__file.getvalue()
self.file.write(data)
self.__file = None
if self._binary_file:
# keep bytes
self.file.write(line)
else:
# decode to string
self.file.write(line.decode(self.encoding, self.errors))
def read_lines_to_eof(self):
"""Internal: read lines until EOF."""
while 1:
line = self.fp.readline(1<<16) # bytes
self.bytes_read += len(line)
if not line:
self.done = -1
break
self.__write(line)
def read_lines_to_outerboundary(self):
"""Internal: read lines until outerboundary.
Data is read as bytes: boundaries and line ends must be converted
to bytes for comparisons.
"""
next_boundary = b"--" + self.outerboundary
last_boundary = next_boundary + b"--"
delim = b""
last_line_lfend = True
_read = 0
while 1:
if _read >= self.limit:
break
line = self.fp.readline(1<<16) # bytes
self.bytes_read += len(line)
_read += len(line)
if not line:
self.done = -1
break
if line.startswith(b"--") and last_line_lfend:
strippedline = line.rstrip()
if strippedline == next_boundary:
break
if strippedline == last_boundary:
self.done = 1
break
odelim = delim
if line.endswith(b"\r\n"):
delim = b"\r\n"
line = line[:-2]
last_line_lfend = True
elif line.endswith(b"\n"):
delim = b"\n"
line = line[:-1]
last_line_lfend = True
else:
delim = b""
last_line_lfend = False
self.__write(odelim + line)
def skip_lines(self):
"""Internal: skip lines until outer boundary if defined."""
if not self.outerboundary or self.done:
return
next_boundary = b"--" + self.outerboundary
last_boundary = next_boundary + b"--"
last_line_lfend = True
while True:
line = self.fp.readline(1<<16)
self.bytes_read += len(line)
if not line:
self.done = -1
break
if line.endswith(b"--") and last_line_lfend:
strippedline = line.strip()
if strippedline == next_boundary:
break
if strippedline == last_boundary:
self.done = 1
break
last_line_lfend = line.endswith(b'\n')
def make_file(self):
"""Overridable: return a readable & writable file.
The file will be used as follows:
- data is written to it
- seek(0)
- data is read from it
The file is opened in binary mode for files, in text mode
for other fields
This version opens a temporary file for reading and writing,
and immediately deletes (unlinks) it. The trick (on Unix!) is
that the file can still be used, but it can't be opened by
another process, and it will automatically be deleted when it
is closed or when the current process terminates.
If you want a more permanent file, you derive a class which
overrides this method. If you want a visible temporary file
that is nevertheless automatically deleted when the script
terminates, try defining a __del__ method in a derived class
which unlinks the temporary files you have created.
"""
if self._binary_file:
return tempfile.TemporaryFile("wb+")
else:
return tempfile.TemporaryFile("w+",
encoding=self.encoding, newline = '\n')
# Test/debug code
# ===============
def test(environ=os.environ):
"""Robust test CGI script, usable as main program.
Write minimal HTTP headers and dump all information provided to
the script in HTML form.
"""
print("Content-type: text/html")
print()
sys.stderr = sys.stdout
try:
form = FieldStorage() # Replace with other classes to test those
print_directory()
print_arguments()
print_form(form)
print_environ(environ)
print_environ_usage()
def f():
exec("testing print_exception() -- <I>italics?</I>")
def g(f=f):
f()
print("<H3>What follows is a test, not an actual exception:</H3>")
g()
except:
print_exception()
print("<H1>Second try with a small maxlen...</H1>")
global maxlen
maxlen = 50
try:
form = FieldStorage() # Replace with other classes to test those
print_directory()
print_arguments()
print_form(form)
print_environ(environ)
except:
print_exception()
def print_exception(type=None, value=None, tb=None, limit=None):
if type is None:
type, value, tb = sys.exc_info()
import traceback
print()
print("<H3>Traceback (most recent call last):</H3>")
list = traceback.format_tb(tb, limit) + \
traceback.format_exception_only(type, value)
print("<PRE>%s<B>%s</B></PRE>" % (
html.escape("".join(list[:-1])),
html.escape(list[-1]),
))
del tb
def print_environ(environ=os.environ):
"""Dump the shell environment as HTML."""
keys = sorted(environ.keys())
print()
print("<H3>Shell Environment:</H3>")
print("<DL>")
for key in keys:
print("<DT>", html.escape(key), "<DD>", html.escape(environ[key]))
print("</DL>")
print()
def print_form(form):
"""Dump the contents of a form as HTML."""
keys = sorted(form.keys())
print()
print("<H3>Form Contents:</H3>")
if not keys:
print("<P>No form fields.")
print("<DL>")
for key in keys:
print("<DT>" + html.escape(key) + ":", end=' ')
value = form[key]
print("<i>" + html.escape(repr(type(value))) + "</i>")
print("<DD>" + html.escape(repr(value)))
print("</DL>")
print()
def print_directory():
"""Dump the current directory as HTML."""
print()
print("<H3>Current Working Directory:</H3>")
try:
pwd = os.getcwd()
except os.error as msg:
print("os.error:", html.escape(str(msg)))
else:
print(html.escape(pwd))
print()
def print_arguments():
print()
print("<H3>Command Line Arguments:</H3>")
print()
print(sys.argv)
print()
def print_environ_usage():
"""Dump a list of environment variables used by CGI as HTML."""
print("""
<H3>These environment variables could have been set:</H3>
<UL>
<LI>AUTH_TYPE
<LI>CONTENT_LENGTH
<LI>CONTENT_TYPE
<LI>DATE_GMT
<LI>DATE_LOCAL
<LI>DOCUMENT_NAME
<LI>DOCUMENT_ROOT
<LI>DOCUMENT_URI
<LI>GATEWAY_INTERFACE
<LI>LAST_MODIFIED
<LI>PATH
<LI>PATH_INFO
<LI>PATH_TRANSLATED
<LI>QUERY_STRING
<LI>REMOTE_ADDR
<LI>REMOTE_HOST
<LI>REMOTE_IDENT
<LI>REMOTE_USER
<LI>REQUEST_METHOD
<LI>SCRIPT_NAME
<LI>SERVER_NAME
<LI>SERVER_PORT
<LI>SERVER_PROTOCOL
<LI>SERVER_ROOT
<LI>SERVER_SOFTWARE
</UL>
In addition, HTTP headers sent by the server may be passed in the
environment as well. Here are some common variable names:
<UL>
<LI>HTTP_ACCEPT
<LI>HTTP_CONNECTION
<LI>HTTP_HOST
<LI>HTTP_PRAGMA
<LI>HTTP_REFERER
<LI>HTTP_USER_AGENT
</UL>
""")
# Utilities
# =========
def escape(s, quote=None):
"""Deprecated API."""
warn("cgi.escape is deprecated, use html.escape instead",
PendingDeprecationWarning, stacklevel=2)
s = s.replace("&", "&") # Must be done first!
s = s.replace("<", "<")
s = s.replace(">", ">")
if quote:
s = s.replace('"', """)
return s
def valid_boundary(s, _vb_pattern=None):
import re
if isinstance(s, bytes):
_vb_pattern = b"^[ -~]{0,200}[!-~]$"
else:
_vb_pattern = "^[ -~]{0,200}[!-~]$"
return re.match(_vb_pattern, s)
# Invoke mainline
# ===============
# Call test() when this file is run as a script (not imported as a module)
if __name__ == '__main__':
test()
| lgpl-3.0 |
adrgerez/ardublockly | package/wxcef_build/pack_ardublockly_wxcef.py | 4 | 9610 | #!/usr/bin/env python2
# -*- coding: utf-8 -*- #
#
# Creates a zip file of the self executable Ardublockly application.
#
# Copyright (c) 2015 carlosperate https://github.com/carlosperate/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IMPORTANT: This script is designed to be located one directory level under the
# project root folder.
#
# This script file will create a copy of the project folder in its parent folder
# So if project folder is located in ~/projects/ardublockly it will create a
# copy in ~/projects/ardublockly_<timestamp>_<tag>.
# It will then delete unnecessary files for a working version of the self
# executable application and zip the contents of the folder.
#
import os
import sys
import time
import shutil
import struct
import zipfile
script_tag = "[Ardublockly pack] "
script_tab = " "
# The project_root_dir depends on this file location, assumed to be two levels
# below project root, so it cannot be moved without updating this variable
project_root_dir = \
os.path.dirname( # going up 1 level
os.path.dirname( # going up 1 level
os.path.dirname(os.path.realpath(__file__)))) # folder dir of this
# This script copies the ardublockly folder with a different name on the same
# directory level to easily filter what to included in the packed version
copy_dir_name = "ardublockly_packed"
copied_project_dir = os.path.join(os.path.dirname(project_root_dir),
copy_dir_name)
def set_tag(tag):
"""
Sets the packaged zip file and copied folder tag to the input argument. So,
the copied folder will be names "ardublockly_<tag>" and the zip file
"ardublockly_<tag>.zip.
:tag: String to indicate the tag to use.
"""
print(script_tag + "Setting the Ardublockly package tag to '%s'" % tag)
global copy_dir_name
global copied_project_dir
copy_dir_name = "ardublockly_%s" % tag
copied_project_dir = os.path.join(os.path.dirname(project_root_dir),
copy_dir_name)
def copy_ardublockly_folder():
"""
Copies all the contents of the project root directory into a new folder on
the same level.
The copy operation ignores a list of directories.
:return: Boolean indicating the success state of the operation.
"""
ignore_pat = (".git*", ".svn", ".travis*", ".appveyor*", "circle.yml",
".ruby-version", "TestTemp_*", "package")
if not os.path.exists(copied_project_dir):
print(script_tab + "Copying contents of %s\n" % project_root_dir +
script_tab + "into %s" % copied_project_dir)
shutil.copytree(project_root_dir,
copied_project_dir,
symlinks=False,
ignore=shutil.ignore_patterns(*ignore_pat))
else:
print(script_tab + "ERROR: %s directory already exists!" %
copied_project_dir)
return False
return True
def remove_directory(dir_to_remove):
""" Removes the a given directory. """
if os.path.exists(dir_to_remove):
print(script_tab + "Removing directory %s" % dir_to_remove)
shutil.rmtree(dir_to_remove)
else:
print(script_tab + "Directory %s was not found." % dir_to_remove)
def zip_ardublockly_copy(name_append):
"""
Zips the contents of the copied project folder into a subdirectory of
the original project folder.
"""
zip_file_dir = os.path.join(project_root_dir, "upload")
zip_file_location = os.path.join(
zip_file_dir, "ardublockly_%s.zip" % name_append)
# First ensure the upload folder exists
if not os.path.exists(zip_file_dir):
os.makedirs(zip_file_dir)
os.chdir(os.path.dirname(project_root_dir))
print(script_tab + "Working directory changed to %s" % os.getcwd())
print(script_tab + "Zipping the contents of %s\n" % copied_project_dir +
script_tab + "into %s\n" % zip_file_location)
zip_file = zipfile.ZipFile(zip_file_location, "w", zipfile.ZIP_DEFLATED)
for dir_name, sub_dirs, files in os.walk(copy_dir_name):
zip_file.write(dir_name)
for filename in files:
zip_file.write(os.path.join(dir_name, filename))
zip_file.close()
def pack_ardublockly(tag):
# Set the copied folder name to the stamp
set_tag(tag)
print(script_tag + "Copying the project root folder:")
success = copy_ardublockly_folder()
if not success:
raise SystemExit(script_tab + "Exiting due to project root copy error.")
print(script_tag + "Removing unnecessary Blockly files:")
remove_directory(os.path.join(copied_project_dir, "blockly", "demos"))
remove_directory(os.path.join(copied_project_dir, "blockly", "appengine"))
remove_directory(os.path.join(copied_project_dir, "blockly", "tests"))
print(script_tag + "Removing an already zipped Ardublockly version:")
remove_directory(os.path.join(copied_project_dir, "upload"))
print(script_tag + "Removing CEF temporary files:")
remove_directory(os.path.join(copied_project_dir, "webcache"))
print(script_tag + "Creating zip file of the new Ardublockly folder:")
zip_ardublockly_copy(tag)
def tag_from_ci_env_vars(ci_name, pull_request_var, branch_var, commit_var):
"""
Checks if the CI environmental variables to check for a pull request,
commit id and band commit branch are present.
:return: String with the CI build information, or None if the CI
environmental variables could not be found.
"""
pull_request = os.environ.get(pull_request_var)
branch = os.environ.get(branch_var)
commit = os.environ.get(commit_var)
if pull_request and pull_request != "false":
try:
int(pull_request)
print(script_tab + "Pull request valid '%s' variable found: %s" %
(ci_name, pull_request))
return "pull_%s" % pull_request
except ValueError:
print(script_tab + "The pull request environmental variable " +
"'%s' value '%s' from %s is not a valid number." %
(pull_request_var, pull_request, ci_name))
if branch and commit:
print(script_tab + "Branch and commit valid '%s' variables found: %s %s"
% (ci_name, branch, commit))
# We only return first 10 digits from the commit ID (normal length 40)
commit = "%s" % commit
return "%s_%s" % (branch, commit[:10])
print(script_tab + "The environmental variables for %s " % ci_name +
"were deemed invalid:\n" +
script_tab + "\t%s: %s\n" % (pull_request_var, pull_request) +
script_tab + "\t%s: %s\n" % (branch_var, branch) +
script_tab + "\t%s: %s" % (commit_var, commit))
return None
def get_tag():
"""
The tag will always contain the timestamp and architecture version.
If provided as a command line argument it will add an additional string,
if not it will check for environmental variables set in build servers to
create an identification tag.
:return: String with the final tag.
"""
# All tags begging with architecture type (based on the Python version) and
# the current time stamp
arch_time_stamp = "%sbit_%s" % ((struct.calcsize('P') * 8),
time.strftime("%Y-%m-%d_%H.%M.%S"))
# Check if a command line argument has been given
if len(sys.argv) > 1:
# Take the first argument and use it as a tag appendage
print(script_tab + "Command line argument '%s' found and will be used "
"for package tag." % sys.argv[1])
return "%s_%s" % (arch_time_stamp, sys.argv[1])
else:
print(script_tab + "No command line argument found")
# Check for Travis-CI environmental variables to create tag appendage
print(script_tab + "Checking Travis-CI environment variables for tag:")
travis_tag = tag_from_ci_env_vars(ci_name="Travis-CI",
pull_request_var="TRAVIS_PULL_REQUEST",
branch_var="TRAVIS_BRANCH",
commit_var="TRAVIS_COMMIT")
if travis_tag:
return "%s_%s" % (arch_time_stamp, travis_tag)
# Check for AppVeyor environmental variables to create tag appendage
print(script_tab + "Checking AppVeyor environment variables for tag:")
appveyor_tag = tag_from_ci_env_vars(
ci_name="AppVeyor",
pull_request_var="APPVEYOR_PULL_REQUEST_NUMBER",
branch_var="APPVEYOR_REPO_BRANCH",
commit_var="APPVEYOR_REPO_COMMIT")
if appveyor_tag:
return "%s_%s" % (arch_time_stamp, appveyor_tag)
return arch_time_stamp
def main():
print(script_tag + "Pack Ardublockly script started.")
print(script_tag + "Checking for tag to attach to zip file:")
tag = get_tag()
pack_ardublockly(tag)
if __name__ == "__main__":
main()
| apache-2.0 |
tosolveit/scikit-learn | sklearn/datasets/lfw.py | 141 | 19372 | """Loader for the Labeled Faces in the Wild (LFW) dataset
This dataset is a collection of JPEG pictures of famous people collected
over the internet, all details are available on the official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. The typical task is called
Face Verification: given a pair of two pictures, a binary classifier
must predict whether the two images are from the same person.
An alternative task, Face Recognition or Face Identification is:
given the picture of the face of an unknown person, identify the name
of the person by referring to a gallery of previously seen pictures of
identified persons.
Both Face Verification and Face Recognition are tasks that are typically
performed on the output of a model trained to perform Face Detection. The
most popular model for Face Detection is called Viola-Johns and is
implemented in the OpenCV library. The LFW faces were extracted by this face
detector from various online websites.
"""
# Copyright (c) 2011 Olivier Grisel <[email protected]>
# License: BSD 3 clause
from os import listdir, makedirs, remove
from os.path import join, exists, isdir
from sklearn.utils import deprecated
import logging
import numpy as np
try:
import urllib.request as urllib # for backwards compatibility
except ImportError:
import urllib
from .base import get_data_home, Bunch
from ..externals.joblib import Memory
from ..externals.six import b
logger = logging.getLogger(__name__)
BASE_URL = "http://vis-www.cs.umass.edu/lfw/"
ARCHIVE_NAME = "lfw.tgz"
FUNNELED_ARCHIVE_NAME = "lfw-funneled.tgz"
TARGET_FILENAMES = [
'pairsDevTrain.txt',
'pairsDevTest.txt',
'pairs.txt',
]
def scale_face(face):
"""Scale back to 0-1 range in case of normalization for plotting"""
scaled = face - face.min()
scaled /= scaled.max()
return scaled
#
# Common private utilities for data fetching from the original LFW website
# local disk caching, and image decoding.
#
def check_fetch_lfw(data_home=None, funneled=True, download_if_missing=True):
"""Helper function to download any missing LFW data"""
data_home = get_data_home(data_home=data_home)
lfw_home = join(data_home, "lfw_home")
if funneled:
archive_path = join(lfw_home, FUNNELED_ARCHIVE_NAME)
data_folder_path = join(lfw_home, "lfw_funneled")
archive_url = BASE_URL + FUNNELED_ARCHIVE_NAME
else:
archive_path = join(lfw_home, ARCHIVE_NAME)
data_folder_path = join(lfw_home, "lfw")
archive_url = BASE_URL + ARCHIVE_NAME
if not exists(lfw_home):
makedirs(lfw_home)
for target_filename in TARGET_FILENAMES:
target_filepath = join(lfw_home, target_filename)
if not exists(target_filepath):
if download_if_missing:
url = BASE_URL + target_filename
logger.warning("Downloading LFW metadata: %s", url)
urllib.urlretrieve(url, target_filepath)
else:
raise IOError("%s is missing" % target_filepath)
if not exists(data_folder_path):
if not exists(archive_path):
if download_if_missing:
logger.warning("Downloading LFW data (~200MB): %s", archive_url)
urllib.urlretrieve(archive_url, archive_path)
else:
raise IOError("%s is missing" % target_filepath)
import tarfile
logger.info("Decompressing the data archive to %s", data_folder_path)
tarfile.open(archive_path, "r:gz").extractall(path=lfw_home)
remove(archive_path)
return lfw_home, data_folder_path
def _load_imgs(file_paths, slice_, color, resize):
"""Internally used to load images"""
# Try to import imread and imresize from PIL. We do this here to prevent
# the whole sklearn.datasets module from depending on PIL.
try:
try:
from scipy.misc import imread
except ImportError:
from scipy.misc.pilutil import imread
from scipy.misc import imresize
except ImportError:
raise ImportError("The Python Imaging Library (PIL)"
" is required to load data from jpeg files")
# compute the portion of the images to load to respect the slice_ parameter
# given by the caller
default_slice = (slice(0, 250), slice(0, 250))
if slice_ is None:
slice_ = default_slice
else:
slice_ = tuple(s or ds for s, ds in zip(slice_, default_slice))
h_slice, w_slice = slice_
h = (h_slice.stop - h_slice.start) // (h_slice.step or 1)
w = (w_slice.stop - w_slice.start) // (w_slice.step or 1)
if resize is not None:
resize = float(resize)
h = int(resize * h)
w = int(resize * w)
# allocate some contiguous memory to host the decoded image slices
n_faces = len(file_paths)
if not color:
faces = np.zeros((n_faces, h, w), dtype=np.float32)
else:
faces = np.zeros((n_faces, h, w, 3), dtype=np.float32)
# iterate over the collected file path to load the jpeg files as numpy
# arrays
for i, file_path in enumerate(file_paths):
if i % 1000 == 0:
logger.info("Loading face #%05d / %05d", i + 1, n_faces)
# Checks if jpeg reading worked. Refer to issue #3594 for more
# details.
img = imread(file_path)
if img.ndim is 0:
raise RuntimeError("Failed to read the image file %s, "
"Please make sure that libjpeg is installed"
% file_path)
face = np.asarray(img[slice_], dtype=np.float32)
face /= 255.0 # scale uint8 coded colors to the [0.0, 1.0] floats
if resize is not None:
face = imresize(face, resize)
if not color:
# average the color channels to compute a gray levels
# representaion
face = face.mean(axis=2)
faces[i, ...] = face
return faces
#
# Task #1: Face Identification on picture with names
#
def _fetch_lfw_people(data_folder_path, slice_=None, color=False, resize=None,
min_faces_per_person=0):
"""Perform the actual data loading for the lfw people dataset
This operation is meant to be cached by a joblib wrapper.
"""
# scan the data folder content to retain people with more that
# `min_faces_per_person` face pictures
person_names, file_paths = [], []
for person_name in sorted(listdir(data_folder_path)):
folder_path = join(data_folder_path, person_name)
if not isdir(folder_path):
continue
paths = [join(folder_path, f) for f in listdir(folder_path)]
n_pictures = len(paths)
if n_pictures >= min_faces_per_person:
person_name = person_name.replace('_', ' ')
person_names.extend([person_name] * n_pictures)
file_paths.extend(paths)
n_faces = len(file_paths)
if n_faces == 0:
raise ValueError("min_faces_per_person=%d is too restrictive" %
min_faces_per_person)
target_names = np.unique(person_names)
target = np.searchsorted(target_names, person_names)
faces = _load_imgs(file_paths, slice_, color, resize)
# shuffle the faces with a deterministic RNG scheme to avoid having
# all faces of the same person in a row, as it would break some
# cross validation and learning algorithms such as SGD and online
# k-means that make an IID assumption
indices = np.arange(n_faces)
np.random.RandomState(42).shuffle(indices)
faces, target = faces[indices], target[indices]
return faces, target, target_names
def fetch_lfw_people(data_home=None, funneled=True, resize=0.5,
min_faces_per_person=0, color=False,
slice_=(slice(70, 195), slice(78, 172)),
download_if_missing=True):
"""Loader for the Labeled Faces in the Wild (LFW) people dataset
This dataset is a collection of JPEG pictures of famous people
collected on the internet, all details are available on the
official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. Each pixel of each channel
(color in RGB) is encoded by a float in range 0.0 - 1.0.
The task is called Face Recognition (or Identification): given the
picture of a face, find the name of the person given a training set
(gallery).
The original images are 250 x 250 pixels, but the default slice and resize
arguments reduce them to 62 x 74.
Parameters
----------
data_home : optional, default: None
Specify another download and cache folder for the datasets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
funneled : boolean, optional, default: True
Download and use the funneled variant of the dataset.
resize : float, optional, default 0.5
Ratio used to resize the each face picture.
min_faces_per_person : int, optional, default None
The extracted dataset will only retain pictures of people that have at
least `min_faces_per_person` different pictures.
color : boolean, optional, default False
Keep the 3 RGB channels instead of averaging them to a single
gray level channel. If color is True the shape of the data has
one more dimension than than the shape with color = False.
slice_ : optional
Provide a custom 2D slice (height, width) to extract the
'interesting' part of the jpeg files and avoid use statistical
correlation from the background
download_if_missing : optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
dataset : dict-like object with the following attributes:
dataset.data : numpy array of shape (13233, 2914)
Each row corresponds to a ravelled face image of original size 62 x 47
pixels. Changing the ``slice_`` or resize parameters will change the shape
of the output.
dataset.images : numpy array of shape (13233, 62, 47)
Each row is a face image corresponding to one of the 5749 people in
the dataset. Changing the ``slice_`` or resize parameters will change the shape
of the output.
dataset.target : numpy array of shape (13233,)
Labels associated to each face image. Those labels range from 0-5748
and correspond to the person IDs.
dataset.DESCR : string
Description of the Labeled Faces in the Wild (LFW) dataset.
"""
lfw_home, data_folder_path = check_fetch_lfw(
data_home=data_home, funneled=funneled,
download_if_missing=download_if_missing)
logger.info('Loading LFW people faces from %s', lfw_home)
# wrap the loader in a memoizing function that will return memmaped data
# arrays for optimal memory usage
m = Memory(cachedir=lfw_home, compress=6, verbose=0)
load_func = m.cache(_fetch_lfw_people)
# load and memoize the pairs as np arrays
faces, target, target_names = load_func(
data_folder_path, resize=resize,
min_faces_per_person=min_faces_per_person, color=color, slice_=slice_)
# pack the results as a Bunch instance
return Bunch(data=faces.reshape(len(faces), -1), images=faces,
target=target, target_names=target_names,
DESCR="LFW faces dataset")
#
# Task #2: Face Verification on pairs of face pictures
#
def _fetch_lfw_pairs(index_file_path, data_folder_path, slice_=None,
color=False, resize=None):
"""Perform the actual data loading for the LFW pairs dataset
This operation is meant to be cached by a joblib wrapper.
"""
# parse the index file to find the number of pairs to be able to allocate
# the right amount of memory before starting to decode the jpeg files
with open(index_file_path, 'rb') as index_file:
split_lines = [ln.strip().split(b('\t')) for ln in index_file]
pair_specs = [sl for sl in split_lines if len(sl) > 2]
n_pairs = len(pair_specs)
# interating over the metadata lines for each pair to find the filename to
# decode and load in memory
target = np.zeros(n_pairs, dtype=np.int)
file_paths = list()
for i, components in enumerate(pair_specs):
if len(components) == 3:
target[i] = 1
pair = (
(components[0], int(components[1]) - 1),
(components[0], int(components[2]) - 1),
)
elif len(components) == 4:
target[i] = 0
pair = (
(components[0], int(components[1]) - 1),
(components[2], int(components[3]) - 1),
)
else:
raise ValueError("invalid line %d: %r" % (i + 1, components))
for j, (name, idx) in enumerate(pair):
try:
person_folder = join(data_folder_path, name)
except TypeError:
person_folder = join(data_folder_path, str(name, 'UTF-8'))
filenames = list(sorted(listdir(person_folder)))
file_path = join(person_folder, filenames[idx])
file_paths.append(file_path)
pairs = _load_imgs(file_paths, slice_, color, resize)
shape = list(pairs.shape)
n_faces = shape.pop(0)
shape.insert(0, 2)
shape.insert(0, n_faces // 2)
pairs.shape = shape
return pairs, target, np.array(['Different persons', 'Same person'])
@deprecated("Function 'load_lfw_people' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_people(download_if_missing=False) instead.")
def load_lfw_people(download_if_missing=False, **kwargs):
"""Alias for fetch_lfw_people(download_if_missing=False)
Check fetch_lfw_people.__doc__ for the documentation and parameter list.
"""
return fetch_lfw_people(download_if_missing=download_if_missing, **kwargs)
def fetch_lfw_pairs(subset='train', data_home=None, funneled=True, resize=0.5,
color=False, slice_=(slice(70, 195), slice(78, 172)),
download_if_missing=True):
"""Loader for the Labeled Faces in the Wild (LFW) pairs dataset
This dataset is a collection of JPEG pictures of famous people
collected on the internet, all details are available on the
official website:
http://vis-www.cs.umass.edu/lfw/
Each picture is centered on a single face. Each pixel of each channel
(color in RGB) is encoded by a float in range 0.0 - 1.0.
The task is called Face Verification: given a pair of two pictures,
a binary classifier must predict whether the two images are from
the same person.
In the official `README.txt`_ this task is described as the
"Restricted" task. As I am not sure as to implement the
"Unrestricted" variant correctly, I left it as unsupported for now.
.. _`README.txt`: http://vis-www.cs.umass.edu/lfw/README.txt
The original images are 250 x 250 pixels, but the default slice and resize
arguments reduce them to 62 x 74.
Read more in the :ref:`User Guide <labeled_faces_in_the_wild>`.
Parameters
----------
subset : optional, default: 'train'
Select the dataset to load: 'train' for the development training
set, 'test' for the development test set, and '10_folds' for the
official evaluation set that is meant to be used with a 10-folds
cross validation.
data_home : optional, default: None
Specify another download and cache folder for the datasets. By
default all scikit learn data is stored in '~/scikit_learn_data'
subfolders.
funneled : boolean, optional, default: True
Download and use the funneled variant of the dataset.
resize : float, optional, default 0.5
Ratio used to resize the each face picture.
color : boolean, optional, default False
Keep the 3 RGB channels instead of averaging them to a single
gray level channel. If color is True the shape of the data has
one more dimension than than the shape with color = False.
slice_ : optional
Provide a custom 2D slice (height, width) to extract the
'interesting' part of the jpeg files and avoid use statistical
correlation from the background
download_if_missing : optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
The data is returned as a Bunch object with the following attributes:
data : numpy array of shape (2200, 5828)
Each row corresponds to 2 ravel'd face images of original size 62 x 47
pixels. Changing the ``slice_`` or resize parameters will change the shape
of the output.
pairs : numpy array of shape (2200, 2, 62, 47)
Each row has 2 face images corresponding to same or different person
from the dataset containing 5749 people. Changing the ``slice_`` or resize
parameters will change the shape of the output.
target : numpy array of shape (13233,)
Labels associated to each pair of images. The two label values being
different persons or the same person.
DESCR : string
Description of the Labeled Faces in the Wild (LFW) dataset.
"""
lfw_home, data_folder_path = check_fetch_lfw(
data_home=data_home, funneled=funneled,
download_if_missing=download_if_missing)
logger.info('Loading %s LFW pairs from %s', subset, lfw_home)
# wrap the loader in a memoizing function that will return memmaped data
# arrays for optimal memory usage
m = Memory(cachedir=lfw_home, compress=6, verbose=0)
load_func = m.cache(_fetch_lfw_pairs)
# select the right metadata file according to the requested subset
label_filenames = {
'train': 'pairsDevTrain.txt',
'test': 'pairsDevTest.txt',
'10_folds': 'pairs.txt',
}
if subset not in label_filenames:
raise ValueError("subset='%s' is invalid: should be one of %r" % (
subset, list(sorted(label_filenames.keys()))))
index_file_path = join(lfw_home, label_filenames[subset])
# load and memoize the pairs as np arrays
pairs, target, target_names = load_func(
index_file_path, data_folder_path, resize=resize, color=color,
slice_=slice_)
# pack the results as a Bunch instance
return Bunch(data=pairs.reshape(len(pairs), -1), pairs=pairs,
target=target, target_names=target_names,
DESCR="'%s' segment of the LFW pairs dataset" % subset)
@deprecated("Function 'load_lfw_pairs' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_pairs(download_if_missing=False) instead.")
def load_lfw_pairs(download_if_missing=False, **kwargs):
"""Alias for fetch_lfw_pairs(download_if_missing=False)
Check fetch_lfw_pairs.__doc__ for the documentation and parameter list.
"""
return fetch_lfw_pairs(download_if_missing=download_if_missing, **kwargs)
| bsd-3-clause |
illicitonion/givabit | lib/sdks/google_appengine_1.7.1/google_appengine/lib/django_1_2/tests/modeltests/validation/tests.py | 50 | 5106 | from django import forms
from django.test import TestCase
from django.core.exceptions import NON_FIELD_ERRORS
from modeltests.validation import ValidationTestCase
from modeltests.validation.models import Author, Article, ModelToValidate
# Import other tests for this package.
from modeltests.validation.validators import TestModelsWithValidators
from modeltests.validation.test_unique import GetUniqueCheckTests, PerformUniqueChecksTest
from modeltests.validation.test_custom_messages import CustomMessagesTest
class BaseModelValidationTests(ValidationTestCase):
def test_missing_required_field_raises_error(self):
mtv = ModelToValidate(f_with_custom_validator=42)
self.assertFailsValidation(mtv.full_clean, ['name', 'number'])
def test_with_correct_value_model_validates(self):
mtv = ModelToValidate(number=10, name='Some Name')
self.assertEqual(None, mtv.full_clean())
def test_custom_validate_method(self):
mtv = ModelToValidate(number=11)
self.assertFailsValidation(mtv.full_clean, [NON_FIELD_ERRORS, 'name'])
def test_wrong_FK_value_raises_error(self):
mtv=ModelToValidate(number=10, name='Some Name', parent_id=3)
self.assertFailsValidation(mtv.full_clean, ['parent'])
def test_correct_FK_value_validates(self):
parent = ModelToValidate.objects.create(number=10, name='Some Name')
mtv = ModelToValidate(number=10, name='Some Name', parent_id=parent.pk)
self.assertEqual(None, mtv.full_clean())
def test_limitted_FK_raises_error(self):
# The limit_choices_to on the parent field says that a parent object's
# number attribute must be 10, so this should fail validation.
parent = ModelToValidate.objects.create(number=11, name='Other Name')
mtv = ModelToValidate(number=10, name='Some Name', parent_id=parent.pk)
self.assertFailsValidation(mtv.full_clean, ['parent'])
def test_wrong_email_value_raises_error(self):
mtv = ModelToValidate(number=10, name='Some Name', email='not-an-email')
self.assertFailsValidation(mtv.full_clean, ['email'])
def test_correct_email_value_passes(self):
mtv = ModelToValidate(number=10, name='Some Name', email='[email protected]')
self.assertEqual(None, mtv.full_clean())
def test_wrong_url_value_raises_error(self):
mtv = ModelToValidate(number=10, name='Some Name', url='not a url')
self.assertFieldFailsValidationWithMessage(mtv.full_clean, 'url', [u'Enter a valid value.'])
def test_correct_url_but_nonexisting_gives_404(self):
mtv = ModelToValidate(number=10, name='Some Name', url='http://google.com/we-love-microsoft.html')
self.assertFieldFailsValidationWithMessage(mtv.full_clean, 'url', [u'This URL appears to be a broken link.'])
def test_correct_url_value_passes(self):
mtv = ModelToValidate(number=10, name='Some Name', url='http://www.djangoproject.com/')
self.assertEqual(None, mtv.full_clean()) # This will fail if there's no Internet connection
def test_text_greater_that_charfields_max_length_eaises_erros(self):
mtv = ModelToValidate(number=10, name='Some Name'*100)
self.assertFailsValidation(mtv.full_clean, ['name',])
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
exclude = ['author']
class ModelFormsTests(TestCase):
def setUp(self):
self.author = Author.objects.create(name='Joseph Kocherhans')
def test_partial_validation(self):
# Make sure the "commit=False and set field values later" idiom still
# works with model validation.
data = {
'title': 'The state of model validation',
'pub_date': '2010-1-10 14:49:00'
}
form = ArticleForm(data)
self.assertEqual(form.errors.keys(), [])
article = form.save(commit=False)
article.author = self.author
article.save()
def test_validation_with_empty_blank_field(self):
# Since a value for pub_date wasn't provided and the field is
# blank=True, model-validation should pass.
# Also, Article.clean() should be run, so pub_date will be filled after
# validation, so the form should save cleanly even though pub_date is
# not allowed to be null.
data = {
'title': 'The state of model validation',
}
article = Article(author_id=self.author.id)
form = ArticleForm(data, instance=article)
self.assertEqual(form.errors.keys(), [])
self.assertNotEqual(form.instance.pub_date, None)
article = form.save()
def test_validation_with_invalid_blank_field(self):
# Even though pub_date is set to blank=True, an invalid value was
# provided, so it should fail validation.
data = {
'title': 'The state of model validation',
'pub_date': 'never'
}
article = Article(author_id=self.author.id)
form = ArticleForm(data, instance=article)
self.assertEqual(form.errors.keys(), ['pub_date'])
| apache-2.0 |
b3j0f/schema | b3j0f/schema/test/registry.py | 1 | 6783 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016 Jonathan Labéjof <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------
from unittest import main
from b3j0f.utils.ut import UTCase
from ..base import Schema
from ..registry import (
SchemaRegistry, registercls, getbydatatype, unregistercls
)
from uuid import uuid4
from numbers import Number
class AAA(object):
pass
class UpdateContentTest(UTCase):
def setUp(self):
class AAA(object):
pass
self.AAA = AAA
class BBB(object):
pass
self.BBB = BBB
class CCC(object):
pass
self.CCC = CCC
@registercls([AAA])
class AAASchema(Schema):
def _validate(self, data, *args, **kwargs):
return isinstance(data, AAA)
self.AAASchema = AAASchema
@registercls([BBB])
class BBBSchema(Schema):
def _validate(self, data, *args, **kwargs):
return isinstance(data, BBB)
self.BBBSchema = BBBSchema
@registercls([CCC])
class CCCSchema(Schema):
def _validate(self, data, *args, **kwargs):
return isinstance(data, CCC)
self.CCCSchema = CCCSchema
def tearDown(self):
unregistercls(self.AAASchema)
unregistercls(self.BBBSchema)
unregistercls(self.CCCSchema)
def test_number(self):
schemacls = getbydatatype(self.AAA)
self.assertIs(schemacls, self.AAASchema)
def test_str(self):
schemacls = getbydatatype(self.BBB)
self.assertIs(schemacls, self.BBBSchema)
def test_object(self):
schemacls = getbydatatype(self.CCC)
self.assertIs(schemacls, self.CCCSchema)
class DefaultTest(UTCase):
def test(self):
class TestSchema(Schema):
default = 0
schema = TestSchema()
self.assertEqual(schema.default, 0)
schema = TestSchema(default=None)
self.assertIsNone(schema._default_)
class TestSchema(Schema):
def __init__(
self, name=None, uuid=None, _=None, default=None, *args, **kwargs
):
super(TestSchema, self).__init__(*args, **kwargs)
self.name = name or TestSchema.__name__
self.uuid = str(uuid or uuid4())
self._testschema = _ or TestSchema(_=self)
self.default = default
def __hash__(self):
return hash(self.uuid)
def getschemas(self):
return {'one': self, 'two': self._testschema}
class SchemaRegistryTest(UTCase):
def setUp(self):
self.registry = SchemaRegistry()
self.schemas = set([TestSchema() for i in range(5)])
def test_init(self):
schemaregistry = SchemaRegistry()
self.assertFalse(schemaregistry._schbyname)
self.assertFalse(schemaregistry._schbyuuid)
self.assertFalse(schemaregistry._schbytype)
def test_init_w_params(self):
schemaregistry = SchemaRegistry(schbyname=2, schbyuuid=3, schbytype=4)
self.assertEqual(schemaregistry._schbyname, 2)
self.assertEqual(schemaregistry._schbyuuid, 3)
self.assertEqual(schemaregistry._schbytype, 4)
def test_register(self):
for schema in self.schemas:
self.registry.register(schema)
schemas = self.registry.getbyname(TestSchema.__name__)
self.assertEqual(schemas, self.schemas)
for schema in self.schemas:
uuid = schema.uuid
_schema = self.registry.getbyuuid(uuid)
self.assertEqual(schema, _schema)
self.registry.unregister(uuid)
self.assertRaises(KeyError, self.registry.getbyuuid, uuid)
def test_registertype(self):
class Schema(object):
def __init__(self, default, *args, **kwargs):
super(Schema, self).__init__(*args, **kwargs)
self.default = default
class IntSchema(Schema):
pass
class BoolSchema(Schema):
pass
class AAASchema(Schema):
pass
self.registry.registercls(schemacls=IntSchema, data_types=[int])
self.registry.registercls(schemacls=BoolSchema, data_types=[bool])
self.registry.registercls(schemacls=AAASchema, data_types=[Number])
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, IntSchema)
schemacls = self.registry.getbydatatype(bool)
self.assertIs(schemacls, BoolSchema)
self.registry.unregistercls(schemacls=IntSchema)
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, AAASchema)
def test_registertype_decorator(self):
class Schema(object):
def __init__(self, default, *args, **kwargs):
super(Schema, self).__init__(*args, **kwargs)
self.default = default
@self.registry.registercls([int])
class IntSchema(Schema):
pass
@self.registry.registercls([bool])
class BoolSchema(Schema):
pass
@self.registry.registercls([Number])
class AAASchema(Schema):
pass
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, IntSchema)
schemacls = self.registry.getbydatatype(bool)
self.assertIs(schemacls, BoolSchema)
self.registry.unregistercls(schemacls=IntSchema)
schemacls = self.registry.getbydatatype(int)
self.assertIs(schemacls, AAASchema)
if __name__ == '__main__':
main()
| mit |
memakura/s2a_fm | Snap!Files/Snap!Mobile/arduino/serial/urlhandler/protocol_loop.py | 141 | 9516 | #! python
#
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# see __init__.py
#
# This module implements a loop back connection receiving itself what it sent.
#
# The purpose of this module is.. well... You can run the unit tests with it.
# and it was so easy to implement ;-)
#
# (C) 2001-2011 Chris Liechti <[email protected]>
# this is distributed under a free software license, see license.txt
#
# URL format: loop://[option[/option...]]
# options:
# - "debug" print diagnostic messages
from serial.serialutil import *
import threading
import time
import logging
# map log level names to constants. used in fromURL()
LOGGER_LEVELS = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
}
class LoopbackSerial(SerialBase):
"""Serial port implementation that simulates a loop back connection in plain software."""
BAUDRATES = (50, 75, 110, 134, 150, 200, 300, 600, 1200, 1800, 2400, 4800,
9600, 19200, 38400, 57600, 115200)
def open(self):
"""Open port with current settings. This may throw a SerialException
if the port cannot be opened."""
if self._isOpen:
raise SerialException("Port is already open.")
self.logger = None
self.buffer_lock = threading.Lock()
self.loop_buffer = bytearray()
self.cts = False
self.dsr = False
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
# not that there is anything to open, but the function applies the
# options found in the URL
self.fromURL(self.port)
# not that there anything to configure...
self._reconfigurePort()
# all things set up get, now a clean start
self._isOpen = True
if not self._rtscts:
self.setRTS(True)
self.setDTR(True)
self.flushInput()
self.flushOutput()
def _reconfigurePort(self):
"""Set communication parameters on opened port. for the loop://
protocol all settings are ignored!"""
# not that's it of any real use, but it helps in the unit tests
if not isinstance(self._baudrate, (int, long)) or not 0 < self._baudrate < 2**32:
raise ValueError("invalid baudrate: %r" % (self._baudrate))
if self.logger:
self.logger.info('_reconfigurePort()')
def close(self):
"""Close port"""
if self._isOpen:
self._isOpen = False
# in case of quick reconnects, give the server some time
time.sleep(0.3)
def makeDeviceName(self, port):
raise SerialException("there is no sensible way to turn numbers into URLs")
def fromURL(self, url):
"""extract host and port from an URL string"""
if url.lower().startswith("loop://"): url = url[7:]
try:
# process options now, directly altering self
for option in url.split('/'):
if '=' in option:
option, value = option.split('=', 1)
else:
value = None
if not option:
pass
elif option == 'logging':
logging.basicConfig() # XXX is that good to call it here?
self.logger = logging.getLogger('pySerial.loop')
self.logger.setLevel(LOGGER_LEVELS[value])
self.logger.debug('enabled logging')
else:
raise ValueError('unknown option: %r' % (option,))
except ValueError, e:
raise SerialException('expected a string in the form "[loop://][option[/option...]]": %s' % e)
# - - - - - - - - - - - - - - - - - - - - - - - -
def inWaiting(self):
"""Return the number of characters currently in the input buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
# attention the logged value can differ from return value in
# threaded environments...
self.logger.debug('inWaiting() -> %d' % (len(self.loop_buffer),))
return len(self.loop_buffer)
def read(self, size=1):
"""Read size bytes from the serial port. If a timeout is set it may
return less characters as requested. With no timeout it will block
until the requested number of bytes is read."""
if not self._isOpen: raise portNotOpenError
if self._timeout is not None:
timeout = time.time() + self._timeout
else:
timeout = None
data = bytearray()
while size > 0:
self.buffer_lock.acquire()
try:
block = to_bytes(self.loop_buffer[:size])
del self.loop_buffer[:size]
finally:
self.buffer_lock.release()
data += block
size -= len(block)
# check for timeout now, after data has been read.
# useful for timeout = 0 (non blocking) read
if timeout and time.time() > timeout:
break
return bytes(data)
def write(self, data):
"""Output the given string over the serial port. Can block if the
connection is blocked. May raise SerialException if the connection is
closed."""
if not self._isOpen: raise portNotOpenError
# ensure we're working with bytes
data = to_bytes(data)
# calculate aprox time that would be used to send the data
time_used_to_send = 10.0*len(data) / self._baudrate
# when a write timeout is configured check if we would be successful
# (not sending anything, not even the part that would have time)
if self._writeTimeout is not None and time_used_to_send > self._writeTimeout:
time.sleep(self._writeTimeout) # must wait so that unit test succeeds
raise writeTimeoutError
self.buffer_lock.acquire()
try:
self.loop_buffer += data
finally:
self.buffer_lock.release()
return len(data)
def flushInput(self):
"""Clear input buffer, discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('flushInput()')
self.buffer_lock.acquire()
try:
del self.loop_buffer[:]
finally:
self.buffer_lock.release()
def flushOutput(self):
"""Clear output buffer, aborting the current output and
discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('flushOutput()')
def sendBreak(self, duration=0.25):
"""Send break condition. Timed, returns to idle state after given
duration."""
if not self._isOpen: raise portNotOpenError
def setBreak(self, level=True):
"""Set break: Controls TXD. When active, to transmitting is
possible."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setBreak(%r)' % (level,))
def setRTS(self, level=True):
"""Set terminal status line: Request To Send"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setRTS(%r) -> state of CTS' % (level,))
self.cts = level
def setDTR(self, level=True):
"""Set terminal status line: Data Terminal Ready"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setDTR(%r) -> state of DSR' % (level,))
self.dsr = level
def getCTS(self):
"""Read terminal status line: Clear To Send"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('getCTS() -> state of RTS (%r)' % (self.cts,))
return self.cts
def getDSR(self):
"""Read terminal status line: Data Set Ready"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('getDSR() -> state of DTR (%r)' % (self.dsr,))
return self.dsr
def getRI(self):
"""Read terminal status line: Ring Indicator"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('returning dummy for getRI()')
return False
def getCD(self):
"""Read terminal status line: Carrier Detect"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('returning dummy for getCD()')
return True
# - - - platform specific - - -
# None so far
# assemble Serial class with the platform specific implementation and the base
# for file-like behavior. for Python 2.6 and newer, that provide the new I/O
# library, derive from io.RawIOBase
try:
import io
except ImportError:
# classic version with our own file-like emulation
class Serial(LoopbackSerial, FileLike):
pass
else:
# io library present
class Serial(LoopbackSerial, io.RawIOBase):
pass
# simple client test
if __name__ == '__main__':
import sys
s = Serial('loop://')
sys.stdout.write('%s\n' % s)
sys.stdout.write("write...\n")
s.write("hello\n")
s.flush()
sys.stdout.write("read: %s\n" % s.read(5))
s.close()
| gpl-3.0 |
pleaseproject/python-for-android | python3-alpha/python3-src/Lib/wsgiref/simple_server.py | 51 | 4859 | """BaseHTTPServer that implements the Python WSGI protocol (PEP 3333)
This is both an example of how WSGI can be implemented, and a basis for running
simple web applications on a local machine, such as might be done when testing
or debugging an application. It has not been reviewed for security issues,
however, and we strongly recommend that you use a "real" web server for
production use.
For example usage, see the 'if __name__=="__main__"' block at the end of the
module. See also the BaseHTTPServer module docs for other API information.
"""
from http.server import BaseHTTPRequestHandler, HTTPServer
import sys
import urllib.parse
from wsgiref.handlers import SimpleHandler
__version__ = "0.2"
__all__ = ['WSGIServer', 'WSGIRequestHandler', 'demo_app', 'make_server']
server_version = "WSGIServer/" + __version__
sys_version = "Python/" + sys.version.split()[0]
software_version = server_version + ' ' + sys_version
class ServerHandler(SimpleHandler):
server_software = software_version
def close(self):
try:
self.request_handler.log_request(
self.status.split(' ',1)[0], self.bytes_sent
)
finally:
SimpleHandler.close(self)
class WSGIServer(HTTPServer):
"""BaseHTTPServer that implements the Python WSGI protocol"""
application = None
def server_bind(self):
"""Override server_bind to store the server name."""
HTTPServer.server_bind(self)
self.setup_environ()
def setup_environ(self):
# Set up base environment
env = self.base_environ = {}
env['SERVER_NAME'] = self.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PORT'] = str(self.server_port)
env['REMOTE_HOST']=''
env['CONTENT_LENGTH']=''
env['SCRIPT_NAME'] = ''
def get_app(self):
return self.application
def set_app(self,application):
self.application = application
class WSGIRequestHandler(BaseHTTPRequestHandler):
server_version = "WSGIServer/" + __version__
def get_environ(self):
env = self.server.base_environ.copy()
env['SERVER_PROTOCOL'] = self.request_version
env['SERVER_SOFTWARE'] = self.server_version
env['REQUEST_METHOD'] = self.command
if '?' in self.path:
path,query = self.path.split('?',1)
else:
path,query = self.path,''
env['PATH_INFO'] = urllib.parse.unquote_to_bytes(path).decode('iso-8859-1')
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
if self.headers.get('content-type') is None:
env['CONTENT_TYPE'] = self.headers.get_content_type()
else:
env['CONTENT_TYPE'] = self.headers['content-type']
length = self.headers.get('content-length')
if length:
env['CONTENT_LENGTH'] = length
for k, v in self.headers.items():
k=k.replace('-','_').upper(); v=v.strip()
if k in env:
continue # skip content length, type,etc.
if 'HTTP_'+k in env:
env['HTTP_'+k] += ','+v # comma-separate multiple headers
else:
env['HTTP_'+k] = v
return env
def get_stderr(self):
return sys.stderr
def handle(self):
"""Handle a single HTTP request"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
handler = ServerHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
def demo_app(environ,start_response):
from io import StringIO
stdout = StringIO()
print("Hello world!", file=stdout)
print(file=stdout)
h = sorted(environ.items())
for k,v in h:
print(k,'=',repr(v), file=stdout)
start_response("200 OK", [('Content-Type','text/plain; charset=utf-8')])
return [stdout.getvalue().encode("utf-8")]
def make_server(
host, port, app, server_class=WSGIServer, handler_class=WSGIRequestHandler
):
"""Create a new WSGI server listening on `host` and `port` for `app`"""
server = server_class((host, port), handler_class)
server.set_app(app)
return server
if __name__ == '__main__':
httpd = make_server('', 8000, demo_app)
sa = httpd.socket.getsockname()
print("Serving HTTP on", sa[0], "port", sa[1], "...")
import webbrowser
webbrowser.open('http://localhost:8000/xyz?abc')
httpd.handle_request() # serve one request, then exit
| apache-2.0 |
ASCrookes/django | django/contrib/gis/db/backends/mysql/operations.py | 328 | 2746 | from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import \
BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
gis_operators = {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # .. ..
'contained': SpatialOperator(func='MBRWithin'), # .. ..
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func='MBREqual'),
'exact': SpatialOperator(func='MBREqual'),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func='MBREqual'),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
function_names = {
'Distance': 'ST_Distance',
'Length': 'GLength',
'Union': 'ST_Union',
}
disallowed_aggregates = (
aggregates.Collect, aggregates.Extent, aggregates.Extent3D,
aggregates.MakeLine, aggregates.Union,
)
@cached_property
def unsupported_functions(self):
unsupported = {
'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle',
'Difference', 'ForceRHR', 'GeoHash', 'Intersection', 'MemSize',
'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid',
'SymDifference', 'Transform', 'Translate',
}
if self.connection.mysql_version < (5, 6, 1):
unsupported.update({'Distance', 'Union'})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, f, value, compiler):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'as_sql'):
placeholder, _ = compiler.compile(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
| bsd-3-clause |
vmp32k/litecoin | contrib/devtools/update-translations.py | 23 | 8426 | #!/usr/bin/env python3
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'bitcoin_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
# Regexp to check for Bitcoin addresses
ADDRESS_REGEXP = re.compile('([13]|bc1)[a-zA-Z0-9]{30,}')
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
sys.exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
sys.exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
specifiers.append(s[percent+1])
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def contains_bitcoin_addr(text, errors):
if text != None and ADDRESS_REGEXP.search(text) != None:
errors.append('Translation "%s" contains a bitcoin address. This will be removed.' % (text))
return True
return False
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus) and not contains_bitcoin_addr(translation, errors)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.