file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
email_test.py | import bountyfunding
from bountyfunding.core.const import *
from bountyfunding.core.data import clean_database
from test import to_object
from nose.tools import *
USER = "bountyfunding"
class Email_Test:
| def setup(self):
self.app = bountyfunding.app.test_client()
clean_database()
def test_email(self):
eq_(len(self.get_emails()), 0)
r = self.app.post('/issues', data=dict(ref=1, status='READY',
title='Title', link='/issue/1'))
eq_(r.status_code, 200)
r = self.app.post('/issue/1/sponsorships',
data=dict(user=USER, amount=10))
eq_(r.status_code, 200)
r = self.app.get("/issue/1")
eq_(r.status_code, 200)
r = self.app.put('/issue/1', data=dict(
status=IssueStatus.to_string(IssueStatus.STARTED)))
eq_(r.status_code, 200)
emails = self.get_emails()
eq_(len(emails), 1)
email = emails[0]
eq_(email.recipient, USER)
ok_(email.issue_id)
ok_(email.body)
r = self.app.delete("/email/%s" % email.id)
eq_(r.status_code, 200)
def get_emails(self):
r = self.app.get("/emails")
eq_(r.status_code, 200)
return to_object(r).data | identifier_body |
|
talk.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
DEBUG = True
observer = None
ser_port = None
s = 0
ser = None
#--------------------------------------------------------------------
import signal
import sys
import os
def signal_handler(signal, frame):
global s, ser
print '\nYou pressed Ctrl+C!'
if s > 18:
print "MTK_Finalize"
serialPost(ser, "B7".decode("hex"))
time.sleep(0.1)
if ser.isOpen(): ser.close()
#sys.exit(0)
os._exit(0)
signal.signal(signal.SIGINT, signal_handler)
#--------------------------------------------------------------------
import os
import serial
from serial.tools import list_ports
def serial_ports():
"""
Returns a generator for all available serial ports
"""
if os.name == 'nt':
# windows
for i in range(256):
try:
s = serial.Serial(i)
s.close()
yield 'COM' + str(i + 1)
except serial.SerialException:
pass
else:
# unix
for port in list_ports.comports():
yield port[0]
#if __name__ == '__main__':
# print(list(serial_ports()))
#exit()
#--------------------------------------------------------------------
import serial, time, binascii
def serialPost(ser, data):
#time.sleep(0.5)
#data = chr(0x44)
print " -> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
def serialPostL(ser, data, slen, scnt):
sys.stdout.write("\r" + str(scnt) + " of " + str(slen) + " <- " + binascii.b2a_hex(data))
if slen == scnt: sys.stdout.write("\n")
#sys.stdout.flush()
ser.write(data)
def summ(block, length):
res = 0
for i in range(length):
res = res + ord(block[i])
#print str(res)
return chr(res & int(0xFF))
def swapSerialData(data):
l = len(data)
#if l > 16:
# print "-> " + str(l) + " bytes"
#else:
# print "-> " + binascii.b2a_hex(data)
if len(data) > 0: ser.write(data)
n = 0
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#print "RX is L: " + str(l) + " -> " + binascii.b2a_hex(data)
return data
#----- CONNECT TO PORT----------
def conn_port (ser_port):
print ser_port
print "module PySerial version: " + serial.VERSION
# if: error open serial port: (22, 'Invalid argument')
# http://superuser.com/questions/572034/how-to-restart-ttyusb
# cat /proc/tty/drivers
# lsmod | grep usbserial
# sudo modprobe -r pl2303 qcaux
# sudo modprobe -r usbserial
#import subprocess
#subprocess.call(['statserial', ser_port])
#subprocess.call(['setserial', '-G', ser_port])
# http://www.roman10.net/serial-port-communication-in-python/
# initialization and open the port
# possible timeout values:
# 1. None: wait forever, block call
# 2. 0: non-blocking mode, return immediately
# 3. x, x is bigger than 0, float allowed, timeout block call
global ser
ser = serial.Serial()
#ser.port = "COM29"
ser.port = ser_port
ser.baudrate = 115200
ser.bytesize = serial.EIGHTBITS # number of bits per bytes
ser.parity = serial.PARITY_EVEN
ser.stopbits = serial.STOPBITS_ONE # number of stop bits
ser.timeout = None # block read
ser.rtscts = True # enable hardware (RTS/CTS) flow control (Hardware handshaking)
#ser.port = "/dev/ttyS0"
#ser.port = "/dev/ttyUSB0"
#ser.port = "2" # COM3
#ser.baudrate = 9600
#ser.parity = serial.PARITY_NONE # set parity check: no parity
#ser.timeout = 0 # non-block read
#ser.xonxoff = False # disable software flow control
#ser.rtscts = False # disable hardware (RTS/CTS) flow control
#ser.dsrdtr = False # disable hardware (DSR/DTR) flow control
#ser.writeTimeout = 2 # timeout for write
#data = chr(0x44) + chr(0x59)
#print "-> " + binascii.b2a_hex(data)
#exit()
try:
ser.open()
except Exception, e:
print "error open serial port: " + str(e)
print "for full reset serial device you must reload drivers:"
print " "
print " cat /proc/tty/drivers "
print " lsmod | grep usbserial "
print " sudo modprobe -r pl2303 qcaux "
print " sudo modprobe -r usbserial "
print " "
exit()
from hktool.bootload.samsung import sgh_e730
#loader1 = open("loader1.bin", "rb").read()
loader1 = sgh_e730.load_bootcode_first()
print "loader1.bin data size is: " + str(len(loader1))
ldr1_i = 0
ldr1_l = len(loader1)
ldr1_c = "4c00".decode("hex")
#loader2 = open("loader2.bin", "rb").read()
loader2 = sgh_e730.load_bootcode_second()
print "loader2.bin data size is: " + str(len(loader2))
ldr2_i = 0
ldr2_l = len(loader2)
#f = open("loader1.bin", "rb")
#try:
# byte = f.read(1)
# while byte != "":
# # Do stuff with byte.
# byte = f.read(1)
#except Exception, e1:
# print "error: " + str(e1)
# ser.close()
# import traceback
# traceback.print_exc()
#finally:
# f.close()
global s
if ser.isOpen():
try:
print 'Work with Samsung SGH-E730:'
print '- wait for SWIFT power on...'
ser.flushInput() # flush input buffer, discarding all its contents
ser.flushOutput() # flush output buffer, aborting current output
# and discard all that is in buffer
# write data
#ser.write("AT+CSQ=?\x0D")
#print("write data: AT+CSQ=?\x0D")
# steps
s = 0
serialPost(ser, "A0".decode("hex"))
while True:
n = 0
s += 1
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#if s != 6 or ldr1_i == 0:
print "RX is L: " + str(l) + " <- " + binascii.b2a_hex(data)
if s == 1:
if data[l-1] == chr(0x5F):
serialPost(ser, chr(0x0A))
elif s == 2:
if data[l-1] == chr(0xF5):
serialPost(ser, chr(0x50))
elif s == 3:
#if l == 16:
# serialPost(ser, "4412345678".decode("hex") + data)
# -> AF
serialPost(ser, "05".decode("hex"))
elif s == 4:
#if data[l-1] == chr(0x4f):
# # set timeout to 1600 ms (10h)
# serialPost(ser, chr(0x54) + chr(0x10))
# # set timeout to 1600 ms (20h)
# #serialPost(ser, chr(0x54) + chr(0x20))
# -> FA
# A2 - read from memory
serialPost(ser, "A2".decode("hex"))
elif s == 5:
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
# -> A2 - read command ACK
# 80 01 00 00 - Configuration Register: Hardware Version Register
serialPost(ser, "80010000".decode("hex"))
elif s == 6:
# -> 80 01 00 00
# 00 00 00 01 - read one byte
serialPost(ser, "00000001".decode("hex"))
#ldr1_i4 = 4*ldr1_i
#ldr1_i8 = 4*ldr1_i + 4
#if ldr1_i8 < ldr1_l:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_i8], ldr1_l, ldr1_i8)
# s -= 1
#else:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_l ], ldr1_l, ldr1_l )
#ldr1_i += 1
elif s == 7:
if l == 6: s += 1
elif s == 8:
# -> 00 00 00 01 - byte is read
# -> XX XX - byte:
serialPost(ser, "A2".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
elif s == 9:
# -> A2
# 80 01 00 08 - Hardware Code Register
serialPost(ser, "80010008".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "4a".decode("hex"))
elif s == 10:
# -> 80 01 00 08
serialPost(ser, "00000001".decode("hex"))
#s = 20;
#if data[l-1] == chr(0xAB):
# # 0x00 -> Speed = 115200
# # 0x01 -> Speed = 230400
# # 0x02 -> Speed = 460800
# # 0x03 -> Speed = 921600
# serialPost(ser, "00".decode("hex"))
# # close comms, bootup completed
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# ser.close()
# # reopen comms at the new speed
# time.sleep(0.1)
# ser.port = "COM3"
# ser.baudrate = 115200
# ser.parity = serial.PARITY_NONE # set parity check: no parity
# ser.open()
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# serialPost(ser, "d9".decode("hex"))
elif s == 11:
if l == 6: s += 1
elif s == 12:
# -> 00 00 00 01
# -> XX XX - we hawe a MediaTek MT6253
serialPost(ser, "A2".decode("hex"))
elif s == 13:
# -> A2
# 80 01 00 04 - Software Version Register
serialPost(ser, "80010004".decode("hex"))
elif s == 14:
# -> 80 01 00 04
serialPost(ser, "00000001".decode("hex"))
elif s == 15:
if l == 6: s += 1
elif s == 16:
# -> 00 00 00 01
# -> XX XX -
# A1 - write to register
serialPost(ser, "A1".decode("hex"))
elif s == 17:
# -> A1 - write command ack
# 80 03 00 00 - Reset Generation Unit (RGU): Watchdog Timer Control Register
serialPost(ser, "80030000".decode("hex"))
elif s == 18:
# -> 80 03 00 00
serialPost(ser, "00000001".decode("hex"))
elif s == 19:
# -> 00 00 00 01
# 22 00 - set
serialPost(ser, "2200".decode("hex"))
elif s == 20:
s -= 1
elif s == 111:
data = "d4".decode("hex")
data0 = chr((ldr2_l >> 24) & int(0xFF))
data0 += chr((ldr2_l >> 16) & int(0xFF))
data0 += chr((ldr2_l >> 8) & int(0xFF))
data0 += chr((ldr2_l ) & int(0xFF))
data += data0
serialPost(ser, data)
elif s == 112:
# zapominaem CRC
crc = data
my_crc = summ(data0, 4)
print "crc is: " + binascii.b2a_hex(crc)
print "my_crc is: " + binascii.b2a_hex(my_crc)
if crc == my_crc:
send_len = 0
for i in range((ldr2_l - 1) >> 11):
send_len = ldr2_l - (i << 11)
if send_len > 2048: send_len = 2048
# calculate sum
ss = i << 11
su = summ(loader2[ss:ss+send_len], send_len)
# send command
data = swapSerialData("f7".decode("hex"))
data = swapSerialData(loader2[ss:ss+send_len])
#print "2 crc is: " + binascii.b2a_hex(data)
#print "2 my_crc is: " + binascii.b2a_hex(su)
#print "i: " + str(i)
sys.stdout.write("\ri: " + str(i))
sys.stdout.write("\n")
serialPost(ser, "FF".decode("hex"))
elif s == 113:
serialPost(ser, "D010000000".decode("hex"))
elif s == 114:
serialPost(ser, "D1".decode("hex"))
elif s == 115:
nand_id = (ord(data[8])<<8) + ord(data[9])
# nado proverit, chto 2,3,4 baity ravny sootvetstvenno 0xEC 0x22 0xFC
#
# additionally identify NAND for Swift
print "Flash... "
if nand_id == int(0x04): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x14): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x24): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x34): print "128MB ( 1Gbit) NAND"
elif nand_id == int(0x0C): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x1C): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x2C): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x3C): print "128MB ( 1Gbit) NAND"
else: print "Unknown NAND: " + str("%02x" % nand_id)
# here, the bootup is completed
# delay slightly (required!)
time.sleep(0.25)
else:
#data = chr(0x44)
data = chr(0x00)
print "-> " + binascii.b2a_hex(data)
#ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x44)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x51)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
#print ser.portstr
time.sleep(0.5) # give the serial port sometime to receive the data
numOfLines = 0
while True:
response = ser.readline()
print("read data: " + response)
numOfLines = numOfLines + 1
if (numOfLines >= 5):
break
ser.close()
except Exception, e1:
print "error communicating...: " + str(e1)
ser.close()
import traceback
traceback.print_exc()
except KeyboardInterrupt:
print "\nmanual interrupted!"
ser.close()
else:
print "cannot open serial port "
exit()
#===========================================================
#from hktool.bootload import mediatek
from hktool.bootload.mediatek import MTKBootload
from threading import Thread
from time import sleep as Sleep
def logical_xor(str1, str2):
return bool(str1) ^ bool(str2)
#----- MAIN CODE -------------------------------------------
if __name__=='__main__':
from sys import platform as _platform
import os
if _platform == "linux" or _platform == "linux2":
# linux
print "it is linux?"
from hktool.hotplug import linux_udev as port_notify
elif _platform == "darwin":
# OS X
print "it is osx?"
print "WARNING: port_notify is not realised !!!"
elif _platform == "win32":
# Windows...
print "it is windows?"
from hktool.hotplug import windevnotif as port_notify
print "sys.platform: " + _platform + ", os.name: " + os.name
print ""
print "Select: xml, boot, sgh, crc, usb, exit, quit, q"
print ""
tsk = str(raw_input("enter command > "))
if tsk.lower() in ['exit', 'quit', 'q']:
os._exit(0)
if tsk.lower() in ['boot']:
print "Working with device communication..."
print ""
Thread(target = port_notify.run_notify).start()
Sleep(1)
port = port_notify.get_notify()
print "port_name is: " + port
#conn_port(port)
#mediatek.init(port)
m = MTKBootload(port)
if 'sgh' in tsk.lower():
tsks = tsk.split()
print ""
print "Working with device communication..."
print ""
Sleep(1)
port = tsks[1]
print "port_name is: " + port
#m = SGHBootload(port)
if tsk.lower() in ['xml', 'lxml']:
print "Working with lxml..."
print ""
from lxml import etree
tree = etree.parse('../../mtk-tests/Projects/_lg-a290/data/UTLog_DownloadAgent_FlashTool.xml')
root = tree.getroot()
print root
#entries = tree.xpath("//atom:category[@term='accessibility']/..", namespaces=NSMAP)
entries = tree.xpath("//UTLOG/Request[@Dir='[OUT]']/Data")
#print entries
old_text = None
dmp_text = False
cnt_text = 0
bin_file = None
for xent in entries:
new_text = xent.text
if new_text == old_text:
continue
old_text = new_text
#print "-> " + new_text
bin_text = new_text.replace(" ", "")
bin_text = bin_text.decode("hex")
bin_len = len(bin_text)
print str(bin_len) + " -> " + new_text
if dmp_text is False and bin_len == 1024:
dmp_text = True
prt = xent.getparent()
atr = prt.attrib
num = atr["Number"]
nam = "big_" + num + ".bin"
bin_file = open(nam, 'wb')
print ""
print "start dump big data to: " + nam
if dmp_text is True:
#---
import array
a = array.array('H', bin_text) # array.array('H', bin_text)
a.byteswap()
bin_text = a.tostring()
#---
bin_file.write(bin_text)
if bin_len == 1024:
cnt_text += 1
else:
|
pass
if tsk.lower() in ['crc']:
str1 = raw_input("Enter string one:")
str2 = raw_input("Enter string two:")
if logical_xor(str1, str2):
print "ok"
else:
print "bad"
pass
print hex(0x12ef ^ 0xabcd)
print hex(int("12ef", 16) ^ int("abcd", 16))
str1 = raw_input("Enter string one: ")
str2 = raw_input("Enter string two: ")
print hex(int(str1, 16) ^ int(str2, 16))
pass
if tsk.lower() in ['usb']:
import usb.core
#import usb.backend.libusb1
import usb.backend.libusb0
import logging
#PYUSB_DEBUG_LEVEL = "debug"
#PYUSB_LOG_FILENAME = "C:\dump"
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
__backend__ = os.path.join(__location__, "libusb0.dll")
#PYUSB_LOG_FILENAME = __location__
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: "/usr/lib/libusb-1.0.so")
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: __backend__)
backend = usb.backend.libusb0.get_backend(find_library=lambda x: __backend__)
dev = usb.core.find(find_all=True, backend=backend)
#dev = usb.core.find(find_all=True)
busses = usb.busses()
print busses
if dev is None:
raise ValueError('Our device is not connected')
for bus in busses:
devices = bus.devices
for dev in devices:
try:
_name = usb.util.get_string(dev.dev, 19, 1)
except:
continue
dev.set_configuration()
cfg = dev.get_active_configuration()
interface_number = cfg[(0,0)].bInterfaceNumber
alternate_settting = usb.control.get_interface(interface_number)
print "Device name:",_name
print "Device:", dev.filename
print " idVendor:",hex(dev.idVendor)
print " idProduct:",hex(dev.idProduct)
for config in dev.configurations:
print " Configuration:", config.value
print " Total length:", config.totalLength
print " selfPowered:", config.selfPowered
print " remoteWakeup:", config.remoteWakeup
print " maxPower:", config.maxPower
print
| cnt_text = cnt_text * 1024 + bin_len
dmp_text = False
bin_file.close()
print "big data length is: " + str(cnt_text)
print ""
cnt_text = 0 | conditional_block |
talk.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
DEBUG = True
observer = None
ser_port = None
s = 0
ser = None
#--------------------------------------------------------------------
import signal
import sys
import os
def signal_handler(signal, frame):
global s, ser
print '\nYou pressed Ctrl+C!'
if s > 18:
print "MTK_Finalize"
serialPost(ser, "B7".decode("hex"))
time.sleep(0.1)
if ser.isOpen(): ser.close()
#sys.exit(0)
os._exit(0)
signal.signal(signal.SIGINT, signal_handler)
#--------------------------------------------------------------------
import os
import serial
from serial.tools import list_ports
def serial_ports():
"""
Returns a generator for all available serial ports
"""
if os.name == 'nt':
# windows
for i in range(256):
try:
s = serial.Serial(i)
s.close()
yield 'COM' + str(i + 1)
except serial.SerialException:
pass
else:
# unix
for port in list_ports.comports():
yield port[0]
#if __name__ == '__main__':
# print(list(serial_ports()))
#exit()
#--------------------------------------------------------------------
import serial, time, binascii
def serialPost(ser, data):
#time.sleep(0.5)
#data = chr(0x44)
print " -> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
def serialPostL(ser, data, slen, scnt):
sys.stdout.write("\r" + str(scnt) + " of " + str(slen) + " <- " + binascii.b2a_hex(data))
if slen == scnt: sys.stdout.write("\n")
#sys.stdout.flush()
ser.write(data)
def summ(block, length):
|
def swapSerialData(data):
l = len(data)
#if l > 16:
# print "-> " + str(l) + " bytes"
#else:
# print "-> " + binascii.b2a_hex(data)
if len(data) > 0: ser.write(data)
n = 0
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#print "RX is L: " + str(l) + " -> " + binascii.b2a_hex(data)
return data
#----- CONNECT TO PORT----------
def conn_port (ser_port):
print ser_port
print "module PySerial version: " + serial.VERSION
# if: error open serial port: (22, 'Invalid argument')
# http://superuser.com/questions/572034/how-to-restart-ttyusb
# cat /proc/tty/drivers
# lsmod | grep usbserial
# sudo modprobe -r pl2303 qcaux
# sudo modprobe -r usbserial
#import subprocess
#subprocess.call(['statserial', ser_port])
#subprocess.call(['setserial', '-G', ser_port])
# http://www.roman10.net/serial-port-communication-in-python/
# initialization and open the port
# possible timeout values:
# 1. None: wait forever, block call
# 2. 0: non-blocking mode, return immediately
# 3. x, x is bigger than 0, float allowed, timeout block call
global ser
ser = serial.Serial()
#ser.port = "COM29"
ser.port = ser_port
ser.baudrate = 115200
ser.bytesize = serial.EIGHTBITS # number of bits per bytes
ser.parity = serial.PARITY_EVEN
ser.stopbits = serial.STOPBITS_ONE # number of stop bits
ser.timeout = None # block read
ser.rtscts = True # enable hardware (RTS/CTS) flow control (Hardware handshaking)
#ser.port = "/dev/ttyS0"
#ser.port = "/dev/ttyUSB0"
#ser.port = "2" # COM3
#ser.baudrate = 9600
#ser.parity = serial.PARITY_NONE # set parity check: no parity
#ser.timeout = 0 # non-block read
#ser.xonxoff = False # disable software flow control
#ser.rtscts = False # disable hardware (RTS/CTS) flow control
#ser.dsrdtr = False # disable hardware (DSR/DTR) flow control
#ser.writeTimeout = 2 # timeout for write
#data = chr(0x44) + chr(0x59)
#print "-> " + binascii.b2a_hex(data)
#exit()
try:
ser.open()
except Exception, e:
print "error open serial port: " + str(e)
print "for full reset serial device you must reload drivers:"
print " "
print " cat /proc/tty/drivers "
print " lsmod | grep usbserial "
print " sudo modprobe -r pl2303 qcaux "
print " sudo modprobe -r usbserial "
print " "
exit()
from hktool.bootload.samsung import sgh_e730
#loader1 = open("loader1.bin", "rb").read()
loader1 = sgh_e730.load_bootcode_first()
print "loader1.bin data size is: " + str(len(loader1))
ldr1_i = 0
ldr1_l = len(loader1)
ldr1_c = "4c00".decode("hex")
#loader2 = open("loader2.bin", "rb").read()
loader2 = sgh_e730.load_bootcode_second()
print "loader2.bin data size is: " + str(len(loader2))
ldr2_i = 0
ldr2_l = len(loader2)
#f = open("loader1.bin", "rb")
#try:
# byte = f.read(1)
# while byte != "":
# # Do stuff with byte.
# byte = f.read(1)
#except Exception, e1:
# print "error: " + str(e1)
# ser.close()
# import traceback
# traceback.print_exc()
#finally:
# f.close()
global s
if ser.isOpen():
try:
print 'Work with Samsung SGH-E730:'
print '- wait for SWIFT power on...'
ser.flushInput() # flush input buffer, discarding all its contents
ser.flushOutput() # flush output buffer, aborting current output
# and discard all that is in buffer
# write data
#ser.write("AT+CSQ=?\x0D")
#print("write data: AT+CSQ=?\x0D")
# steps
s = 0
serialPost(ser, "A0".decode("hex"))
while True:
n = 0
s += 1
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#if s != 6 or ldr1_i == 0:
print "RX is L: " + str(l) + " <- " + binascii.b2a_hex(data)
if s == 1:
if data[l-1] == chr(0x5F):
serialPost(ser, chr(0x0A))
elif s == 2:
if data[l-1] == chr(0xF5):
serialPost(ser, chr(0x50))
elif s == 3:
#if l == 16:
# serialPost(ser, "4412345678".decode("hex") + data)
# -> AF
serialPost(ser, "05".decode("hex"))
elif s == 4:
#if data[l-1] == chr(0x4f):
# # set timeout to 1600 ms (10h)
# serialPost(ser, chr(0x54) + chr(0x10))
# # set timeout to 1600 ms (20h)
# #serialPost(ser, chr(0x54) + chr(0x20))
# -> FA
# A2 - read from memory
serialPost(ser, "A2".decode("hex"))
elif s == 5:
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
# -> A2 - read command ACK
# 80 01 00 00 - Configuration Register: Hardware Version Register
serialPost(ser, "80010000".decode("hex"))
elif s == 6:
# -> 80 01 00 00
# 00 00 00 01 - read one byte
serialPost(ser, "00000001".decode("hex"))
#ldr1_i4 = 4*ldr1_i
#ldr1_i8 = 4*ldr1_i + 4
#if ldr1_i8 < ldr1_l:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_i8], ldr1_l, ldr1_i8)
# s -= 1
#else:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_l ], ldr1_l, ldr1_l )
#ldr1_i += 1
elif s == 7:
if l == 6: s += 1
elif s == 8:
# -> 00 00 00 01 - byte is read
# -> XX XX - byte:
serialPost(ser, "A2".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
elif s == 9:
# -> A2
# 80 01 00 08 - Hardware Code Register
serialPost(ser, "80010008".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "4a".decode("hex"))
elif s == 10:
# -> 80 01 00 08
serialPost(ser, "00000001".decode("hex"))
#s = 20;
#if data[l-1] == chr(0xAB):
# # 0x00 -> Speed = 115200
# # 0x01 -> Speed = 230400
# # 0x02 -> Speed = 460800
# # 0x03 -> Speed = 921600
# serialPost(ser, "00".decode("hex"))
# # close comms, bootup completed
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# ser.close()
# # reopen comms at the new speed
# time.sleep(0.1)
# ser.port = "COM3"
# ser.baudrate = 115200
# ser.parity = serial.PARITY_NONE # set parity check: no parity
# ser.open()
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# serialPost(ser, "d9".decode("hex"))
elif s == 11:
if l == 6: s += 1
elif s == 12:
# -> 00 00 00 01
# -> XX XX - we hawe a MediaTek MT6253
serialPost(ser, "A2".decode("hex"))
elif s == 13:
# -> A2
# 80 01 00 04 - Software Version Register
serialPost(ser, "80010004".decode("hex"))
elif s == 14:
# -> 80 01 00 04
serialPost(ser, "00000001".decode("hex"))
elif s == 15:
if l == 6: s += 1
elif s == 16:
# -> 00 00 00 01
# -> XX XX -
# A1 - write to register
serialPost(ser, "A1".decode("hex"))
elif s == 17:
# -> A1 - write command ack
# 80 03 00 00 - Reset Generation Unit (RGU): Watchdog Timer Control Register
serialPost(ser, "80030000".decode("hex"))
elif s == 18:
# -> 80 03 00 00
serialPost(ser, "00000001".decode("hex"))
elif s == 19:
# -> 00 00 00 01
# 22 00 - set
serialPost(ser, "2200".decode("hex"))
elif s == 20:
s -= 1
elif s == 111:
data = "d4".decode("hex")
data0 = chr((ldr2_l >> 24) & int(0xFF))
data0 += chr((ldr2_l >> 16) & int(0xFF))
data0 += chr((ldr2_l >> 8) & int(0xFF))
data0 += chr((ldr2_l ) & int(0xFF))
data += data0
serialPost(ser, data)
elif s == 112:
# zapominaem CRC
crc = data
my_crc = summ(data0, 4)
print "crc is: " + binascii.b2a_hex(crc)
print "my_crc is: " + binascii.b2a_hex(my_crc)
if crc == my_crc:
send_len = 0
for i in range((ldr2_l - 1) >> 11):
send_len = ldr2_l - (i << 11)
if send_len > 2048: send_len = 2048
# calculate sum
ss = i << 11
su = summ(loader2[ss:ss+send_len], send_len)
# send command
data = swapSerialData("f7".decode("hex"))
data = swapSerialData(loader2[ss:ss+send_len])
#print "2 crc is: " + binascii.b2a_hex(data)
#print "2 my_crc is: " + binascii.b2a_hex(su)
#print "i: " + str(i)
sys.stdout.write("\ri: " + str(i))
sys.stdout.write("\n")
serialPost(ser, "FF".decode("hex"))
elif s == 113:
serialPost(ser, "D010000000".decode("hex"))
elif s == 114:
serialPost(ser, "D1".decode("hex"))
elif s == 115:
nand_id = (ord(data[8])<<8) + ord(data[9])
# nado proverit, chto 2,3,4 baity ravny sootvetstvenno 0xEC 0x22 0xFC
#
# additionally identify NAND for Swift
print "Flash... "
if nand_id == int(0x04): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x14): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x24): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x34): print "128MB ( 1Gbit) NAND"
elif nand_id == int(0x0C): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x1C): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x2C): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x3C): print "128MB ( 1Gbit) NAND"
else: print "Unknown NAND: " + str("%02x" % nand_id)
# here, the bootup is completed
# delay slightly (required!)
time.sleep(0.25)
else:
#data = chr(0x44)
data = chr(0x00)
print "-> " + binascii.b2a_hex(data)
#ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x44)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x51)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
#print ser.portstr
time.sleep(0.5) # give the serial port sometime to receive the data
numOfLines = 0
while True:
response = ser.readline()
print("read data: " + response)
numOfLines = numOfLines + 1
if (numOfLines >= 5):
break
ser.close()
except Exception, e1:
print "error communicating...: " + str(e1)
ser.close()
import traceback
traceback.print_exc()
except KeyboardInterrupt:
print "\nmanual interrupted!"
ser.close()
else:
print "cannot open serial port "
exit()
#===========================================================
#from hktool.bootload import mediatek
from hktool.bootload.mediatek import MTKBootload
from threading import Thread
from time import sleep as Sleep
def logical_xor(str1, str2):
return bool(str1) ^ bool(str2)
#----- MAIN CODE -------------------------------------------
if __name__=='__main__':
from sys import platform as _platform
import os
if _platform == "linux" or _platform == "linux2":
# linux
print "it is linux?"
from hktool.hotplug import linux_udev as port_notify
elif _platform == "darwin":
# OS X
print "it is osx?"
print "WARNING: port_notify is not realised !!!"
elif _platform == "win32":
# Windows...
print "it is windows?"
from hktool.hotplug import windevnotif as port_notify
print "sys.platform: " + _platform + ", os.name: " + os.name
print ""
print "Select: xml, boot, sgh, crc, usb, exit, quit, q"
print ""
tsk = str(raw_input("enter command > "))
if tsk.lower() in ['exit', 'quit', 'q']:
os._exit(0)
if tsk.lower() in ['boot']:
print "Working with device communication..."
print ""
Thread(target = port_notify.run_notify).start()
Sleep(1)
port = port_notify.get_notify()
print "port_name is: " + port
#conn_port(port)
#mediatek.init(port)
m = MTKBootload(port)
if 'sgh' in tsk.lower():
tsks = tsk.split()
print ""
print "Working with device communication..."
print ""
Sleep(1)
port = tsks[1]
print "port_name is: " + port
#m = SGHBootload(port)
if tsk.lower() in ['xml', 'lxml']:
print "Working with lxml..."
print ""
from lxml import etree
tree = etree.parse('../../mtk-tests/Projects/_lg-a290/data/UTLog_DownloadAgent_FlashTool.xml')
root = tree.getroot()
print root
#entries = tree.xpath("//atom:category[@term='accessibility']/..", namespaces=NSMAP)
entries = tree.xpath("//UTLOG/Request[@Dir='[OUT]']/Data")
#print entries
old_text = None
dmp_text = False
cnt_text = 0
bin_file = None
for xent in entries:
new_text = xent.text
if new_text == old_text:
continue
old_text = new_text
#print "-> " + new_text
bin_text = new_text.replace(" ", "")
bin_text = bin_text.decode("hex")
bin_len = len(bin_text)
print str(bin_len) + " -> " + new_text
if dmp_text is False and bin_len == 1024:
dmp_text = True
prt = xent.getparent()
atr = prt.attrib
num = atr["Number"]
nam = "big_" + num + ".bin"
bin_file = open(nam, 'wb')
print ""
print "start dump big data to: " + nam
if dmp_text is True:
#---
import array
a = array.array('H', bin_text) # array.array('H', bin_text)
a.byteswap()
bin_text = a.tostring()
#---
bin_file.write(bin_text)
if bin_len == 1024:
cnt_text += 1
else:
cnt_text = cnt_text * 1024 + bin_len
dmp_text = False
bin_file.close()
print "big data length is: " + str(cnt_text)
print ""
cnt_text = 0
pass
if tsk.lower() in ['crc']:
str1 = raw_input("Enter string one:")
str2 = raw_input("Enter string two:")
if logical_xor(str1, str2):
print "ok"
else:
print "bad"
pass
print hex(0x12ef ^ 0xabcd)
print hex(int("12ef", 16) ^ int("abcd", 16))
str1 = raw_input("Enter string one: ")
str2 = raw_input("Enter string two: ")
print hex(int(str1, 16) ^ int(str2, 16))
pass
if tsk.lower() in ['usb']:
import usb.core
#import usb.backend.libusb1
import usb.backend.libusb0
import logging
#PYUSB_DEBUG_LEVEL = "debug"
#PYUSB_LOG_FILENAME = "C:\dump"
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
__backend__ = os.path.join(__location__, "libusb0.dll")
#PYUSB_LOG_FILENAME = __location__
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: "/usr/lib/libusb-1.0.so")
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: __backend__)
backend = usb.backend.libusb0.get_backend(find_library=lambda x: __backend__)
dev = usb.core.find(find_all=True, backend=backend)
#dev = usb.core.find(find_all=True)
busses = usb.busses()
print busses
if dev is None:
raise ValueError('Our device is not connected')
for bus in busses:
devices = bus.devices
for dev in devices:
try:
_name = usb.util.get_string(dev.dev, 19, 1)
except:
continue
dev.set_configuration()
cfg = dev.get_active_configuration()
interface_number = cfg[(0,0)].bInterfaceNumber
alternate_settting = usb.control.get_interface(interface_number)
print "Device name:",_name
print "Device:", dev.filename
print " idVendor:",hex(dev.idVendor)
print " idProduct:",hex(dev.idProduct)
for config in dev.configurations:
print " Configuration:", config.value
print " Total length:", config.totalLength
print " selfPowered:", config.selfPowered
print " remoteWakeup:", config.remoteWakeup
print " maxPower:", config.maxPower
print
| res = 0
for i in range(length):
res = res + ord(block[i])
#print str(res)
return chr(res & int(0xFF)) | identifier_body |
talk.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
DEBUG = True
observer = None
ser_port = None
s = 0
ser = None
#--------------------------------------------------------------------
import signal
import sys
import os
def signal_handler(signal, frame):
global s, ser
print '\nYou pressed Ctrl+C!'
if s > 18:
print "MTK_Finalize"
serialPost(ser, "B7".decode("hex"))
time.sleep(0.1)
if ser.isOpen(): ser.close()
#sys.exit(0)
os._exit(0)
signal.signal(signal.SIGINT, signal_handler)
#--------------------------------------------------------------------
import os
import serial
from serial.tools import list_ports
def serial_ports():
"""
Returns a generator for all available serial ports
"""
if os.name == 'nt':
# windows
for i in range(256):
try:
s = serial.Serial(i)
s.close()
yield 'COM' + str(i + 1)
except serial.SerialException:
pass
else:
# unix
for port in list_ports.comports():
yield port[0]
#if __name__ == '__main__':
# print(list(serial_ports()))
#exit()
#--------------------------------------------------------------------
import serial, time, binascii
def serialPost(ser, data):
#time.sleep(0.5)
#data = chr(0x44)
print " -> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
def serialPostL(ser, data, slen, scnt):
sys.stdout.write("\r" + str(scnt) + " of " + str(slen) + " <- " + binascii.b2a_hex(data))
if slen == scnt: sys.stdout.write("\n")
#sys.stdout.flush()
ser.write(data)
def summ(block, length):
res = 0
for i in range(length):
res = res + ord(block[i])
#print str(res)
return chr(res & int(0xFF))
def swapSerialData(data):
l = len(data)
#if l > 16:
# print "-> " + str(l) + " bytes"
#else:
# print "-> " + binascii.b2a_hex(data)
if len(data) > 0: ser.write(data)
n = 0
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#print "RX is L: " + str(l) + " -> " + binascii.b2a_hex(data)
return data
#----- CONNECT TO PORT----------
def conn_port (ser_port):
print ser_port
print "module PySerial version: " + serial.VERSION
# if: error open serial port: (22, 'Invalid argument')
# http://superuser.com/questions/572034/how-to-restart-ttyusb
# cat /proc/tty/drivers
# lsmod | grep usbserial
# sudo modprobe -r pl2303 qcaux
# sudo modprobe -r usbserial
#import subprocess
#subprocess.call(['statserial', ser_port])
#subprocess.call(['setserial', '-G', ser_port])
# http://www.roman10.net/serial-port-communication-in-python/
# initialization and open the port
# possible timeout values:
# 1. None: wait forever, block call
# 2. 0: non-blocking mode, return immediately
# 3. x, x is bigger than 0, float allowed, timeout block call
global ser
ser = serial.Serial()
#ser.port = "COM29"
ser.port = ser_port
ser.baudrate = 115200
ser.bytesize = serial.EIGHTBITS # number of bits per bytes
ser.parity = serial.PARITY_EVEN
ser.stopbits = serial.STOPBITS_ONE # number of stop bits
ser.timeout = None # block read
ser.rtscts = True # enable hardware (RTS/CTS) flow control (Hardware handshaking)
#ser.port = "/dev/ttyS0"
#ser.port = "/dev/ttyUSB0"
#ser.port = "2" # COM3
#ser.baudrate = 9600
#ser.parity = serial.PARITY_NONE # set parity check: no parity
#ser.timeout = 0 # non-block read
#ser.xonxoff = False # disable software flow control
#ser.rtscts = False # disable hardware (RTS/CTS) flow control
#ser.dsrdtr = False # disable hardware (DSR/DTR) flow control
#ser.writeTimeout = 2 # timeout for write
#data = chr(0x44) + chr(0x59)
#print "-> " + binascii.b2a_hex(data)
#exit()
try:
ser.open()
except Exception, e:
print "error open serial port: " + str(e)
print "for full reset serial device you must reload drivers:"
print " "
print " cat /proc/tty/drivers "
print " lsmod | grep usbserial "
print " sudo modprobe -r pl2303 qcaux "
print " sudo modprobe -r usbserial "
print " "
exit()
from hktool.bootload.samsung import sgh_e730
#loader1 = open("loader1.bin", "rb").read()
loader1 = sgh_e730.load_bootcode_first()
print "loader1.bin data size is: " + str(len(loader1))
ldr1_i = 0
ldr1_l = len(loader1)
ldr1_c = "4c00".decode("hex")
#loader2 = open("loader2.bin", "rb").read()
loader2 = sgh_e730.load_bootcode_second()
print "loader2.bin data size is: " + str(len(loader2))
ldr2_i = 0
ldr2_l = len(loader2)
#f = open("loader1.bin", "rb")
#try:
# byte = f.read(1)
# while byte != "":
# # Do stuff with byte.
# byte = f.read(1)
#except Exception, e1:
# print "error: " + str(e1)
# ser.close()
# import traceback
# traceback.print_exc()
#finally:
# f.close()
global s
if ser.isOpen():
try:
print 'Work with Samsung SGH-E730:'
print '- wait for SWIFT power on...'
ser.flushInput() # flush input buffer, discarding all its contents
ser.flushOutput() # flush output buffer, aborting current output
# and discard all that is in buffer
# write data
#ser.write("AT+CSQ=?\x0D")
#print("write data: AT+CSQ=?\x0D")
# steps
s = 0
serialPost(ser, "A0".decode("hex"))
while True:
n = 0
s += 1
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#if s != 6 or ldr1_i == 0:
print "RX is L: " + str(l) + " <- " + binascii.b2a_hex(data)
if s == 1:
if data[l-1] == chr(0x5F):
serialPost(ser, chr(0x0A))
elif s == 2:
if data[l-1] == chr(0xF5):
serialPost(ser, chr(0x50))
elif s == 3:
#if l == 16:
# serialPost(ser, "4412345678".decode("hex") + data)
# -> AF
serialPost(ser, "05".decode("hex"))
elif s == 4:
#if data[l-1] == chr(0x4f):
# # set timeout to 1600 ms (10h)
# serialPost(ser, chr(0x54) + chr(0x10))
# # set timeout to 1600 ms (20h)
# #serialPost(ser, chr(0x54) + chr(0x20))
# -> FA | serialPost(ser, "A2".decode("hex"))
elif s == 5:
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
# -> A2 - read command ACK
# 80 01 00 00 - Configuration Register: Hardware Version Register
serialPost(ser, "80010000".decode("hex"))
elif s == 6:
# -> 80 01 00 00
# 00 00 00 01 - read one byte
serialPost(ser, "00000001".decode("hex"))
#ldr1_i4 = 4*ldr1_i
#ldr1_i8 = 4*ldr1_i + 4
#if ldr1_i8 < ldr1_l:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_i8], ldr1_l, ldr1_i8)
# s -= 1
#else:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_l ], ldr1_l, ldr1_l )
#ldr1_i += 1
elif s == 7:
if l == 6: s += 1
elif s == 8:
# -> 00 00 00 01 - byte is read
# -> XX XX - byte:
serialPost(ser, "A2".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
elif s == 9:
# -> A2
# 80 01 00 08 - Hardware Code Register
serialPost(ser, "80010008".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "4a".decode("hex"))
elif s == 10:
# -> 80 01 00 08
serialPost(ser, "00000001".decode("hex"))
#s = 20;
#if data[l-1] == chr(0xAB):
# # 0x00 -> Speed = 115200
# # 0x01 -> Speed = 230400
# # 0x02 -> Speed = 460800
# # 0x03 -> Speed = 921600
# serialPost(ser, "00".decode("hex"))
# # close comms, bootup completed
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# ser.close()
# # reopen comms at the new speed
# time.sleep(0.1)
# ser.port = "COM3"
# ser.baudrate = 115200
# ser.parity = serial.PARITY_NONE # set parity check: no parity
# ser.open()
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# serialPost(ser, "d9".decode("hex"))
elif s == 11:
if l == 6: s += 1
elif s == 12:
# -> 00 00 00 01
# -> XX XX - we hawe a MediaTek MT6253
serialPost(ser, "A2".decode("hex"))
elif s == 13:
# -> A2
# 80 01 00 04 - Software Version Register
serialPost(ser, "80010004".decode("hex"))
elif s == 14:
# -> 80 01 00 04
serialPost(ser, "00000001".decode("hex"))
elif s == 15:
if l == 6: s += 1
elif s == 16:
# -> 00 00 00 01
# -> XX XX -
# A1 - write to register
serialPost(ser, "A1".decode("hex"))
elif s == 17:
# -> A1 - write command ack
# 80 03 00 00 - Reset Generation Unit (RGU): Watchdog Timer Control Register
serialPost(ser, "80030000".decode("hex"))
elif s == 18:
# -> 80 03 00 00
serialPost(ser, "00000001".decode("hex"))
elif s == 19:
# -> 00 00 00 01
# 22 00 - set
serialPost(ser, "2200".decode("hex"))
elif s == 20:
s -= 1
elif s == 111:
data = "d4".decode("hex")
data0 = chr((ldr2_l >> 24) & int(0xFF))
data0 += chr((ldr2_l >> 16) & int(0xFF))
data0 += chr((ldr2_l >> 8) & int(0xFF))
data0 += chr((ldr2_l ) & int(0xFF))
data += data0
serialPost(ser, data)
elif s == 112:
# zapominaem CRC
crc = data
my_crc = summ(data0, 4)
print "crc is: " + binascii.b2a_hex(crc)
print "my_crc is: " + binascii.b2a_hex(my_crc)
if crc == my_crc:
send_len = 0
for i in range((ldr2_l - 1) >> 11):
send_len = ldr2_l - (i << 11)
if send_len > 2048: send_len = 2048
# calculate sum
ss = i << 11
su = summ(loader2[ss:ss+send_len], send_len)
# send command
data = swapSerialData("f7".decode("hex"))
data = swapSerialData(loader2[ss:ss+send_len])
#print "2 crc is: " + binascii.b2a_hex(data)
#print "2 my_crc is: " + binascii.b2a_hex(su)
#print "i: " + str(i)
sys.stdout.write("\ri: " + str(i))
sys.stdout.write("\n")
serialPost(ser, "FF".decode("hex"))
elif s == 113:
serialPost(ser, "D010000000".decode("hex"))
elif s == 114:
serialPost(ser, "D1".decode("hex"))
elif s == 115:
nand_id = (ord(data[8])<<8) + ord(data[9])
# nado proverit, chto 2,3,4 baity ravny sootvetstvenno 0xEC 0x22 0xFC
#
# additionally identify NAND for Swift
print "Flash... "
if nand_id == int(0x04): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x14): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x24): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x34): print "128MB ( 1Gbit) NAND"
elif nand_id == int(0x0C): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x1C): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x2C): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x3C): print "128MB ( 1Gbit) NAND"
else: print "Unknown NAND: " + str("%02x" % nand_id)
# here, the bootup is completed
# delay slightly (required!)
time.sleep(0.25)
else:
#data = chr(0x44)
data = chr(0x00)
print "-> " + binascii.b2a_hex(data)
#ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x44)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x51)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
#print ser.portstr
time.sleep(0.5) # give the serial port sometime to receive the data
numOfLines = 0
while True:
response = ser.readline()
print("read data: " + response)
numOfLines = numOfLines + 1
if (numOfLines >= 5):
break
ser.close()
except Exception, e1:
print "error communicating...: " + str(e1)
ser.close()
import traceback
traceback.print_exc()
except KeyboardInterrupt:
print "\nmanual interrupted!"
ser.close()
else:
print "cannot open serial port "
exit()
#===========================================================
#from hktool.bootload import mediatek
from hktool.bootload.mediatek import MTKBootload
from threading import Thread
from time import sleep as Sleep
def logical_xor(str1, str2):
return bool(str1) ^ bool(str2)
#----- MAIN CODE -------------------------------------------
if __name__=='__main__':
from sys import platform as _platform
import os
if _platform == "linux" or _platform == "linux2":
# linux
print "it is linux?"
from hktool.hotplug import linux_udev as port_notify
elif _platform == "darwin":
# OS X
print "it is osx?"
print "WARNING: port_notify is not realised !!!"
elif _platform == "win32":
# Windows...
print "it is windows?"
from hktool.hotplug import windevnotif as port_notify
print "sys.platform: " + _platform + ", os.name: " + os.name
print ""
print "Select: xml, boot, sgh, crc, usb, exit, quit, q"
print ""
tsk = str(raw_input("enter command > "))
if tsk.lower() in ['exit', 'quit', 'q']:
os._exit(0)
if tsk.lower() in ['boot']:
print "Working with device communication..."
print ""
Thread(target = port_notify.run_notify).start()
Sleep(1)
port = port_notify.get_notify()
print "port_name is: " + port
#conn_port(port)
#mediatek.init(port)
m = MTKBootload(port)
if 'sgh' in tsk.lower():
tsks = tsk.split()
print ""
print "Working with device communication..."
print ""
Sleep(1)
port = tsks[1]
print "port_name is: " + port
#m = SGHBootload(port)
if tsk.lower() in ['xml', 'lxml']:
print "Working with lxml..."
print ""
from lxml import etree
tree = etree.parse('../../mtk-tests/Projects/_lg-a290/data/UTLog_DownloadAgent_FlashTool.xml')
root = tree.getroot()
print root
#entries = tree.xpath("//atom:category[@term='accessibility']/..", namespaces=NSMAP)
entries = tree.xpath("//UTLOG/Request[@Dir='[OUT]']/Data")
#print entries
old_text = None
dmp_text = False
cnt_text = 0
bin_file = None
for xent in entries:
new_text = xent.text
if new_text == old_text:
continue
old_text = new_text
#print "-> " + new_text
bin_text = new_text.replace(" ", "")
bin_text = bin_text.decode("hex")
bin_len = len(bin_text)
print str(bin_len) + " -> " + new_text
if dmp_text is False and bin_len == 1024:
dmp_text = True
prt = xent.getparent()
atr = prt.attrib
num = atr["Number"]
nam = "big_" + num + ".bin"
bin_file = open(nam, 'wb')
print ""
print "start dump big data to: " + nam
if dmp_text is True:
#---
import array
a = array.array('H', bin_text) # array.array('H', bin_text)
a.byteswap()
bin_text = a.tostring()
#---
bin_file.write(bin_text)
if bin_len == 1024:
cnt_text += 1
else:
cnt_text = cnt_text * 1024 + bin_len
dmp_text = False
bin_file.close()
print "big data length is: " + str(cnt_text)
print ""
cnt_text = 0
pass
if tsk.lower() in ['crc']:
str1 = raw_input("Enter string one:")
str2 = raw_input("Enter string two:")
if logical_xor(str1, str2):
print "ok"
else:
print "bad"
pass
print hex(0x12ef ^ 0xabcd)
print hex(int("12ef", 16) ^ int("abcd", 16))
str1 = raw_input("Enter string one: ")
str2 = raw_input("Enter string two: ")
print hex(int(str1, 16) ^ int(str2, 16))
pass
if tsk.lower() in ['usb']:
import usb.core
#import usb.backend.libusb1
import usb.backend.libusb0
import logging
#PYUSB_DEBUG_LEVEL = "debug"
#PYUSB_LOG_FILENAME = "C:\dump"
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
__backend__ = os.path.join(__location__, "libusb0.dll")
#PYUSB_LOG_FILENAME = __location__
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: "/usr/lib/libusb-1.0.so")
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: __backend__)
backend = usb.backend.libusb0.get_backend(find_library=lambda x: __backend__)
dev = usb.core.find(find_all=True, backend=backend)
#dev = usb.core.find(find_all=True)
busses = usb.busses()
print busses
if dev is None:
raise ValueError('Our device is not connected')
for bus in busses:
devices = bus.devices
for dev in devices:
try:
_name = usb.util.get_string(dev.dev, 19, 1)
except:
continue
dev.set_configuration()
cfg = dev.get_active_configuration()
interface_number = cfg[(0,0)].bInterfaceNumber
alternate_settting = usb.control.get_interface(interface_number)
print "Device name:",_name
print "Device:", dev.filename
print " idVendor:",hex(dev.idVendor)
print " idProduct:",hex(dev.idProduct)
for config in dev.configurations:
print " Configuration:", config.value
print " Total length:", config.totalLength
print " selfPowered:", config.selfPowered
print " remoteWakeup:", config.remoteWakeup
print " maxPower:", config.maxPower
print | # A2 - read from memory | random_line_split |
talk.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
DEBUG = True
observer = None
ser_port = None
s = 0
ser = None
#--------------------------------------------------------------------
import signal
import sys
import os
def signal_handler(signal, frame):
global s, ser
print '\nYou pressed Ctrl+C!'
if s > 18:
print "MTK_Finalize"
serialPost(ser, "B7".decode("hex"))
time.sleep(0.1)
if ser.isOpen(): ser.close()
#sys.exit(0)
os._exit(0)
signal.signal(signal.SIGINT, signal_handler)
#--------------------------------------------------------------------
import os
import serial
from serial.tools import list_ports
def serial_ports():
"""
Returns a generator for all available serial ports
"""
if os.name == 'nt':
# windows
for i in range(256):
try:
s = serial.Serial(i)
s.close()
yield 'COM' + str(i + 1)
except serial.SerialException:
pass
else:
# unix
for port in list_ports.comports():
yield port[0]
#if __name__ == '__main__':
# print(list(serial_ports()))
#exit()
#--------------------------------------------------------------------
import serial, time, binascii
def serialPost(ser, data):
#time.sleep(0.5)
#data = chr(0x44)
print " -> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
def serialPostL(ser, data, slen, scnt):
sys.stdout.write("\r" + str(scnt) + " of " + str(slen) + " <- " + binascii.b2a_hex(data))
if slen == scnt: sys.stdout.write("\n")
#sys.stdout.flush()
ser.write(data)
def summ(block, length):
res = 0
for i in range(length):
res = res + ord(block[i])
#print str(res)
return chr(res & int(0xFF))
def swapSerialData(data):
l = len(data)
#if l > 16:
# print "-> " + str(l) + " bytes"
#else:
# print "-> " + binascii.b2a_hex(data)
if len(data) > 0: ser.write(data)
n = 0
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#print "RX is L: " + str(l) + " -> " + binascii.b2a_hex(data)
return data
#----- CONNECT TO PORT----------
def conn_port (ser_port):
print ser_port
print "module PySerial version: " + serial.VERSION
# if: error open serial port: (22, 'Invalid argument')
# http://superuser.com/questions/572034/how-to-restart-ttyusb
# cat /proc/tty/drivers
# lsmod | grep usbserial
# sudo modprobe -r pl2303 qcaux
# sudo modprobe -r usbserial
#import subprocess
#subprocess.call(['statserial', ser_port])
#subprocess.call(['setserial', '-G', ser_port])
# http://www.roman10.net/serial-port-communication-in-python/
# initialization and open the port
# possible timeout values:
# 1. None: wait forever, block call
# 2. 0: non-blocking mode, return immediately
# 3. x, x is bigger than 0, float allowed, timeout block call
global ser
ser = serial.Serial()
#ser.port = "COM29"
ser.port = ser_port
ser.baudrate = 115200
ser.bytesize = serial.EIGHTBITS # number of bits per bytes
ser.parity = serial.PARITY_EVEN
ser.stopbits = serial.STOPBITS_ONE # number of stop bits
ser.timeout = None # block read
ser.rtscts = True # enable hardware (RTS/CTS) flow control (Hardware handshaking)
#ser.port = "/dev/ttyS0"
#ser.port = "/dev/ttyUSB0"
#ser.port = "2" # COM3
#ser.baudrate = 9600
#ser.parity = serial.PARITY_NONE # set parity check: no parity
#ser.timeout = 0 # non-block read
#ser.xonxoff = False # disable software flow control
#ser.rtscts = False # disable hardware (RTS/CTS) flow control
#ser.dsrdtr = False # disable hardware (DSR/DTR) flow control
#ser.writeTimeout = 2 # timeout for write
#data = chr(0x44) + chr(0x59)
#print "-> " + binascii.b2a_hex(data)
#exit()
try:
ser.open()
except Exception, e:
print "error open serial port: " + str(e)
print "for full reset serial device you must reload drivers:"
print " "
print " cat /proc/tty/drivers "
print " lsmod | grep usbserial "
print " sudo modprobe -r pl2303 qcaux "
print " sudo modprobe -r usbserial "
print " "
exit()
from hktool.bootload.samsung import sgh_e730
#loader1 = open("loader1.bin", "rb").read()
loader1 = sgh_e730.load_bootcode_first()
print "loader1.bin data size is: " + str(len(loader1))
ldr1_i = 0
ldr1_l = len(loader1)
ldr1_c = "4c00".decode("hex")
#loader2 = open("loader2.bin", "rb").read()
loader2 = sgh_e730.load_bootcode_second()
print "loader2.bin data size is: " + str(len(loader2))
ldr2_i = 0
ldr2_l = len(loader2)
#f = open("loader1.bin", "rb")
#try:
# byte = f.read(1)
# while byte != "":
# # Do stuff with byte.
# byte = f.read(1)
#except Exception, e1:
# print "error: " + str(e1)
# ser.close()
# import traceback
# traceback.print_exc()
#finally:
# f.close()
global s
if ser.isOpen():
try:
print 'Work with Samsung SGH-E730:'
print '- wait for SWIFT power on...'
ser.flushInput() # flush input buffer, discarding all its contents
ser.flushOutput() # flush output buffer, aborting current output
# and discard all that is in buffer
# write data
#ser.write("AT+CSQ=?\x0D")
#print("write data: AT+CSQ=?\x0D")
# steps
s = 0
serialPost(ser, "A0".decode("hex"))
while True:
n = 0
s += 1
while n < 1:
n = ser.inWaiting()
#time.sleep(1)
data = ser.read(n)
l = len(data)
#if s != 6 or ldr1_i == 0:
print "RX is L: " + str(l) + " <- " + binascii.b2a_hex(data)
if s == 1:
if data[l-1] == chr(0x5F):
serialPost(ser, chr(0x0A))
elif s == 2:
if data[l-1] == chr(0xF5):
serialPost(ser, chr(0x50))
elif s == 3:
#if l == 16:
# serialPost(ser, "4412345678".decode("hex") + data)
# -> AF
serialPost(ser, "05".decode("hex"))
elif s == 4:
#if data[l-1] == chr(0x4f):
# # set timeout to 1600 ms (10h)
# serialPost(ser, chr(0x54) + chr(0x10))
# # set timeout to 1600 ms (20h)
# #serialPost(ser, chr(0x54) + chr(0x20))
# -> FA
# A2 - read from memory
serialPost(ser, "A2".decode("hex"))
elif s == 5:
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
# -> A2 - read command ACK
# 80 01 00 00 - Configuration Register: Hardware Version Register
serialPost(ser, "80010000".decode("hex"))
elif s == 6:
# -> 80 01 00 00
# 00 00 00 01 - read one byte
serialPost(ser, "00000001".decode("hex"))
#ldr1_i4 = 4*ldr1_i
#ldr1_i8 = 4*ldr1_i + 4
#if ldr1_i8 < ldr1_l:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_i8], ldr1_l, ldr1_i8)
# s -= 1
#else:
# serialPostL(ser, ldr1_c + loader1[ldr1_i4:ldr1_l ], ldr1_l, ldr1_l )
#ldr1_i += 1
elif s == 7:
if l == 6: s += 1
elif s == 8:
# -> 00 00 00 01 - byte is read
# -> XX XX - byte:
serialPost(ser, "A2".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "530000000c".decode("hex"))
elif s == 9:
# -> A2
# 80 01 00 08 - Hardware Code Register
serialPost(ser, "80010008".decode("hex"))
#if data[l-1] == chr(0x4f):
# serialPost(ser, "4a".decode("hex"))
elif s == 10:
# -> 80 01 00 08
serialPost(ser, "00000001".decode("hex"))
#s = 20;
#if data[l-1] == chr(0xAB):
# # 0x00 -> Speed = 115200
# # 0x01 -> Speed = 230400
# # 0x02 -> Speed = 460800
# # 0x03 -> Speed = 921600
# serialPost(ser, "00".decode("hex"))
# # close comms, bootup completed
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# ser.close()
# # reopen comms at the new speed
# time.sleep(0.1)
# ser.port = "COM3"
# ser.baudrate = 115200
# ser.parity = serial.PARITY_NONE # set parity check: no parity
# ser.open()
# ser.flushInput() # flush input buffer, discarding all its contents
# ser.flushOutput() # flush output buffer, aborting current output
# serialPost(ser, "d9".decode("hex"))
elif s == 11:
if l == 6: s += 1
elif s == 12:
# -> 00 00 00 01
# -> XX XX - we hawe a MediaTek MT6253
serialPost(ser, "A2".decode("hex"))
elif s == 13:
# -> A2
# 80 01 00 04 - Software Version Register
serialPost(ser, "80010004".decode("hex"))
elif s == 14:
# -> 80 01 00 04
serialPost(ser, "00000001".decode("hex"))
elif s == 15:
if l == 6: s += 1
elif s == 16:
# -> 00 00 00 01
# -> XX XX -
# A1 - write to register
serialPost(ser, "A1".decode("hex"))
elif s == 17:
# -> A1 - write command ack
# 80 03 00 00 - Reset Generation Unit (RGU): Watchdog Timer Control Register
serialPost(ser, "80030000".decode("hex"))
elif s == 18:
# -> 80 03 00 00
serialPost(ser, "00000001".decode("hex"))
elif s == 19:
# -> 00 00 00 01
# 22 00 - set
serialPost(ser, "2200".decode("hex"))
elif s == 20:
s -= 1
elif s == 111:
data = "d4".decode("hex")
data0 = chr((ldr2_l >> 24) & int(0xFF))
data0 += chr((ldr2_l >> 16) & int(0xFF))
data0 += chr((ldr2_l >> 8) & int(0xFF))
data0 += chr((ldr2_l ) & int(0xFF))
data += data0
serialPost(ser, data)
elif s == 112:
# zapominaem CRC
crc = data
my_crc = summ(data0, 4)
print "crc is: " + binascii.b2a_hex(crc)
print "my_crc is: " + binascii.b2a_hex(my_crc)
if crc == my_crc:
send_len = 0
for i in range((ldr2_l - 1) >> 11):
send_len = ldr2_l - (i << 11)
if send_len > 2048: send_len = 2048
# calculate sum
ss = i << 11
su = summ(loader2[ss:ss+send_len], send_len)
# send command
data = swapSerialData("f7".decode("hex"))
data = swapSerialData(loader2[ss:ss+send_len])
#print "2 crc is: " + binascii.b2a_hex(data)
#print "2 my_crc is: " + binascii.b2a_hex(su)
#print "i: " + str(i)
sys.stdout.write("\ri: " + str(i))
sys.stdout.write("\n")
serialPost(ser, "FF".decode("hex"))
elif s == 113:
serialPost(ser, "D010000000".decode("hex"))
elif s == 114:
serialPost(ser, "D1".decode("hex"))
elif s == 115:
nand_id = (ord(data[8])<<8) + ord(data[9])
# nado proverit, chto 2,3,4 baity ravny sootvetstvenno 0xEC 0x22 0xFC
#
# additionally identify NAND for Swift
print "Flash... "
if nand_id == int(0x04): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x14): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x24): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x34): print "128MB ( 1Gbit) NAND"
elif nand_id == int(0x0C): print " 16MB (128Mbit) NAND"
elif nand_id == int(0x1C): print " 32MB (256Mbit) NAND"
elif nand_id == int(0x2C): print " 64MB (512Mbit) NAND"
elif nand_id == int(0x3C): print "128MB ( 1Gbit) NAND"
else: print "Unknown NAND: " + str("%02x" % nand_id)
# here, the bootup is completed
# delay slightly (required!)
time.sleep(0.25)
else:
#data = chr(0x44)
data = chr(0x00)
print "-> " + binascii.b2a_hex(data)
#ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x44)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
#ser.flush()
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
data = chr(0x51)
print "-> " + binascii.b2a_hex(data)
ser.write(data)
data = ser.read()
print "serial RX: " + binascii.b2a_hex(data)
#print ser.portstr
time.sleep(0.5) # give the serial port sometime to receive the data
numOfLines = 0
while True:
response = ser.readline()
print("read data: " + response)
numOfLines = numOfLines + 1
if (numOfLines >= 5):
break
ser.close()
except Exception, e1:
print "error communicating...: " + str(e1)
ser.close()
import traceback
traceback.print_exc()
except KeyboardInterrupt:
print "\nmanual interrupted!"
ser.close()
else:
print "cannot open serial port "
exit()
#===========================================================
#from hktool.bootload import mediatek
from hktool.bootload.mediatek import MTKBootload
from threading import Thread
from time import sleep as Sleep
def | (str1, str2):
return bool(str1) ^ bool(str2)
#----- MAIN CODE -------------------------------------------
if __name__=='__main__':
from sys import platform as _platform
import os
if _platform == "linux" or _platform == "linux2":
# linux
print "it is linux?"
from hktool.hotplug import linux_udev as port_notify
elif _platform == "darwin":
# OS X
print "it is osx?"
print "WARNING: port_notify is not realised !!!"
elif _platform == "win32":
# Windows...
print "it is windows?"
from hktool.hotplug import windevnotif as port_notify
print "sys.platform: " + _platform + ", os.name: " + os.name
print ""
print "Select: xml, boot, sgh, crc, usb, exit, quit, q"
print ""
tsk = str(raw_input("enter command > "))
if tsk.lower() in ['exit', 'quit', 'q']:
os._exit(0)
if tsk.lower() in ['boot']:
print "Working with device communication..."
print ""
Thread(target = port_notify.run_notify).start()
Sleep(1)
port = port_notify.get_notify()
print "port_name is: " + port
#conn_port(port)
#mediatek.init(port)
m = MTKBootload(port)
if 'sgh' in tsk.lower():
tsks = tsk.split()
print ""
print "Working with device communication..."
print ""
Sleep(1)
port = tsks[1]
print "port_name is: " + port
#m = SGHBootload(port)
if tsk.lower() in ['xml', 'lxml']:
print "Working with lxml..."
print ""
from lxml import etree
tree = etree.parse('../../mtk-tests/Projects/_lg-a290/data/UTLog_DownloadAgent_FlashTool.xml')
root = tree.getroot()
print root
#entries = tree.xpath("//atom:category[@term='accessibility']/..", namespaces=NSMAP)
entries = tree.xpath("//UTLOG/Request[@Dir='[OUT]']/Data")
#print entries
old_text = None
dmp_text = False
cnt_text = 0
bin_file = None
for xent in entries:
new_text = xent.text
if new_text == old_text:
continue
old_text = new_text
#print "-> " + new_text
bin_text = new_text.replace(" ", "")
bin_text = bin_text.decode("hex")
bin_len = len(bin_text)
print str(bin_len) + " -> " + new_text
if dmp_text is False and bin_len == 1024:
dmp_text = True
prt = xent.getparent()
atr = prt.attrib
num = atr["Number"]
nam = "big_" + num + ".bin"
bin_file = open(nam, 'wb')
print ""
print "start dump big data to: " + nam
if dmp_text is True:
#---
import array
a = array.array('H', bin_text) # array.array('H', bin_text)
a.byteswap()
bin_text = a.tostring()
#---
bin_file.write(bin_text)
if bin_len == 1024:
cnt_text += 1
else:
cnt_text = cnt_text * 1024 + bin_len
dmp_text = False
bin_file.close()
print "big data length is: " + str(cnt_text)
print ""
cnt_text = 0
pass
if tsk.lower() in ['crc']:
str1 = raw_input("Enter string one:")
str2 = raw_input("Enter string two:")
if logical_xor(str1, str2):
print "ok"
else:
print "bad"
pass
print hex(0x12ef ^ 0xabcd)
print hex(int("12ef", 16) ^ int("abcd", 16))
str1 = raw_input("Enter string one: ")
str2 = raw_input("Enter string two: ")
print hex(int(str1, 16) ^ int(str2, 16))
pass
if tsk.lower() in ['usb']:
import usb.core
#import usb.backend.libusb1
import usb.backend.libusb0
import logging
#PYUSB_DEBUG_LEVEL = "debug"
#PYUSB_LOG_FILENAME = "C:\dump"
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
__backend__ = os.path.join(__location__, "libusb0.dll")
#PYUSB_LOG_FILENAME = __location__
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: "/usr/lib/libusb-1.0.so")
#backend = usb.backend.libusb1.get_backend(find_library=lambda x: __backend__)
backend = usb.backend.libusb0.get_backend(find_library=lambda x: __backend__)
dev = usb.core.find(find_all=True, backend=backend)
#dev = usb.core.find(find_all=True)
busses = usb.busses()
print busses
if dev is None:
raise ValueError('Our device is not connected')
for bus in busses:
devices = bus.devices
for dev in devices:
try:
_name = usb.util.get_string(dev.dev, 19, 1)
except:
continue
dev.set_configuration()
cfg = dev.get_active_configuration()
interface_number = cfg[(0,0)].bInterfaceNumber
alternate_settting = usb.control.get_interface(interface_number)
print "Device name:",_name
print "Device:", dev.filename
print " idVendor:",hex(dev.idVendor)
print " idProduct:",hex(dev.idProduct)
for config in dev.configurations:
print " Configuration:", config.value
print " Total length:", config.totalLength
print " selfPowered:", config.selfPowered
print " remoteWakeup:", config.remoteWakeup
print " maxPower:", config.maxPower
print
| logical_xor | identifier_name |
execWrapper.test.ts | #! /usr/bin/env jest
import { ExecError, execWrapper } from "./execWrapper"
describe("execWrapper()", () => {
it("should resolve when there is a zero exit code", async () => { | code: 0,
stdout: "it works\n",
stderr: "",
})
})
it("should reject when there is a non-zero exit code", async () => {
try {
await execWrapper(`echo "begin"; echo "fail" 1>&2; exit 1`, {
silent: true,
})
} catch (err) {
expect(err).toBeInstanceOf(ExecError)
if (err instanceof ExecError) {
expect(err.code).toEqual(1)
expect(err.stdout).toEqual("begin\n")
expect(err.stderr).toEqual("fail\n")
}
} finally {
expect.assertions(4)
}
})
}) | const result = await execWrapper(`echo "it works"; exit 0`, {
silent: true,
})
expect(result).toEqual({ | random_line_split |
execWrapper.test.ts | #! /usr/bin/env jest
import { ExecError, execWrapper } from "./execWrapper"
describe("execWrapper()", () => {
it("should resolve when there is a zero exit code", async () => {
const result = await execWrapper(`echo "it works"; exit 0`, {
silent: true,
})
expect(result).toEqual({
code: 0,
stdout: "it works\n",
stderr: "",
})
})
it("should reject when there is a non-zero exit code", async () => {
try {
await execWrapper(`echo "begin"; echo "fail" 1>&2; exit 1`, {
silent: true,
})
} catch (err) {
expect(err).toBeInstanceOf(ExecError)
if (err instanceof ExecError) |
} finally {
expect.assertions(4)
}
})
})
| {
expect(err.code).toEqual(1)
expect(err.stdout).toEqual("begin\n")
expect(err.stderr).toEqual("fail\n")
} | conditional_block |
package.js | // All other packages automatically depend on this one
Package.describe({
summary: "Core Meteor environment",
version: '1.2.18-beta.5'
});
Package.registerBuildPlugin({
name: "basicFileTypes",
sources: ['plugin/basic-file-types.js']
});
Npm.depends({
"meteor-deque": "2.1.0"
});
Package.onUse(function (api) {
api.use('underscore', ['client', 'server']);
api.use('isobuild:[email protected]');
api.export('Meteor');
api.addFiles('global.js', ['client', 'server']);
api.export('global');
api.addFiles('client_environment.js', 'client');
api.addFiles('server_environment.js', 'server');
// Defined by client_environment.js and server_environment.js.
api.export("meteorEnv");
api.addFiles('cordova_environment.js', 'web.cordova');
api.addFiles('helpers.js', ['client', 'server']);
api.addFiles('setimmediate.js', ['client', 'server']);
api.addFiles('timers.js', ['client', 'server']);
api.addFiles('errors.js', ['client', 'server']);
api.addFiles('fiber_helpers.js', 'server');
api.addFiles('fiber_stubs_client.js', 'client');
api.addFiles('startup_client.js', ['client']);
api.addFiles('startup_server.js', ['server']);
api.addFiles('debug.js', ['client', 'server']);
api.addFiles('string_utils.js', ['client', 'server']);
api.addFiles('test_environment.js', ['client', 'server']);
// dynamic variables, bindEnvironment
// XXX move into a separate package?
api.addFiles('dynamics_browser.js', 'client');
api.addFiles('dynamics_nodejs.js', 'server');
// note server before common. usually it is the other way around, but
// in this case server must load first.
api.addFiles('url_server.js', 'server');
api.addFiles('url_common.js', ['client', 'server']);
// People expect process.exit() to not swallow console output.
// On Windows, it sometimes does, so we fix it for all apps and packages
api.addFiles('flush-buffers-on-exit-in-windows.js', 'server');
});
Package.onTest(function (api) {
api.use(['underscore', 'tinytest', 'test-helpers']);
api.addFiles('browser_environment_test.js', 'web.browser');
api.addFiles('client_environment_test.js', 'client');
api.addFiles('cordova_environment_test.js', 'web.cordova');
api.addFiles('server_environment_test.js', 'server');
api.addFiles('helpers_test.js', ['client', 'server']);
api.addFiles('dynamics_test.js', ['client', 'server']);
api.addFiles('fiber_helpers_test.js', ['server']);
api.addFiles('wrapasync_test.js', ['server']);
api.addFiles('url_tests.js', ['client', 'server']);
api.addFiles('timers_tests.js', ['client', 'server']);
| }); | api.addFiles('debug_test.js', 'client');
api.addFiles('bare_test_setup.js', 'client', {bare: true});
api.addFiles('bare_tests.js', 'client'); | random_line_split |
Lista.js | ο»Ώ$(document).ready(function () {
LlenarTabla();
//$(".Agregar").click(function () {
// $('#ModalAgregarTipoPregunta').modal('show');
//});
});
function LlenarTabla() {
$('#TablaTipoPreguntas').dataTable({
"processing": true,
"serverSide": true,
"bFilter": false,
"dom": '<"toolbar">frtip',
"bDestroy": true,
"info": true,
"stateSave": true,
"lengthMenu": [[10, 20, 50, 100], [10, 20, 50, 100]],
"ajax": {
"url": "/TipoPreguntas/GetList/",
"type": "POST",
"data": { 'data': 1 },
//},
//"fnInitComplete": function (oSettings, json) {
//},
//"fnDrawCallback": function (oSettings) {
},
"columns": [
{ "data": "IdTipoPregunta", "orderable": false },
{ "data": "Descripcion", "orderable": false },
////{
//// sortable: false,
//// "render": function (data, type, full, meta) {
//// console.log(full);
//// }
////},
//{
// sortable: false,
// "render": function (data, type, full, meta) {
// return Opciones(); //Es el campo de opciones de la tabla.
// }
//}
],
language: {
processing: "Procesando informaciΓ³n...",
search: "Buscar :",
lengthMenu: "Mostrar _MENU_ Elementos",
info: "Mostrando _START_ de _END_ Total _TOTAL_ elementos",
infoEmpty: "No hay elemetos para mostrar",
infoFiltered: "(filtrados _MAX_ )",
infoPostFix: "",
loadingRecords: "BΓΊsqueda en curso...",
zeroRecords: "No hay registros para mostrar",
emptyTable: "No hay registros disponibles",
paginate: {
first: "Primera",
previous: "Anterior",
next: "Siguiente",
last: "Ultima"
},
//aria: {
// sortAscending: ": activer pour trier la colonne par ordre croissant",
// sortDescending: ": activer pour trier la colonne par ordre dΓ©croissant"
//}
},
"order": [[0, "asc"]]
})
//$("div.toolbar").html('<button class="btn btn-success btn-sm Agregar" style="float:right;" ><i class="fa fa-plus" aria-hidden="true"></i> Nuevo Tipo de Pregunta </button> <div class="input-group input-group-sm"><input class="form-control" type="text"><span class="input-group-btn"><button class="btn btn-info btn-flat" type="button">Buscar</button></span></div>');
}
function Opciones() {
var botones = "<button class='btn btn-info btn-xs detalleTipoPregunta' id='detalleTipoPregunta'>Detalles</button> <button class='btn btn-warning btn-xs editarTipoPregunta' id='editarTipoPregunta'>Editar</button> <button class='btn btn-danger btn-xs eliminarPregunta' id='eliminarTipoPregunta'> Eliminar</button> ";
return botones;
}
//funcion:retorna las opciones que tendra cada row en la tabla principal
function Opciones() {
| #TablaTipoPreguntas').on('click', '.Detalle', function () {
$('#ModalDetalleTipoPregunta').modal('show');
});
$('#TablaTipoPreguntas').on('click', '.Editar', function () {
$('#ModalDetalleTipoPregunta').modal('show');
});
$('#TablaTipoPreguntas').on('click', '.Eliminar', function () {
alert("click");
});
//function ActualizaListaPreguntas() {
// $('#TbodyPreguntas').empty();
// for (var b = 0; b < Lista_preguntas.length; b++) {
// $('#tablaPreguntas').append("<tr><td>" + Lista_preguntas[b].Nombre + "</td><td>" + Lista_preguntas[b].TipoControl + "</td><td><button class='btn btn-info btn-xs detallepregunta' rel='" + Lista_preguntas[b].id + "'>Detalles</button> <button class='btn btn-warning btn-xs EditarPregunta ' rel='" + Lista_preguntas[b].id + "'>Editar</button> <button class='btn btn-danger btn-xs EliminaPregunta' rel='" + Lista_preguntas[b].id + "'>Eliminar</button></td></tr>");
// }
//}
//$('#TablaClientes').on('click', '.EliminaCliente', function () {
// var id = $(this).attr('rel');
// $('#ModalEliminaCliente').modal('show');
// //$('#contrato').val(id);
//});
| var opc = "<button class='btn btn-info btn-xs Detalle' type='button'>Detalles</button> <button class='btn btn-warning btn-xs Editar' type='button'><i class='fa fa-pencil' aria-hidden='true'></i> Editar</button> <button class='btn btn-danger btn-xs eliminar' type='button'> <i class='fa fa-trash-o' aria-hidden='true'></i> Eliminar</button>"
return opc;
}
$(' | identifier_body |
Lista.js | ο»Ώ$(document).ready(function () {
LlenarTabla();
//$(".Agregar").click(function () {
// $('#ModalAgregarTipoPregunta').modal('show');
//});
});
function LlenarTabla() {
$('#TablaTipoPreguntas').dataTable({
"processing": true,
"serverSide": true,
"bFilter": false,
"dom": '<"toolbar">frtip',
"bDestroy": true,
"info": true,
"stateSave": true,
"lengthMenu": [[10, 20, 50, 100], [10, 20, 50, 100]],
"ajax": {
"url": "/TipoPreguntas/GetList/",
"type": "POST",
"data": { 'data': 1 },
//},
//"fnInitComplete": function (oSettings, json) {
//},
//"fnDrawCallback": function (oSettings) {
},
"columns": [
{ "data": "IdTipoPregunta", "orderable": false },
{ "data": "Descripcion", "orderable": false },
////{
//// sortable: false,
//// "render": function (data, type, full, meta) {
//// console.log(full);
//// }
////},
//{
// sortable: false,
// "render": function (data, type, full, meta) {
// return Opciones(); //Es el campo de opciones de la tabla.
// }
//}
],
language: {
processing: "Procesando informaciΓ³n...",
search: "Buscar :",
lengthMenu: "Mostrar _MENU_ Elementos",
info: "Mostrando _START_ de _END_ Total _TOTAL_ elementos",
infoEmpty: "No hay elemetos para mostrar",
infoFiltered: "(filtrados _MAX_ )",
infoPostFix: "",
loadingRecords: "BΓΊsqueda en curso...",
zeroRecords: "No hay registros para mostrar",
emptyTable: "No hay registros disponibles",
paginate: {
first: "Primera",
previous: "Anterior",
next: "Siguiente",
last: "Ultima"
},
//aria: {
// sortAscending: ": activer pour trier la colonne par ordre croissant",
// sortDescending: ": activer pour trier la colonne par ordre dΓ©croissant"
//}
},
"order": [[0, "asc"]]
})
//$("div.toolbar").html('<button class="btn btn-success btn-sm Agregar" style="float:right;" ><i class="fa fa-plus" aria-hidden="true"></i> Nuevo Tipo de Pregunta </button> <div class="input-group input-group-sm"><input class="form-control" type="text"><span class="input-group-btn"><button class="btn btn-info btn-flat" type="button">Buscar</button></span></div>');
}
function Opciones() {
var botones = "<button class='btn btn-info btn-xs detalleTipoPregunta' id='detalleTipoPregunta'>Detalles</button> <button class='btn btn-warning btn-xs editarTipoPregunta' id='editarTipoPregunta'>Editar</button> <button class='btn btn-danger btn-xs eliminarPregunta' id='eliminarTipoPregunta'> Eliminar</button> ";
return botones;
}
//funcion:retorna las opciones que tendra cada row en la tabla principal
function Opcio | var opc = "<button class='btn btn-info btn-xs Detalle' type='button'>Detalles</button> <button class='btn btn-warning btn-xs Editar' type='button'><i class='fa fa-pencil' aria-hidden='true'></i> Editar</button> <button class='btn btn-danger btn-xs eliminar' type='button'> <i class='fa fa-trash-o' aria-hidden='true'></i> Eliminar</button>"
return opc;
}
$('#TablaTipoPreguntas').on('click', '.Detalle', function () {
$('#ModalDetalleTipoPregunta').modal('show');
});
$('#TablaTipoPreguntas').on('click', '.Editar', function () {
$('#ModalDetalleTipoPregunta').modal('show');
});
$('#TablaTipoPreguntas').on('click', '.Eliminar', function () {
alert("click");
});
//function ActualizaListaPreguntas() {
// $('#TbodyPreguntas').empty();
// for (var b = 0; b < Lista_preguntas.length; b++) {
// $('#tablaPreguntas').append("<tr><td>" + Lista_preguntas[b].Nombre + "</td><td>" + Lista_preguntas[b].TipoControl + "</td><td><button class='btn btn-info btn-xs detallepregunta' rel='" + Lista_preguntas[b].id + "'>Detalles</button> <button class='btn btn-warning btn-xs EditarPregunta ' rel='" + Lista_preguntas[b].id + "'>Editar</button> <button class='btn btn-danger btn-xs EliminaPregunta' rel='" + Lista_preguntas[b].id + "'>Eliminar</button></td></tr>");
// }
//}
//$('#TablaClientes').on('click', '.EliminaCliente', function () {
// var id = $(this).attr('rel');
// $('#ModalEliminaCliente').modal('show');
// //$('#contrato').val(id);
//});
| nes() {
| identifier_name |
Lista.js | ο»Ώ$(document).ready(function () {
LlenarTabla();
//$(".Agregar").click(function () {
// $('#ModalAgregarTipoPregunta').modal('show');
//});
});
function LlenarTabla() {
$('#TablaTipoPreguntas').dataTable({
"processing": true,
"serverSide": true,
"bFilter": false,
"dom": '<"toolbar">frtip',
"bDestroy": true,
"info": true,
"stateSave": true,
"lengthMenu": [[10, 20, 50, 100], [10, 20, 50, 100]],
"ajax": {
"url": "/TipoPreguntas/GetList/",
"type": "POST",
"data": { 'data': 1 },
//},
//"fnInitComplete": function (oSettings, json) {
//},
//"fnDrawCallback": function (oSettings) {
},
"columns": [
{ "data": "IdTipoPregunta", "orderable": false },
{ "data": "Descripcion", "orderable": false },
////{
//// sortable: false,
//// "render": function (data, type, full, meta) {
//// console.log(full);
//// }
////},
//{
// sortable: false,
// "render": function (data, type, full, meta) {
// return Opciones(); //Es el campo de opciones de la tabla.
// }
//}
],
language: {
processing: "Procesando informaciΓ³n...",
search: "Buscar :",
lengthMenu: "Mostrar _MENU_ Elementos",
info: "Mostrando _START_ de _END_ Total _TOTAL_ elementos",
infoEmpty: "No hay elemetos para mostrar",
infoFiltered: "(filtrados _MAX_ )",
infoPostFix: "",
loadingRecords: "BΓΊsqueda en curso...",
zeroRecords: "No hay registros para mostrar",
emptyTable: "No hay registros disponibles",
paginate: {
first: "Primera",
previous: "Anterior",
next: "Siguiente",
last: "Ultima"
},
//aria: {
// sortAscending: ": activer pour trier la colonne par ordre croissant",
// sortDescending: ": activer pour trier la colonne par ordre dΓ©croissant"
//}
},
"order": [[0, "asc"]]
})
//$("div.toolbar").html('<button class="btn btn-success btn-sm Agregar" style="float:right;" ><i class="fa fa-plus" aria-hidden="true"></i> Nuevo Tipo de Pregunta </button> <div class="input-group input-group-sm"><input class="form-control" type="text"><span class="input-group-btn"><button class="btn btn-info btn-flat" type="button">Buscar</button></span></div>');
}
function Opciones() {
var botones = "<button class='btn btn-info btn-xs detalleTipoPregunta' id='detalleTipoPregunta'>Detalles</button> <button class='btn btn-warning btn-xs editarTipoPregunta' id='editarTipoPregunta'>Editar</button> <button class='btn btn-danger btn-xs eliminarPregunta' id='eliminarTipoPregunta'> Eliminar</button> ";
return botones;
}
//funcion:retorna las opciones que tendra cada row en la tabla principal
function Opciones() {
var opc = "<button class='btn btn-info btn-xs Detalle' type='button'>Detalles</button> <button class='btn btn-warning btn-xs Editar' type='button'><i class='fa fa-pencil' aria-hidden='true'></i> Editar</button> <button class='btn btn-danger btn-xs eliminar' type='button'> <i class='fa fa-trash-o' aria-hidden='true'></i> Eliminar</button>"
return opc;
}
$('#TablaTipoPreguntas').on('click', '.Detalle', function () {
$('#ModalDetalleTipoPregunta').modal('show');
});
$('#TablaTipoPreguntas').on('click', '.Editar', function () { | $('#TablaTipoPreguntas').on('click', '.Eliminar', function () {
alert("click");
});
//function ActualizaListaPreguntas() {
// $('#TbodyPreguntas').empty();
// for (var b = 0; b < Lista_preguntas.length; b++) {
// $('#tablaPreguntas').append("<tr><td>" + Lista_preguntas[b].Nombre + "</td><td>" + Lista_preguntas[b].TipoControl + "</td><td><button class='btn btn-info btn-xs detallepregunta' rel='" + Lista_preguntas[b].id + "'>Detalles</button> <button class='btn btn-warning btn-xs EditarPregunta ' rel='" + Lista_preguntas[b].id + "'>Editar</button> <button class='btn btn-danger btn-xs EliminaPregunta' rel='" + Lista_preguntas[b].id + "'>Eliminar</button></td></tr>");
// }
//}
//$('#TablaClientes').on('click', '.EliminaCliente', function () {
// var id = $(this).attr('rel');
// $('#ModalEliminaCliente').modal('show');
// //$('#contrato').val(id);
//}); | $('#ModalDetalleTipoPregunta').modal('show');
});
| random_line_split |
serializers.py | from rest_framework import serializers
from csinterop.models import SharingProposal, Folder, User
class SharingProposalSerializer(serializers.ModelSerializer):
share_id = serializers.RelatedField(source='key')
permission = serializers.CharField(source='get_permission', read_only=True)
folder_name = serializers.RelatedField(source='folder.name')
owner_name = serializers.RelatedField(source='owner.name')
owner_email = serializers.RelatedField(source='owner.email')
protocol_version = serializers.CharField(required=False)
def restore_object(self, attrs, instance=None):
"""
Given a dictionary of deserialized field values, either update
an existing model instance, or create a new model instance.
"""
if instance is not None:
return instance
proposal = SharingProposal(**attrs)
proposal.key = self.context['request'].DATA['share_id']
owner = User()
owner.name = self.context['request'].DATA['owner_name']
owner.email = self.context['request'].DATA['owner_email']
proposal.owner = owner
folder = Folder()
folder.name = self.context['request'].DATA['folder_name']
proposal.folder = folder
write_access = True if self.context['request'].DATA['permission'].lower() is 'read-write' else False
proposal.write_access = write_access
proposal.status = 'PENDING'
return proposal
class | :
model = SharingProposal
fields = (
'share_id', 'recipient', 'resource_url', 'owner_name', 'owner_email', 'folder_name', 'permission',
'callback', 'protocol_version',
'status', 'created_at') | Meta | identifier_name |
serializers.py | from rest_framework import serializers
from csinterop.models import SharingProposal, Folder, User
class SharingProposalSerializer(serializers.ModelSerializer):
share_id = serializers.RelatedField(source='key')
permission = serializers.CharField(source='get_permission', read_only=True)
folder_name = serializers.RelatedField(source='folder.name')
owner_name = serializers.RelatedField(source='owner.name')
owner_email = serializers.RelatedField(source='owner.email')
protocol_version = serializers.CharField(required=False)
def restore_object(self, attrs, instance=None):
"""
Given a dictionary of deserialized field values, either update
an existing model instance, or create a new model instance.
"""
if instance is not None:
|
proposal = SharingProposal(**attrs)
proposal.key = self.context['request'].DATA['share_id']
owner = User()
owner.name = self.context['request'].DATA['owner_name']
owner.email = self.context['request'].DATA['owner_email']
proposal.owner = owner
folder = Folder()
folder.name = self.context['request'].DATA['folder_name']
proposal.folder = folder
write_access = True if self.context['request'].DATA['permission'].lower() is 'read-write' else False
proposal.write_access = write_access
proposal.status = 'PENDING'
return proposal
class Meta:
model = SharingProposal
fields = (
'share_id', 'recipient', 'resource_url', 'owner_name', 'owner_email', 'folder_name', 'permission',
'callback', 'protocol_version',
'status', 'created_at') | return instance | conditional_block |
serializers.py | from rest_framework import serializers
from csinterop.models import SharingProposal, Folder, User
class SharingProposalSerializer(serializers.ModelSerializer):
share_id = serializers.RelatedField(source='key')
permission = serializers.CharField(source='get_permission', read_only=True)
folder_name = serializers.RelatedField(source='folder.name')
owner_name = serializers.RelatedField(source='owner.name')
owner_email = serializers.RelatedField(source='owner.email')
protocol_version = serializers.CharField(required=False)
def restore_object(self, attrs, instance=None):
|
class Meta:
model = SharingProposal
fields = (
'share_id', 'recipient', 'resource_url', 'owner_name', 'owner_email', 'folder_name', 'permission',
'callback', 'protocol_version',
'status', 'created_at') | """
Given a dictionary of deserialized field values, either update
an existing model instance, or create a new model instance.
"""
if instance is not None:
return instance
proposal = SharingProposal(**attrs)
proposal.key = self.context['request'].DATA['share_id']
owner = User()
owner.name = self.context['request'].DATA['owner_name']
owner.email = self.context['request'].DATA['owner_email']
proposal.owner = owner
folder = Folder()
folder.name = self.context['request'].DATA['folder_name']
proposal.folder = folder
write_access = True if self.context['request'].DATA['permission'].lower() is 'read-write' else False
proposal.write_access = write_access
proposal.status = 'PENDING'
return proposal | identifier_body |
serializers.py | from rest_framework import serializers
from csinterop.models import SharingProposal, Folder, User
class SharingProposalSerializer(serializers.ModelSerializer):
share_id = serializers.RelatedField(source='key')
permission = serializers.CharField(source='get_permission', read_only=True)
folder_name = serializers.RelatedField(source='folder.name')
owner_name = serializers.RelatedField(source='owner.name')
owner_email = serializers.RelatedField(source='owner.email')
protocol_version = serializers.CharField(required=False)
def restore_object(self, attrs, instance=None):
"""
Given a dictionary of deserialized field values, either update
an existing model instance, or create a new model instance.
"""
if instance is not None:
return instance
proposal = SharingProposal(**attrs)
proposal.key = self.context['request'].DATA['share_id']
owner = User()
owner.name = self.context['request'].DATA['owner_name'] | proposal.owner = owner
folder = Folder()
folder.name = self.context['request'].DATA['folder_name']
proposal.folder = folder
write_access = True if self.context['request'].DATA['permission'].lower() is 'read-write' else False
proposal.write_access = write_access
proposal.status = 'PENDING'
return proposal
class Meta:
model = SharingProposal
fields = (
'share_id', 'recipient', 'resource_url', 'owner_name', 'owner_email', 'folder_name', 'permission',
'callback', 'protocol_version',
'status', 'created_at') | owner.email = self.context['request'].DATA['owner_email'] | random_line_split |
parser.py | # -*- coding:utf-8 -*-
from urllib.parse import urlparse
from lxml import html
from lxml.html.clean import Cleaner
from .forms import FormWrapper
from .helpers import (
match_form,
filter_element
)
class HtmlParser:
""" Parses response content string to valid html using `lxml.html`
"""
def | (self, response, session=None, use_cleaner=None, cleaner_params=None):
self._html_tree = html.fromstring(response.content)
self.links = {}
self._forms = []
self._cleaner = Cleaner(**cleaner_params) if use_cleaner else None
self._session = session
self._url = response.url
def make_links_absolute(self):
"""Makes absolute links http://domain.com/index.html from the relative ones /index.html
"""
parsed_url = urlparse(self._url)
self._html_tree.make_links_absolute(
'{url.scheme}://{url.netloc}/'.format(url=parsed_url),
resolve_base_href=True
)
def find_links(self, tags=None, filters=None, match='EQUAL'):
""" Find links and iterate through them checking if they are matching given filters and
tags
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/links/10/0')
>>> tags = ['style', 'link', 'script', 'a']
>>> parser = HtmlParser(response)
>>> links = parser.find_links(tags)
>>> len(links)
9
"""
filters = filters or {}
tags = tags or ['a']
for link, _, url, _ in self._html_tree.iterlinks():
matched = filter_element(
link,
tags=tags,
filters=filters,
match=match
)
if matched:
self.links[url] = matched
return self.links
def find_forms(self, filters=None):
""" Find forms and wraps them with class::`<FormWrapper>` object
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/forms/post')
>>> parser = HtmlParser(response)
>>> forms = parser.find_forms()
>>> len(forms)
1
"""
filters = filters or {}
self._forms = []
for form in self._html_tree.forms:
wrapped_form = FormWrapper(form, session=self._session, url=self._url)
if match_form(wrapped_form, filters):
self._forms.append(wrapped_form)
return self._forms
def xpath(self, path):
"""Select elements using xpath selectors"""
return self._html_tree.xpath(path)
def css(self, selector):
"""Select elements by css selectors"""
return self._html_tree.cssselect(selector)
if __name__ == '__main__':
import doctest
doctest.testmod()
| __init__ | identifier_name |
parser.py | # -*- coding:utf-8 -*-
from urllib.parse import urlparse
from lxml import html
from lxml.html.clean import Cleaner
from .forms import FormWrapper
from .helpers import (
match_form,
filter_element
)
class HtmlParser:
""" Parses response content string to valid html using `lxml.html`
"""
def __init__(self, response, session=None, use_cleaner=None, cleaner_params=None):
self._html_tree = html.fromstring(response.content)
self.links = {}
self._forms = []
self._cleaner = Cleaner(**cleaner_params) if use_cleaner else None
self._session = session
self._url = response.url
def make_links_absolute(self):
"""Makes absolute links http://domain.com/index.html from the relative ones /index.html
"""
parsed_url = urlparse(self._url)
self._html_tree.make_links_absolute(
'{url.scheme}://{url.netloc}/'.format(url=parsed_url),
resolve_base_href=True
)
def find_links(self, tags=None, filters=None, match='EQUAL'):
""" Find links and iterate through them checking if they are matching given filters and
tags
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/links/10/0')
>>> tags = ['style', 'link', 'script', 'a']
>>> parser = HtmlParser(response)
>>> links = parser.find_links(tags)
>>> len(links)
9
"""
filters = filters or {}
tags = tags or ['a']
for link, _, url, _ in self._html_tree.iterlinks():
matched = filter_element(
link,
tags=tags,
filters=filters,
match=match
)
if matched:
self.links[url] = matched
return self.links
def find_forms(self, filters=None):
""" Find forms and wraps them with class::`<FormWrapper>` object
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/forms/post')
>>> parser = HtmlParser(response)
>>> forms = parser.find_forms()
>>> len(forms)
1
"""
filters = filters or {}
self._forms = []
for form in self._html_tree.forms:
wrapped_form = FormWrapper(form, session=self._session, url=self._url)
if match_form(wrapped_form, filters):
self._forms.append(wrapped_form)
return self._forms
def xpath(self, path):
|
def css(self, selector):
"""Select elements by css selectors"""
return self._html_tree.cssselect(selector)
if __name__ == '__main__':
import doctest
doctest.testmod()
| """Select elements using xpath selectors"""
return self._html_tree.xpath(path) | identifier_body |
parser.py | # -*- coding:utf-8 -*-
from urllib.parse import urlparse
from lxml import html
from lxml.html.clean import Cleaner
from .forms import FormWrapper
from .helpers import (
match_form,
filter_element
)
class HtmlParser:
""" Parses response content string to valid html using `lxml.html`
"""
def __init__(self, response, session=None, use_cleaner=None, cleaner_params=None):
self._html_tree = html.fromstring(response.content)
self.links = {}
self._forms = []
self._cleaner = Cleaner(**cleaner_params) if use_cleaner else None
self._session = session
self._url = response.url
def make_links_absolute(self):
"""Makes absolute links http://domain.com/index.html from the relative ones /index.html
"""
parsed_url = urlparse(self._url)
self._html_tree.make_links_absolute(
'{url.scheme}://{url.netloc}/'.format(url=parsed_url),
resolve_base_href=True
)
def find_links(self, tags=None, filters=None, match='EQUAL'):
""" Find links and iterate through them checking if they are matching given filters and
tags
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/links/10/0')
>>> tags = ['style', 'link', 'script', 'a']
>>> parser = HtmlParser(response)
>>> links = parser.find_links(tags)
>>> len(links)
9
"""
filters = filters or {}
tags = tags or ['a']
for link, _, url, _ in self._html_tree.iterlinks():
matched = filter_element(
link,
tags=tags,
filters=filters,
match=match
)
if matched:
self.links[url] = matched
return self.links
def find_forms(self, filters=None):
""" Find forms and wraps them with class::`<FormWrapper>` object
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/forms/post')
>>> parser = HtmlParser(response)
>>> forms = parser.find_forms()
>>> len(forms)
1
"""
filters = filters or {}
self._forms = []
for form in self._html_tree.forms:
|
return self._forms
def xpath(self, path):
"""Select elements using xpath selectors"""
return self._html_tree.xpath(path)
def css(self, selector):
"""Select elements by css selectors"""
return self._html_tree.cssselect(selector)
if __name__ == '__main__':
import doctest
doctest.testmod()
| wrapped_form = FormWrapper(form, session=self._session, url=self._url)
if match_form(wrapped_form, filters):
self._forms.append(wrapped_form) | conditional_block |
parser.py | # -*- coding:utf-8 -*-
from urllib.parse import urlparse
from lxml import html
from lxml.html.clean import Cleaner
from .forms import FormWrapper
from .helpers import (
match_form,
filter_element
)
class HtmlParser:
""" Parses response content string to valid html using `lxml.html`
"""
def __init__(self, response, session=None, use_cleaner=None, cleaner_params=None):
self._html_tree = html.fromstring(response.content)
self.links = {}
self._forms = []
self._cleaner = Cleaner(**cleaner_params) if use_cleaner else None
self._session = session
self._url = response.url
def make_links_absolute(self):
"""Makes absolute links http://domain.com/index.html from the relative ones /index.html
"""
parsed_url = urlparse(self._url)
self._html_tree.make_links_absolute(
'{url.scheme}://{url.netloc}/'.format(url=parsed_url),
resolve_base_href=True
)
def find_links(self, tags=None, filters=None, match='EQUAL'):
""" Find links and iterate through them checking if they are matching given filters and
tags
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/links/10/0')
>>> tags = ['style', 'link', 'script', 'a']
>>> parser = HtmlParser(response)
>>> links = parser.find_links(tags)
>>> len(links)
9
"""
filters = filters or {}
tags = tags or ['a']
for link, _, url, _ in self._html_tree.iterlinks():
matched = filter_element(
link, | filters=filters,
match=match
)
if matched:
self.links[url] = matched
return self.links
def find_forms(self, filters=None):
""" Find forms and wraps them with class::`<FormWrapper>` object
usage::
>>> import requests
>>> response = requests.get('https://httpbin.org/forms/post')
>>> parser = HtmlParser(response)
>>> forms = parser.find_forms()
>>> len(forms)
1
"""
filters = filters or {}
self._forms = []
for form in self._html_tree.forms:
wrapped_form = FormWrapper(form, session=self._session, url=self._url)
if match_form(wrapped_form, filters):
self._forms.append(wrapped_form)
return self._forms
def xpath(self, path):
"""Select elements using xpath selectors"""
return self._html_tree.xpath(path)
def css(self, selector):
"""Select elements by css selectors"""
return self._html_tree.cssselect(selector)
if __name__ == '__main__':
import doctest
doctest.testmod() | tags=tags, | random_line_split |
index.ts | import {Bounds, parseBounds, parseDocumentSize} from './css/layout/bounds';
import {COLORS, isTransparent, parseColor} from './css/types/color';
import {CloneConfigurations, CloneOptions, DocumentCloner, WindowOptions} from './dom/document-cloner';
import {isBodyElement, isHTMLElement, parseTree} from './dom/node-parser';
import {CacheStorage} from './core/cache-storage';
import {CanvasRenderer, RenderConfigurations, RenderOptions} from './render/canvas/canvas-renderer';
import {ForeignObjectRenderer} from './render/canvas/foreignobject-renderer';
import {Context, ContextOptions} from './core/context';
export type Options = CloneOptions &
WindowOptions &
RenderOptions &
ContextOptions & {
backgroundColor: string | null;
foreignObjectRendering: boolean;
removeContainer?: boolean;
};
const html2canvas = (element: HTMLElement, options: Partial<Options> = {}): Promise<HTMLCanvasElement> => {
return renderElement(element, options);
};
export default html2canvas;
if (typeof window !== 'undefined') {
CacheStorage.setContext(window);
}
const renderElement = async (element: HTMLElement, opts: Partial<Options>): Promise<HTMLCanvasElement> => {
if (!element || typeof element !== 'object') {
return Promise.reject('Invalid element provided as first argument');
}
const ownerDocument = element.ownerDocument;
if (!ownerDocument) {
throw new Error(`Element is not attached to a Document`);
}
const defaultView = ownerDocument.defaultView;
| if (!defaultView) {
throw new Error(`Document is not attached to a Window`);
}
const resourceOptions = {
allowTaint: opts.allowTaint ?? false,
imageTimeout: opts.imageTimeout ?? 15000,
proxy: opts.proxy,
useCORS: opts.useCORS ?? false
};
const contextOptions = {
logging: opts.logging ?? true,
cache: opts.cache,
...resourceOptions
};
const windowOptions = {
windowWidth: opts.windowWidth ?? defaultView.innerWidth,
windowHeight: opts.windowHeight ?? defaultView.innerHeight,
scrollX: opts.scrollX ?? defaultView.pageXOffset,
scrollY: opts.scrollY ?? defaultView.pageYOffset
};
const windowBounds = new Bounds(
windowOptions.scrollX,
windowOptions.scrollY,
windowOptions.windowWidth,
windowOptions.windowHeight
);
const context = new Context(contextOptions, windowBounds);
const foreignObjectRendering = opts.foreignObjectRendering ?? false;
const cloneOptions: CloneConfigurations = {
allowTaint: opts.allowTaint ?? false,
onclone: opts.onclone,
ignoreElements: opts.ignoreElements,
inlineImages: foreignObjectRendering,
copyStyles: foreignObjectRendering
};
context.logger.debug(
`Starting document clone with size ${windowBounds.width}x${
windowBounds.height
} scrolled to ${-windowBounds.left},${-windowBounds.top}`
);
const documentCloner = new DocumentCloner(context, element, cloneOptions);
const clonedElement = documentCloner.clonedReferenceElement;
if (!clonedElement) {
return Promise.reject(`Unable to find element in cloned iframe`);
}
const container = await documentCloner.toIFrame(ownerDocument, windowBounds);
const {width, height, left, top} =
isBodyElement(clonedElement) || isHTMLElement(clonedElement)
? parseDocumentSize(clonedElement.ownerDocument)
: parseBounds(context, clonedElement);
const backgroundColor = parseBackgroundColor(context, clonedElement, opts.backgroundColor);
const renderOptions: RenderConfigurations = {
canvas: opts.canvas,
backgroundColor,
scale: opts.scale ?? defaultView.devicePixelRatio ?? 1,
x: (opts.x ?? 0) + left,
y: (opts.y ?? 0) + top,
width: opts.width ?? Math.ceil(width),
height: opts.height ?? Math.ceil(height)
};
let canvas;
if (foreignObjectRendering) {
context.logger.debug(`Document cloned, using foreign object rendering`);
const renderer = new ForeignObjectRenderer(context, renderOptions);
canvas = await renderer.render(clonedElement);
} else {
context.logger.debug(
`Document cloned, element located at ${left},${top} with size ${width}x${height} using computed rendering`
);
context.logger.debug(`Starting DOM parsing`);
const root = parseTree(context, clonedElement);
if (backgroundColor === root.styles.backgroundColor) {
root.styles.backgroundColor = COLORS.TRANSPARENT;
}
context.logger.debug(
`Starting renderer for element at ${renderOptions.x},${renderOptions.y} with size ${renderOptions.width}x${renderOptions.height}`
);
const renderer = new CanvasRenderer(context, renderOptions);
canvas = await renderer.render(root);
}
if (opts.removeContainer ?? true) {
if (!DocumentCloner.destroy(container)) {
context.logger.error(`Cannot detach cloned iframe as it is not in the DOM anymore`);
}
}
context.logger.debug(`Finished rendering`);
return canvas;
};
const parseBackgroundColor = (context: Context, element: HTMLElement, backgroundColorOverride?: string | null) => {
const ownerDocument = element.ownerDocument;
// http://www.w3.org/TR/css3-background/#special-backgrounds
const documentBackgroundColor = ownerDocument.documentElement
? parseColor(context, getComputedStyle(ownerDocument.documentElement).backgroundColor as string)
: COLORS.TRANSPARENT;
const bodyBackgroundColor = ownerDocument.body
? parseColor(context, getComputedStyle(ownerDocument.body).backgroundColor as string)
: COLORS.TRANSPARENT;
const defaultBackgroundColor =
typeof backgroundColorOverride === 'string'
? parseColor(context, backgroundColorOverride)
: backgroundColorOverride === null
? COLORS.TRANSPARENT
: 0xffffffff;
return element === ownerDocument.documentElement
? isTransparent(documentBackgroundColor)
? isTransparent(bodyBackgroundColor)
? defaultBackgroundColor
: bodyBackgroundColor
: documentBackgroundColor
: defaultBackgroundColor;
}; | random_line_split |
|
index.ts | import {Bounds, parseBounds, parseDocumentSize} from './css/layout/bounds';
import {COLORS, isTransparent, parseColor} from './css/types/color';
import {CloneConfigurations, CloneOptions, DocumentCloner, WindowOptions} from './dom/document-cloner';
import {isBodyElement, isHTMLElement, parseTree} from './dom/node-parser';
import {CacheStorage} from './core/cache-storage';
import {CanvasRenderer, RenderConfigurations, RenderOptions} from './render/canvas/canvas-renderer';
import {ForeignObjectRenderer} from './render/canvas/foreignobject-renderer';
import {Context, ContextOptions} from './core/context';
export type Options = CloneOptions &
WindowOptions &
RenderOptions &
ContextOptions & {
backgroundColor: string | null;
foreignObjectRendering: boolean;
removeContainer?: boolean;
};
const html2canvas = (element: HTMLElement, options: Partial<Options> = {}): Promise<HTMLCanvasElement> => {
return renderElement(element, options);
};
export default html2canvas;
if (typeof window !== 'undefined') {
CacheStorage.setContext(window);
}
const renderElement = async (element: HTMLElement, opts: Partial<Options>): Promise<HTMLCanvasElement> => {
if (!element || typeof element !== 'object') |
const ownerDocument = element.ownerDocument;
if (!ownerDocument) {
throw new Error(`Element is not attached to a Document`);
}
const defaultView = ownerDocument.defaultView;
if (!defaultView) {
throw new Error(`Document is not attached to a Window`);
}
const resourceOptions = {
allowTaint: opts.allowTaint ?? false,
imageTimeout: opts.imageTimeout ?? 15000,
proxy: opts.proxy,
useCORS: opts.useCORS ?? false
};
const contextOptions = {
logging: opts.logging ?? true,
cache: opts.cache,
...resourceOptions
};
const windowOptions = {
windowWidth: opts.windowWidth ?? defaultView.innerWidth,
windowHeight: opts.windowHeight ?? defaultView.innerHeight,
scrollX: opts.scrollX ?? defaultView.pageXOffset,
scrollY: opts.scrollY ?? defaultView.pageYOffset
};
const windowBounds = new Bounds(
windowOptions.scrollX,
windowOptions.scrollY,
windowOptions.windowWidth,
windowOptions.windowHeight
);
const context = new Context(contextOptions, windowBounds);
const foreignObjectRendering = opts.foreignObjectRendering ?? false;
const cloneOptions: CloneConfigurations = {
allowTaint: opts.allowTaint ?? false,
onclone: opts.onclone,
ignoreElements: opts.ignoreElements,
inlineImages: foreignObjectRendering,
copyStyles: foreignObjectRendering
};
context.logger.debug(
`Starting document clone with size ${windowBounds.width}x${
windowBounds.height
} scrolled to ${-windowBounds.left},${-windowBounds.top}`
);
const documentCloner = new DocumentCloner(context, element, cloneOptions);
const clonedElement = documentCloner.clonedReferenceElement;
if (!clonedElement) {
return Promise.reject(`Unable to find element in cloned iframe`);
}
const container = await documentCloner.toIFrame(ownerDocument, windowBounds);
const {width, height, left, top} =
isBodyElement(clonedElement) || isHTMLElement(clonedElement)
? parseDocumentSize(clonedElement.ownerDocument)
: parseBounds(context, clonedElement);
const backgroundColor = parseBackgroundColor(context, clonedElement, opts.backgroundColor);
const renderOptions: RenderConfigurations = {
canvas: opts.canvas,
backgroundColor,
scale: opts.scale ?? defaultView.devicePixelRatio ?? 1,
x: (opts.x ?? 0) + left,
y: (opts.y ?? 0) + top,
width: opts.width ?? Math.ceil(width),
height: opts.height ?? Math.ceil(height)
};
let canvas;
if (foreignObjectRendering) {
context.logger.debug(`Document cloned, using foreign object rendering`);
const renderer = new ForeignObjectRenderer(context, renderOptions);
canvas = await renderer.render(clonedElement);
} else {
context.logger.debug(
`Document cloned, element located at ${left},${top} with size ${width}x${height} using computed rendering`
);
context.logger.debug(`Starting DOM parsing`);
const root = parseTree(context, clonedElement);
if (backgroundColor === root.styles.backgroundColor) {
root.styles.backgroundColor = COLORS.TRANSPARENT;
}
context.logger.debug(
`Starting renderer for element at ${renderOptions.x},${renderOptions.y} with size ${renderOptions.width}x${renderOptions.height}`
);
const renderer = new CanvasRenderer(context, renderOptions);
canvas = await renderer.render(root);
}
if (opts.removeContainer ?? true) {
if (!DocumentCloner.destroy(container)) {
context.logger.error(`Cannot detach cloned iframe as it is not in the DOM anymore`);
}
}
context.logger.debug(`Finished rendering`);
return canvas;
};
const parseBackgroundColor = (context: Context, element: HTMLElement, backgroundColorOverride?: string | null) => {
const ownerDocument = element.ownerDocument;
// http://www.w3.org/TR/css3-background/#special-backgrounds
const documentBackgroundColor = ownerDocument.documentElement
? parseColor(context, getComputedStyle(ownerDocument.documentElement).backgroundColor as string)
: COLORS.TRANSPARENT;
const bodyBackgroundColor = ownerDocument.body
? parseColor(context, getComputedStyle(ownerDocument.body).backgroundColor as string)
: COLORS.TRANSPARENT;
const defaultBackgroundColor =
typeof backgroundColorOverride === 'string'
? parseColor(context, backgroundColorOverride)
: backgroundColorOverride === null
? COLORS.TRANSPARENT
: 0xffffffff;
return element === ownerDocument.documentElement
? isTransparent(documentBackgroundColor)
? isTransparent(bodyBackgroundColor)
? defaultBackgroundColor
: bodyBackgroundColor
: documentBackgroundColor
: defaultBackgroundColor;
};
| {
return Promise.reject('Invalid element provided as first argument');
} | conditional_block |
baBackTop.component.ts | import {Component, ViewChild, HostListener, Input, ElementRef} from '@angular/core';
| <i #baBackTop class="fa fa-angle-up back-top ba-back-top" title="Back to Top"></i>
`
})
export class BaBackTop {
@Input() position:number = 400;
@Input() showSpeed:number = 500;
@Input() moveSpeed:number = 1000;
@ViewChild('baBackTop') _selector:ElementRef;
ngAfterViewInit () {
this._onWindowScroll();
}
@HostListener('click')
_onClick():boolean {
jQuery('html, body').animate({scrollTop:0}, {duration:this.moveSpeed});
return false;
}
@HostListener('window:scroll')
_onWindowScroll():void {
let el = this._selector.nativeElement;
window.scrollY > this.position ? jQuery(el).fadeIn(this.showSpeed) : jQuery(el).fadeOut(this.showSpeed);
}
} | @Component({
selector: 'ba-back-top',
styleUrls: ['./baBackTop.scss'],
template: ` | random_line_split |
baBackTop.component.ts | import {Component, ViewChild, HostListener, Input, ElementRef} from '@angular/core';
@Component({
selector: 'ba-back-top',
styleUrls: ['./baBackTop.scss'],
template: `
<i #baBackTop class="fa fa-angle-up back-top ba-back-top" title="Back to Top"></i>
`
})
export class BaBackTop {
@Input() position:number = 400;
@Input() showSpeed:number = 500;
@Input() moveSpeed:number = 1000;
@ViewChild('baBackTop') _selector:ElementRef;
ngAfterViewInit () {
this._onWindowScroll();
}
@HostListener('click')
_onClick():boolean {
jQuery('html, body').animate({scrollTop:0}, {duration:this.moveSpeed});
return false;
}
@HostListener('window:scroll')
| ():void {
let el = this._selector.nativeElement;
window.scrollY > this.position ? jQuery(el).fadeIn(this.showSpeed) : jQuery(el).fadeOut(this.showSpeed);
}
}
| _onWindowScroll | identifier_name |
replicator.py | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import itertools
import json
import time
from collections import defaultdict
from eventlet import Timeout
from swift.container.sync_store import ContainerSyncStore
from swift.container.backend import ContainerBroker, DATADIR
from swift.container.reconciler import (
MISPLACED_OBJECTS_ACCOUNT, incorrect_policy_index,
get_reconciler_container_name, get_row_to_q_entry_translator)
from swift.common import db_replicator
from swift.common.storage_policy import POLICIES
from swift.common.exceptions import DeviceUnavailable
from swift.common.http import is_success
from swift.common.db import DatabaseAlreadyExists
from swift.common.utils import (Timestamp, hash_path,
storage_directory, majority_size)
class ContainerReplicator(db_replicator.Replicator):
server_type = 'container'
brokerclass = ContainerBroker
datadir = DATADIR
default_port = 6201
def report_up_to_date(self, full_info):
reported_key_map = {
'reported_put_timestamp': 'put_timestamp',
'reported_delete_timestamp': 'delete_timestamp',
'reported_bytes_used': 'bytes_used',
'reported_object_count': 'count',
}
for reported, value_key in reported_key_map.items():
if full_info[reported] != full_info[value_key]:
return False
return True
def _gather_sync_args(self, replication_info):
parent = super(ContainerReplicator, self)
sync_args = parent._gather_sync_args(replication_info)
if len(POLICIES) > 1:
sync_args += tuple(replication_info[k] for k in
('status_changed_at', 'count',
'storage_policy_index'))
return sync_args
def _handle_sync_response(self, node, response, info, broker, http,
different_region):
parent = super(ContainerReplicator, self)
if is_success(response.status):
remote_info = json.loads(response.data)
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time())
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at.internal)
sync_timestamps = ('created_at', 'put_timestamp',
'delete_timestamp')
if any(info[key] != remote_info[key] for key in sync_timestamps):
broker.merge_timestamps(*(remote_info[key] for key in
sync_timestamps))
rv = parent._handle_sync_response(
node, response, info, broker, http, different_region)
return rv
def find_local_handoff_for_part(self, part):
"""
Look through devices in the ring for the first handoff device that was
identified during job creation as available on this node.
:returns: a node entry from the ring
"""
nodes = self.ring.get_part_nodes(part)
more_nodes = self.ring.get_more_nodes(part)
for node in itertools.chain(nodes, more_nodes):
if node['id'] in self._local_device_ids:
return node
return None
def get_reconciler_broker(self, timestamp):
"""
Get a local instance of the reconciler container broker that is
appropriate to enqueue the given timestamp.
:param timestamp: the timestamp of the row to be enqueued
:returns: a local reconciler broker
"""
container = get_reconciler_container_name(timestamp)
if self.reconciler_containers and \
container in self.reconciler_containers:
return self.reconciler_containers[container][1]
account = MISPLACED_OBJECTS_ACCOUNT
part = self.ring.get_part(account, container)
node = self.find_local_handoff_for_part(part)
if not node:
raise DeviceUnavailable(
'No mounted devices found suitable to Handoff reconciler '
'container %s in partition %s' % (container, part))
hsh = hash_path(account, container)
db_dir = storage_directory(DATADIR, part, hsh)
db_path = os.path.join(self.root, node['device'], db_dir, hsh + '.db')
broker = ContainerBroker(db_path, account=account, container=container)
if not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp, 0)
except DatabaseAlreadyExists:
pass
if self.reconciler_containers is not None:
self.reconciler_containers[container] = part, broker, node['id']
return broker
def feed_reconciler(self, container, item_list):
"""
Add queue entries for rows in item_list to the local reconciler
container database.
:param container: the name of the reconciler container
:param item_list: the list of rows to enqueue
:returns: True if successfully enqueued
"""
try:
reconciler = self.get_reconciler_broker(container)
except DeviceUnavailable as e:
self.logger.warning('DeviceUnavailable: %s', e)
return False
self.logger.debug('Adding %d objects to the reconciler at %s',
len(item_list), reconciler.db_file)
try:
reconciler.merge_items(item_list)
except (Exception, Timeout):
self.logger.exception('UNHANDLED EXCEPTION: trying to merge '
'%d items to reconciler container %s',
len(item_list), reconciler.db_file)
return False
return True
def dump_to_reconciler(self, broker, point):
"""
Look for object rows for objects updates in the wrong storage policy
in broker with a ``ROWID`` greater than the rowid given as point.
:param broker: the container broker with misplaced objects
:param point: the last verified ``reconciler_sync_point``
:returns: the last successful enqueued rowid
"""
max_sync = broker.get_max_row()
misplaced = broker.get_misplaced_since(point, self.per_diff)
if not misplaced:
return max_sync
translator = get_row_to_q_entry_translator(broker)
errors = False
low_sync = point
while misplaced:
batches = defaultdict(list)
for item in misplaced:
container = get_reconciler_container_name(item['created_at'])
batches[container].append(translator(item))
for container, item_list in batches.items():
success = self.feed_reconciler(container, item_list)
if not success:
errors = True
point = misplaced[-1]['ROWID']
if not errors:
low_sync = point
misplaced = broker.get_misplaced_since(point, self.per_diff)
return low_sync
def _post_replicate_hook(self, broker, info, responses):
if info['account'] == MISPLACED_OBJECTS_ACCOUNT:
return
try:
self.sync_store.update_sync_store(broker)
except Exception:
self.logger.exception('Failed to update sync_store %s' %
broker.db_file)
point = broker.get_reconciler_sync()
if not broker.has_multiple_policies() and info['max_row'] != point:
broker.update_reconciler_sync(info['max_row'])
return
max_sync = self.dump_to_reconciler(broker, point)
success = responses.count(True) >= majority_size(len(responses))
if max_sync > point and success:
# to be safe, only slide up the sync point with a majority on |
def delete_db(self, broker):
"""
Ensure that reconciler databases are only cleaned up at the end of the
replication run.
"""
if (self.reconciler_cleanups is not None and
broker.account == MISPLACED_OBJECTS_ACCOUNT):
# this container shouldn't be here, make sure it's cleaned up
self.reconciler_cleanups[broker.container] = broker
return
try:
# DB is going to get deleted. Be preemptive about it
self.sync_store.remove_synced_container(broker)
except Exception:
self.logger.exception('Failed to remove sync_store entry %s' %
broker.db_file)
return super(ContainerReplicator, self).delete_db(broker)
def replicate_reconcilers(self):
"""
Ensure any items merged to reconciler containers during replication
are pushed out to correct nodes and any reconciler containers that do
not belong on this node are removed.
"""
self.logger.info('Replicating %d reconciler containers',
len(self.reconciler_containers))
for part, reconciler, node_id in self.reconciler_containers.values():
self.cpool.spawn_n(
self._replicate_object, part, reconciler.db_file, node_id)
self.cpool.waitall()
# wipe out the cache do disable bypass in delete_db
cleanups = self.reconciler_cleanups
self.reconciler_cleanups = self.reconciler_containers = None
self.logger.info('Cleaning up %d reconciler containers',
len(cleanups))
for reconciler in cleanups.values():
self.cpool.spawn_n(self.delete_db, reconciler)
self.cpool.waitall()
self.logger.info('Finished reconciler replication')
def run_once(self, *args, **kwargs):
self.reconciler_containers = {}
self.reconciler_cleanups = {}
self.sync_store = ContainerSyncStore(self.root,
self.logger,
self.mount_check)
rv = super(ContainerReplicator, self).run_once(*args, **kwargs)
if any([self.reconciler_containers, self.reconciler_cleanups]):
self.replicate_reconcilers()
return rv
class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
def _parse_sync_args(self, args):
parent = super(ContainerReplicatorRpc, self)
remote_info = parent._parse_sync_args(args)
if len(args) > 9:
remote_info['status_changed_at'] = args[7]
remote_info['count'] = args[8]
remote_info['storage_policy_index'] = args[9]
return remote_info
def _get_synced_replication_info(self, broker, remote_info):
"""
Sync the remote_info storage_policy_index if needed and return the
newly synced replication info.
:param broker: the database broker
:param remote_info: the remote replication info
:returns: local broker replication info
"""
info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time()).internal
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at)
info = broker.get_replication_info()
return info | # replication
broker.update_reconciler_sync(max_sync) | random_line_split |
replicator.py | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import itertools
import json
import time
from collections import defaultdict
from eventlet import Timeout
from swift.container.sync_store import ContainerSyncStore
from swift.container.backend import ContainerBroker, DATADIR
from swift.container.reconciler import (
MISPLACED_OBJECTS_ACCOUNT, incorrect_policy_index,
get_reconciler_container_name, get_row_to_q_entry_translator)
from swift.common import db_replicator
from swift.common.storage_policy import POLICIES
from swift.common.exceptions import DeviceUnavailable
from swift.common.http import is_success
from swift.common.db import DatabaseAlreadyExists
from swift.common.utils import (Timestamp, hash_path,
storage_directory, majority_size)
class ContainerReplicator(db_replicator.Replicator):
server_type = 'container'
brokerclass = ContainerBroker
datadir = DATADIR
default_port = 6201
def report_up_to_date(self, full_info):
reported_key_map = {
'reported_put_timestamp': 'put_timestamp',
'reported_delete_timestamp': 'delete_timestamp',
'reported_bytes_used': 'bytes_used',
'reported_object_count': 'count',
}
for reported, value_key in reported_key_map.items():
if full_info[reported] != full_info[value_key]:
return False
return True
def _gather_sync_args(self, replication_info):
parent = super(ContainerReplicator, self)
sync_args = parent._gather_sync_args(replication_info)
if len(POLICIES) > 1:
sync_args += tuple(replication_info[k] for k in
('status_changed_at', 'count',
'storage_policy_index'))
return sync_args
def _handle_sync_response(self, node, response, info, broker, http,
different_region):
parent = super(ContainerReplicator, self)
if is_success(response.status):
remote_info = json.loads(response.data)
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time())
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at.internal)
sync_timestamps = ('created_at', 'put_timestamp',
'delete_timestamp')
if any(info[key] != remote_info[key] for key in sync_timestamps):
broker.merge_timestamps(*(remote_info[key] for key in
sync_timestamps))
rv = parent._handle_sync_response(
node, response, info, broker, http, different_region)
return rv
def find_local_handoff_for_part(self, part):
"""
Look through devices in the ring for the first handoff device that was
identified during job creation as available on this node.
:returns: a node entry from the ring
"""
nodes = self.ring.get_part_nodes(part)
more_nodes = self.ring.get_more_nodes(part)
for node in itertools.chain(nodes, more_nodes):
if node['id'] in self._local_device_ids:
|
return None
def get_reconciler_broker(self, timestamp):
"""
Get a local instance of the reconciler container broker that is
appropriate to enqueue the given timestamp.
:param timestamp: the timestamp of the row to be enqueued
:returns: a local reconciler broker
"""
container = get_reconciler_container_name(timestamp)
if self.reconciler_containers and \
container in self.reconciler_containers:
return self.reconciler_containers[container][1]
account = MISPLACED_OBJECTS_ACCOUNT
part = self.ring.get_part(account, container)
node = self.find_local_handoff_for_part(part)
if not node:
raise DeviceUnavailable(
'No mounted devices found suitable to Handoff reconciler '
'container %s in partition %s' % (container, part))
hsh = hash_path(account, container)
db_dir = storage_directory(DATADIR, part, hsh)
db_path = os.path.join(self.root, node['device'], db_dir, hsh + '.db')
broker = ContainerBroker(db_path, account=account, container=container)
if not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp, 0)
except DatabaseAlreadyExists:
pass
if self.reconciler_containers is not None:
self.reconciler_containers[container] = part, broker, node['id']
return broker
def feed_reconciler(self, container, item_list):
"""
Add queue entries for rows in item_list to the local reconciler
container database.
:param container: the name of the reconciler container
:param item_list: the list of rows to enqueue
:returns: True if successfully enqueued
"""
try:
reconciler = self.get_reconciler_broker(container)
except DeviceUnavailable as e:
self.logger.warning('DeviceUnavailable: %s', e)
return False
self.logger.debug('Adding %d objects to the reconciler at %s',
len(item_list), reconciler.db_file)
try:
reconciler.merge_items(item_list)
except (Exception, Timeout):
self.logger.exception('UNHANDLED EXCEPTION: trying to merge '
'%d items to reconciler container %s',
len(item_list), reconciler.db_file)
return False
return True
def dump_to_reconciler(self, broker, point):
"""
Look for object rows for objects updates in the wrong storage policy
in broker with a ``ROWID`` greater than the rowid given as point.
:param broker: the container broker with misplaced objects
:param point: the last verified ``reconciler_sync_point``
:returns: the last successful enqueued rowid
"""
max_sync = broker.get_max_row()
misplaced = broker.get_misplaced_since(point, self.per_diff)
if not misplaced:
return max_sync
translator = get_row_to_q_entry_translator(broker)
errors = False
low_sync = point
while misplaced:
batches = defaultdict(list)
for item in misplaced:
container = get_reconciler_container_name(item['created_at'])
batches[container].append(translator(item))
for container, item_list in batches.items():
success = self.feed_reconciler(container, item_list)
if not success:
errors = True
point = misplaced[-1]['ROWID']
if not errors:
low_sync = point
misplaced = broker.get_misplaced_since(point, self.per_diff)
return low_sync
def _post_replicate_hook(self, broker, info, responses):
if info['account'] == MISPLACED_OBJECTS_ACCOUNT:
return
try:
self.sync_store.update_sync_store(broker)
except Exception:
self.logger.exception('Failed to update sync_store %s' %
broker.db_file)
point = broker.get_reconciler_sync()
if not broker.has_multiple_policies() and info['max_row'] != point:
broker.update_reconciler_sync(info['max_row'])
return
max_sync = self.dump_to_reconciler(broker, point)
success = responses.count(True) >= majority_size(len(responses))
if max_sync > point and success:
# to be safe, only slide up the sync point with a majority on
# replication
broker.update_reconciler_sync(max_sync)
def delete_db(self, broker):
"""
Ensure that reconciler databases are only cleaned up at the end of the
replication run.
"""
if (self.reconciler_cleanups is not None and
broker.account == MISPLACED_OBJECTS_ACCOUNT):
# this container shouldn't be here, make sure it's cleaned up
self.reconciler_cleanups[broker.container] = broker
return
try:
# DB is going to get deleted. Be preemptive about it
self.sync_store.remove_synced_container(broker)
except Exception:
self.logger.exception('Failed to remove sync_store entry %s' %
broker.db_file)
return super(ContainerReplicator, self).delete_db(broker)
def replicate_reconcilers(self):
"""
Ensure any items merged to reconciler containers during replication
are pushed out to correct nodes and any reconciler containers that do
not belong on this node are removed.
"""
self.logger.info('Replicating %d reconciler containers',
len(self.reconciler_containers))
for part, reconciler, node_id in self.reconciler_containers.values():
self.cpool.spawn_n(
self._replicate_object, part, reconciler.db_file, node_id)
self.cpool.waitall()
# wipe out the cache do disable bypass in delete_db
cleanups = self.reconciler_cleanups
self.reconciler_cleanups = self.reconciler_containers = None
self.logger.info('Cleaning up %d reconciler containers',
len(cleanups))
for reconciler in cleanups.values():
self.cpool.spawn_n(self.delete_db, reconciler)
self.cpool.waitall()
self.logger.info('Finished reconciler replication')
def run_once(self, *args, **kwargs):
self.reconciler_containers = {}
self.reconciler_cleanups = {}
self.sync_store = ContainerSyncStore(self.root,
self.logger,
self.mount_check)
rv = super(ContainerReplicator, self).run_once(*args, **kwargs)
if any([self.reconciler_containers, self.reconciler_cleanups]):
self.replicate_reconcilers()
return rv
class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
def _parse_sync_args(self, args):
parent = super(ContainerReplicatorRpc, self)
remote_info = parent._parse_sync_args(args)
if len(args) > 9:
remote_info['status_changed_at'] = args[7]
remote_info['count'] = args[8]
remote_info['storage_policy_index'] = args[9]
return remote_info
def _get_synced_replication_info(self, broker, remote_info):
"""
Sync the remote_info storage_policy_index if needed and return the
newly synced replication info.
:param broker: the database broker
:param remote_info: the remote replication info
:returns: local broker replication info
"""
info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time()).internal
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at)
info = broker.get_replication_info()
return info
| return node | conditional_block |
replicator.py | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import itertools
import json
import time
from collections import defaultdict
from eventlet import Timeout
from swift.container.sync_store import ContainerSyncStore
from swift.container.backend import ContainerBroker, DATADIR
from swift.container.reconciler import (
MISPLACED_OBJECTS_ACCOUNT, incorrect_policy_index,
get_reconciler_container_name, get_row_to_q_entry_translator)
from swift.common import db_replicator
from swift.common.storage_policy import POLICIES
from swift.common.exceptions import DeviceUnavailable
from swift.common.http import is_success
from swift.common.db import DatabaseAlreadyExists
from swift.common.utils import (Timestamp, hash_path,
storage_directory, majority_size)
class ContainerReplicator(db_replicator.Replicator):
server_type = 'container'
brokerclass = ContainerBroker
datadir = DATADIR
default_port = 6201
def report_up_to_date(self, full_info):
reported_key_map = {
'reported_put_timestamp': 'put_timestamp',
'reported_delete_timestamp': 'delete_timestamp',
'reported_bytes_used': 'bytes_used',
'reported_object_count': 'count',
}
for reported, value_key in reported_key_map.items():
if full_info[reported] != full_info[value_key]:
return False
return True
def _gather_sync_args(self, replication_info):
parent = super(ContainerReplicator, self)
sync_args = parent._gather_sync_args(replication_info)
if len(POLICIES) > 1:
sync_args += tuple(replication_info[k] for k in
('status_changed_at', 'count',
'storage_policy_index'))
return sync_args
def _handle_sync_response(self, node, response, info, broker, http,
different_region):
parent = super(ContainerReplicator, self)
if is_success(response.status):
remote_info = json.loads(response.data)
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time())
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at.internal)
sync_timestamps = ('created_at', 'put_timestamp',
'delete_timestamp')
if any(info[key] != remote_info[key] for key in sync_timestamps):
broker.merge_timestamps(*(remote_info[key] for key in
sync_timestamps))
rv = parent._handle_sync_response(
node, response, info, broker, http, different_region)
return rv
def find_local_handoff_for_part(self, part):
"""
Look through devices in the ring for the first handoff device that was
identified during job creation as available on this node.
:returns: a node entry from the ring
"""
nodes = self.ring.get_part_nodes(part)
more_nodes = self.ring.get_more_nodes(part)
for node in itertools.chain(nodes, more_nodes):
if node['id'] in self._local_device_ids:
return node
return None
def get_reconciler_broker(self, timestamp):
"""
Get a local instance of the reconciler container broker that is
appropriate to enqueue the given timestamp.
:param timestamp: the timestamp of the row to be enqueued
:returns: a local reconciler broker
"""
container = get_reconciler_container_name(timestamp)
if self.reconciler_containers and \
container in self.reconciler_containers:
return self.reconciler_containers[container][1]
account = MISPLACED_OBJECTS_ACCOUNT
part = self.ring.get_part(account, container)
node = self.find_local_handoff_for_part(part)
if not node:
raise DeviceUnavailable(
'No mounted devices found suitable to Handoff reconciler '
'container %s in partition %s' % (container, part))
hsh = hash_path(account, container)
db_dir = storage_directory(DATADIR, part, hsh)
db_path = os.path.join(self.root, node['device'], db_dir, hsh + '.db')
broker = ContainerBroker(db_path, account=account, container=container)
if not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp, 0)
except DatabaseAlreadyExists:
pass
if self.reconciler_containers is not None:
self.reconciler_containers[container] = part, broker, node['id']
return broker
def feed_reconciler(self, container, item_list):
"""
Add queue entries for rows in item_list to the local reconciler
container database.
:param container: the name of the reconciler container
:param item_list: the list of rows to enqueue
:returns: True if successfully enqueued
"""
try:
reconciler = self.get_reconciler_broker(container)
except DeviceUnavailable as e:
self.logger.warning('DeviceUnavailable: %s', e)
return False
self.logger.debug('Adding %d objects to the reconciler at %s',
len(item_list), reconciler.db_file)
try:
reconciler.merge_items(item_list)
except (Exception, Timeout):
self.logger.exception('UNHANDLED EXCEPTION: trying to merge '
'%d items to reconciler container %s',
len(item_list), reconciler.db_file)
return False
return True
def dump_to_reconciler(self, broker, point):
"""
Look for object rows for objects updates in the wrong storage policy
in broker with a ``ROWID`` greater than the rowid given as point.
:param broker: the container broker with misplaced objects
:param point: the last verified ``reconciler_sync_point``
:returns: the last successful enqueued rowid
"""
max_sync = broker.get_max_row()
misplaced = broker.get_misplaced_since(point, self.per_diff)
if not misplaced:
return max_sync
translator = get_row_to_q_entry_translator(broker)
errors = False
low_sync = point
while misplaced:
batches = defaultdict(list)
for item in misplaced:
container = get_reconciler_container_name(item['created_at'])
batches[container].append(translator(item))
for container, item_list in batches.items():
success = self.feed_reconciler(container, item_list)
if not success:
errors = True
point = misplaced[-1]['ROWID']
if not errors:
low_sync = point
misplaced = broker.get_misplaced_since(point, self.per_diff)
return low_sync
def _post_replicate_hook(self, broker, info, responses):
if info['account'] == MISPLACED_OBJECTS_ACCOUNT:
return
try:
self.sync_store.update_sync_store(broker)
except Exception:
self.logger.exception('Failed to update sync_store %s' %
broker.db_file)
point = broker.get_reconciler_sync()
if not broker.has_multiple_policies() and info['max_row'] != point:
broker.update_reconciler_sync(info['max_row'])
return
max_sync = self.dump_to_reconciler(broker, point)
success = responses.count(True) >= majority_size(len(responses))
if max_sync > point and success:
# to be safe, only slide up the sync point with a majority on
# replication
broker.update_reconciler_sync(max_sync)
def delete_db(self, broker):
"""
Ensure that reconciler databases are only cleaned up at the end of the
replication run.
"""
if (self.reconciler_cleanups is not None and
broker.account == MISPLACED_OBJECTS_ACCOUNT):
# this container shouldn't be here, make sure it's cleaned up
self.reconciler_cleanups[broker.container] = broker
return
try:
# DB is going to get deleted. Be preemptive about it
self.sync_store.remove_synced_container(broker)
except Exception:
self.logger.exception('Failed to remove sync_store entry %s' %
broker.db_file)
return super(ContainerReplicator, self).delete_db(broker)
def | (self):
"""
Ensure any items merged to reconciler containers during replication
are pushed out to correct nodes and any reconciler containers that do
not belong on this node are removed.
"""
self.logger.info('Replicating %d reconciler containers',
len(self.reconciler_containers))
for part, reconciler, node_id in self.reconciler_containers.values():
self.cpool.spawn_n(
self._replicate_object, part, reconciler.db_file, node_id)
self.cpool.waitall()
# wipe out the cache do disable bypass in delete_db
cleanups = self.reconciler_cleanups
self.reconciler_cleanups = self.reconciler_containers = None
self.logger.info('Cleaning up %d reconciler containers',
len(cleanups))
for reconciler in cleanups.values():
self.cpool.spawn_n(self.delete_db, reconciler)
self.cpool.waitall()
self.logger.info('Finished reconciler replication')
def run_once(self, *args, **kwargs):
self.reconciler_containers = {}
self.reconciler_cleanups = {}
self.sync_store = ContainerSyncStore(self.root,
self.logger,
self.mount_check)
rv = super(ContainerReplicator, self).run_once(*args, **kwargs)
if any([self.reconciler_containers, self.reconciler_cleanups]):
self.replicate_reconcilers()
return rv
class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
def _parse_sync_args(self, args):
parent = super(ContainerReplicatorRpc, self)
remote_info = parent._parse_sync_args(args)
if len(args) > 9:
remote_info['status_changed_at'] = args[7]
remote_info['count'] = args[8]
remote_info['storage_policy_index'] = args[9]
return remote_info
def _get_synced_replication_info(self, broker, remote_info):
"""
Sync the remote_info storage_policy_index if needed and return the
newly synced replication info.
:param broker: the database broker
:param remote_info: the remote replication info
:returns: local broker replication info
"""
info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time()).internal
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at)
info = broker.get_replication_info()
return info
| replicate_reconcilers | identifier_name |
replicator.py | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import itertools
import json
import time
from collections import defaultdict
from eventlet import Timeout
from swift.container.sync_store import ContainerSyncStore
from swift.container.backend import ContainerBroker, DATADIR
from swift.container.reconciler import (
MISPLACED_OBJECTS_ACCOUNT, incorrect_policy_index,
get_reconciler_container_name, get_row_to_q_entry_translator)
from swift.common import db_replicator
from swift.common.storage_policy import POLICIES
from swift.common.exceptions import DeviceUnavailable
from swift.common.http import is_success
from swift.common.db import DatabaseAlreadyExists
from swift.common.utils import (Timestamp, hash_path,
storage_directory, majority_size)
class ContainerReplicator(db_replicator.Replicator):
server_type = 'container'
brokerclass = ContainerBroker
datadir = DATADIR
default_port = 6201
def report_up_to_date(self, full_info):
reported_key_map = {
'reported_put_timestamp': 'put_timestamp',
'reported_delete_timestamp': 'delete_timestamp',
'reported_bytes_used': 'bytes_used',
'reported_object_count': 'count',
}
for reported, value_key in reported_key_map.items():
if full_info[reported] != full_info[value_key]:
return False
return True
def _gather_sync_args(self, replication_info):
parent = super(ContainerReplicator, self)
sync_args = parent._gather_sync_args(replication_info)
if len(POLICIES) > 1:
sync_args += tuple(replication_info[k] for k in
('status_changed_at', 'count',
'storage_policy_index'))
return sync_args
def _handle_sync_response(self, node, response, info, broker, http,
different_region):
|
def find_local_handoff_for_part(self, part):
"""
Look through devices in the ring for the first handoff device that was
identified during job creation as available on this node.
:returns: a node entry from the ring
"""
nodes = self.ring.get_part_nodes(part)
more_nodes = self.ring.get_more_nodes(part)
for node in itertools.chain(nodes, more_nodes):
if node['id'] in self._local_device_ids:
return node
return None
def get_reconciler_broker(self, timestamp):
"""
Get a local instance of the reconciler container broker that is
appropriate to enqueue the given timestamp.
:param timestamp: the timestamp of the row to be enqueued
:returns: a local reconciler broker
"""
container = get_reconciler_container_name(timestamp)
if self.reconciler_containers and \
container in self.reconciler_containers:
return self.reconciler_containers[container][1]
account = MISPLACED_OBJECTS_ACCOUNT
part = self.ring.get_part(account, container)
node = self.find_local_handoff_for_part(part)
if not node:
raise DeviceUnavailable(
'No mounted devices found suitable to Handoff reconciler '
'container %s in partition %s' % (container, part))
hsh = hash_path(account, container)
db_dir = storage_directory(DATADIR, part, hsh)
db_path = os.path.join(self.root, node['device'], db_dir, hsh + '.db')
broker = ContainerBroker(db_path, account=account, container=container)
if not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp, 0)
except DatabaseAlreadyExists:
pass
if self.reconciler_containers is not None:
self.reconciler_containers[container] = part, broker, node['id']
return broker
def feed_reconciler(self, container, item_list):
"""
Add queue entries for rows in item_list to the local reconciler
container database.
:param container: the name of the reconciler container
:param item_list: the list of rows to enqueue
:returns: True if successfully enqueued
"""
try:
reconciler = self.get_reconciler_broker(container)
except DeviceUnavailable as e:
self.logger.warning('DeviceUnavailable: %s', e)
return False
self.logger.debug('Adding %d objects to the reconciler at %s',
len(item_list), reconciler.db_file)
try:
reconciler.merge_items(item_list)
except (Exception, Timeout):
self.logger.exception('UNHANDLED EXCEPTION: trying to merge '
'%d items to reconciler container %s',
len(item_list), reconciler.db_file)
return False
return True
def dump_to_reconciler(self, broker, point):
"""
Look for object rows for objects updates in the wrong storage policy
in broker with a ``ROWID`` greater than the rowid given as point.
:param broker: the container broker with misplaced objects
:param point: the last verified ``reconciler_sync_point``
:returns: the last successful enqueued rowid
"""
max_sync = broker.get_max_row()
misplaced = broker.get_misplaced_since(point, self.per_diff)
if not misplaced:
return max_sync
translator = get_row_to_q_entry_translator(broker)
errors = False
low_sync = point
while misplaced:
batches = defaultdict(list)
for item in misplaced:
container = get_reconciler_container_name(item['created_at'])
batches[container].append(translator(item))
for container, item_list in batches.items():
success = self.feed_reconciler(container, item_list)
if not success:
errors = True
point = misplaced[-1]['ROWID']
if not errors:
low_sync = point
misplaced = broker.get_misplaced_since(point, self.per_diff)
return low_sync
def _post_replicate_hook(self, broker, info, responses):
if info['account'] == MISPLACED_OBJECTS_ACCOUNT:
return
try:
self.sync_store.update_sync_store(broker)
except Exception:
self.logger.exception('Failed to update sync_store %s' %
broker.db_file)
point = broker.get_reconciler_sync()
if not broker.has_multiple_policies() and info['max_row'] != point:
broker.update_reconciler_sync(info['max_row'])
return
max_sync = self.dump_to_reconciler(broker, point)
success = responses.count(True) >= majority_size(len(responses))
if max_sync > point and success:
# to be safe, only slide up the sync point with a majority on
# replication
broker.update_reconciler_sync(max_sync)
def delete_db(self, broker):
"""
Ensure that reconciler databases are only cleaned up at the end of the
replication run.
"""
if (self.reconciler_cleanups is not None and
broker.account == MISPLACED_OBJECTS_ACCOUNT):
# this container shouldn't be here, make sure it's cleaned up
self.reconciler_cleanups[broker.container] = broker
return
try:
# DB is going to get deleted. Be preemptive about it
self.sync_store.remove_synced_container(broker)
except Exception:
self.logger.exception('Failed to remove sync_store entry %s' %
broker.db_file)
return super(ContainerReplicator, self).delete_db(broker)
def replicate_reconcilers(self):
"""
Ensure any items merged to reconciler containers during replication
are pushed out to correct nodes and any reconciler containers that do
not belong on this node are removed.
"""
self.logger.info('Replicating %d reconciler containers',
len(self.reconciler_containers))
for part, reconciler, node_id in self.reconciler_containers.values():
self.cpool.spawn_n(
self._replicate_object, part, reconciler.db_file, node_id)
self.cpool.waitall()
# wipe out the cache do disable bypass in delete_db
cleanups = self.reconciler_cleanups
self.reconciler_cleanups = self.reconciler_containers = None
self.logger.info('Cleaning up %d reconciler containers',
len(cleanups))
for reconciler in cleanups.values():
self.cpool.spawn_n(self.delete_db, reconciler)
self.cpool.waitall()
self.logger.info('Finished reconciler replication')
def run_once(self, *args, **kwargs):
self.reconciler_containers = {}
self.reconciler_cleanups = {}
self.sync_store = ContainerSyncStore(self.root,
self.logger,
self.mount_check)
rv = super(ContainerReplicator, self).run_once(*args, **kwargs)
if any([self.reconciler_containers, self.reconciler_cleanups]):
self.replicate_reconcilers()
return rv
class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
def _parse_sync_args(self, args):
parent = super(ContainerReplicatorRpc, self)
remote_info = parent._parse_sync_args(args)
if len(args) > 9:
remote_info['status_changed_at'] = args[7]
remote_info['count'] = args[8]
remote_info['storage_policy_index'] = args[9]
return remote_info
def _get_synced_replication_info(self, broker, remote_info):
"""
Sync the remote_info storage_policy_index if needed and return the
newly synced replication info.
:param broker: the database broker
:param remote_info: the remote replication info
:returns: local broker replication info
"""
info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time()).internal
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at)
info = broker.get_replication_info()
return info
| parent = super(ContainerReplicator, self)
if is_success(response.status):
remote_info = json.loads(response.data)
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time())
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at.internal)
sync_timestamps = ('created_at', 'put_timestamp',
'delete_timestamp')
if any(info[key] != remote_info[key] for key in sync_timestamps):
broker.merge_timestamps(*(remote_info[key] for key in
sync_timestamps))
rv = parent._handle_sync_response(
node, response, info, broker, http, different_region)
return rv | identifier_body |
aoj2400.py | while True:
t, p, r = map(int, input().split())
if t == 0 and p == 0 and r == 0: break
logs = [input().split() for _ in range(r)]
score = [[0, 0, -i, [0] * p] for i in range(t)]
c_n, pen, w_n = 0, 1, 3
for tid, pid, time, msg in logs:
tid, pid, time = int(tid) - 1, int(pid) - 1, int(time)
if msg == 'WRONG': | score[tid][w_n][pid] += 1
elif msg == 'CORRECT':
score[tid][c_n] += 1
score[tid][pen] -= (score[tid][w_n][pid] * 1200 + time)
score[tid][w_n][pid] = 0
score = sorted(score, reverse=True)
for c_n, pen, t, _ in score:
print(abs(t) + 1 , c_n, abs(pen)) | random_line_split |
|
aoj2400.py | while True:
t, p, r = map(int, input().split())
if t == 0 and p == 0 and r == 0: break
logs = [input().split() for _ in range(r)]
score = [[0, 0, -i, [0] * p] for i in range(t)]
c_n, pen, w_n = 0, 1, 3
for tid, pid, time, msg in logs:
tid, pid, time = int(tid) - 1, int(pid) - 1, int(time)
if msg == 'WRONG':
score[tid][w_n][pid] += 1
elif msg == 'CORRECT':
score[tid][c_n] += 1
score[tid][pen] -= (score[tid][w_n][pid] * 1200 + time)
score[tid][w_n][pid] = 0
score = sorted(score, reverse=True)
for c_n, pen, t, _ in score:
| print(abs(t) + 1 , c_n, abs(pen)) | conditional_block |
|
variables_9.js | var searchData=
[ | ['statistics_2',['statistics',['../struct_vma_detailed_statistics.html#a13efbdb35bd1291191d275f43e96d360',1,'VmaDetailedStatistics::statistics()'],['../struct_vma_budget.html#a6d15ab3a798fd62d9efa3a1e1f83bf54',1,'VmaBudget::statistics()']]]
]; | ['size_0',['size',['../struct_vma_allocation_info.html#aac76d113a6a5ccbb09fea00fb25fd18f',1,'VmaAllocationInfo::size()'],['../struct_vma_virtual_block_create_info.html#a670ab8c6a6e822f3c36781d79e8824e9',1,'VmaVirtualBlockCreateInfo::size()'],['../struct_vma_virtual_allocation_create_info.html#aae08752b86817abd0d944c6025dc603e',1,'VmaVirtualAllocationCreateInfo::size()'],['../struct_vma_virtual_allocation_info.html#afb6d6bd0a6813869ea0842048d40aa2b',1,'VmaVirtualAllocationInfo::size()']]],
['srcallocation_1',['srcAllocation',['../struct_vma_defragmentation_move.html#a25aa1bb64efc507a49c6cbc50689f862',1,'VmaDefragmentationMove']]], | random_line_split |
dao.py | '''
Created on Aug 29, 2015
@author: kevinchien
'''
import datetime
# from bson import ObjectId
from tornado.gen import Task, Return
from tornado.gen import coroutine
from src.common.logutil import get_logger
# from src.core.mongoutil import get_instance
#
# @coroutine
# def update_auth(auth_info):
# new_auth_info = auth_info.copy()
# new_auth_info['updated_at'] = datetime.datetime.utcnow()
#
# criteria = {"user_id": new_auth_info.get('user_id'),
# "access_token": new_auth_info.get('access_token'),
# "refresh_token": new_auth_info.get('refresh_token')} | # result, error = yield Task(get_instance().auth_info.update, criteria, fields)
# if error is not None:
# raise error
#
# raise Return(result) | #
# fields = {'$set': new_auth_info}
# | random_line_split |
tests.py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from nose.tools import assert_true, assert_equal, assert_not_equal
from hadoop.yarn import clients
LOG = logging.getLogger(__name__)
def test_get_log_client():
old_max_heap_size = clients.MAX_HEAP_SIZE
clients.MAX_HEAP_SIZE = 2
try:
log_link1 = "http://test1:8041/container/nonsense"
log_link2 = "http://test2:8041/container/nonsense"
log_link3 = "http://test3:8041/container/nonsense"
c1 = clients.get_log_client(log_link1)
c2 = clients.get_log_client(log_link2)
assert_not_equal(c1, c2)
assert_equal(c1, clients.get_log_client(log_link1))
clients.get_log_client(log_link3)
assert_equal(2, len(clients._log_client_heap))
base_urls = [tup[1].base_url for tup in clients._log_client_heap]
assert_true('http://test1:8041' in base_urls)
assert_true('http://test3:8041' in base_urls)
finally:
clients.MAX_HEAP_SIZE = old_max_heap_size | # with the License. You may obtain a copy of the License at
# | random_line_split |
tests.py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from nose.tools import assert_true, assert_equal, assert_not_equal
from hadoop.yarn import clients
LOG = logging.getLogger(__name__)
def | ():
old_max_heap_size = clients.MAX_HEAP_SIZE
clients.MAX_HEAP_SIZE = 2
try:
log_link1 = "http://test1:8041/container/nonsense"
log_link2 = "http://test2:8041/container/nonsense"
log_link3 = "http://test3:8041/container/nonsense"
c1 = clients.get_log_client(log_link1)
c2 = clients.get_log_client(log_link2)
assert_not_equal(c1, c2)
assert_equal(c1, clients.get_log_client(log_link1))
clients.get_log_client(log_link3)
assert_equal(2, len(clients._log_client_heap))
base_urls = [tup[1].base_url for tup in clients._log_client_heap]
assert_true('http://test1:8041' in base_urls)
assert_true('http://test3:8041' in base_urls)
finally:
clients.MAX_HEAP_SIZE = old_max_heap_size
| test_get_log_client | identifier_name |
tests.py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from nose.tools import assert_true, assert_equal, assert_not_equal
from hadoop.yarn import clients
LOG = logging.getLogger(__name__)
def test_get_log_client():
| old_max_heap_size = clients.MAX_HEAP_SIZE
clients.MAX_HEAP_SIZE = 2
try:
log_link1 = "http://test1:8041/container/nonsense"
log_link2 = "http://test2:8041/container/nonsense"
log_link3 = "http://test3:8041/container/nonsense"
c1 = clients.get_log_client(log_link1)
c2 = clients.get_log_client(log_link2)
assert_not_equal(c1, c2)
assert_equal(c1, clients.get_log_client(log_link1))
clients.get_log_client(log_link3)
assert_equal(2, len(clients._log_client_heap))
base_urls = [tup[1].base_url for tup in clients._log_client_heap]
assert_true('http://test1:8041' in base_urls)
assert_true('http://test3:8041' in base_urls)
finally:
clients.MAX_HEAP_SIZE = old_max_heap_size | identifier_body |
|
base_entity_manager.py | from __future__ import absolute_import
import momoko
from tornado import gen
from psycopg2.extras import RealDictConnection
def initialize_database():
db = momoko.Pool(
dsn='''dbname=nightson user=vswamy password=vswamy host=localhost port=5432''',
size=5,
connection_factory=RealDictConnection,
)
db.connect()
return db
class BaseEntityManager(object):
db = initialize_database()
def __init__(self):
pass
def __init__(self, request):
self.request = request
@gen.coroutine
def | (self, sql):
''' Executes an sql statement and returns the value '''
cursor = yield BaseEntityManager.db.execute(sql)
raise gen.Return(cursor)
def get_value(self, key):
''' Gets a value given dictionary like arguments'''
params = {}
if(self.request.method == 'GET'):
params = self.request.query_arguments
elif(self.request.method == 'POST'):
params = self.request.body_arguments
elif(self.request.method == 'PUT'):
params = self.request.arguments
elif(self.request.method == 'DELETE'):
params = self.request.body_arguments
if(key not in params):
return None
''' Params will always be of the form key:[values] '''
return params.get(key)[0] | execute_sql | identifier_name |
base_entity_manager.py | from __future__ import absolute_import
import momoko
from tornado import gen
from psycopg2.extras import RealDictConnection
def initialize_database():
db = momoko.Pool(
dsn='''dbname=nightson user=vswamy password=vswamy host=localhost port=5432''',
size=5,
connection_factory=RealDictConnection,
)
db.connect()
return db
class BaseEntityManager(object):
db = initialize_database()
def __init__(self):
pass
def __init__(self, request):
self.request = request
@gen.coroutine
def execute_sql(self, sql):
''' Executes an sql statement and returns the value '''
cursor = yield BaseEntityManager.db.execute(sql)
raise gen.Return(cursor)
def get_value(self, key):
''' Gets a value given dictionary like arguments'''
params = {}
if(self.request.method == 'GET'):
params = self.request.query_arguments
elif(self.request.method == 'POST'):
params = self.request.body_arguments
elif(self.request.method == 'PUT'):
|
elif(self.request.method == 'DELETE'):
params = self.request.body_arguments
if(key not in params):
return None
''' Params will always be of the form key:[values] '''
return params.get(key)[0] | params = self.request.arguments | conditional_block |
base_entity_manager.py | from __future__ import absolute_import
import momoko
from tornado import gen
from psycopg2.extras import RealDictConnection
def initialize_database():
db = momoko.Pool(
dsn='''dbname=nightson user=vswamy password=vswamy host=localhost port=5432''',
size=5,
connection_factory=RealDictConnection,
)
db.connect()
return db
class BaseEntityManager(object):
db = initialize_database()
def __init__(self):
pass |
def __init__(self, request):
self.request = request
@gen.coroutine
def execute_sql(self, sql):
''' Executes an sql statement and returns the value '''
cursor = yield BaseEntityManager.db.execute(sql)
raise gen.Return(cursor)
def get_value(self, key):
''' Gets a value given dictionary like arguments'''
params = {}
if(self.request.method == 'GET'):
params = self.request.query_arguments
elif(self.request.method == 'POST'):
params = self.request.body_arguments
elif(self.request.method == 'PUT'):
params = self.request.arguments
elif(self.request.method == 'DELETE'):
params = self.request.body_arguments
if(key not in params):
return None
''' Params will always be of the form key:[values] '''
return params.get(key)[0] | random_line_split |
|
base_entity_manager.py | from __future__ import absolute_import
import momoko
from tornado import gen
from psycopg2.extras import RealDictConnection
def initialize_database():
db = momoko.Pool(
dsn='''dbname=nightson user=vswamy password=vswamy host=localhost port=5432''',
size=5,
connection_factory=RealDictConnection,
)
db.connect()
return db
class BaseEntityManager(object):
db = initialize_database()
def __init__(self):
pass
def __init__(self, request):
self.request = request
@gen.coroutine
def execute_sql(self, sql):
''' Executes an sql statement and returns the value '''
cursor = yield BaseEntityManager.db.execute(sql)
raise gen.Return(cursor)
def get_value(self, key):
| ''' Gets a value given dictionary like arguments'''
params = {}
if(self.request.method == 'GET'):
params = self.request.query_arguments
elif(self.request.method == 'POST'):
params = self.request.body_arguments
elif(self.request.method == 'PUT'):
params = self.request.arguments
elif(self.request.method == 'DELETE'):
params = self.request.body_arguments
if(key not in params):
return None
''' Params will always be of the form key:[values] '''
return params.get(key)[0] | identifier_body |
|
CollapsedBreadcrumbs.js | /* eslint-disable jsx-a11y/anchor-is-valid */
import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Paper from '@material-ui/core/Paper';
import Breadcrumbs from '@material-ui/core/Breadcrumbs';
import Typography from '@material-ui/core/Typography';
import Link from '@material-ui/core/Link';
const useStyles = makeStyles(theme => ({
root: {
justifyContent: 'center',
flexWrap: 'wrap',
},
paper: {
padding: theme.spacing(1, 2),
},
})); | function handleClick(event) {
event.preventDefault();
alert('You clicked a breadcrumb.');
}
export default function CollapsedBreadcrumbs() {
const classes = useStyles();
return (
<Paper elevation={0} className={classes.paper}>
<Breadcrumbs maxItems={2} aria-label="breadcrumb">
<Link color="inherit" href="#" onClick={handleClick}>
Home
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
Catalog
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
Accessories
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
New Collection
</Link>
<Typography color="textPrimary">Belts</Typography>
</Breadcrumbs>
</Paper>
);
} | random_line_split |
|
CollapsedBreadcrumbs.js | /* eslint-disable jsx-a11y/anchor-is-valid */
import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Paper from '@material-ui/core/Paper';
import Breadcrumbs from '@material-ui/core/Breadcrumbs';
import Typography from '@material-ui/core/Typography';
import Link from '@material-ui/core/Link';
const useStyles = makeStyles(theme => ({
root: {
justifyContent: 'center',
flexWrap: 'wrap',
},
paper: {
padding: theme.spacing(1, 2),
},
}));
function handleClick(event) |
export default function CollapsedBreadcrumbs() {
const classes = useStyles();
return (
<Paper elevation={0} className={classes.paper}>
<Breadcrumbs maxItems={2} aria-label="breadcrumb">
<Link color="inherit" href="#" onClick={handleClick}>
Home
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
Catalog
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
Accessories
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
New Collection
</Link>
<Typography color="textPrimary">Belts</Typography>
</Breadcrumbs>
</Paper>
);
}
| {
event.preventDefault();
alert('You clicked a breadcrumb.');
} | identifier_body |
CollapsedBreadcrumbs.js | /* eslint-disable jsx-a11y/anchor-is-valid */
import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Paper from '@material-ui/core/Paper';
import Breadcrumbs from '@material-ui/core/Breadcrumbs';
import Typography from '@material-ui/core/Typography';
import Link from '@material-ui/core/Link';
const useStyles = makeStyles(theme => ({
root: {
justifyContent: 'center',
flexWrap: 'wrap',
},
paper: {
padding: theme.spacing(1, 2),
},
}));
function | (event) {
event.preventDefault();
alert('You clicked a breadcrumb.');
}
export default function CollapsedBreadcrumbs() {
const classes = useStyles();
return (
<Paper elevation={0} className={classes.paper}>
<Breadcrumbs maxItems={2} aria-label="breadcrumb">
<Link color="inherit" href="#" onClick={handleClick}>
Home
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
Catalog
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
Accessories
</Link>
<Link color="inherit" href="#" onClick={handleClick}>
New Collection
</Link>
<Typography color="textPrimary">Belts</Typography>
</Breadcrumbs>
</Paper>
);
}
| handleClick | identifier_name |
planet.py | import mcpi.minecraft as minecraft
import mcpi.block as block
import mcpi.minecraftstuff as mcstuff
from time import sleep
class Planet():
def __init__(self, pos, radius, blockType, blockData = 0):
self.mc = minecraft.Minecraft.create()
self.pos = pos
self.radius = radius
self.blockType = blockType
self.blockData = blockData
self._draw()
def _draw(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, self.blockType, self.blockData)
| def destroy(self, delay):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
#mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
# self.radius, block.LAVA_STATIONARY.id)
#sleep(delayLava)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.COBBLESTONE.id)
sleep(delay)
self.clear()
def clear(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.AIR.id) | random_line_split |
|
planet.py | import mcpi.minecraft as minecraft
import mcpi.block as block
import mcpi.minecraftstuff as mcstuff
from time import sleep
class Planet():
def __init__(self, pos, radius, blockType, blockData = 0):
self.mc = minecraft.Minecraft.create()
self.pos = pos
self.radius = radius
self.blockType = blockType
self.blockData = blockData
self._draw()
def _draw(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, self.blockType, self.blockData)
def | (self, delay):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
#mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
# self.radius, block.LAVA_STATIONARY.id)
#sleep(delayLava)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.COBBLESTONE.id)
sleep(delay)
self.clear()
def clear(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.AIR.id)
| destroy | identifier_name |
planet.py | import mcpi.minecraft as minecraft
import mcpi.block as block
import mcpi.minecraftstuff as mcstuff
from time import sleep
class Planet():
| def __init__(self, pos, radius, blockType, blockData = 0):
self.mc = minecraft.Minecraft.create()
self.pos = pos
self.radius = radius
self.blockType = blockType
self.blockData = blockData
self._draw()
def _draw(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, self.blockType, self.blockData)
def destroy(self, delay):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
#mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
# self.radius, block.LAVA_STATIONARY.id)
#sleep(delayLava)
mcDraw.drawHollowSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.COBBLESTONE.id)
sleep(delay)
self.clear()
def clear(self):
mcDraw = mcstuff.MinecraftDrawing(self.mc)
mcDraw.drawSphere(self.pos.x, self.pos.y, self.pos.z,
self.radius, block.AIR.id) | identifier_body |
|
TierSelect.tsx | import BungieImage from 'app/dim-ui/BungieImage';
import { t } from 'app/i18next-t';
import { useD2Definitions } from 'app/manifest/selectors';
import { AppIcon, dragHandleIcon } from 'app/shell/icons';
import { DestinyStatDefinition } from 'bungie-api-ts/destiny2';
import clsx from 'clsx';
import _ from 'lodash';
import React from 'react';
import { DragDropContext, Draggable, Droppable, DropResult } from 'react-beautiful-dnd';
import { ArmorStatHashes, MinMaxIgnored, StatFilters, StatRanges } from '../types';
import { statTierWithHalf } from '../utils';
import styles from './TierSelect.m.scss';
const IGNORE = 'ignore';
const INCLUDE = 'include';
const MinMaxSelect = React.memo(MinMaxSelectInner);
/**
* A selector that allows for choosing minimum and maximum stat ranges, plus reordering the stat priority.
*/
export default function TierSelect({
stats,
statRangesFiltered,
order,
onStatOrderChanged,
onStatFiltersChanged,
}: {
stats: StatFilters;
/** The ranges the stats could have gotten to INCLUDING stat filters and mod compatibility */
statRangesFiltered?: Readonly<StatRanges>;
order: number[]; // stat hashes in user order
onStatOrderChanged(order: ArmorStatHashes[]): void;
onStatFiltersChanged(stats: StatFilters): void;
}) { | ) => {
const newTiers = {
...stats,
[statHash]: { ...stats[statHash], ...changed },
};
onStatFiltersChanged(newTiers);
};
const statDefs: { [statHash: number]: DestinyStatDefinition } = {};
for (const statHash of order) {
statDefs[statHash] = defs.Stat.get(statHash);
}
const onDragEnd = (result: DropResult) => {
// dropped outside the list
if (!result.destination) {
return;
}
const newOrder = reorder(order, result.source.index, result.destination.index);
onStatOrderChanged(newOrder);
};
return (
<DragDropContext onDragEnd={onDragEnd}>
<Droppable droppableId="droppable">
{(provided) => (
<div ref={provided.innerRef}>
{order.map((statHash: number, index) => (
<DraggableItem
key={statHash}
id={statHash.toString()}
index={index}
className={styles.row}
name={
<span
className={clsx(
{ [styles.ignored]: stats[statHash].ignored },
styles.statDisplayInfo
)}
>
<BungieImage
className={styles.iconStat}
src={statDefs[statHash].displayProperties.icon}
/>
<span
className={styles.statName}
title={statDefs[statHash].displayProperties.name}
>
{statDefs[statHash].displayProperties.name}
</span>
</span>
}
>
<span className={styles.range}>
{statRangesFiltered
? t('LoadoutBuilder.MaxTier', {
tier: t('LoadoutBuilder.TierNumber', {
tier: statTierWithHalf(statRangesFiltered[statHash].max),
}),
})
: '-'}
</span>
<MinMaxSelect
statHash={statHash}
stat={stats[statHash]}
type="Min"
handleTierChange={handleTierChange}
/>
<MinMaxSelect
statHash={statHash}
stat={stats[statHash]}
type="Max"
handleTierChange={handleTierChange}
/>
</DraggableItem>
))}
{provided.placeholder}
</div>
)}
</Droppable>
</DragDropContext>
);
}
function DraggableItem({
id,
index,
name,
className,
children,
}: {
id: string;
index: number;
className: string;
name: React.ReactNode;
children: React.ReactNode;
}) {
return (
<Draggable draggableId={id} index={index}>
{(provided) => (
<div
className={className}
data-index={index}
ref={provided.innerRef}
{...provided.draggableProps}
>
<label {...provided.dragHandleProps}>
<span className={styles.grip}>
<AppIcon icon={dragHandleIcon} />
</span>
{name}
</label>
{children}
</div>
)}
</Draggable>
);
}
function MinMaxSelectInner({
statHash,
type,
stat,
handleTierChange,
}: {
statHash: number;
type: 'Min' | 'Max';
/** Filter config for a single stat */
stat: MinMaxIgnored;
handleTierChange(
statHash: number,
changed: {
min: number;
max: number;
ignored: boolean;
}
): void;
}) {
const min = 0;
const max = 10;
const ignored = stat.ignored;
function handleChange(e: React.ChangeEvent<HTMLSelectElement>) {
let update: {
min: number;
max: number;
ignored: boolean;
};
if (e.target.value === IGNORE || e.target.value === INCLUDE) {
update = {
min: stat.min,
max: stat.max,
ignored: e.target.value === IGNORE,
};
} else {
const value = parseInt(e.target.value, 10);
const lower = type.toLowerCase();
const opposite = lower === 'min' ? 'max' : 'min';
update = {
[lower]: value,
[opposite]: opposite === 'min' ? Math.min(stat.min, value) : Math.max(stat.max, value),
ignored: false,
} as typeof update;
}
handleTierChange(statHash, update);
}
const value = type === 'Min' ? Math.max(min, stat.min) : Math.min(max, stat.max);
return (
<select
className={type === 'Min' ? styles.minimum : styles.maximum}
value={ignored ? '-' : value}
onChange={handleChange}
>
<option disabled>{t(`LoadoutBuilder.Select${type}`, { contextList: 'minMax' })}</option>
{!ignored &&
_.range(min, max + 1).map((tier) => (
<option key={tier} value={tier}>
{t('LoadoutBuilder.TierNumber', {
tier,
})}
</option>
))}
<option key="-" value="-" disabled>
-
</option>
{ignored ? (
<option key={INCLUDE} value={INCLUDE}>
{t('LoadoutBuilder.StatTierIncludeOption')}
</option>
) : (
<option key={IGNORE} value={IGNORE}>
{t('LoadoutBuilder.StatTierIgnoreOption')}
</option>
)}
</select>
);
}
// a little function to help us with reordering the result
function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] {
const result = Array.from(list);
const [removed] = result.splice(startIndex, 1);
result.splice(endIndex, 0, removed);
return result;
} | const defs = useD2Definitions()!;
const handleTierChange = (
statHash: number,
changed: { min?: number; max?: number; ignored: boolean } | random_line_split |
TierSelect.tsx | import BungieImage from 'app/dim-ui/BungieImage';
import { t } from 'app/i18next-t';
import { useD2Definitions } from 'app/manifest/selectors';
import { AppIcon, dragHandleIcon } from 'app/shell/icons';
import { DestinyStatDefinition } from 'bungie-api-ts/destiny2';
import clsx from 'clsx';
import _ from 'lodash';
import React from 'react';
import { DragDropContext, Draggable, Droppable, DropResult } from 'react-beautiful-dnd';
import { ArmorStatHashes, MinMaxIgnored, StatFilters, StatRanges } from '../types';
import { statTierWithHalf } from '../utils';
import styles from './TierSelect.m.scss';
const IGNORE = 'ignore';
const INCLUDE = 'include';
const MinMaxSelect = React.memo(MinMaxSelectInner);
/**
* A selector that allows for choosing minimum and maximum stat ranges, plus reordering the stat priority.
*/
export default function TierSelect({
stats,
statRangesFiltered,
order,
onStatOrderChanged,
onStatFiltersChanged,
}: {
stats: StatFilters;
/** The ranges the stats could have gotten to INCLUDING stat filters and mod compatibility */
statRangesFiltered?: Readonly<StatRanges>;
order: number[]; // stat hashes in user order
onStatOrderChanged(order: ArmorStatHashes[]): void;
onStatFiltersChanged(stats: StatFilters): void;
}) {
const defs = useD2Definitions()!;
const handleTierChange = (
statHash: number,
changed: { min?: number; max?: number; ignored: boolean }
) => {
const newTiers = {
...stats,
[statHash]: { ...stats[statHash], ...changed },
};
onStatFiltersChanged(newTiers);
};
const statDefs: { [statHash: number]: DestinyStatDefinition } = {};
for (const statHash of order) {
statDefs[statHash] = defs.Stat.get(statHash);
}
const onDragEnd = (result: DropResult) => {
// dropped outside the list
if (!result.destination) |
const newOrder = reorder(order, result.source.index, result.destination.index);
onStatOrderChanged(newOrder);
};
return (
<DragDropContext onDragEnd={onDragEnd}>
<Droppable droppableId="droppable">
{(provided) => (
<div ref={provided.innerRef}>
{order.map((statHash: number, index) => (
<DraggableItem
key={statHash}
id={statHash.toString()}
index={index}
className={styles.row}
name={
<span
className={clsx(
{ [styles.ignored]: stats[statHash].ignored },
styles.statDisplayInfo
)}
>
<BungieImage
className={styles.iconStat}
src={statDefs[statHash].displayProperties.icon}
/>
<span
className={styles.statName}
title={statDefs[statHash].displayProperties.name}
>
{statDefs[statHash].displayProperties.name}
</span>
</span>
}
>
<span className={styles.range}>
{statRangesFiltered
? t('LoadoutBuilder.MaxTier', {
tier: t('LoadoutBuilder.TierNumber', {
tier: statTierWithHalf(statRangesFiltered[statHash].max),
}),
})
: '-'}
</span>
<MinMaxSelect
statHash={statHash}
stat={stats[statHash]}
type="Min"
handleTierChange={handleTierChange}
/>
<MinMaxSelect
statHash={statHash}
stat={stats[statHash]}
type="Max"
handleTierChange={handleTierChange}
/>
</DraggableItem>
))}
{provided.placeholder}
</div>
)}
</Droppable>
</DragDropContext>
);
}
function DraggableItem({
id,
index,
name,
className,
children,
}: {
id: string;
index: number;
className: string;
name: React.ReactNode;
children: React.ReactNode;
}) {
return (
<Draggable draggableId={id} index={index}>
{(provided) => (
<div
className={className}
data-index={index}
ref={provided.innerRef}
{...provided.draggableProps}
>
<label {...provided.dragHandleProps}>
<span className={styles.grip}>
<AppIcon icon={dragHandleIcon} />
</span>
{name}
</label>
{children}
</div>
)}
</Draggable>
);
}
function MinMaxSelectInner({
statHash,
type,
stat,
handleTierChange,
}: {
statHash: number;
type: 'Min' | 'Max';
/** Filter config for a single stat */
stat: MinMaxIgnored;
handleTierChange(
statHash: number,
changed: {
min: number;
max: number;
ignored: boolean;
}
): void;
}) {
const min = 0;
const max = 10;
const ignored = stat.ignored;
function handleChange(e: React.ChangeEvent<HTMLSelectElement>) {
let update: {
min: number;
max: number;
ignored: boolean;
};
if (e.target.value === IGNORE || e.target.value === INCLUDE) {
update = {
min: stat.min,
max: stat.max,
ignored: e.target.value === IGNORE,
};
} else {
const value = parseInt(e.target.value, 10);
const lower = type.toLowerCase();
const opposite = lower === 'min' ? 'max' : 'min';
update = {
[lower]: value,
[opposite]: opposite === 'min' ? Math.min(stat.min, value) : Math.max(stat.max, value),
ignored: false,
} as typeof update;
}
handleTierChange(statHash, update);
}
const value = type === 'Min' ? Math.max(min, stat.min) : Math.min(max, stat.max);
return (
<select
className={type === 'Min' ? styles.minimum : styles.maximum}
value={ignored ? '-' : value}
onChange={handleChange}
>
<option disabled>{t(`LoadoutBuilder.Select${type}`, { contextList: 'minMax' })}</option>
{!ignored &&
_.range(min, max + 1).map((tier) => (
<option key={tier} value={tier}>
{t('LoadoutBuilder.TierNumber', {
tier,
})}
</option>
))}
<option key="-" value="-" disabled>
-
</option>
{ignored ? (
<option key={INCLUDE} value={INCLUDE}>
{t('LoadoutBuilder.StatTierIncludeOption')}
</option>
) : (
<option key={IGNORE} value={IGNORE}>
{t('LoadoutBuilder.StatTierIgnoreOption')}
</option>
)}
</select>
);
}
// a little function to help us with reordering the result
function reorder<T>(list: T[], startIndex: number, endIndex: number): T[] {
const result = Array.from(list);
const [removed] = result.splice(startIndex, 1);
result.splice(endIndex, 0, removed);
return result;
}
| {
return;
} | conditional_block |
no071.py | #!/usr/bin/env python
# neighbors
# a/b < c/d | # for positive integers a,b,c and d with a < b and c < d then a/b and c/d
# will be neighbours in the Farey sequence of order max(b,d).
# By listing the set of reduced proper fractions for D <= 1,000,000 in
# ascending order of size, find the numerator of the fraction immediately
# to the left of 3/7.
#########################################################
# c = 3, d = 7, 3b - 7a = 1
# 0 + 2a == 1 mod 3, a == 2 mod 3
# a = 3k + 2, b = 7k + 5
# a < b <==> 3k + 2 < 7k + 5, -3 < 4k, -0.75 < k, k >= 0
# a/b < 3/7 <==> 7a < 3b <==> 0 < 3b - 7a <==> ALWAYS
# gcd(a,b) = (3k+2,7k+5) = (3k+2,k+1) = (k,k+1) = 1
# b <= D
# 7k + 5 <= D
# k <= floor((D-5)/7)
from python.decorators import euler_timer
def main(verbose=False):
D = 10 ** 6
return 3 * int((D - 5) / 7.0) + 2
if __name__ == '__main__':
print euler_timer(71)(main)(verbose=True) | # need bc - ad = 1
# The converse is also true. If
# bc - ad = 1 | random_line_split |
no071.py | #!/usr/bin/env python
# neighbors
# a/b < c/d
# need bc - ad = 1
# The converse is also true. If
# bc - ad = 1
# for positive integers a,b,c and d with a < b and c < d then a/b and c/d
# will be neighbours in the Farey sequence of order max(b,d).
# By listing the set of reduced proper fractions for D <= 1,000,000 in
# ascending order of size, find the numerator of the fraction immediately
# to the left of 3/7.
#########################################################
# c = 3, d = 7, 3b - 7a = 1
# 0 + 2a == 1 mod 3, a == 2 mod 3
# a = 3k + 2, b = 7k + 5
# a < b <==> 3k + 2 < 7k + 5, -3 < 4k, -0.75 < k, k >= 0
# a/b < 3/7 <==> 7a < 3b <==> 0 < 3b - 7a <==> ALWAYS
# gcd(a,b) = (3k+2,7k+5) = (3k+2,k+1) = (k,k+1) = 1
# b <= D
# 7k + 5 <= D
# k <= floor((D-5)/7)
from python.decorators import euler_timer
def | (verbose=False):
D = 10 ** 6
return 3 * int((D - 5) / 7.0) + 2
if __name__ == '__main__':
print euler_timer(71)(main)(verbose=True)
| main | identifier_name |
no071.py | #!/usr/bin/env python
# neighbors
# a/b < c/d
# need bc - ad = 1
# The converse is also true. If
# bc - ad = 1
# for positive integers a,b,c and d with a < b and c < d then a/b and c/d
# will be neighbours in the Farey sequence of order max(b,d).
# By listing the set of reduced proper fractions for D <= 1,000,000 in
# ascending order of size, find the numerator of the fraction immediately
# to the left of 3/7.
#########################################################
# c = 3, d = 7, 3b - 7a = 1
# 0 + 2a == 1 mod 3, a == 2 mod 3
# a = 3k + 2, b = 7k + 5
# a < b <==> 3k + 2 < 7k + 5, -3 < 4k, -0.75 < k, k >= 0
# a/b < 3/7 <==> 7a < 3b <==> 0 < 3b - 7a <==> ALWAYS
# gcd(a,b) = (3k+2,7k+5) = (3k+2,k+1) = (k,k+1) = 1
# b <= D
# 7k + 5 <= D
# k <= floor((D-5)/7)
from python.decorators import euler_timer
def main(verbose=False):
D = 10 ** 6
return 3 * int((D - 5) / 7.0) + 2
if __name__ == '__main__':
| print euler_timer(71)(main)(verbose=True) | conditional_block |
|
no071.py | #!/usr/bin/env python
# neighbors
# a/b < c/d
# need bc - ad = 1
# The converse is also true. If
# bc - ad = 1
# for positive integers a,b,c and d with a < b and c < d then a/b and c/d
# will be neighbours in the Farey sequence of order max(b,d).
# By listing the set of reduced proper fractions for D <= 1,000,000 in
# ascending order of size, find the numerator of the fraction immediately
# to the left of 3/7.
#########################################################
# c = 3, d = 7, 3b - 7a = 1
# 0 + 2a == 1 mod 3, a == 2 mod 3
# a = 3k + 2, b = 7k + 5
# a < b <==> 3k + 2 < 7k + 5, -3 < 4k, -0.75 < k, k >= 0
# a/b < 3/7 <==> 7a < 3b <==> 0 < 3b - 7a <==> ALWAYS
# gcd(a,b) = (3k+2,7k+5) = (3k+2,k+1) = (k,k+1) = 1
# b <= D
# 7k + 5 <= D
# k <= floor((D-5)/7)
from python.decorators import euler_timer
def main(verbose=False):
|
if __name__ == '__main__':
print euler_timer(71)(main)(verbose=True)
| D = 10 ** 6
return 3 * int((D - 5) / 7.0) + 2 | identifier_body |
code_generator.py | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" A code generator (needed by ModToolAdd) """
from templates import Templates
import Cheetah.Template
from util_functions import str_to_fancyc_comment
from util_functions import str_to_python_comment
from util_functions import strip_default_values
from util_functions import strip_arg_types
from util_functions import strip_arg_types_grc
class GRMTemplate(Cheetah.Template.Template):
""" An extended template class """
def __init__(self, src, searchList):
self.grtypelist = {
'sync': 'sync_block',
'sink': 'sync_block',
'source': 'sync_block',
'decimator': 'sync_decimator',
'interpolator': 'sync_interpolator',
'general': 'block',
'tagged_stream': 'tagged_stream_block',
'hier': 'hier_block2',
'noblock': ''}
searchList['str_to_fancyc_comment'] = str_to_fancyc_comment
searchList['str_to_python_comment'] = str_to_python_comment
searchList['strip_default_values'] = strip_default_values
searchList['strip_arg_types'] = strip_arg_types
searchList['strip_arg_types_grc'] = strip_arg_types_grc
Cheetah.Template.Template.__init__(self, src, searchList=searchList)
self.grblocktype = self.grtypelist[searchList['blocktype']]
if searchList['is_component']:
self.include_dir_prefix = "gnuradio/" + searchList['modname']
else:
self.include_dir_prefix = searchList['modname']
def | (tpl_id, **kwargs):
""" Return the template given by tpl_id, parsed through Cheetah """
return str(GRMTemplate(Templates[tpl_id], searchList=kwargs))
| get_template | identifier_name |
code_generator.py | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, | import Cheetah.Template
from util_functions import str_to_fancyc_comment
from util_functions import str_to_python_comment
from util_functions import strip_default_values
from util_functions import strip_arg_types
from util_functions import strip_arg_types_grc
class GRMTemplate(Cheetah.Template.Template):
""" An extended template class """
def __init__(self, src, searchList):
self.grtypelist = {
'sync': 'sync_block',
'sink': 'sync_block',
'source': 'sync_block',
'decimator': 'sync_decimator',
'interpolator': 'sync_interpolator',
'general': 'block',
'tagged_stream': 'tagged_stream_block',
'hier': 'hier_block2',
'noblock': ''}
searchList['str_to_fancyc_comment'] = str_to_fancyc_comment
searchList['str_to_python_comment'] = str_to_python_comment
searchList['strip_default_values'] = strip_default_values
searchList['strip_arg_types'] = strip_arg_types
searchList['strip_arg_types_grc'] = strip_arg_types_grc
Cheetah.Template.Template.__init__(self, src, searchList=searchList)
self.grblocktype = self.grtypelist[searchList['blocktype']]
if searchList['is_component']:
self.include_dir_prefix = "gnuradio/" + searchList['modname']
else:
self.include_dir_prefix = searchList['modname']
def get_template(tpl_id, **kwargs):
""" Return the template given by tpl_id, parsed through Cheetah """
return str(GRMTemplate(Templates[tpl_id], searchList=kwargs)) | # Boston, MA 02110-1301, USA.
#
""" A code generator (needed by ModToolAdd) """
from templates import Templates | random_line_split |
code_generator.py | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" A code generator (needed by ModToolAdd) """
from templates import Templates
import Cheetah.Template
from util_functions import str_to_fancyc_comment
from util_functions import str_to_python_comment
from util_functions import strip_default_values
from util_functions import strip_arg_types
from util_functions import strip_arg_types_grc
class GRMTemplate(Cheetah.Template.Template):
""" An extended template class """
def __init__(self, src, searchList):
self.grtypelist = {
'sync': 'sync_block',
'sink': 'sync_block',
'source': 'sync_block',
'decimator': 'sync_decimator',
'interpolator': 'sync_interpolator',
'general': 'block',
'tagged_stream': 'tagged_stream_block',
'hier': 'hier_block2',
'noblock': ''}
searchList['str_to_fancyc_comment'] = str_to_fancyc_comment
searchList['str_to_python_comment'] = str_to_python_comment
searchList['strip_default_values'] = strip_default_values
searchList['strip_arg_types'] = strip_arg_types
searchList['strip_arg_types_grc'] = strip_arg_types_grc
Cheetah.Template.Template.__init__(self, src, searchList=searchList)
self.grblocktype = self.grtypelist[searchList['blocktype']]
if searchList['is_component']:
|
else:
self.include_dir_prefix = searchList['modname']
def get_template(tpl_id, **kwargs):
""" Return the template given by tpl_id, parsed through Cheetah """
return str(GRMTemplate(Templates[tpl_id], searchList=kwargs))
| self.include_dir_prefix = "gnuradio/" + searchList['modname'] | conditional_block |
code_generator.py | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" A code generator (needed by ModToolAdd) """
from templates import Templates
import Cheetah.Template
from util_functions import str_to_fancyc_comment
from util_functions import str_to_python_comment
from util_functions import strip_default_values
from util_functions import strip_arg_types
from util_functions import strip_arg_types_grc
class GRMTemplate(Cheetah.Template.Template):
|
def get_template(tpl_id, **kwargs):
""" Return the template given by tpl_id, parsed through Cheetah """
return str(GRMTemplate(Templates[tpl_id], searchList=kwargs))
| """ An extended template class """
def __init__(self, src, searchList):
self.grtypelist = {
'sync': 'sync_block',
'sink': 'sync_block',
'source': 'sync_block',
'decimator': 'sync_decimator',
'interpolator': 'sync_interpolator',
'general': 'block',
'tagged_stream': 'tagged_stream_block',
'hier': 'hier_block2',
'noblock': ''}
searchList['str_to_fancyc_comment'] = str_to_fancyc_comment
searchList['str_to_python_comment'] = str_to_python_comment
searchList['strip_default_values'] = strip_default_values
searchList['strip_arg_types'] = strip_arg_types
searchList['strip_arg_types_grc'] = strip_arg_types_grc
Cheetah.Template.Template.__init__(self, src, searchList=searchList)
self.grblocktype = self.grtypelist[searchList['blocktype']]
if searchList['is_component']:
self.include_dir_prefix = "gnuradio/" + searchList['modname']
else:
self.include_dir_prefix = searchList['modname'] | identifier_body |
VideoResultsList.tsx | import * as React from "react";
import SearchRadioButtons from "./SearchRadioButtons";
import VideoLite, { VideoLiteProps } from "./VideoLite";
import VideoPlaceholder from "./VideoPlaceholder";
import VideoResultPreview from "./VideoResultPreview";
import { VideoResultPreviewEventHandlers } from "./VideoResultPreview";
import isEqual = require("lodash/isEqual");
import {
fetchFactReturningPromise,
VideoFactHashMap
} from "../utils/databaseAPI";
import { alertErr } from "../utils/functions";
import { Search } from "../java2ts/Search";
import { Foundation } from "../java2ts/Foundation";
import { Promise } from "es6-promise";
export type SelectionOptions = "Containing" | "BeforeAndAfter";
export interface FactTurns {
videoFact: Foundation.VideoFactContent;
turns: number[];
}
interface SortedResults {
hash: string;
turns: number[];
}
interface VideoResultsListProps {
results: Search.FactResultList;
searchTerm: string;
}
interface VideoResultsListState {
fixVideo: boolean;
selectedOption: SelectionOptions;
factTurns: FactTurns[];
videoProps?: {
videoId: string;
clipRange?: [number, number];
};
}
class VideoResultsList extends React.Component<
VideoResultsListProps,
VideoResultsListState
> {
private sortedResults: SortedResults[];
constructor(props: VideoResultsListProps) {
super(props);
this.sortedResults = this.sortResults(props.results);
this.state = {
fixVideo: false,
selectedOption: "Containing",
factTurns: []
};
}
handleChange = (ev: React.ChangeEvent<HTMLInputElement>) => {
const value = ev.target.value;
if (value === "Containing" || value === "BeforeAndAfter") {
if (value !== this.state.selectedOption) {
this.setState({
selectedOption: value
});
}
} else {
const msg = "VideoResults: Unknown radio button";
alertErr(msg);
throw msg;
}
};
handlePlayClick = (
videoFact: Foundation.VideoFactContent,
clipRange: [number, number]
) => {
this.setState({
videoProps: {
videoId: videoFact.youtubeId,
clipRange: clipRange
}
});
};
handleReady = (youtubeId: string) => {
this.setState({
videoProps: {
videoId: youtubeId
}
});
};
handleScroll = (fixVideo: boolean) => {
if (this.state.fixVideo != fixVideo) {
this.setState({
fixVideo: fixVideo
});
}
};
fetchFacts = (): any => {
const promises = [];
for (const result of this.sortedResults) {
promises.push(fetchFactReturningPromise(result.hash));
}
Promise.all(promises).then(this.processFacts.bind(this));
};
processFacts(json: VideoFactHashMap[]) {
let factTurnsArr: FactTurns[] = [];
for (const videoFact of json) {
const currentHash = videoFact.hash;
const reducer = this.sortedResults.reduce(
(accumulator: SortedResults, currentValue: SortedResults) => {
if (accumulator.hash !== currentHash) {
// Skip accumulating until we match our hash
return currentValue;
}
if (currentValue.hash === currentHash) {
return {
hash: currentHash,
turns: accumulator.turns.concat(currentValue.turns)
};
} else {
return accumulator;
}
}
);
factTurnsArr.push({
turns: reducer.turns,
videoFact: videoFact.videoFact
});
}
this.setState({
factTurns: factTurnsArr
});
}
sortResults = (results: Search.FactResultList): SortedResults[] => {
const facts: Search.VideoResult[] = results.facts;
if (facts.length > 0) {
let sortedResults: SortedResults[] = [];
let prevHash = facts[0].hash;
let turns: number[] = [];
for (const videoResult of facts) {
if (videoResult.hash !== prevHash) {
sortedResults.push({
hash: prevHash,
turns: turns
});
prevHash = videoResult.hash;
turns = [videoResult.turn];
} else {
turns.push(videoResult.turn);
}
}
// Push last hash after loop is over
sortedResults.push({
hash: prevHash,
turns: turns
});
return sortedResults;
} else {
return [];
}
};
componentDidMount() {
this.fetchFacts();
}
componentWillReceiveProps(nextProps: VideoResultsListProps) {
if (!isEqual(this.props.results, nextProps.results)) |
}
render() {
const fixedClass = this.state.fixVideo ? "results__push" : "";
return (
<div className="results">
<div className="results__inner-container">
<h1 className="results__heading">Search Results</h1>
{this.state.factTurns.length === 0 ? (
<p className="turn__results">
Search returned no results for{" "}
<strong>{this.props.searchTerm}</strong>
</p>
) : (
<div>
{this.state.videoProps ? (
<VideoLite
{...this.state.videoProps}
onScroll={this.handleScroll}
isFixed={this.state.fixVideo}
/>
) : (
<VideoPlaceholder />
)}
<div className={fixedClass}>
<SearchRadioButtons
onChange={this.handleChange}
selectedOption={this.state.selectedOption}
/>
</div>
</div>
)}
{this.state.factTurns.map((videoResult, idx) => {
const eventHandlers: VideoResultPreviewEventHandlers = {
onPlayClick: this.handlePlayClick
};
if (idx === 0) {
eventHandlers.onReady = this.handleReady;
}
return (
<VideoResultPreview
key={idx.toString()}
eventHandlers={eventHandlers}
searchTerm={this.props.searchTerm}
sortBy={this.state.selectedOption}
turns={videoResult.turns}
videoFact={videoResult.videoFact}
/>
);
})}
</div>
</div>
);
}
}
export default VideoResultsList;
| {
this.sortedResults = this.sortResults(nextProps.results);
this.fetchFacts();
} | conditional_block |
VideoResultsList.tsx | import * as React from "react";
import SearchRadioButtons from "./SearchRadioButtons";
import VideoLite, { VideoLiteProps } from "./VideoLite";
import VideoPlaceholder from "./VideoPlaceholder";
import VideoResultPreview from "./VideoResultPreview";
import { VideoResultPreviewEventHandlers } from "./VideoResultPreview";
import isEqual = require("lodash/isEqual");
import {
fetchFactReturningPromise,
VideoFactHashMap
} from "../utils/databaseAPI";
import { alertErr } from "../utils/functions";
import { Search } from "../java2ts/Search";
import { Foundation } from "../java2ts/Foundation";
import { Promise } from "es6-promise";
export type SelectionOptions = "Containing" | "BeforeAndAfter";
export interface FactTurns {
videoFact: Foundation.VideoFactContent;
turns: number[];
}
interface SortedResults {
hash: string;
turns: number[];
}
interface VideoResultsListProps {
results: Search.FactResultList;
searchTerm: string;
}
interface VideoResultsListState {
fixVideo: boolean;
selectedOption: SelectionOptions;
factTurns: FactTurns[];
videoProps?: {
videoId: string;
clipRange?: [number, number];
};
}
class VideoResultsList extends React.Component<
VideoResultsListProps,
VideoResultsListState
> {
private sortedResults: SortedResults[];
constructor(props: VideoResultsListProps) {
super(props);
this.sortedResults = this.sortResults(props.results);
this.state = {
fixVideo: false,
selectedOption: "Containing",
factTurns: []
};
}
handleChange = (ev: React.ChangeEvent<HTMLInputElement>) => {
const value = ev.target.value;
if (value === "Containing" || value === "BeforeAndAfter") {
if (value !== this.state.selectedOption) {
this.setState({
selectedOption: value
});
}
} else {
const msg = "VideoResults: Unknown radio button";
alertErr(msg);
throw msg;
}
};
handlePlayClick = (
videoFact: Foundation.VideoFactContent,
clipRange: [number, number]
) => {
this.setState({
videoProps: {
videoId: videoFact.youtubeId,
clipRange: clipRange
}
});
};
handleReady = (youtubeId: string) => {
this.setState({
videoProps: {
videoId: youtubeId
}
});
};
handleScroll = (fixVideo: boolean) => {
if (this.state.fixVideo != fixVideo) {
this.setState({
fixVideo: fixVideo
});
}
};
fetchFacts = (): any => {
const promises = [];
for (const result of this.sortedResults) {
promises.push(fetchFactReturningPromise(result.hash));
}
Promise.all(promises).then(this.processFacts.bind(this));
};
processFacts(json: VideoFactHashMap[]) {
let factTurnsArr: FactTurns[] = [];
for (const videoFact of json) {
const currentHash = videoFact.hash;
const reducer = this.sortedResults.reduce(
(accumulator: SortedResults, currentValue: SortedResults) => {
if (accumulator.hash !== currentHash) {
// Skip accumulating until we match our hash
return currentValue;
}
if (currentValue.hash === currentHash) {
return {
hash: currentHash,
turns: accumulator.turns.concat(currentValue.turns)
};
} else {
return accumulator;
}
}
);
factTurnsArr.push({
turns: reducer.turns,
videoFact: videoFact.videoFact
});
}
this.setState({
factTurns: factTurnsArr
});
}
sortResults = (results: Search.FactResultList): SortedResults[] => {
const facts: Search.VideoResult[] = results.facts;
if (facts.length > 0) {
let sortedResults: SortedResults[] = [];
let prevHash = facts[0].hash;
let turns: number[] = [];
for (const videoResult of facts) {
if (videoResult.hash !== prevHash) {
sortedResults.push({
hash: prevHash,
turns: turns
});
prevHash = videoResult.hash;
turns = [videoResult.turn];
} else {
turns.push(videoResult.turn);
}
}
// Push last hash after loop is over
sortedResults.push({
hash: prevHash,
turns: turns
});
return sortedResults;
} else {
return [];
}
};
componentDidMount() |
componentWillReceiveProps(nextProps: VideoResultsListProps) {
if (!isEqual(this.props.results, nextProps.results)) {
this.sortedResults = this.sortResults(nextProps.results);
this.fetchFacts();
}
}
render() {
const fixedClass = this.state.fixVideo ? "results__push" : "";
return (
<div className="results">
<div className="results__inner-container">
<h1 className="results__heading">Search Results</h1>
{this.state.factTurns.length === 0 ? (
<p className="turn__results">
Search returned no results for{" "}
<strong>{this.props.searchTerm}</strong>
</p>
) : (
<div>
{this.state.videoProps ? (
<VideoLite
{...this.state.videoProps}
onScroll={this.handleScroll}
isFixed={this.state.fixVideo}
/>
) : (
<VideoPlaceholder />
)}
<div className={fixedClass}>
<SearchRadioButtons
onChange={this.handleChange}
selectedOption={this.state.selectedOption}
/>
</div>
</div>
)}
{this.state.factTurns.map((videoResult, idx) => {
const eventHandlers: VideoResultPreviewEventHandlers = {
onPlayClick: this.handlePlayClick
};
if (idx === 0) {
eventHandlers.onReady = this.handleReady;
}
return (
<VideoResultPreview
key={idx.toString()}
eventHandlers={eventHandlers}
searchTerm={this.props.searchTerm}
sortBy={this.state.selectedOption}
turns={videoResult.turns}
videoFact={videoResult.videoFact}
/>
);
})}
</div>
</div>
);
}
}
export default VideoResultsList;
| {
this.fetchFacts();
} | identifier_body |
VideoResultsList.tsx | import * as React from "react";
import SearchRadioButtons from "./SearchRadioButtons";
import VideoLite, { VideoLiteProps } from "./VideoLite";
import VideoPlaceholder from "./VideoPlaceholder";
import VideoResultPreview from "./VideoResultPreview";
import { VideoResultPreviewEventHandlers } from "./VideoResultPreview";
import isEqual = require("lodash/isEqual");
import {
fetchFactReturningPromise,
VideoFactHashMap
} from "../utils/databaseAPI";
import { alertErr } from "../utils/functions";
import { Search } from "../java2ts/Search";
import { Foundation } from "../java2ts/Foundation";
import { Promise } from "es6-promise";
export type SelectionOptions = "Containing" | "BeforeAndAfter";
export interface FactTurns {
videoFact: Foundation.VideoFactContent;
turns: number[];
}
interface SortedResults {
hash: string;
turns: number[];
}
interface VideoResultsListProps {
results: Search.FactResultList;
searchTerm: string;
}
interface VideoResultsListState {
fixVideo: boolean;
selectedOption: SelectionOptions;
factTurns: FactTurns[];
videoProps?: {
videoId: string;
clipRange?: [number, number];
};
}
class VideoResultsList extends React.Component<
VideoResultsListProps,
VideoResultsListState
> {
private sortedResults: SortedResults[];
constructor(props: VideoResultsListProps) {
super(props);
this.sortedResults = this.sortResults(props.results);
this.state = {
fixVideo: false,
selectedOption: "Containing",
factTurns: []
};
}
handleChange = (ev: React.ChangeEvent<HTMLInputElement>) => {
const value = ev.target.value;
if (value === "Containing" || value === "BeforeAndAfter") {
if (value !== this.state.selectedOption) {
this.setState({
selectedOption: value
});
}
} else {
const msg = "VideoResults: Unknown radio button";
alertErr(msg);
throw msg;
}
};
handlePlayClick = (
videoFact: Foundation.VideoFactContent,
clipRange: [number, number]
) => {
this.setState({
videoProps: {
videoId: videoFact.youtubeId,
clipRange: clipRange
}
});
};
handleReady = (youtubeId: string) => {
this.setState({
videoProps: {
videoId: youtubeId
}
});
};
handleScroll = (fixVideo: boolean) => {
if (this.state.fixVideo != fixVideo) {
this.setState({
fixVideo: fixVideo
});
}
};
fetchFacts = (): any => {
const promises = [];
for (const result of this.sortedResults) {
promises.push(fetchFactReturningPromise(result.hash));
}
Promise.all(promises).then(this.processFacts.bind(this));
};
processFacts(json: VideoFactHashMap[]) {
let factTurnsArr: FactTurns[] = [];
for (const videoFact of json) {
const currentHash = videoFact.hash;
const reducer = this.sortedResults.reduce(
(accumulator: SortedResults, currentValue: SortedResults) => {
if (accumulator.hash !== currentHash) {
// Skip accumulating until we match our hash
return currentValue;
}
if (currentValue.hash === currentHash) {
return {
hash: currentHash,
turns: accumulator.turns.concat(currentValue.turns)
};
} else {
return accumulator;
}
}
);
factTurnsArr.push({
turns: reducer.turns,
videoFact: videoFact.videoFact
});
}
this.setState({
factTurns: factTurnsArr
});
}
sortResults = (results: Search.FactResultList): SortedResults[] => {
const facts: Search.VideoResult[] = results.facts;
if (facts.length > 0) {
let sortedResults: SortedResults[] = [];
let prevHash = facts[0].hash;
let turns: number[] = [];
for (const videoResult of facts) {
if (videoResult.hash !== prevHash) {
sortedResults.push({
hash: prevHash,
turns: turns
});
prevHash = videoResult.hash;
turns = [videoResult.turn];
} else {
turns.push(videoResult.turn);
}
}
// Push last hash after loop is over
sortedResults.push({
hash: prevHash,
turns: turns
});
return sortedResults;
} else {
return [];
}
};
| () {
this.fetchFacts();
}
componentWillReceiveProps(nextProps: VideoResultsListProps) {
if (!isEqual(this.props.results, nextProps.results)) {
this.sortedResults = this.sortResults(nextProps.results);
this.fetchFacts();
}
}
render() {
const fixedClass = this.state.fixVideo ? "results__push" : "";
return (
<div className="results">
<div className="results__inner-container">
<h1 className="results__heading">Search Results</h1>
{this.state.factTurns.length === 0 ? (
<p className="turn__results">
Search returned no results for{" "}
<strong>{this.props.searchTerm}</strong>
</p>
) : (
<div>
{this.state.videoProps ? (
<VideoLite
{...this.state.videoProps}
onScroll={this.handleScroll}
isFixed={this.state.fixVideo}
/>
) : (
<VideoPlaceholder />
)}
<div className={fixedClass}>
<SearchRadioButtons
onChange={this.handleChange}
selectedOption={this.state.selectedOption}
/>
</div>
</div>
)}
{this.state.factTurns.map((videoResult, idx) => {
const eventHandlers: VideoResultPreviewEventHandlers = {
onPlayClick: this.handlePlayClick
};
if (idx === 0) {
eventHandlers.onReady = this.handleReady;
}
return (
<VideoResultPreview
key={idx.toString()}
eventHandlers={eventHandlers}
searchTerm={this.props.searchTerm}
sortBy={this.state.selectedOption}
turns={videoResult.turns}
videoFact={videoResult.videoFact}
/>
);
})}
</div>
</div>
);
}
}
export default VideoResultsList;
| componentDidMount | identifier_name |
VideoResultsList.tsx | import * as React from "react";
import SearchRadioButtons from "./SearchRadioButtons";
import VideoLite, { VideoLiteProps } from "./VideoLite";
import VideoPlaceholder from "./VideoPlaceholder";
import VideoResultPreview from "./VideoResultPreview";
import { VideoResultPreviewEventHandlers } from "./VideoResultPreview";
import isEqual = require("lodash/isEqual");
import {
fetchFactReturningPromise,
VideoFactHashMap
} from "../utils/databaseAPI";
import { alertErr } from "../utils/functions";
import { Search } from "../java2ts/Search";
import { Foundation } from "../java2ts/Foundation";
import { Promise } from "es6-promise";
export type SelectionOptions = "Containing" | "BeforeAndAfter";
export interface FactTurns {
videoFact: Foundation.VideoFactContent;
turns: number[];
}
interface SortedResults {
hash: string;
turns: number[];
}
interface VideoResultsListProps {
results: Search.FactResultList;
searchTerm: string;
}
interface VideoResultsListState {
fixVideo: boolean;
selectedOption: SelectionOptions;
factTurns: FactTurns[];
videoProps?: {
videoId: string;
clipRange?: [number, number];
};
}
class VideoResultsList extends React.Component<
VideoResultsListProps,
VideoResultsListState
> {
private sortedResults: SortedResults[];
constructor(props: VideoResultsListProps) {
super(props);
this.sortedResults = this.sortResults(props.results);
this.state = {
fixVideo: false,
selectedOption: "Containing",
factTurns: []
};
}
handleChange = (ev: React.ChangeEvent<HTMLInputElement>) => {
const value = ev.target.value;
if (value === "Containing" || value === "BeforeAndAfter") {
if (value !== this.state.selectedOption) {
this.setState({
selectedOption: value
});
}
} else {
const msg = "VideoResults: Unknown radio button";
alertErr(msg);
throw msg;
}
};
handlePlayClick = (
videoFact: Foundation.VideoFactContent,
clipRange: [number, number]
) => {
this.setState({
videoProps: {
videoId: videoFact.youtubeId,
clipRange: clipRange
}
});
};
handleReady = (youtubeId: string) => {
this.setState({
videoProps: {
videoId: youtubeId
}
});
};
handleScroll = (fixVideo: boolean) => {
if (this.state.fixVideo != fixVideo) {
this.setState({
fixVideo: fixVideo
});
}
};
fetchFacts = (): any => {
const promises = [];
for (const result of this.sortedResults) {
promises.push(fetchFactReturningPromise(result.hash));
}
Promise.all(promises).then(this.processFacts.bind(this));
};
processFacts(json: VideoFactHashMap[]) {
let factTurnsArr: FactTurns[] = [];
for (const videoFact of json) {
const currentHash = videoFact.hash;
const reducer = this.sortedResults.reduce(
(accumulator: SortedResults, currentValue: SortedResults) => {
if (accumulator.hash !== currentHash) {
// Skip accumulating until we match our hash
return currentValue;
}
if (currentValue.hash === currentHash) {
return {
hash: currentHash,
turns: accumulator.turns.concat(currentValue.turns)
};
} else {
return accumulator;
}
}
);
factTurnsArr.push({
turns: reducer.turns,
videoFact: videoFact.videoFact
});
}
this.setState({
factTurns: factTurnsArr
});
}
sortResults = (results: Search.FactResultList): SortedResults[] => {
const facts: Search.VideoResult[] = results.facts;
if (facts.length > 0) {
let sortedResults: SortedResults[] = [];
let prevHash = facts[0].hash;
let turns: number[] = [];
for (const videoResult of facts) {
if (videoResult.hash !== prevHash) {
sortedResults.push({
hash: prevHash,
turns: turns
});
prevHash = videoResult.hash;
turns = [videoResult.turn];
} else {
turns.push(videoResult.turn);
}
}
// Push last hash after loop is over
sortedResults.push({
hash: prevHash,
turns: turns
});
return sortedResults;
} else {
return [];
}
};
componentDidMount() {
this.fetchFacts();
}
componentWillReceiveProps(nextProps: VideoResultsListProps) {
if (!isEqual(this.props.results, nextProps.results)) {
this.sortedResults = this.sortResults(nextProps.results);
this.fetchFacts();
}
}
render() {
const fixedClass = this.state.fixVideo ? "results__push" : "";
return (
<div className="results">
<div className="results__inner-container">
<h1 className="results__heading">Search Results</h1>
{this.state.factTurns.length === 0 ? (
<p className="turn__results">
Search returned no results for{" "}
<strong>{this.props.searchTerm}</strong>
</p>
) : (
<div>
{this.state.videoProps ? (
<VideoLite
{...this.state.videoProps}
onScroll={this.handleScroll}
isFixed={this.state.fixVideo}
/>
) : (
<VideoPlaceholder />
)}
<div className={fixedClass}>
<SearchRadioButtons
onChange={this.handleChange}
selectedOption={this.state.selectedOption}
/>
</div>
</div>
)}
{this.state.factTurns.map((videoResult, idx) => {
const eventHandlers: VideoResultPreviewEventHandlers = {
onPlayClick: this.handlePlayClick
};
if (idx === 0) {
eventHandlers.onReady = this.handleReady; | return (
<VideoResultPreview
key={idx.toString()}
eventHandlers={eventHandlers}
searchTerm={this.props.searchTerm}
sortBy={this.state.selectedOption}
turns={videoResult.turns}
videoFact={videoResult.videoFact}
/>
);
})}
</div>
</div>
);
}
}
export default VideoResultsList; | } | random_line_split |
customdicts.py | from imbroglio import InnerDict
NULL = object()
class CustomDict(InnerDict):
def __repr__(self):
return type(self).__name__ + super().__repr__()
def copy(self):
return type(self)(*self.items())
class CollisionError(ValueError):
def __init__(self, key1, val1, key2, val2):
txt = "{0} cannot be mapped to {1}; {2} is mapped to it".format(
key1, val2, key2)
super().__init__(txt)
self.blocked_key = key1
self.blocking_key = key2
self.blocking_value = val2
if val1 is not NULL:
self.blocked_value = val1
class InvertibleDict(CustomDict):
CollisionError = CollisionError
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
if value in self.inverse and self.inverse[value] != item:
item_val = self.get(item, NULL)
raise CollisionError(item, item_val, self.inverse[value], value)
if item in self:
del self.inverse[self[item]]
super().__setitem__(item, value)
self.inverse[self[item]] = item
def __delitem__(self, item):
value = self[item]
del self.inverse[value]
super().__delitem__(item)
class ReversibleDict(CustomDict):
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
if item in self:
old_value = self[item]
self.inverse[old_value].remove(item)
if not self.inverse[old_value]:
|
if value not in self.inverse:
self.inverse[value] = set()
super().__setitem__(item, value)
self.inverse[value].add(item)
def __delitem__(self, item):
value = self[item]
self.inverse[value].remove(item)
if not self.inverse[value]:
del self.inverse[value]
super().__delitem__(item)
| del self.inverse[old_value] | conditional_block |
customdicts.py | from imbroglio import InnerDict
NULL = object()
class CustomDict(InnerDict):
def __repr__(self):
return type(self).__name__ + super().__repr__()
def copy(self):
return type(self)(*self.items())
class CollisionError(ValueError):
def __init__(self, key1, val1, key2, val2):
txt = "{0} cannot be mapped to {1}; {2} is mapped to it".format(
key1, val2, key2)
super().__init__(txt)
self.blocked_key = key1
self.blocking_key = key2
self.blocking_value = val2
if val1 is not NULL:
self.blocked_value = val1
class InvertibleDict(CustomDict):
CollisionError = CollisionError
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def | (self, item, value):
if value in self.inverse and self.inverse[value] != item:
item_val = self.get(item, NULL)
raise CollisionError(item, item_val, self.inverse[value], value)
if item in self:
del self.inverse[self[item]]
super().__setitem__(item, value)
self.inverse[self[item]] = item
def __delitem__(self, item):
value = self[item]
del self.inverse[value]
super().__delitem__(item)
class ReversibleDict(CustomDict):
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
if item in self:
old_value = self[item]
self.inverse[old_value].remove(item)
if not self.inverse[old_value]:
del self.inverse[old_value]
if value not in self.inverse:
self.inverse[value] = set()
super().__setitem__(item, value)
self.inverse[value].add(item)
def __delitem__(self, item):
value = self[item]
self.inverse[value].remove(item)
if not self.inverse[value]:
del self.inverse[value]
super().__delitem__(item)
| __setitem__ | identifier_name |
customdicts.py | from imbroglio import InnerDict
NULL = object()
class CustomDict(InnerDict):
def __repr__(self):
return type(self).__name__ + super().__repr__()
def copy(self):
return type(self)(*self.items())
class CollisionError(ValueError):
def __init__(self, key1, val1, key2, val2):
txt = "{0} cannot be mapped to {1}; {2} is mapped to it".format(
key1, val2, key2)
super().__init__(txt)
self.blocked_key = key1
self.blocking_key = key2
self.blocking_value = val2
if val1 is not NULL:
self.blocked_value = val1
class InvertibleDict(CustomDict):
CollisionError = CollisionError
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
if value in self.inverse and self.inverse[value] != item:
item_val = self.get(item, NULL)
raise CollisionError(item, item_val, self.inverse[value], value)
if item in self:
del self.inverse[self[item]]
super().__setitem__(item, value)
self.inverse[self[item]] = item
def __delitem__(self, item):
value = self[item]
del self.inverse[value]
super().__delitem__(item)
class ReversibleDict(CustomDict):
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
|
def __delitem__(self, item):
value = self[item]
self.inverse[value].remove(item)
if not self.inverse[value]:
del self.inverse[value]
super().__delitem__(item)
| if item in self:
old_value = self[item]
self.inverse[old_value].remove(item)
if not self.inverse[old_value]:
del self.inverse[old_value]
if value not in self.inverse:
self.inverse[value] = set()
super().__setitem__(item, value)
self.inverse[value].add(item) | identifier_body |
customdicts.py | from imbroglio import InnerDict
NULL = object()
class CustomDict(InnerDict):
def __repr__(self):
return type(self).__name__ + super().__repr__()
def copy(self):
return type(self)(*self.items())
class CollisionError(ValueError):
def __init__(self, key1, val1, key2, val2):
txt = "{0} cannot be mapped to {1}; {2} is mapped to it".format(
key1, val2, key2)
super().__init__(txt)
self.blocked_key = key1
self.blocking_key = key2
self.blocking_value = val2
if val1 is not NULL:
self.blocked_value = val1
class InvertibleDict(CustomDict):
CollisionError = CollisionError
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
if value in self.inverse and self.inverse[value] != item:
item_val = self.get(item, NULL)
raise CollisionError(item, item_val, self.inverse[value], value)
if item in self:
del self.inverse[self[item]]
super().__setitem__(item, value)
self.inverse[self[item]] = item
def __delitem__(self, item):
value = self[item]
del self.inverse[value]
super().__delitem__(item)
class ReversibleDict(CustomDict):
def __init__(self): | def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
if item in self:
old_value = self[item]
self.inverse[old_value].remove(item)
if not self.inverse[old_value]:
del self.inverse[old_value]
if value not in self.inverse:
self.inverse[value] = set()
super().__setitem__(item, value)
self.inverse[value].add(item)
def __delitem__(self, item):
value = self[item]
self.inverse[value].remove(item)
if not self.inverse[value]:
del self.inverse[value]
super().__delitem__(item) | self.inverse = dict()
super().__init__()
| random_line_split |
plot.ts | /// <reference path='typings/tsd.d.ts' />
'use strict';
module oribir.plot {
var PADDING = {
top: 20,
right: 30,
bottom: 60,
left: 80
};
export class Plot {
private _parent;
private _svg;
private _panel;
private _panel_background;
private _axis_x;
private _axis_title_x;
private _axis_title_y;
private _path;
private _cache: number[] = [];
private _scale_x = d3.scale.linear();
private _scale_y = d3.scale.linear();
private _line = d3.svg.line()
.x(function(d, i) {return this._scale_x(i);})
.y(function(d, i) {return this._scale_y(d);})
.interpolate('linear');
private _axis_func_x = d3.svg.axis()
.scale(this._scale_x)
.orient('bottom');
private _axis_func_y = d3.svg.axis()
.scale(this._scale_y)
.orient('left');
constructor (parent, class_: string, max_x: number, max_y: number,
title_x: string, title_y: string) {
this._parent = parent;
this._svg = this._parent.append('svg')
.attr('class', 'plot ' + class_);
var svg_height = parseInt(this._svg.style('height'));
var panel_height = svg_height - PADDING.top - PADDING.bottom;
this._panel = this._svg.append('g')
.attr('class', 'panel')
.attr('transform',
'translate('+PADDING.left+','+PADDING.top+')')
.attr('height', panel_height);
this._panel_background = this._panel.append('rect')
.attr('class', 'panel_background')
.attr('height', panel_height);
this._scale_x.domain([0, max_x]);
this._scale_y.domain([0, max_y])
.range([panel_height, 0]);
this._axis_x = this._panel.append('g')
.attr('class', 'xaxis')
.attr('transform', 'translate(0,' + panel_height + ')');
this._panel.append('g')
.attr('class', 'yaxis')
.call(this._axis_func_y);
this._axis_title_x = this._panel.append('text')
.attr('text-anchor', 'middle')
.text(title_x);
this._axis_title_y = this._panel.append('text')
.attr('text-anchor', 'middle')
.text(title_y)
.attr('transform',
'translate(-50,'+ panel_height/2 +') rotate(-90)');
this._path = this._panel.append('path')
.attr('class', 'trajectory');
this.update_width();
}
update_width() {
var width = parseInt(this._parent.style('width')); | if (isNaN(width)) return;
var panel_width = width - PADDING.left - PADDING.right;
var panel_height = parseInt(this._panel.attr('height'));
this._panel_background.attr('width', panel_width);
this._scale_x.range([0, panel_width]);
this._axis_x.call(this._axis_func_x);
this._axis_title_x.attr('transform', 'translate('+
(panel_width / 2)+','+
(panel_height + 50) +')');
this.path_d(this._cache);
}
domain(range: number[]) {
this._scale_x.domain(range);
this._axis_x.call(this._axis_func_x);
}
path_d(values: any, delay: number = 0) {
this._cache = values;
this._path.transition().delay(delay).ease('linear')
.attr('d', this._line(values));
}
}
} | random_line_split |
|
plot.ts | /// <reference path='typings/tsd.d.ts' />
'use strict';
module oribir.plot {
var PADDING = {
top: 20,
right: 30,
bottom: 60,
left: 80
};
export class Plot {
private _parent;
private _svg;
private _panel;
private _panel_background;
private _axis_x;
private _axis_title_x;
private _axis_title_y;
private _path;
private _cache: number[] = [];
private _scale_x = d3.scale.linear();
private _scale_y = d3.scale.linear();
private _line = d3.svg.line()
.x(function(d, i) {return this._scale_x(i);})
.y(function(d, i) {return this._scale_y(d);})
.interpolate('linear');
private _axis_func_x = d3.svg.axis()
.scale(this._scale_x)
.orient('bottom');
private _axis_func_y = d3.svg.axis()
.scale(this._scale_y)
.orient('left');
constructor (parent, class_: string, max_x: number, max_y: number,
title_x: string, title_y: string) {
this._parent = parent;
this._svg = this._parent.append('svg')
.attr('class', 'plot ' + class_);
var svg_height = parseInt(this._svg.style('height'));
var panel_height = svg_height - PADDING.top - PADDING.bottom;
this._panel = this._svg.append('g')
.attr('class', 'panel')
.attr('transform',
'translate('+PADDING.left+','+PADDING.top+')')
.attr('height', panel_height);
this._panel_background = this._panel.append('rect')
.attr('class', 'panel_background')
.attr('height', panel_height);
this._scale_x.domain([0, max_x]);
this._scale_y.domain([0, max_y])
.range([panel_height, 0]);
this._axis_x = this._panel.append('g')
.attr('class', 'xaxis')
.attr('transform', 'translate(0,' + panel_height + ')');
this._panel.append('g')
.attr('class', 'yaxis')
.call(this._axis_func_y);
this._axis_title_x = this._panel.append('text')
.attr('text-anchor', 'middle')
.text(title_x);
this._axis_title_y = this._panel.append('text')
.attr('text-anchor', 'middle')
.text(title_y)
.attr('transform',
'translate(-50,'+ panel_height/2 +') rotate(-90)');
this._path = this._panel.append('path')
.attr('class', 'trajectory');
this.update_width();
}
| () {
var width = parseInt(this._parent.style('width'));
if (isNaN(width)) return;
var panel_width = width - PADDING.left - PADDING.right;
var panel_height = parseInt(this._panel.attr('height'));
this._panel_background.attr('width', panel_width);
this._scale_x.range([0, panel_width]);
this._axis_x.call(this._axis_func_x);
this._axis_title_x.attr('transform', 'translate('+
(panel_width / 2)+','+
(panel_height + 50) +')');
this.path_d(this._cache);
}
domain(range: number[]) {
this._scale_x.domain(range);
this._axis_x.call(this._axis_func_x);
}
path_d(values: any, delay: number = 0) {
this._cache = values;
this._path.transition().delay(delay).ease('linear')
.attr('d', this._line(values));
}
}
}
| update_width | identifier_name |
lib.rs | extern crate anduin;
use anduin::logic::{Actable, lcm, Application};
use anduin::backends::vulkan;
use anduin::core;
use anduin::input::{InputProcessor, Key, InputType, InputEvent};
use anduin::graphics::Drawable;
use anduin::audio::{music, sound, PlaybackController};
use anduin::logic::ApplicationListener;
use anduin::files;
use std::thread::sleep;
use std::time::Duration;
use std::path::PathBuf;
use std::str::FromStr;
use std::fs;
fn create_test_vulkan_app() {
let mut vulkan_app = vulkan::VulkanApplication::init("Anduin", "desktop", Some(5), Box::new(Game{}));
println!("application created");
let game_loop = lcm::GameLoop::new();
println!("game_loop created");
vulkan_app.application.input.add_input_processor(Box::new(InputProcessorStuct{}));
println!("add_input_processor finished");
game_loop.run(&mut vulkan_app);
println!("game_loop runned");
vulkan_app.application.listener.as_mut().exit();
}
#[test]
fn open_file() {
/*for entry in fs::read_dir(".").expect("") {
let dir = entry.expect("");
println!("{:?}", dir.file_name());
println!("{:?}", dir.path());
}
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("tests/resources/test.txt");
let path = d.to_str().expect("");*/
let path = "./tests/resources/test.txt";
println!("path {}", path);
let handle: files::FileHandle = files::Files::getFileHandle(path, files::PathType::Local);
let name: String = handle.name();
println!("name {}", name.as_str());
}
#[test]
fn play_sound() {
let music = music::Music::new("resources/music.ogg");
let sound = sound::Sound::new("resources/shot.wav");
music.play();
sound.play();
sleep(Duration::from_millis(5000));
}
#[test]
fn create_game_loop() {
let game_loop = lcm::GameLoop::new();
println!("Loop is created {:?}", game_loop);
}
#[test]
fn create_simple_scene() {
let scene = core::scene::Stage {root: core::scene::Node::new("Root Node")};
println!("Simple scene is created {:?}", scene);
}
fn create_simple_game()
{
let scene = core::scene::Stage {
root: core::scene::Node::build("Root Node", Actor{}, Control{}, Image{})
};
scene.update();
} |
/*fn test_input()
{
match event {
winit::Event::Moved(x, y) => {
window.set_title(&format!("Window pos: ({:?}, {:?})", x, y))
}
winit::Event::Resized(w, h) => {
window.set_title(&format!("Window size: ({:?}, {:?})", w, h))
}
winit::Event::Closed => {
println!("Window close requested.");
process::exit(0);
}
winit::Event::DroppedFile(path_buf) => println!("PathBuf {:?}", path_buf),
winit::Event::ReceivedCharacter(received_char) => {
println!("Received Char {:?}", received_char)
}
winit::Event::Focused(focused) => println!("Window focused: {:?}.", focused),
winit::Event::KeyboardInput(element_state, scancode, virtual_key_code) => {
println!("Element State: {:?}, ScanCode: {:?}, Virtual Key Code: {:?}",
element_state,
scancode,
virtual_key_code);
match (virtual_key_code, element_state) {
(Some(winit::VirtualKeyCode::Escape), _) => process::exit(0),
(Some(winit::VirtualKeyCode::R), _) => {
// Resize should cause the window to "refresh"
match window.get_inner_size() {
Some(size) => window.set_inner_size(size.0, size.1),
None => (),
}
}
(Some(key), winit::ElementState::Pressed) => {
&self.keys_states.insert(key, true);
for processor in &self.input_processors {
processor.key_down(key.translate());
}
}
(Some(key), winit::ElementState::Released) => {
&self.keys_states.insert(key, false);
for processor in &self.input_processors {
processor.key_up(key.translate());
}
}
_ => {}
}
}
a @ winit::Event::MouseMoved(_) => {
println!("{:?}", a);
}
winit::Event::MouseWheel(mouse_scroll_delta, touch_phase) => {
println!("Mouse Scroll Delta {:?}, Touch Phase {:?}",
mouse_scroll_delta,
touch_phase)
}
winit::Event::MouseInput(element_state, mouse_button) => {
println!("Element State {:?}, Mouse Button {:?}",
element_state,
mouse_button)
}
winit::Event::TouchpadPressure(f, i) => println!("F {:?}, I {:?}", f, i),
winit::Event::Awakened => println!("Awakened"),
winit::Event::Refresh => println!("Window refresh callback triggered."),
winit::Event::Suspended(is_suspended) => println!("Is suspended {:?}", is_suspended),
winit::Event::Touch(touch) => println!("Touch {:?}", touch),
}
}*/
/**
* Test Game Example
*/
struct Game {
}
impl ApplicationListener for Game {
fn init(&self) {
println!("init");
}
fn update(&mut self) {
println!("update");
// Input
// Logic
// Physics
}
fn resize(&self, width: i32, height: i32) {
println!("Resize to {}x{}", width, height);
}
fn render(&self) {
println!("render");
// Animation
// Render
}
fn pause(&self) {
println!("pause");
}
fn resume(&self) {
println!("resume");
}
fn dispose(&self) {
println!("dispose");
}
fn exit(&mut self) {
println!("exit");
}
}
pub struct InputProcessorStuct;
impl InputProcessor for InputProcessorStuct {
fn process(&self, keyboard_event: InputEvent) {
match keyboard_event.event_type {
InputType::KeyDown => self.key_down(keyboard_event.key),
InputType::KeyUp => self.key_up(keyboard_event.key),
_ => (),
}
}
fn key_down(&self, key: Key) {
println!("Key down {:?}", key)
}
fn key_up(&self, key: Key) {
println!("Key up {:?}", key)
}
}
struct Actor {
}
struct Image {
}
struct Control {
}
impl Actable for Actor {
fn update(&self) {
println!("Updating self");
}
}
impl Drawable for Image {
fn draw(&self) {
println!("Drawing self");
}
}
impl InputProcessor for Control {
fn key_down(&self, key: Key)
{
println!("Keypushed down: {:?}", key)
}
fn key_up(&self, key: Key)
{
println!("Keypushed up: {:?}", key)
}
}
/*
Simple game TC
Game game = new Game(width, height, title);
Screen menu_screen = new Screen(title);
Button new_game = new Button();
ButtonActionHandler start_game = new ButtonActionHandler(new_game);
Stage main_stage = new Stage(new Viewport(new Camera()));
main_stage.add(Ball{radius, Mesh{material, color}});
main_stage.add(Line{vec![{x1,y1}, {x2,y2}]});
*/ | random_line_split |
|
lib.rs | extern crate anduin;
use anduin::logic::{Actable, lcm, Application};
use anduin::backends::vulkan;
use anduin::core;
use anduin::input::{InputProcessor, Key, InputType, InputEvent};
use anduin::graphics::Drawable;
use anduin::audio::{music, sound, PlaybackController};
use anduin::logic::ApplicationListener;
use anduin::files;
use std::thread::sleep;
use std::time::Duration;
use std::path::PathBuf;
use std::str::FromStr;
use std::fs;
fn create_test_vulkan_app() {
let mut vulkan_app = vulkan::VulkanApplication::init("Anduin", "desktop", Some(5), Box::new(Game{}));
println!("application created");
let game_loop = lcm::GameLoop::new();
println!("game_loop created");
vulkan_app.application.input.add_input_processor(Box::new(InputProcessorStuct{}));
println!("add_input_processor finished");
game_loop.run(&mut vulkan_app);
println!("game_loop runned");
vulkan_app.application.listener.as_mut().exit();
}
#[test]
fn open_file() {
/*for entry in fs::read_dir(".").expect("") {
let dir = entry.expect("");
println!("{:?}", dir.file_name());
println!("{:?}", dir.path());
}
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("tests/resources/test.txt");
let path = d.to_str().expect("");*/
let path = "./tests/resources/test.txt";
println!("path {}", path);
let handle: files::FileHandle = files::Files::getFileHandle(path, files::PathType::Local);
let name: String = handle.name();
println!("name {}", name.as_str());
}
#[test]
fn play_sound() {
let music = music::Music::new("resources/music.ogg");
let sound = sound::Sound::new("resources/shot.wav");
music.play();
sound.play();
sleep(Duration::from_millis(5000));
}
#[test]
fn create_game_loop() {
let game_loop = lcm::GameLoop::new();
println!("Loop is created {:?}", game_loop);
}
#[test]
fn create_simple_scene() {
let scene = core::scene::Stage {root: core::scene::Node::new("Root Node")};
println!("Simple scene is created {:?}", scene);
}
fn create_simple_game()
{
let scene = core::scene::Stage {
root: core::scene::Node::build("Root Node", Actor{}, Control{}, Image{})
};
scene.update();
}
/*fn test_input()
{
match event {
winit::Event::Moved(x, y) => {
window.set_title(&format!("Window pos: ({:?}, {:?})", x, y))
}
winit::Event::Resized(w, h) => {
window.set_title(&format!("Window size: ({:?}, {:?})", w, h))
}
winit::Event::Closed => {
println!("Window close requested.");
process::exit(0);
}
winit::Event::DroppedFile(path_buf) => println!("PathBuf {:?}", path_buf),
winit::Event::ReceivedCharacter(received_char) => {
println!("Received Char {:?}", received_char)
}
winit::Event::Focused(focused) => println!("Window focused: {:?}.", focused),
winit::Event::KeyboardInput(element_state, scancode, virtual_key_code) => {
println!("Element State: {:?}, ScanCode: {:?}, Virtual Key Code: {:?}",
element_state,
scancode,
virtual_key_code);
match (virtual_key_code, element_state) {
(Some(winit::VirtualKeyCode::Escape), _) => process::exit(0),
(Some(winit::VirtualKeyCode::R), _) => {
// Resize should cause the window to "refresh"
match window.get_inner_size() {
Some(size) => window.set_inner_size(size.0, size.1),
None => (),
}
}
(Some(key), winit::ElementState::Pressed) => {
&self.keys_states.insert(key, true);
for processor in &self.input_processors {
processor.key_down(key.translate());
}
}
(Some(key), winit::ElementState::Released) => {
&self.keys_states.insert(key, false);
for processor in &self.input_processors {
processor.key_up(key.translate());
}
}
_ => {}
}
}
a @ winit::Event::MouseMoved(_) => {
println!("{:?}", a);
}
winit::Event::MouseWheel(mouse_scroll_delta, touch_phase) => {
println!("Mouse Scroll Delta {:?}, Touch Phase {:?}",
mouse_scroll_delta,
touch_phase)
}
winit::Event::MouseInput(element_state, mouse_button) => {
println!("Element State {:?}, Mouse Button {:?}",
element_state,
mouse_button)
}
winit::Event::TouchpadPressure(f, i) => println!("F {:?}, I {:?}", f, i),
winit::Event::Awakened => println!("Awakened"),
winit::Event::Refresh => println!("Window refresh callback triggered."),
winit::Event::Suspended(is_suspended) => println!("Is suspended {:?}", is_suspended),
winit::Event::Touch(touch) => println!("Touch {:?}", touch),
}
}*/
/**
* Test Game Example
*/
struct Game {
}
impl ApplicationListener for Game {
fn init(&self) {
println!("init");
}
fn update(&mut self) {
println!("update");
// Input
// Logic
// Physics
}
fn resize(&self, width: i32, height: i32) {
println!("Resize to {}x{}", width, height);
}
fn render(&self) {
println!("render");
// Animation
// Render
}
fn pause(&self) {
println!("pause");
}
fn resume(&self) {
println!("resume");
}
fn dispose(&self) {
println!("dispose");
}
fn exit(&mut self) {
println!("exit");
}
}
pub struct | ;
impl InputProcessor for InputProcessorStuct {
fn process(&self, keyboard_event: InputEvent) {
match keyboard_event.event_type {
InputType::KeyDown => self.key_down(keyboard_event.key),
InputType::KeyUp => self.key_up(keyboard_event.key),
_ => (),
}
}
fn key_down(&self, key: Key) {
println!("Key down {:?}", key)
}
fn key_up(&self, key: Key) {
println!("Key up {:?}", key)
}
}
struct Actor {
}
struct Image {
}
struct Control {
}
impl Actable for Actor {
fn update(&self) {
println!("Updating self");
}
}
impl Drawable for Image {
fn draw(&self) {
println!("Drawing self");
}
}
impl InputProcessor for Control {
fn key_down(&self, key: Key)
{
println!("Keypushed down: {:?}", key)
}
fn key_up(&self, key: Key)
{
println!("Keypushed up: {:?}", key)
}
}
/*
Simple game TC
Game game = new Game(width, height, title);
Screen menu_screen = new Screen(title);
Button new_game = new Button();
ButtonActionHandler start_game = new ButtonActionHandler(new_game);
Stage main_stage = new Stage(new Viewport(new Camera()));
main_stage.add(Ball{radius, Mesh{material, color}});
main_stage.add(Line{vec![{x1,y1}, {x2,y2}]});
*/
| InputProcessorStuct | identifier_name |
lib.rs | extern crate anduin;
use anduin::logic::{Actable, lcm, Application};
use anduin::backends::vulkan;
use anduin::core;
use anduin::input::{InputProcessor, Key, InputType, InputEvent};
use anduin::graphics::Drawable;
use anduin::audio::{music, sound, PlaybackController};
use anduin::logic::ApplicationListener;
use anduin::files;
use std::thread::sleep;
use std::time::Duration;
use std::path::PathBuf;
use std::str::FromStr;
use std::fs;
fn create_test_vulkan_app() {
let mut vulkan_app = vulkan::VulkanApplication::init("Anduin", "desktop", Some(5), Box::new(Game{}));
println!("application created");
let game_loop = lcm::GameLoop::new();
println!("game_loop created");
vulkan_app.application.input.add_input_processor(Box::new(InputProcessorStuct{}));
println!("add_input_processor finished");
game_loop.run(&mut vulkan_app);
println!("game_loop runned");
vulkan_app.application.listener.as_mut().exit();
}
#[test]
fn open_file() {
/*for entry in fs::read_dir(".").expect("") {
let dir = entry.expect("");
println!("{:?}", dir.file_name());
println!("{:?}", dir.path());
}
let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
d.push("tests/resources/test.txt");
let path = d.to_str().expect("");*/
let path = "./tests/resources/test.txt";
println!("path {}", path);
let handle: files::FileHandle = files::Files::getFileHandle(path, files::PathType::Local);
let name: String = handle.name();
println!("name {}", name.as_str());
}
#[test]
fn play_sound() {
let music = music::Music::new("resources/music.ogg");
let sound = sound::Sound::new("resources/shot.wav");
music.play();
sound.play();
sleep(Duration::from_millis(5000));
}
#[test]
fn create_game_loop() {
let game_loop = lcm::GameLoop::new();
println!("Loop is created {:?}", game_loop);
}
#[test]
fn create_simple_scene() {
let scene = core::scene::Stage {root: core::scene::Node::new("Root Node")};
println!("Simple scene is created {:?}", scene);
}
fn create_simple_game()
{
let scene = core::scene::Stage {
root: core::scene::Node::build("Root Node", Actor{}, Control{}, Image{})
};
scene.update();
}
/*fn test_input()
{
match event {
winit::Event::Moved(x, y) => {
window.set_title(&format!("Window pos: ({:?}, {:?})", x, y))
}
winit::Event::Resized(w, h) => {
window.set_title(&format!("Window size: ({:?}, {:?})", w, h))
}
winit::Event::Closed => {
println!("Window close requested.");
process::exit(0);
}
winit::Event::DroppedFile(path_buf) => println!("PathBuf {:?}", path_buf),
winit::Event::ReceivedCharacter(received_char) => {
println!("Received Char {:?}", received_char)
}
winit::Event::Focused(focused) => println!("Window focused: {:?}.", focused),
winit::Event::KeyboardInput(element_state, scancode, virtual_key_code) => {
println!("Element State: {:?}, ScanCode: {:?}, Virtual Key Code: {:?}",
element_state,
scancode,
virtual_key_code);
match (virtual_key_code, element_state) {
(Some(winit::VirtualKeyCode::Escape), _) => process::exit(0),
(Some(winit::VirtualKeyCode::R), _) => {
// Resize should cause the window to "refresh"
match window.get_inner_size() {
Some(size) => window.set_inner_size(size.0, size.1),
None => (),
}
}
(Some(key), winit::ElementState::Pressed) => {
&self.keys_states.insert(key, true);
for processor in &self.input_processors {
processor.key_down(key.translate());
}
}
(Some(key), winit::ElementState::Released) => {
&self.keys_states.insert(key, false);
for processor in &self.input_processors {
processor.key_up(key.translate());
}
}
_ => {}
}
}
a @ winit::Event::MouseMoved(_) => {
println!("{:?}", a);
}
winit::Event::MouseWheel(mouse_scroll_delta, touch_phase) => {
println!("Mouse Scroll Delta {:?}, Touch Phase {:?}",
mouse_scroll_delta,
touch_phase)
}
winit::Event::MouseInput(element_state, mouse_button) => {
println!("Element State {:?}, Mouse Button {:?}",
element_state,
mouse_button)
}
winit::Event::TouchpadPressure(f, i) => println!("F {:?}, I {:?}", f, i),
winit::Event::Awakened => println!("Awakened"),
winit::Event::Refresh => println!("Window refresh callback triggered."),
winit::Event::Suspended(is_suspended) => println!("Is suspended {:?}", is_suspended),
winit::Event::Touch(touch) => println!("Touch {:?}", touch),
}
}*/
/**
* Test Game Example
*/
struct Game {
}
impl ApplicationListener for Game {
fn init(&self) {
println!("init");
}
fn update(&mut self) {
println!("update");
// Input
// Logic
// Physics
}
fn resize(&self, width: i32, height: i32) {
println!("Resize to {}x{}", width, height);
}
fn render(&self) {
println!("render");
// Animation
// Render
}
fn pause(&self) {
println!("pause");
}
fn resume(&self) {
println!("resume");
}
fn dispose(&self) {
println!("dispose");
}
fn exit(&mut self) {
println!("exit");
}
}
pub struct InputProcessorStuct;
impl InputProcessor for InputProcessorStuct {
fn process(&self, keyboard_event: InputEvent) |
fn key_down(&self, key: Key) {
println!("Key down {:?}", key)
}
fn key_up(&self, key: Key) {
println!("Key up {:?}", key)
}
}
struct Actor {
}
struct Image {
}
struct Control {
}
impl Actable for Actor {
fn update(&self) {
println!("Updating self");
}
}
impl Drawable for Image {
fn draw(&self) {
println!("Drawing self");
}
}
impl InputProcessor for Control {
fn key_down(&self, key: Key)
{
println!("Keypushed down: {:?}", key)
}
fn key_up(&self, key: Key)
{
println!("Keypushed up: {:?}", key)
}
}
/*
Simple game TC
Game game = new Game(width, height, title);
Screen menu_screen = new Screen(title);
Button new_game = new Button();
ButtonActionHandler start_game = new ButtonActionHandler(new_game);
Stage main_stage = new Stage(new Viewport(new Camera()));
main_stage.add(Ball{radius, Mesh{material, color}});
main_stage.add(Line{vec![{x1,y1}, {x2,y2}]});
*/
| {
match keyboard_event.event_type {
InputType::KeyDown => self.key_down(keyboard_event.key),
InputType::KeyUp => self.key_up(keyboard_event.key),
_ => (),
}
} | identifier_body |
email_limit.py | """ Email limiter """
import logging
from sqlalchemy import Column, BigInteger, DateTime, func
from sqlalchemy.schema import ForeignKey
import akiri.framework.sqlalchemy as meta
from event_control import EventControl
from manager import Manager
from system import SystemKeys
logger = logging.getLogger()
class EmailLimitEntry(meta.Base):
# pylint: disable=no-init
__tablename__ = "email_sent"
emailid = Column(BigInteger, unique=True, nullable=False,
autoincrement=True, primary_key=True)
envid = Column(BigInteger, ForeignKey("environment.envid"))
eventid = Column(BigInteger) # Just kept to help. Not required.
creation_time = Column(DateTime, server_default=func.now())
@classmethod
def remove_all(cls, envid):
session = meta.Session()
session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == envid).\
delete()
session.commit()
class EmailLimitManager(Manager):
""" Ensures that email is not sent too frequently. """
def _log_email(self, eventid):
session = meta.Session()
entry = EmailLimitEntry(envid=self.envid, eventid=eventid)
session.add(entry)
session.commit()
def _prune(self):
"""Keep only the the ones in the last email-lookback-minutes
period."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
stmt = ("DELETE from email_sent "
"where creation_time < NOW() - INTERVAL '%d MINUTES'") % \
(email_lookback_minutes,)
connection = meta.get_connection()
result = connection.execute(stmt)
connection.close()
logger.debug("email limit manager: pruned %d", result.rowcount)
def _recent_count(self):
return meta.Session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == self.envid).\
count()
def email_limit_reached(self, event_entry, eventid):
"""Keep track of how many emails have been sent during the last
email-lookback-minutes period and check to see if
email-max-count have already been sent. Return:
count-of-emails-sent-recently: if email_limit reached
reached (don't send more
emails).
False if email-limit hasn't been reached (keep sending emails).
"""
logger.debug("email_limit_reached checking: event %s, eventid %s\n",
event_entry.key, eventid)
# We limit only ERROR events.
if event_entry.level != 'E' or \
event_entry.key in [EventControl.EMAIL_TEST,
EventControl.EMAIL_SPIKE]:
# These events can always be emailed and don't count against
# the maximum.
return False
self._log_email(eventid)
self._prune() # Keep only the last email-looback-minutes rows
emails_sent_recently = self._recent_count()
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
logger.debug("email_limit: sent %d error emails in the last "
"%d minutes.",
emails_sent_recently, email_lookback_minutes)
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
if emails_sent_recently > email_max_count: | self._eventit()
# Disable email alerts
self.system[SystemKeys.ALERTS_ADMIN_ENABLED] = False
self.system[SystemKeys.ALERTS_PUBLISHER_ENABLED] = False
self.system[SystemKeys.EMAIL_SPIKE_DISABLED_ALERTS] = True
meta.commit()
return emails_sent_recently
# Send this email alert
return False
def _eventit(self):
"""Send the EMAIL-SPIKE event."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
data = {'email_lookback_minutes': email_lookback_minutes,
'email_max_count': email_max_count}
self.server.event_control.gen(EventControl.EMAIL_SPIKE, data) | # Don't sent this email alert
# send an alert that we're disabling email alerts | random_line_split |
email_limit.py | """ Email limiter """
import logging
from sqlalchemy import Column, BigInteger, DateTime, func
from sqlalchemy.schema import ForeignKey
import akiri.framework.sqlalchemy as meta
from event_control import EventControl
from manager import Manager
from system import SystemKeys
logger = logging.getLogger()
class EmailLimitEntry(meta.Base):
# pylint: disable=no-init
__tablename__ = "email_sent"
emailid = Column(BigInteger, unique=True, nullable=False,
autoincrement=True, primary_key=True)
envid = Column(BigInteger, ForeignKey("environment.envid"))
eventid = Column(BigInteger) # Just kept to help. Not required.
creation_time = Column(DateTime, server_default=func.now())
@classmethod
def remove_all(cls, envid):
session = meta.Session()
session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == envid).\
delete()
session.commit()
class EmailLimitManager(Manager):
""" Ensures that email is not sent too frequently. """
def | (self, eventid):
session = meta.Session()
entry = EmailLimitEntry(envid=self.envid, eventid=eventid)
session.add(entry)
session.commit()
def _prune(self):
"""Keep only the the ones in the last email-lookback-minutes
period."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
stmt = ("DELETE from email_sent "
"where creation_time < NOW() - INTERVAL '%d MINUTES'") % \
(email_lookback_minutes,)
connection = meta.get_connection()
result = connection.execute(stmt)
connection.close()
logger.debug("email limit manager: pruned %d", result.rowcount)
def _recent_count(self):
return meta.Session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == self.envid).\
count()
def email_limit_reached(self, event_entry, eventid):
"""Keep track of how many emails have been sent during the last
email-lookback-minutes period and check to see if
email-max-count have already been sent. Return:
count-of-emails-sent-recently: if email_limit reached
reached (don't send more
emails).
False if email-limit hasn't been reached (keep sending emails).
"""
logger.debug("email_limit_reached checking: event %s, eventid %s\n",
event_entry.key, eventid)
# We limit only ERROR events.
if event_entry.level != 'E' or \
event_entry.key in [EventControl.EMAIL_TEST,
EventControl.EMAIL_SPIKE]:
# These events can always be emailed and don't count against
# the maximum.
return False
self._log_email(eventid)
self._prune() # Keep only the last email-looback-minutes rows
emails_sent_recently = self._recent_count()
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
logger.debug("email_limit: sent %d error emails in the last "
"%d minutes.",
emails_sent_recently, email_lookback_minutes)
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
if emails_sent_recently > email_max_count:
# Don't sent this email alert
# send an alert that we're disabling email alerts
self._eventit()
# Disable email alerts
self.system[SystemKeys.ALERTS_ADMIN_ENABLED] = False
self.system[SystemKeys.ALERTS_PUBLISHER_ENABLED] = False
self.system[SystemKeys.EMAIL_SPIKE_DISABLED_ALERTS] = True
meta.commit()
return emails_sent_recently
# Send this email alert
return False
def _eventit(self):
"""Send the EMAIL-SPIKE event."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
data = {'email_lookback_minutes': email_lookback_minutes,
'email_max_count': email_max_count}
self.server.event_control.gen(EventControl.EMAIL_SPIKE, data)
| _log_email | identifier_name |
email_limit.py | """ Email limiter """
import logging
from sqlalchemy import Column, BigInteger, DateTime, func
from sqlalchemy.schema import ForeignKey
import akiri.framework.sqlalchemy as meta
from event_control import EventControl
from manager import Manager
from system import SystemKeys
logger = logging.getLogger()
class EmailLimitEntry(meta.Base):
# pylint: disable=no-init
__tablename__ = "email_sent"
emailid = Column(BigInteger, unique=True, nullable=False,
autoincrement=True, primary_key=True)
envid = Column(BigInteger, ForeignKey("environment.envid"))
eventid = Column(BigInteger) # Just kept to help. Not required.
creation_time = Column(DateTime, server_default=func.now())
@classmethod
def remove_all(cls, envid):
session = meta.Session()
session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == envid).\
delete()
session.commit()
class EmailLimitManager(Manager):
""" Ensures that email is not sent too frequently. """
def _log_email(self, eventid):
session = meta.Session()
entry = EmailLimitEntry(envid=self.envid, eventid=eventid)
session.add(entry)
session.commit()
def _prune(self):
"""Keep only the the ones in the last email-lookback-minutes
period."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
stmt = ("DELETE from email_sent "
"where creation_time < NOW() - INTERVAL '%d MINUTES'") % \
(email_lookback_minutes,)
connection = meta.get_connection()
result = connection.execute(stmt)
connection.close()
logger.debug("email limit manager: pruned %d", result.rowcount)
def _recent_count(self):
return meta.Session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == self.envid).\
count()
def email_limit_reached(self, event_entry, eventid):
"""Keep track of how many emails have been sent during the last
email-lookback-minutes period and check to see if
email-max-count have already been sent. Return:
count-of-emails-sent-recently: if email_limit reached
reached (don't send more
emails).
False if email-limit hasn't been reached (keep sending emails).
"""
logger.debug("email_limit_reached checking: event %s, eventid %s\n",
event_entry.key, eventid)
# We limit only ERROR events.
if event_entry.level != 'E' or \
event_entry.key in [EventControl.EMAIL_TEST,
EventControl.EMAIL_SPIKE]:
# These events can always be emailed and don't count against
# the maximum.
return False
self._log_email(eventid)
self._prune() # Keep only the last email-looback-minutes rows
emails_sent_recently = self._recent_count()
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
logger.debug("email_limit: sent %d error emails in the last "
"%d minutes.",
emails_sent_recently, email_lookback_minutes)
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
if emails_sent_recently > email_max_count:
# Don't sent this email alert
# send an alert that we're disabling email alerts
|
# Send this email alert
return False
def _eventit(self):
"""Send the EMAIL-SPIKE event."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
data = {'email_lookback_minutes': email_lookback_minutes,
'email_max_count': email_max_count}
self.server.event_control.gen(EventControl.EMAIL_SPIKE, data)
| self._eventit()
# Disable email alerts
self.system[SystemKeys.ALERTS_ADMIN_ENABLED] = False
self.system[SystemKeys.ALERTS_PUBLISHER_ENABLED] = False
self.system[SystemKeys.EMAIL_SPIKE_DISABLED_ALERTS] = True
meta.commit()
return emails_sent_recently | conditional_block |
email_limit.py | """ Email limiter """
import logging
from sqlalchemy import Column, BigInteger, DateTime, func
from sqlalchemy.schema import ForeignKey
import akiri.framework.sqlalchemy as meta
from event_control import EventControl
from manager import Manager
from system import SystemKeys
logger = logging.getLogger()
class EmailLimitEntry(meta.Base):
# pylint: disable=no-init
__tablename__ = "email_sent"
emailid = Column(BigInteger, unique=True, nullable=False,
autoincrement=True, primary_key=True)
envid = Column(BigInteger, ForeignKey("environment.envid"))
eventid = Column(BigInteger) # Just kept to help. Not required.
creation_time = Column(DateTime, server_default=func.now())
@classmethod
def remove_all(cls, envid):
session = meta.Session()
session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == envid).\
delete()
session.commit()
class EmailLimitManager(Manager):
""" Ensures that email is not sent too frequently. """
def _log_email(self, eventid):
|
def _prune(self):
"""Keep only the the ones in the last email-lookback-minutes
period."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
stmt = ("DELETE from email_sent "
"where creation_time < NOW() - INTERVAL '%d MINUTES'") % \
(email_lookback_minutes,)
connection = meta.get_connection()
result = connection.execute(stmt)
connection.close()
logger.debug("email limit manager: pruned %d", result.rowcount)
def _recent_count(self):
return meta.Session.query(EmailLimitEntry).\
filter(EmailLimitEntry.envid == self.envid).\
count()
def email_limit_reached(self, event_entry, eventid):
"""Keep track of how many emails have been sent during the last
email-lookback-minutes period and check to see if
email-max-count have already been sent. Return:
count-of-emails-sent-recently: if email_limit reached
reached (don't send more
emails).
False if email-limit hasn't been reached (keep sending emails).
"""
logger.debug("email_limit_reached checking: event %s, eventid %s\n",
event_entry.key, eventid)
# We limit only ERROR events.
if event_entry.level != 'E' or \
event_entry.key in [EventControl.EMAIL_TEST,
EventControl.EMAIL_SPIKE]:
# These events can always be emailed and don't count against
# the maximum.
return False
self._log_email(eventid)
self._prune() # Keep only the last email-looback-minutes rows
emails_sent_recently = self._recent_count()
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
logger.debug("email_limit: sent %d error emails in the last "
"%d minutes.",
emails_sent_recently, email_lookback_minutes)
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
if emails_sent_recently > email_max_count:
# Don't sent this email alert
# send an alert that we're disabling email alerts
self._eventit()
# Disable email alerts
self.system[SystemKeys.ALERTS_ADMIN_ENABLED] = False
self.system[SystemKeys.ALERTS_PUBLISHER_ENABLED] = False
self.system[SystemKeys.EMAIL_SPIKE_DISABLED_ALERTS] = True
meta.commit()
return emails_sent_recently
# Send this email alert
return False
def _eventit(self):
"""Send the EMAIL-SPIKE event."""
email_lookback_minutes = self.system[SystemKeys.EMAIL_LOOKBACK_MINUTES]
email_max_count = self.system[SystemKeys.EMAIL_MAX_COUNT]
data = {'email_lookback_minutes': email_lookback_minutes,
'email_max_count': email_max_count}
self.server.event_control.gen(EventControl.EMAIL_SPIKE, data)
| session = meta.Session()
entry = EmailLimitEntry(envid=self.envid, eventid=eventid)
session.add(entry)
session.commit() | identifier_body |
RequestUtils.js | /*
* RequestUtils
* Visit http://createjs.com/ for documentation, updates and examples.
*
*
* Copyright (c) 2012 gskinner.com, inc.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* @module PreloadJS
*/
(function () {
/**
* Utilities that assist with parsing load items, and determining file types, etc.
* @class RequestUtils
*/
var s = {};
/**
* The Regular Expression used to test file URLS for an absolute path.
* @property ABSOLUTE_PATH
* @type {RegExp}
* @static
*/
s.ABSOLUTE_PATT = /^(?:\w+:)?\/{2}/i;
/**
* The Regular Expression used to test file URLS for a relative path.
* @property RELATIVE_PATH
* @type {RegExp}
* @static
*/
s.RELATIVE_PATT = (/^[./]*?\//i);
/**
* The Regular Expression used to test file URLS for an extension. Note that URIs must already have the query string
* removed.
* @property EXTENSION_PATT
* @type {RegExp}
* @static
*/
s.EXTENSION_PATT = /\/?[^/]+\.(\w{1,5})$/i;
/**
* Parse a file path to determine the information we need to work with it. Currently, PreloadJS needs to know:
* <ul>
* <li>If the path is absolute. Absolute paths start with a protocol (such as `http://`, `file://`, or
* `//networkPath`)</li>
* <li>If the path is relative. Relative paths start with `../` or `/path` (or similar)</li>
* <li>The file extension. This is determined by the filename with an extension. Query strings are dropped, and
* the file path is expected to follow the format `name.ext`.</li>
* </ul>
* @method parseURI
* @param {String} path
* @returns {Object} An Object with an `absolute` and `relative` Boolean values, as well as an optional 'extension`
* property, which is the lowercase extension.
* @static
*/
s.parseURI = function (path) {
var info = {absolute: false, relative: false};
if (path == null) { return info; }
// Drop the query string
var queryIndex = path.indexOf("?");
if (queryIndex > -1) {
path = path.substr(0, queryIndex);
}
// Absolute
var match;
if (s.ABSOLUTE_PATT.test(path)) {
info.absolute = true;
// Relative
} else if (s.RELATIVE_PATT.test(path)) {
info.relative = true;
}
// Extension
if (match = path.match(s.EXTENSION_PATT)) {
info.extension = match[1].toLowerCase();
}
return info;
};
/**
* Formats an object into a query string for either a POST or GET request.
* @method formatQueryString
* @param {Object} data The data to convert to a query string.
* @param {Array} [query] Existing name/value pairs to append on to this query.
* @static
*/
s.formatQueryString = function (data, query) {
if (data == null) {
throw new Error('You must specify data.');
}
var params = [];
for (var n in data) {
params.push(n + '=' + escape(data[n]));
}
if (query) {
params = params.concat(query);
}
return params.join('&');
};
/**
* A utility method that builds a file path using a source and a data object, and formats it into a new path.
* @method buildPath
* @param {String} src The source path to add values to.
* @param {Object} [data] Object used to append values to this request as a query string. Existing parameters on the
* path will be preserved.
* @returns {string} A formatted string that contains the path and the supplied parameters.
* @static
*/
s.buildPath = function (src, data) {
if (data == null) |
var query = [];
var idx = src.indexOf('?');
if (idx != -1) {
var q = src.slice(idx + 1);
query = query.concat(q.split('&'));
}
if (idx != -1) {
return src.slice(0, idx) + '?' + this.formatQueryString(data, query);
} else {
return src + '?' + this.formatQueryString(data, query);
}
};
/**
* @method isCrossDomain
* @param {LoadItem|Object} item A load item with a `src` property.
* @return {Boolean} If the load item is loading from a different domain than the current location.
* @static
*/
s.isCrossDomain = function (item) {
var target = document.createElement("a");
target.href = item.src;
var host = document.createElement("a");
host.href = location.href;
var crossdomain = (target.hostname != "") &&
(target.port != host.port ||
target.protocol != host.protocol ||
target.hostname != host.hostname);
return crossdomain;
};
/**
* @method isLocal
* @param {LoadItem|Object} item A load item with a `src` property
* @return {Boolean} If the load item is loading from the "file:" protocol. Assume that the host must be local as
* well.
* @static
*/
s.isLocal = function (item) {
var target = document.createElement("a");
target.href = item.src;
return target.hostname == "" && target.protocol == "file:";
};
/**
* Determine if a specific type should be loaded as a binary file. Currently, only images and items marked
* specifically as "binary" are loaded as binary. Note that audio is <b>not</b> a binary type, as we can not play
* back using an audio tag if it is loaded as binary. Plugins can change the item type to binary to ensure they get
* a binary result to work with. Binary files are loaded using XHR2. Types are defined as static constants on
* {{#crossLink "AbstractLoader"}}{{/crossLink}}.
* @method isBinary
* @param {String} type The item type.
* @return {Boolean} If the specified type is binary.
* @static
*/
s.isBinary = function (type) {
switch (type) {
case createjs.AbstractLoader.IMAGE:
case createjs.AbstractLoader.BINARY:
return true;
default:
return false;
}
};
/**
* Check if item is a valid HTMLImageElement
* @method isImageTag
* @param {Object} item
* @returns {Boolean}
* @static
*/
s.isImageTag = function(item) {
return item instanceof HTMLImageElement;
};
/**
* Check if item is a valid HTMLAudioElement
* @method isAudioTag
* @param {Object} item
* @returns {Boolean}
* @static
*/
s.isAudioTag = function(item) {
if (window.HTMLAudioElement) {
return item instanceof HTMLAudioElement;
} else {
return false;
}
};
/**
* Check if item is a valid HTMLVideoElement
* @method isVideoTag
* @param {Object} item
* @returns {Boolean}
* @static
*/
s.isVideoTag = function(item) {
if (window.HTMLVideoElement) {
return item instanceof HTMLVideoElement;
} else {
return false;
}
};
/**
* Determine if a specific type is a text-based asset, and should be loaded as UTF-8.
* @method isText
* @param {String} type The item type.
* @return {Boolean} If the specified type is text.
* @static
*/
s.isText = function (type) {
switch (type) {
case createjs.AbstractLoader.TEXT:
case createjs.AbstractLoader.JSON:
case createjs.AbstractLoader.MANIFEST:
case createjs.AbstractLoader.XML:
case createjs.AbstractLoader.CSS:
case createjs.AbstractLoader.SVG:
case createjs.AbstractLoader.JAVASCRIPT:
case createjs.AbstractLoader.SPRITESHEET:
return true;
default:
return false;
}
};
/**
* Determine the type of the object using common extensions. Note that the type can be passed in with the load item
* if it is an unusual extension.
* @method getTypeByExtension
* @param {String} extension The file extension to use to determine the load type.
* @return {String} The determined load type (for example, <code>AbstractLoader.IMAGE</code>). Will return `null` if
* the type can not be determined by the extension.
* @static
*/
s.getTypeByExtension = function (extension) {
if (extension == null) {
return createjs.AbstractLoader.TEXT;
}
switch (extension.toLowerCase()) {
case "jpeg":
case "jpg":
case "gif":
case "png":
case "webp":
case "bmp":
return createjs.AbstractLoader.IMAGE;
case "ogg":
case "mp3":
case "webm":
return createjs.AbstractLoader.SOUND;
case "mp4":
case "webm":
case "ts":
return createjs.AbstractLoader.VIDEO;
case "json":
return createjs.AbstractLoader.JSON;
case "xml":
return createjs.AbstractLoader.XML;
case "css":
return createjs.AbstractLoader.CSS;
case "js":
return createjs.AbstractLoader.JAVASCRIPT;
case 'svg':
return createjs.AbstractLoader.SVG;
default:
return createjs.AbstractLoader.TEXT;
}
};
createjs.RequestUtils = s;
}());
| {
return src;
} | conditional_block |
RequestUtils.js | /*
* RequestUtils
* Visit http://createjs.com/ for documentation, updates and examples.
*
*
* Copyright (c) 2012 gskinner.com, inc.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* @module PreloadJS
*/
(function () {
/**
* Utilities that assist with parsing load items, and determining file types, etc.
* @class RequestUtils
*/
var s = {};
/**
* The Regular Expression used to test file URLS for an absolute path.
* @property ABSOLUTE_PATH
* @type {RegExp}
* @static
*/
s.ABSOLUTE_PATT = /^(?:\w+:)?\/{2}/i;
/**
* The Regular Expression used to test file URLS for a relative path.
* @property RELATIVE_PATH
* @type {RegExp}
* @static
*/
s.RELATIVE_PATT = (/^[./]*?\//i);
/**
* The Regular Expression used to test file URLS for an extension. Note that URIs must already have the query string
* removed.
* @property EXTENSION_PATT
* @type {RegExp}
* @static
*/
s.EXTENSION_PATT = /\/?[^/]+\.(\w{1,5})$/i;
/**
* Parse a file path to determine the information we need to work with it. Currently, PreloadJS needs to know:
* <ul>
* <li>If the path is absolute. Absolute paths start with a protocol (such as `http://`, `file://`, or
* `//networkPath`)</li>
* <li>If the path is relative. Relative paths start with `../` or `/path` (or similar)</li>
* <li>The file extension. This is determined by the filename with an extension. Query strings are dropped, and
* the file path is expected to follow the format `name.ext`.</li>
* </ul>
* @method parseURI
* @param {String} path
* @returns {Object} An Object with an `absolute` and `relative` Boolean values, as well as an optional 'extension`
* property, which is the lowercase extension.
* @static
*/
s.parseURI = function (path) {
var info = {absolute: false, relative: false};
if (path == null) { return info; }
// Drop the query string
var queryIndex = path.indexOf("?");
if (queryIndex > -1) {
path = path.substr(0, queryIndex);
}
// Absolute
var match;
if (s.ABSOLUTE_PATT.test(path)) {
info.absolute = true;
// Relative
} else if (s.RELATIVE_PATT.test(path)) {
info.relative = true; | }
return info;
};
/**
* Formats an object into a query string for either a POST or GET request.
* @method formatQueryString
* @param {Object} data The data to convert to a query string.
* @param {Array} [query] Existing name/value pairs to append on to this query.
* @static
*/
s.formatQueryString = function (data, query) {
if (data == null) {
throw new Error('You must specify data.');
}
var params = [];
for (var n in data) {
params.push(n + '=' + escape(data[n]));
}
if (query) {
params = params.concat(query);
}
return params.join('&');
};
/**
* A utility method that builds a file path using a source and a data object, and formats it into a new path.
* @method buildPath
* @param {String} src The source path to add values to.
* @param {Object} [data] Object used to append values to this request as a query string. Existing parameters on the
* path will be preserved.
* @returns {string} A formatted string that contains the path and the supplied parameters.
* @static
*/
s.buildPath = function (src, data) {
if (data == null) {
return src;
}
var query = [];
var idx = src.indexOf('?');
if (idx != -1) {
var q = src.slice(idx + 1);
query = query.concat(q.split('&'));
}
if (idx != -1) {
return src.slice(0, idx) + '?' + this.formatQueryString(data, query);
} else {
return src + '?' + this.formatQueryString(data, query);
}
};
/**
* @method isCrossDomain
* @param {LoadItem|Object} item A load item with a `src` property.
* @return {Boolean} If the load item is loading from a different domain than the current location.
* @static
*/
s.isCrossDomain = function (item) {
var target = document.createElement("a");
target.href = item.src;
var host = document.createElement("a");
host.href = location.href;
var crossdomain = (target.hostname != "") &&
(target.port != host.port ||
target.protocol != host.protocol ||
target.hostname != host.hostname);
return crossdomain;
};
/**
* @method isLocal
* @param {LoadItem|Object} item A load item with a `src` property
* @return {Boolean} If the load item is loading from the "file:" protocol. Assume that the host must be local as
* well.
* @static
*/
s.isLocal = function (item) {
var target = document.createElement("a");
target.href = item.src;
return target.hostname == "" && target.protocol == "file:";
};
/**
* Determine if a specific type should be loaded as a binary file. Currently, only images and items marked
* specifically as "binary" are loaded as binary. Note that audio is <b>not</b> a binary type, as we can not play
* back using an audio tag if it is loaded as binary. Plugins can change the item type to binary to ensure they get
* a binary result to work with. Binary files are loaded using XHR2. Types are defined as static constants on
* {{#crossLink "AbstractLoader"}}{{/crossLink}}.
* @method isBinary
* @param {String} type The item type.
* @return {Boolean} If the specified type is binary.
* @static
*/
s.isBinary = function (type) {
switch (type) {
case createjs.AbstractLoader.IMAGE:
case createjs.AbstractLoader.BINARY:
return true;
default:
return false;
}
};
/**
* Check if item is a valid HTMLImageElement
* @method isImageTag
* @param {Object} item
* @returns {Boolean}
* @static
*/
s.isImageTag = function(item) {
return item instanceof HTMLImageElement;
};
/**
* Check if item is a valid HTMLAudioElement
* @method isAudioTag
* @param {Object} item
* @returns {Boolean}
* @static
*/
s.isAudioTag = function(item) {
if (window.HTMLAudioElement) {
return item instanceof HTMLAudioElement;
} else {
return false;
}
};
/**
* Check if item is a valid HTMLVideoElement
* @method isVideoTag
* @param {Object} item
* @returns {Boolean}
* @static
*/
s.isVideoTag = function(item) {
if (window.HTMLVideoElement) {
return item instanceof HTMLVideoElement;
} else {
return false;
}
};
/**
* Determine if a specific type is a text-based asset, and should be loaded as UTF-8.
* @method isText
* @param {String} type The item type.
* @return {Boolean} If the specified type is text.
* @static
*/
s.isText = function (type) {
switch (type) {
case createjs.AbstractLoader.TEXT:
case createjs.AbstractLoader.JSON:
case createjs.AbstractLoader.MANIFEST:
case createjs.AbstractLoader.XML:
case createjs.AbstractLoader.CSS:
case createjs.AbstractLoader.SVG:
case createjs.AbstractLoader.JAVASCRIPT:
case createjs.AbstractLoader.SPRITESHEET:
return true;
default:
return false;
}
};
/**
* Determine the type of the object using common extensions. Note that the type can be passed in with the load item
* if it is an unusual extension.
* @method getTypeByExtension
* @param {String} extension The file extension to use to determine the load type.
* @return {String} The determined load type (for example, <code>AbstractLoader.IMAGE</code>). Will return `null` if
* the type can not be determined by the extension.
* @static
*/
s.getTypeByExtension = function (extension) {
if (extension == null) {
return createjs.AbstractLoader.TEXT;
}
switch (extension.toLowerCase()) {
case "jpeg":
case "jpg":
case "gif":
case "png":
case "webp":
case "bmp":
return createjs.AbstractLoader.IMAGE;
case "ogg":
case "mp3":
case "webm":
return createjs.AbstractLoader.SOUND;
case "mp4":
case "webm":
case "ts":
return createjs.AbstractLoader.VIDEO;
case "json":
return createjs.AbstractLoader.JSON;
case "xml":
return createjs.AbstractLoader.XML;
case "css":
return createjs.AbstractLoader.CSS;
case "js":
return createjs.AbstractLoader.JAVASCRIPT;
case 'svg':
return createjs.AbstractLoader.SVG;
default:
return createjs.AbstractLoader.TEXT;
}
};
createjs.RequestUtils = s;
}()); | }
// Extension
if (match = path.match(s.EXTENSION_PATT)) {
info.extension = match[1].toLowerCase(); | random_line_split |
aarch64.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
-> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
arg.layout.homogeneous_aggregate(cx).and_then(|unit| {
let size = arg.layout.size;
// Ensure we have at most four uniquely addressable members.
if size > unit.size.checked_mul(4, cx).unwrap() {
return None;
}
let valid_unit = match unit.kind {
RegKind::Integer => false,
RegKind::Float => true,
RegKind::Vector => size.bits() == 64 || size.bits() == 128
};
if valid_unit {
Some(Uniform {
unit,
total: size
})
} else {
None
}
})
}
fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
|
fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !arg.layout.is_aggregate() {
arg.extend_integer_width_to(32);
return;
}
if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {
arg.cast_to(uniform);
return;
}
let size = arg.layout.size;
let bits = size.bits();
if bits <= 128 {
let unit = if bits <= 8 {
Reg::i8()
} else if bits <= 16 {
Reg::i16()
} else if bits <= 32 {
Reg::i32()
} else {
Reg::i64()
};
arg.cast_to(Uniform {
unit,
total: size
});
return;
}
arg.make_indirect();
}
pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !fty.ret.is_ignore() {
classify_ret_ty(cx, &mut fty.ret);
}
for arg in &mut fty.args {
if arg.is_ignore() { continue; }
classify_arg_ty(cx, arg);
}
}
| {
if !ret.layout.is_aggregate() {
ret.extend_integer_width_to(32);
return;
}
if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {
ret.cast_to(uniform);
return;
}
let size = ret.layout.size;
let bits = size.bits();
if bits <= 128 {
let unit = if bits <= 8 {
Reg::i8()
} else if bits <= 16 {
Reg::i16()
} else if bits <= 32 {
Reg::i32()
} else {
Reg::i64()
};
ret.cast_to(Uniform {
unit,
total: size
});
return;
}
ret.make_indirect();
} | identifier_body |
aarch64.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods}; | C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
arg.layout.homogeneous_aggregate(cx).and_then(|unit| {
let size = arg.layout.size;
// Ensure we have at most four uniquely addressable members.
if size > unit.size.checked_mul(4, cx).unwrap() {
return None;
}
let valid_unit = match unit.kind {
RegKind::Integer => false,
RegKind::Float => true,
RegKind::Vector => size.bits() == 64 || size.bits() == 128
};
if valid_unit {
Some(Uniform {
unit,
total: size
})
} else {
None
}
})
}
fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !ret.layout.is_aggregate() {
ret.extend_integer_width_to(32);
return;
}
if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {
ret.cast_to(uniform);
return;
}
let size = ret.layout.size;
let bits = size.bits();
if bits <= 128 {
let unit = if bits <= 8 {
Reg::i8()
} else if bits <= 16 {
Reg::i16()
} else if bits <= 32 {
Reg::i32()
} else {
Reg::i64()
};
ret.cast_to(Uniform {
unit,
total: size
});
return;
}
ret.make_indirect();
}
fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !arg.layout.is_aggregate() {
arg.extend_integer_width_to(32);
return;
}
if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {
arg.cast_to(uniform);
return;
}
let size = arg.layout.size;
let bits = size.bits();
if bits <= 128 {
let unit = if bits <= 8 {
Reg::i8()
} else if bits <= 16 {
Reg::i16()
} else if bits <= 32 {
Reg::i32()
} else {
Reg::i64()
};
arg.cast_to(Uniform {
unit,
total: size
});
return;
}
arg.make_indirect();
}
pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !fty.ret.is_ignore() {
classify_ret_ty(cx, &mut fty.ret);
}
for arg in &mut fty.args {
if arg.is_ignore() { continue; }
classify_arg_ty(cx, arg);
}
} |
fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
-> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy, | random_line_split |
aarch64.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
-> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
arg.layout.homogeneous_aggregate(cx).and_then(|unit| {
let size = arg.layout.size;
// Ensure we have at most four uniquely addressable members.
if size > unit.size.checked_mul(4, cx).unwrap() {
return None;
}
let valid_unit = match unit.kind {
RegKind::Integer => false,
RegKind::Float => true,
RegKind::Vector => size.bits() == 64 || size.bits() == 128
};
if valid_unit {
Some(Uniform {
unit,
total: size
})
} else {
None
}
})
}
fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !ret.layout.is_aggregate() {
ret.extend_integer_width_to(32);
return;
}
if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {
ret.cast_to(uniform);
return;
}
let size = ret.layout.size;
let bits = size.bits();
if bits <= 128 {
let unit = if bits <= 8 {
Reg::i8()
} else if bits <= 16 {
Reg::i16()
} else if bits <= 32 {
Reg::i32()
} else {
Reg::i64()
};
ret.cast_to(Uniform {
unit,
total: size
});
return;
}
ret.make_indirect();
}
fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !arg.layout.is_aggregate() {
arg.extend_integer_width_to(32);
return;
}
if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {
arg.cast_to(uniform);
return;
}
let size = arg.layout.size;
let bits = size.bits();
if bits <= 128 {
let unit = if bits <= 8 {
Reg::i8()
} else if bits <= 16 {
Reg::i16()
} else if bits <= 32 {
Reg::i32()
} else {
Reg::i64()
};
arg.cast_to(Uniform {
unit,
total: size
});
return;
}
arg.make_indirect();
}
pub fn | <'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
if !fty.ret.is_ignore() {
classify_ret_ty(cx, &mut fty.ret);
}
for arg in &mut fty.args {
if arg.is_ignore() { continue; }
classify_arg_ty(cx, arg);
}
}
| compute_abi_info | identifier_name |
categories.component.ts | import {Component, OnInit} from '@angular/core';
import {Category} from './models/category';
import {CategoryService} from './services/category.service';
import {Observable} from 'rxjs/Observable';
@Component({
selector: 'app-categories',
template: `
<h3>Categories</h3>
<input #searchBox class="search" type="text" autofocus placeholder="Search Categories" (keyup)="search(searchBox.value)" (keyup.esc)="searchBox.value = null; search(null)"/>
<div class="action-bar">
<button class="button right" (click)="searchBox.value = null; ngOnInit()">Refresh</button>
<a class="button" routerLink="/categories/add">Add Category</a>
</div>
<div class="alert alert-err" *ngIf="error">Something went wrong</div>
<table>
<thead>
<tr>
<th>Name</th>
<th width="20%">Rules</th>
<th style="width: 30px"></th>
</tr>
</thead>
<tbody>
<tr class="category-row" *ngFor="let c of categories">
<td><a [routerLink]="['/categories/edit', c.categoryId]">{{c.categoryName}}</a></td>
<td>{{c.numberOfRules}}</td>
<td><span class="fa fa-times" aria-hidden="true" (click)="deleteCategory(c.categoryId)" style="cursor: pointer"></span></td>
</tr>
</tbody>
</table>
`
})
export class CategoriesComponent implements OnInit {
error = false;
categories: Category[];
_categories: Category[];
search(searchTerm: string): void {
this.categories = !searchTerm ? this._categories
: this._categories.filter(c => c.categoryName.includes(searchTerm));
}
constructor(private categoryService: CategoryService) {}
| (categoryId: number): void {
this.categoryService.deleteCategory(categoryId)
.subscribe(
() => this.loadCategories(),
() => this.error = true);
}
loadCategories(): void {
this.categoryService.get()
.subscribe(cats => {
this._categories = cats;
this.search(null);
});
}
ngOnInit(): void {
this.loadCategories();
}
}
| deleteCategory | identifier_name |
categories.component.ts | import {Component, OnInit} from '@angular/core';
import {Category} from './models/category';
import {CategoryService} from './services/category.service';
import {Observable} from 'rxjs/Observable';
@Component({
selector: 'app-categories',
template: `
<h3>Categories</h3>
<input #searchBox class="search" type="text" autofocus placeholder="Search Categories" (keyup)="search(searchBox.value)" (keyup.esc)="searchBox.value = null; search(null)"/>
<div class="action-bar">
<button class="button right" (click)="searchBox.value = null; ngOnInit()">Refresh</button>
<a class="button" routerLink="/categories/add">Add Category</a>
</div>
<div class="alert alert-err" *ngIf="error">Something went wrong</div>
<table>
<thead>
<tr>
<th>Name</th>
<th width="20%">Rules</th>
<th style="width: 30px"></th>
</tr>
</thead>
<tbody>
<tr class="category-row" *ngFor="let c of categories">
<td><a [routerLink]="['/categories/edit', c.categoryId]">{{c.categoryName}}</a></td>
<td>{{c.numberOfRules}}</td>
<td><span class="fa fa-times" aria-hidden="true" (click)="deleteCategory(c.categoryId)" style="cursor: pointer"></span></td>
</tr>
</tbody> | </table>
`
})
export class CategoriesComponent implements OnInit {
error = false;
categories: Category[];
_categories: Category[];
search(searchTerm: string): void {
this.categories = !searchTerm ? this._categories
: this._categories.filter(c => c.categoryName.includes(searchTerm));
}
constructor(private categoryService: CategoryService) {}
deleteCategory(categoryId: number): void {
this.categoryService.deleteCategory(categoryId)
.subscribe(
() => this.loadCategories(),
() => this.error = true);
}
loadCategories(): void {
this.categoryService.get()
.subscribe(cats => {
this._categories = cats;
this.search(null);
});
}
ngOnInit(): void {
this.loadCategories();
}
} | random_line_split |
|
categories.component.ts | import {Component, OnInit} from '@angular/core';
import {Category} from './models/category';
import {CategoryService} from './services/category.service';
import {Observable} from 'rxjs/Observable';
@Component({
selector: 'app-categories',
template: `
<h3>Categories</h3>
<input #searchBox class="search" type="text" autofocus placeholder="Search Categories" (keyup)="search(searchBox.value)" (keyup.esc)="searchBox.value = null; search(null)"/>
<div class="action-bar">
<button class="button right" (click)="searchBox.value = null; ngOnInit()">Refresh</button>
<a class="button" routerLink="/categories/add">Add Category</a>
</div>
<div class="alert alert-err" *ngIf="error">Something went wrong</div>
<table>
<thead>
<tr>
<th>Name</th>
<th width="20%">Rules</th>
<th style="width: 30px"></th>
</tr>
</thead>
<tbody>
<tr class="category-row" *ngFor="let c of categories">
<td><a [routerLink]="['/categories/edit', c.categoryId]">{{c.categoryName}}</a></td>
<td>{{c.numberOfRules}}</td>
<td><span class="fa fa-times" aria-hidden="true" (click)="deleteCategory(c.categoryId)" style="cursor: pointer"></span></td>
</tr>
</tbody>
</table>
`
})
export class CategoriesComponent implements OnInit {
error = false;
categories: Category[];
_categories: Category[];
search(searchTerm: string): void |
constructor(private categoryService: CategoryService) {}
deleteCategory(categoryId: number): void {
this.categoryService.deleteCategory(categoryId)
.subscribe(
() => this.loadCategories(),
() => this.error = true);
}
loadCategories(): void {
this.categoryService.get()
.subscribe(cats => {
this._categories = cats;
this.search(null);
});
}
ngOnInit(): void {
this.loadCategories();
}
}
| {
this.categories = !searchTerm ? this._categories
: this._categories.filter(c => c.categoryName.includes(searchTerm));
} | identifier_body |
test_ptp_clock_cdc_64.py | #!/usr/bin/env python
"""
Copyright (c) 2019 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import ptp
module = 'ptp_clock_cdc'
testbench = 'test_%s_64' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
TS_WIDTH = 64
NS_WIDTH = 4
FNS_WIDTH = 16
INPUT_PERIOD_NS = 0x6
INPUT_PERIOD_FNS = 0x6666
OUTPUT_PERIOD_NS = 0x6
OUTPUT_PERIOD_FNS = 0x6666
USE_SAMPLE_CLOCK = 1
LOG_FIFO_DEPTH = 3
LOG_RATE = 3
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
input_clk = Signal(bool(0))
input_rst = Signal(bool(0))
output_clk = Signal(bool(0))
output_rst = Signal(bool(0))
sample_clk = Signal(bool(0))
input_ts = Signal(intbv(0)[96:])
# Outputs
output_ts = Signal(intbv(0)[96:])
output_ts_step = Signal(bool(0))
output_pps = Signal(bool(0))
# PTP clock
ptp_clock = ptp.PtpClock(period_ns=INPUT_PERIOD_NS, period_fns=INPUT_PERIOD_FNS)
ptp_logic = ptp_clock.create_logic(
input_clk,
input_rst,
ts_64=input_ts
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
input_clk=input_clk,
input_rst=input_rst,
output_clk=output_clk,
output_rst=output_rst,
sample_clk=sample_clk,
input_ts=input_ts,
output_ts=output_ts,
output_ts_step=output_ts_step,
output_pps=output_pps
)
@always(delay(3200))
def clkgen():
clk.next = not clk
input_clk.next = not input_clk
output_clk_hp = Signal(int(3200))
@instance
def clkgen_output():
while True:
yield delay(int(output_clk_hp))
output_clk.next = not output_clk
@always(delay(5000))
def clkgen_sample():
sample_clk.next = not sample_clk
@instance
def check():
yield delay(100000)
yield clk.posedge
rst.next = 1
input_rst.next = 1
output_rst.next = 1
yield clk.posedge
yield clk.posedge
yield clk.posedge
input_rst.next = 0
output_rst.next = 0
yield clk.posedge
yield delay(100000)
yield clk.posedge
# testbench stimulus
yield clk.posedge
print("test 1: Same clock speed")
current_test.next = 1
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 2: Slightly faster")
current_test.next = 2
output_clk_hp.next = 3100
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 3: Slightly slower")
current_test.next = 3
output_clk_hp.next = 3300
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 4: Significantly faster")
current_test.next = 4
output_clk_hp.next = 2000
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 5: Significantly slower")
current_test.next = 5
output_clk_hp.next = 5000
yield clk.posedge
for i in range(30000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000) | raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench() | random_line_split |
|
test_ptp_clock_cdc_64.py | #!/usr/bin/env python
"""
Copyright (c) 2019 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import ptp
module = 'ptp_clock_cdc'
testbench = 'test_%s_64' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
TS_WIDTH = 64
NS_WIDTH = 4
FNS_WIDTH = 16
INPUT_PERIOD_NS = 0x6
INPUT_PERIOD_FNS = 0x6666
OUTPUT_PERIOD_NS = 0x6
OUTPUT_PERIOD_FNS = 0x6666
USE_SAMPLE_CLOCK = 1
LOG_FIFO_DEPTH = 3
LOG_RATE = 3
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
input_clk = Signal(bool(0))
input_rst = Signal(bool(0))
output_clk = Signal(bool(0))
output_rst = Signal(bool(0))
sample_clk = Signal(bool(0))
input_ts = Signal(intbv(0)[96:])
# Outputs
output_ts = Signal(intbv(0)[96:])
output_ts_step = Signal(bool(0))
output_pps = Signal(bool(0))
# PTP clock
ptp_clock = ptp.PtpClock(period_ns=INPUT_PERIOD_NS, period_fns=INPUT_PERIOD_FNS)
ptp_logic = ptp_clock.create_logic(
input_clk,
input_rst,
ts_64=input_ts
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
input_clk=input_clk,
input_rst=input_rst,
output_clk=output_clk,
output_rst=output_rst,
sample_clk=sample_clk,
input_ts=input_ts,
output_ts=output_ts,
output_ts_step=output_ts_step,
output_pps=output_pps
)
@always(delay(3200))
def clkgen():
clk.next = not clk
input_clk.next = not input_clk
output_clk_hp = Signal(int(3200))
@instance
def clkgen_output():
while True:
yield delay(int(output_clk_hp))
output_clk.next = not output_clk
@always(delay(5000))
def clkgen_sample():
sample_clk.next = not sample_clk
@instance
def | ():
yield delay(100000)
yield clk.posedge
rst.next = 1
input_rst.next = 1
output_rst.next = 1
yield clk.posedge
yield clk.posedge
yield clk.posedge
input_rst.next = 0
output_rst.next = 0
yield clk.posedge
yield delay(100000)
yield clk.posedge
# testbench stimulus
yield clk.posedge
print("test 1: Same clock speed")
current_test.next = 1
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 2: Slightly faster")
current_test.next = 2
output_clk_hp.next = 3100
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 3: Slightly slower")
current_test.next = 3
output_clk_hp.next = 3300
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 4: Significantly faster")
current_test.next = 4
output_clk_hp.next = 2000
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 5: Significantly slower")
current_test.next = 5
output_clk_hp.next = 5000
yield clk.posedge
for i in range(30000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| check | identifier_name |
test_ptp_clock_cdc_64.py | #!/usr/bin/env python
"""
Copyright (c) 2019 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import ptp
module = 'ptp_clock_cdc'
testbench = 'test_%s_64' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
TS_WIDTH = 64
NS_WIDTH = 4
FNS_WIDTH = 16
INPUT_PERIOD_NS = 0x6
INPUT_PERIOD_FNS = 0x6666
OUTPUT_PERIOD_NS = 0x6
OUTPUT_PERIOD_FNS = 0x6666
USE_SAMPLE_CLOCK = 1
LOG_FIFO_DEPTH = 3
LOG_RATE = 3
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
input_clk = Signal(bool(0))
input_rst = Signal(bool(0))
output_clk = Signal(bool(0))
output_rst = Signal(bool(0))
sample_clk = Signal(bool(0))
input_ts = Signal(intbv(0)[96:])
# Outputs
output_ts = Signal(intbv(0)[96:])
output_ts_step = Signal(bool(0))
output_pps = Signal(bool(0))
# PTP clock
ptp_clock = ptp.PtpClock(period_ns=INPUT_PERIOD_NS, period_fns=INPUT_PERIOD_FNS)
ptp_logic = ptp_clock.create_logic(
input_clk,
input_rst,
ts_64=input_ts
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
input_clk=input_clk,
input_rst=input_rst,
output_clk=output_clk,
output_rst=output_rst,
sample_clk=sample_clk,
input_ts=input_ts,
output_ts=output_ts,
output_ts_step=output_ts_step,
output_pps=output_pps
)
@always(delay(3200))
def clkgen():
clk.next = not clk
input_clk.next = not input_clk
output_clk_hp = Signal(int(3200))
@instance
def clkgen_output():
while True:
yield delay(int(output_clk_hp))
output_clk.next = not output_clk
@always(delay(5000))
def clkgen_sample():
sample_clk.next = not sample_clk
@instance
def check():
|
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| yield delay(100000)
yield clk.posedge
rst.next = 1
input_rst.next = 1
output_rst.next = 1
yield clk.posedge
yield clk.posedge
yield clk.posedge
input_rst.next = 0
output_rst.next = 0
yield clk.posedge
yield delay(100000)
yield clk.posedge
# testbench stimulus
yield clk.posedge
print("test 1: Same clock speed")
current_test.next = 1
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 2: Slightly faster")
current_test.next = 2
output_clk_hp.next = 3100
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 3: Slightly slower")
current_test.next = 3
output_clk_hp.next = 3300
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 4: Significantly faster")
current_test.next = 4
output_clk_hp.next = 2000
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 5: Significantly slower")
current_test.next = 5
output_clk_hp.next = 5000
yield clk.posedge
for i in range(30000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
raise StopSimulation | identifier_body |
test_ptp_clock_cdc_64.py | #!/usr/bin/env python
"""
Copyright (c) 2019 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import ptp
module = 'ptp_clock_cdc'
testbench = 'test_%s_64' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
TS_WIDTH = 64
NS_WIDTH = 4
FNS_WIDTH = 16
INPUT_PERIOD_NS = 0x6
INPUT_PERIOD_FNS = 0x6666
OUTPUT_PERIOD_NS = 0x6
OUTPUT_PERIOD_FNS = 0x6666
USE_SAMPLE_CLOCK = 1
LOG_FIFO_DEPTH = 3
LOG_RATE = 3
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
input_clk = Signal(bool(0))
input_rst = Signal(bool(0))
output_clk = Signal(bool(0))
output_rst = Signal(bool(0))
sample_clk = Signal(bool(0))
input_ts = Signal(intbv(0)[96:])
# Outputs
output_ts = Signal(intbv(0)[96:])
output_ts_step = Signal(bool(0))
output_pps = Signal(bool(0))
# PTP clock
ptp_clock = ptp.PtpClock(period_ns=INPUT_PERIOD_NS, period_fns=INPUT_PERIOD_FNS)
ptp_logic = ptp_clock.create_logic(
input_clk,
input_rst,
ts_64=input_ts
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
input_clk=input_clk,
input_rst=input_rst,
output_clk=output_clk,
output_rst=output_rst,
sample_clk=sample_clk,
input_ts=input_ts,
output_ts=output_ts,
output_ts_step=output_ts_step,
output_pps=output_pps
)
@always(delay(3200))
def clkgen():
clk.next = not clk
input_clk.next = not input_clk
output_clk_hp = Signal(int(3200))
@instance
def clkgen_output():
while True:
yield delay(int(output_clk_hp))
output_clk.next = not output_clk
@always(delay(5000))
def clkgen_sample():
sample_clk.next = not sample_clk
@instance
def check():
yield delay(100000)
yield clk.posedge
rst.next = 1
input_rst.next = 1
output_rst.next = 1
yield clk.posedge
yield clk.posedge
yield clk.posedge
input_rst.next = 0
output_rst.next = 0
yield clk.posedge
yield delay(100000)
yield clk.posedge
# testbench stimulus
yield clk.posedge
print("test 1: Same clock speed")
current_test.next = 1
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 2: Slightly faster")
current_test.next = 2
output_clk_hp.next = 3100
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 3: Slightly slower")
current_test.next = 3
output_clk_hp.next = 3300
yield clk.posedge
for i in range(20000):
|
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 4: Significantly faster")
current_test.next = 4
output_clk_hp.next = 2000
yield clk.posedge
for i in range(20000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
yield clk.posedge
print("test 5: Significantly slower")
current_test.next = 5
output_clk_hp.next = 5000
yield clk.posedge
for i in range(30000):
yield clk.posedge
input_stop_ts = input_ts/2**16*1e-9
output_stop_ts = output_ts/2**16*1e-9
print(input_stop_ts-output_stop_ts)
assert abs(input_stop_ts-output_stop_ts) < 1e-8
yield delay(100000)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| yield clk.posedge | conditional_block |
http.py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for creating HTTP health checks."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import health_checks_utils
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA)
class Create(base_classes.BaseAsyncCreator):
"""Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
health_checks_utils.AddHttpRelatedCreationArgs(parser)
health_checks_utils.AddProtocolAgnosticCreationArgs(parser, 'HTTP')
@property
def service(self):
|
@property
def method(self):
return 'Insert'
@property
def resource_type(self):
return 'healthChecks'
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
health_check_ref = self.CreateGlobalReference(
args.name, resource_type='healthChecks')
proxy_header = self.messages.HTTPHealthCheck.ProxyHeaderValueValuesEnum(
args.proxy_header)
request = self.messages.ComputeHealthChecksInsertRequest(
healthCheck=self.messages.HealthCheck(
name=health_check_ref.Name(),
description=args.description,
type=self.messages.HealthCheck.TypeValueValuesEnum.HTTP,
httpHealthCheck=self.messages.HTTPHealthCheck(
host=args.host,
port=args.port,
portName=args.port_name,
requestPath=args.request_path,
proxyHeader=proxy_header),
checkIntervalSec=args.check_interval,
timeoutSec=args.timeout,
healthyThreshold=args.healthy_threshold,
unhealthyThreshold=args.unhealthy_threshold,
),
project=self.project)
return [request]
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
"""Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
Create.Args(parser)
health_checks_utils.AddHttpRelatedResponseArg(parser)
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
requests = super(CreateAlpha, self).CreateRequests(args)
requests[0].healthCheck.httpHealthCheck.response = args.response
return requests
Create.detailed_help = {
'brief': ('Create a HTTP health check to monitor load balanced instances'),
'DESCRIPTION': """\
*{command}* is used to create a HTTP health check. HTTP health checks
monitor instances in a load balancer controlled by a target pool. All
arguments to the command are optional except for the name of the health
check. For more information on load balancing, see
[](https://cloud.google.com/compute/docs/load-balancing-and-autoscaling/)
""",
}
| return self.compute.healthChecks | identifier_body |
http.py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for creating HTTP health checks."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import health_checks_utils
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA)
class Create(base_classes.BaseAsyncCreator): | """Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
health_checks_utils.AddHttpRelatedCreationArgs(parser)
health_checks_utils.AddProtocolAgnosticCreationArgs(parser, 'HTTP')
@property
def service(self):
return self.compute.healthChecks
@property
def method(self):
return 'Insert'
@property
def resource_type(self):
return 'healthChecks'
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
health_check_ref = self.CreateGlobalReference(
args.name, resource_type='healthChecks')
proxy_header = self.messages.HTTPHealthCheck.ProxyHeaderValueValuesEnum(
args.proxy_header)
request = self.messages.ComputeHealthChecksInsertRequest(
healthCheck=self.messages.HealthCheck(
name=health_check_ref.Name(),
description=args.description,
type=self.messages.HealthCheck.TypeValueValuesEnum.HTTP,
httpHealthCheck=self.messages.HTTPHealthCheck(
host=args.host,
port=args.port,
portName=args.port_name,
requestPath=args.request_path,
proxyHeader=proxy_header),
checkIntervalSec=args.check_interval,
timeoutSec=args.timeout,
healthyThreshold=args.healthy_threshold,
unhealthyThreshold=args.unhealthy_threshold,
),
project=self.project)
return [request]
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
"""Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
Create.Args(parser)
health_checks_utils.AddHttpRelatedResponseArg(parser)
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
requests = super(CreateAlpha, self).CreateRequests(args)
requests[0].healthCheck.httpHealthCheck.response = args.response
return requests
Create.detailed_help = {
'brief': ('Create a HTTP health check to monitor load balanced instances'),
'DESCRIPTION': """\
*{command}* is used to create a HTTP health check. HTTP health checks
monitor instances in a load balancer controlled by a target pool. All
arguments to the command are optional except for the name of the health
check. For more information on load balancing, see
[](https://cloud.google.com/compute/docs/load-balancing-and-autoscaling/)
""",
} | random_line_split |
|
http.py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for creating HTTP health checks."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import health_checks_utils
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA)
class Create(base_classes.BaseAsyncCreator):
"""Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
health_checks_utils.AddHttpRelatedCreationArgs(parser)
health_checks_utils.AddProtocolAgnosticCreationArgs(parser, 'HTTP')
@property
def service(self):
return self.compute.healthChecks
@property
def method(self):
return 'Insert'
@property
def resource_type(self):
return 'healthChecks'
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
health_check_ref = self.CreateGlobalReference(
args.name, resource_type='healthChecks')
proxy_header = self.messages.HTTPHealthCheck.ProxyHeaderValueValuesEnum(
args.proxy_header)
request = self.messages.ComputeHealthChecksInsertRequest(
healthCheck=self.messages.HealthCheck(
name=health_check_ref.Name(),
description=args.description,
type=self.messages.HealthCheck.TypeValueValuesEnum.HTTP,
httpHealthCheck=self.messages.HTTPHealthCheck(
host=args.host,
port=args.port,
portName=args.port_name,
requestPath=args.request_path,
proxyHeader=proxy_header),
checkIntervalSec=args.check_interval,
timeoutSec=args.timeout,
healthyThreshold=args.healthy_threshold,
unhealthyThreshold=args.unhealthy_threshold,
),
project=self.project)
return [request]
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class | (Create):
"""Create a HTTP health check to monitor load balanced instances."""
@staticmethod
def Args(parser):
Create.Args(parser)
health_checks_utils.AddHttpRelatedResponseArg(parser)
def CreateRequests(self, args):
"""Returns the request necessary for adding the health check."""
requests = super(CreateAlpha, self).CreateRequests(args)
requests[0].healthCheck.httpHealthCheck.response = args.response
return requests
Create.detailed_help = {
'brief': ('Create a HTTP health check to monitor load balanced instances'),
'DESCRIPTION': """\
*{command}* is used to create a HTTP health check. HTTP health checks
monitor instances in a load balancer controlled by a target pool. All
arguments to the command are optional except for the name of the health
check. For more information on load balancing, see
[](https://cloud.google.com/compute/docs/load-balancing-and-autoscaling/)
""",
}
| CreateAlpha | identifier_name |
debuggerTable.tsx | import * as React from "react";
export interface DebuggerTableProps {
header: string;
frozen?: boolean;
}
export class DebuggerTable extends React.Component<DebuggerTableProps> {
| () {
return <div className="ui varExplorer">
<div className="ui variableTableHeader">
{this.props.header}
</div>
<div className={`ui segment debugvariables ${this.props.frozen ? "frozen" : ""} ui collapsing basic striped table`}>
{this.props.children}
</div>
</div>
}
}
export interface DebuggerTableRowProps {
leftText: string;
rightText: string;
refID?: string | number;
icon?: string;
rightTitle?: string
rightClass?: string;
leftTitle?: string;
leftClass?: string;
depth?: number;
rowClass?: string;
onClick?: (e: React.SyntheticEvent<HTMLDivElement>, component: DebuggerTableRow) => void;
}
export class DebuggerTableRow extends React.Component<DebuggerTableRowProps> {
render() {
return <div role="listitem" className={`item ${this.props.rowClass || ""}`} onClick={this.props.onClick ? this.clickHandler : undefined}>
<div className="variableAndValue">
<div className={`variable varname ${this.props.leftClass || ""}`} title={this.props.leftTitle} style={this.props.depth ? { marginLeft: (this.props.depth * 0.75) + "em" } : undefined}>
{ <i className={`ui icon small ${this.props.icon || "invisible"}`} /> }
<span>{this.props.leftText}</span>
</div>
<div className="variable detail" style={{ padding: 0.2 }} title={this.props.rightTitle}>
<span className={`varval ${this.props.rightClass || ""}`}>{this.props.rightText}</span>
</div>
</div>
</div>
}
protected clickHandler = (e: React.SyntheticEvent<HTMLDivElement>) => {
if (this.props.onClick) this.props.onClick(e, this);
}
} | render | identifier_name |
debuggerTable.tsx | import * as React from "react";
export interface DebuggerTableProps {
header: string;
frozen?: boolean;
}
export class DebuggerTable extends React.Component<DebuggerTableProps> {
render() {
return <div className="ui varExplorer">
<div className="ui variableTableHeader">
{this.props.header}
</div>
<div className={`ui segment debugvariables ${this.props.frozen ? "frozen" : ""} ui collapsing basic striped table`}>
{this.props.children}
</div>
</div>
}
}
export interface DebuggerTableRowProps {
leftText: string;
rightText: string;
refID?: string | number;
icon?: string;
rightTitle?: string
rightClass?: string;
leftTitle?: string;
leftClass?: string;
depth?: number;
rowClass?: string;
onClick?: (e: React.SyntheticEvent<HTMLDivElement>, component: DebuggerTableRow) => void;
}
export class DebuggerTableRow extends React.Component<DebuggerTableRowProps> {
render() | onClick={this.props.onClick ? this.clickHandler : undefined}>
<div className="variableAndValue">
<div className={`variable varname ${this.props.leftClass || ""}`} title={this.props.leftTitle} style={this.props.depth ? { marginLeft: (this.props.depth * 0.75) + "em" } : undefined}>
{ <i className={`ui icon small ${this.props.icon || "invisible"}`} /> }
<span>{this.props.leftText}</span>
</div>
<div className="variable detail" style={{ padding: 0.2 }} title={this.props.rightTitle}>
<span className={`varval ${this.props.rightClass || ""}`}>{this.props.rightText}</span>
</div>
</div>
</div>
}
protected clickHandler = (e: React.SyntheticEvent<HTMLDivElement>) => {
if (this.props.onClick) this.props.onClick(e, this);
}
} | {
return <div role="listitem" className={`item ${this.props.rowClass || ""}`} | identifier_body |
debuggerTable.tsx | import * as React from "react";
export interface DebuggerTableProps {
header: string;
frozen?: boolean;
}
export class DebuggerTable extends React.Component<DebuggerTableProps> {
render() {
return <div className="ui varExplorer">
<div className="ui variableTableHeader">
{this.props.header}
</div>
<div className={`ui segment debugvariables ${this.props.frozen ? "frozen" : ""} ui collapsing basic striped table`}>
{this.props.children}
</div>
</div>
}
}
export interface DebuggerTableRowProps {
leftText: string;
rightText: string;
refID?: string | number;
icon?: string;
rightTitle?: string
rightClass?: string;
leftTitle?: string;
leftClass?: string;
depth?: number;
rowClass?: string;
onClick?: (e: React.SyntheticEvent<HTMLDivElement>, component: DebuggerTableRow) => void;
}
export class DebuggerTableRow extends React.Component<DebuggerTableRowProps> {
render() {
return <div role="listitem" className={`item ${this.props.rowClass || ""}`} onClick={this.props.onClick ? this.clickHandler : undefined}>
<div className="variableAndValue">
<div className={`variable varname ${this.props.leftClass || ""}`} title={this.props.leftTitle} style={this.props.depth ? { marginLeft: (this.props.depth * 0.75) + "em" } : undefined}>
{ <i className={`ui icon small ${this.props.icon || "invisible"}`} /> }
<span>{this.props.leftText}</span>
</div>
<div className="variable detail" style={{ padding: 0.2 }} title={this.props.rightTitle}>
<span className={`varval ${this.props.rightClass || ""}`}>{this.props.rightText}</span>
</div>
</div>
</div>
}
protected clickHandler = (e: React.SyntheticEvent<HTMLDivElement>) => {
if (this.props.onClick) this.props.onClick(e, this);
} | } | random_line_split |
|
query.service.ts | import { Injectable } from '@angular/core';
import {Query} from './model/query';
import {QueryCategory} from './model/query-category';
import {QueryPart} from './model/query-part';
@Injectable()
export class QueryService {
// String to separate category-names an the values
private categoryValueSeparator = ': ';
/**
* Creates a query-object from a query-string. The string can have the following syntax:
* <CategoryName1>: <Value1> <CategoryName2>: <Value2>
*
* If the query-string starts with a string that is not in the list of categories, the query-object will have a part
* with a null-category and the string as value.
*
* @param categories
* @param queryString
* @returns
*/
public getQueryFromString(categories: Array<QueryCategory>, queryString: string): Query {
const queryParts: Array<QueryPart> = [];
let remainingQueryString: string = queryString;
while (true) {
let lastPart: QueryPart;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, remainingQueryString);
if (lastPart === null) {
if (remainingQueryString.length > 0) {
queryParts.unshift(new QueryPart(null, remainingQueryString));
}
break;
}
queryParts.unshift(lastPart);
}
return new Query(queryParts);
}
/**
* Extracts the last query-part and returns it and the shortened query-string
*
* @param categories
* @param queryString
* @returns
*/
private popLastQueryPartFromString(categories: Array<QueryCategory>, queryString: string): [QueryPart, string] {
const lastPartRegexString = '([^\\s"\']*|("([^"]*)")|(\'([^\']*)\'))$';
// Try to match categories or the default category
for (const category of categories.concat([null])) {
const categoryPart = category ? category.name + this.categoryValueSeparator.trim() + '\\s*' : '';
const regexStr = categoryPart + lastPartRegexString;
const regex = new RegExp(regexStr);
const match = queryString.trim().match(regex);
if (match && match[0].length > 0) {
// Pick the correct match to not have quotes in result string
const value = match[5] || match[3] || match[1] || '';
const queryPart = new QueryPart(category, value);
const remainingQueryString = queryString.trim().replace(regex, '').trim();
return [queryPart, remainingQueryString];
}
}
return [null, queryString.trim()];
}
/**
* Appends the provided query-part to the query-string and returns the combined query-string.
*
* @param categories
* @param queryString
* @param appendPart
*/
public appendQueryPartToQueryString(categories: Array<QueryCategory>, queryString: string, appendPart: QueryPart) {
let lastPart: QueryPart, remainingQueryString: string;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, queryString);
let newQuery;
// If the current query has no last part it can be fully replaced
if (!lastPart) {
newQuery = '';
// If the category of the last part matches to one to be appended, it means that only the value should be updated
} else if (lastPart.category === appendPart.category) {
newQuery = remainingQueryString;
// The category is different, so a new one will be added
} else {
newQuery = queryString;
if (appendPart.category) |
}
// Trim the query an add a whitespace only if the query is not empty
newQuery = newQuery.trim();
newQuery += newQuery.length > 0 ? ' ' : '';
const value = appendPart.value.indexOf(' ') === -1 ? appendPart.value : '"' + appendPart.value + '"';
// Now that the current query is cleaned up, the actual append can start
newQuery += (appendPart.category ? (appendPart.category.name + this.categoryValueSeparator) : '') + value;
return newQuery;
}
}
| {
// Remove the beginning of the category-name if it was typed
const categoryName = appendPart.category.name;
for (let i = categoryName.length; i > 0 ; i--) {
if (newQuery.toLowerCase().endsWith(categoryName.toLowerCase().substr(0, i))) {
newQuery = newQuery.slice(0, -i);
}
}
} | conditional_block |
query.service.ts | import { Injectable } from '@angular/core';
import {Query} from './model/query';
import {QueryCategory} from './model/query-category';
import {QueryPart} from './model/query-part';
@Injectable()
export class QueryService {
// String to separate category-names an the values
private categoryValueSeparator = ': ';
/**
* Creates a query-object from a query-string. The string can have the following syntax:
* <CategoryName1>: <Value1> <CategoryName2>: <Value2>
*
* If the query-string starts with a string that is not in the list of categories, the query-object will have a part
* with a null-category and the string as value.
*
* @param categories
* @param queryString
* @returns
*/
public getQueryFromString(categories: Array<QueryCategory>, queryString: string): Query {
const queryParts: Array<QueryPart> = [];
let remainingQueryString: string = queryString;
while (true) {
let lastPart: QueryPart;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, remainingQueryString);
if (lastPart === null) {
if (remainingQueryString.length > 0) {
queryParts.unshift(new QueryPart(null, remainingQueryString));
}
break;
}
queryParts.unshift(lastPart);
}
return new Query(queryParts);
}
/**
* Extracts the last query-part and returns it and the shortened query-string
*
* @param categories
* @param queryString
* @returns
*/
private popLastQueryPartFromString(categories: Array<QueryCategory>, queryString: string): [QueryPart, string] |
/**
* Appends the provided query-part to the query-string and returns the combined query-string.
*
* @param categories
* @param queryString
* @param appendPart
*/
public appendQueryPartToQueryString(categories: Array<QueryCategory>, queryString: string, appendPart: QueryPart) {
let lastPart: QueryPart, remainingQueryString: string;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, queryString);
let newQuery;
// If the current query has no last part it can be fully replaced
if (!lastPart) {
newQuery = '';
// If the category of the last part matches to one to be appended, it means that only the value should be updated
} else if (lastPart.category === appendPart.category) {
newQuery = remainingQueryString;
// The category is different, so a new one will be added
} else {
newQuery = queryString;
if (appendPart.category) {
// Remove the beginning of the category-name if it was typed
const categoryName = appendPart.category.name;
for (let i = categoryName.length; i > 0 ; i--) {
if (newQuery.toLowerCase().endsWith(categoryName.toLowerCase().substr(0, i))) {
newQuery = newQuery.slice(0, -i);
}
}
}
}
// Trim the query an add a whitespace only if the query is not empty
newQuery = newQuery.trim();
newQuery += newQuery.length > 0 ? ' ' : '';
const value = appendPart.value.indexOf(' ') === -1 ? appendPart.value : '"' + appendPart.value + '"';
// Now that the current query is cleaned up, the actual append can start
newQuery += (appendPart.category ? (appendPart.category.name + this.categoryValueSeparator) : '') + value;
return newQuery;
}
}
| {
const lastPartRegexString = '([^\\s"\']*|("([^"]*)")|(\'([^\']*)\'))$';
// Try to match categories or the default category
for (const category of categories.concat([null])) {
const categoryPart = category ? category.name + this.categoryValueSeparator.trim() + '\\s*' : '';
const regexStr = categoryPart + lastPartRegexString;
const regex = new RegExp(regexStr);
const match = queryString.trim().match(regex);
if (match && match[0].length > 0) {
// Pick the correct match to not have quotes in result string
const value = match[5] || match[3] || match[1] || '';
const queryPart = new QueryPart(category, value);
const remainingQueryString = queryString.trim().replace(regex, '').trim();
return [queryPart, remainingQueryString];
}
}
return [null, queryString.trim()];
} | identifier_body |
query.service.ts | import { Injectable } from '@angular/core';
import {Query} from './model/query';
import {QueryCategory} from './model/query-category';
import {QueryPart} from './model/query-part';
@Injectable()
export class | {
// String to separate category-names an the values
private categoryValueSeparator = ': ';
/**
* Creates a query-object from a query-string. The string can have the following syntax:
* <CategoryName1>: <Value1> <CategoryName2>: <Value2>
*
* If the query-string starts with a string that is not in the list of categories, the query-object will have a part
* with a null-category and the string as value.
*
* @param categories
* @param queryString
* @returns
*/
public getQueryFromString(categories: Array<QueryCategory>, queryString: string): Query {
const queryParts: Array<QueryPart> = [];
let remainingQueryString: string = queryString;
while (true) {
let lastPart: QueryPart;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, remainingQueryString);
if (lastPart === null) {
if (remainingQueryString.length > 0) {
queryParts.unshift(new QueryPart(null, remainingQueryString));
}
break;
}
queryParts.unshift(lastPart);
}
return new Query(queryParts);
}
/**
* Extracts the last query-part and returns it and the shortened query-string
*
* @param categories
* @param queryString
* @returns
*/
private popLastQueryPartFromString(categories: Array<QueryCategory>, queryString: string): [QueryPart, string] {
const lastPartRegexString = '([^\\s"\']*|("([^"]*)")|(\'([^\']*)\'))$';
// Try to match categories or the default category
for (const category of categories.concat([null])) {
const categoryPart = category ? category.name + this.categoryValueSeparator.trim() + '\\s*' : '';
const regexStr = categoryPart + lastPartRegexString;
const regex = new RegExp(regexStr);
const match = queryString.trim().match(regex);
if (match && match[0].length > 0) {
// Pick the correct match to not have quotes in result string
const value = match[5] || match[3] || match[1] || '';
const queryPart = new QueryPart(category, value);
const remainingQueryString = queryString.trim().replace(regex, '').trim();
return [queryPart, remainingQueryString];
}
}
return [null, queryString.trim()];
}
/**
* Appends the provided query-part to the query-string and returns the combined query-string.
*
* @param categories
* @param queryString
* @param appendPart
*/
public appendQueryPartToQueryString(categories: Array<QueryCategory>, queryString: string, appendPart: QueryPart) {
let lastPart: QueryPart, remainingQueryString: string;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, queryString);
let newQuery;
// If the current query has no last part it can be fully replaced
if (!lastPart) {
newQuery = '';
// If the category of the last part matches to one to be appended, it means that only the value should be updated
} else if (lastPart.category === appendPart.category) {
newQuery = remainingQueryString;
// The category is different, so a new one will be added
} else {
newQuery = queryString;
if (appendPart.category) {
// Remove the beginning of the category-name if it was typed
const categoryName = appendPart.category.name;
for (let i = categoryName.length; i > 0 ; i--) {
if (newQuery.toLowerCase().endsWith(categoryName.toLowerCase().substr(0, i))) {
newQuery = newQuery.slice(0, -i);
}
}
}
}
// Trim the query an add a whitespace only if the query is not empty
newQuery = newQuery.trim();
newQuery += newQuery.length > 0 ? ' ' : '';
const value = appendPart.value.indexOf(' ') === -1 ? appendPart.value : '"' + appendPart.value + '"';
// Now that the current query is cleaned up, the actual append can start
newQuery += (appendPart.category ? (appendPart.category.name + this.categoryValueSeparator) : '') + value;
return newQuery;
}
}
| QueryService | identifier_name |
query.service.ts | import { Injectable } from '@angular/core';
import {Query} from './model/query';
import {QueryCategory} from './model/query-category';
import {QueryPart} from './model/query-part';
@Injectable()
export class QueryService {
// String to separate category-names an the values
private categoryValueSeparator = ': ';
/**
* Creates a query-object from a query-string. The string can have the following syntax:
* <CategoryName1>: <Value1> <CategoryName2>: <Value2>
*
* If the query-string starts with a string that is not in the list of categories, the query-object will have a part
* with a null-category and the string as value.
*
* @param categories
* @param queryString
* @returns
*/
public getQueryFromString(categories: Array<QueryCategory>, queryString: string): Query {
const queryParts: Array<QueryPart> = [];
let remainingQueryString: string = queryString;
|
if (lastPart === null) {
if (remainingQueryString.length > 0) {
queryParts.unshift(new QueryPart(null, remainingQueryString));
}
break;
}
queryParts.unshift(lastPart);
}
return new Query(queryParts);
}
/**
* Extracts the last query-part and returns it and the shortened query-string
*
* @param categories
* @param queryString
* @returns
*/
private popLastQueryPartFromString(categories: Array<QueryCategory>, queryString: string): [QueryPart, string] {
const lastPartRegexString = '([^\\s"\']*|("([^"]*)")|(\'([^\']*)\'))$';
// Try to match categories or the default category
for (const category of categories.concat([null])) {
const categoryPart = category ? category.name + this.categoryValueSeparator.trim() + '\\s*' : '';
const regexStr = categoryPart + lastPartRegexString;
const regex = new RegExp(regexStr);
const match = queryString.trim().match(regex);
if (match && match[0].length > 0) {
// Pick the correct match to not have quotes in result string
const value = match[5] || match[3] || match[1] || '';
const queryPart = new QueryPart(category, value);
const remainingQueryString = queryString.trim().replace(regex, '').trim();
return [queryPart, remainingQueryString];
}
}
return [null, queryString.trim()];
}
/**
* Appends the provided query-part to the query-string and returns the combined query-string.
*
* @param categories
* @param queryString
* @param appendPart
*/
public appendQueryPartToQueryString(categories: Array<QueryCategory>, queryString: string, appendPart: QueryPart) {
let lastPart: QueryPart, remainingQueryString: string;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, queryString);
let newQuery;
// If the current query has no last part it can be fully replaced
if (!lastPart) {
newQuery = '';
// If the category of the last part matches to one to be appended, it means that only the value should be updated
} else if (lastPart.category === appendPart.category) {
newQuery = remainingQueryString;
// The category is different, so a new one will be added
} else {
newQuery = queryString;
if (appendPart.category) {
// Remove the beginning of the category-name if it was typed
const categoryName = appendPart.category.name;
for (let i = categoryName.length; i > 0 ; i--) {
if (newQuery.toLowerCase().endsWith(categoryName.toLowerCase().substr(0, i))) {
newQuery = newQuery.slice(0, -i);
}
}
}
}
// Trim the query an add a whitespace only if the query is not empty
newQuery = newQuery.trim();
newQuery += newQuery.length > 0 ? ' ' : '';
const value = appendPart.value.indexOf(' ') === -1 ? appendPart.value : '"' + appendPart.value + '"';
// Now that the current query is cleaned up, the actual append can start
newQuery += (appendPart.category ? (appendPart.category.name + this.categoryValueSeparator) : '') + value;
return newQuery;
}
} | while (true) {
let lastPart: QueryPart;
[lastPart, remainingQueryString] = this.popLastQueryPartFromString(categories, remainingQueryString); | random_line_split |
lib.rs | #![crate_type = "dylib"]
#![feature(plugin_registrar, rustc_private)]
//! # Rustplacements
//!
//! This is a compiler plugin for the [Rust language](https://www.rust-lang.org/en-US/) that replaces all of your string literals
//! in the source code with random text. Well, it's not really random. You can choose to replace text with items from any of the
//! lists on [this page](https://github.com/Peternator7/rustplacements/blob/master/CATEGORIES.md) by simply adding a few
//! attributes to your existing Rust code.
//!
//! ## A Brief Example
//!
//! Let's start with a simple example like the one below. It prints out the words in the sentence below one word at a time.
//!
//! ```rust,ignore
//! const SENTENCE: [&'static str; 9] = ["The", "Quick", "Brown", "Fox", "Jumped", "Over", "the",
//! "Lazy", "Dog"];
//!
//! fn main() {
//! for word in &SENTENCE {
//! println!("{}", word);
//! }
//! }
//! ```
//!
//! The output should look like:
//!
//! ```txt
//! The
//! Quick
//! Brown
//! Fox
//! Jumped
//! Over
//! the
//! Lazy
//! Dog
//! ```
//!
//! Rustplacements let's us replace all the strings at compile with other values. Let's say we want to replace all the text with
//! emojis. Rustplacements can do that.
//!
//! ```rust,ignore
//! #![feature(plugin)]
//! #![plugin(rustplacements)]
//!
//! // Placing it in the module root will replace everything in the module
//! #![Rustplacements = "emojis"]
//!
//! const SENTENCE: [&'static str; 9] = ["The", "Quick", "Brown", "Fox", "Jumped", "Over", "the",
//! "Lazy", "Dog"];
//!
//! fn main() {
//! for word in &SENTENCE {
//! println!("{}", word);
//! }
//! }
//! ```
//!
//! The new output will look something like this. The output is randomized so it will be re-generated everytime you compile
//! your crate.
//!
//! ```text
//! π’ π« π€
//! π π π π π§
//! π¬ π¬ π π‘ π
//! π π π¬
//! π π π€§ π¬ π§ π‘
//! π π π π«
//! π π± π°
//! π π€‘ π
π―
//! π€ π π
//! ```
//!
//! ## Using Rustplacements
//!
//! Compiler plugins like Rustplacements are only available on nightly rust because they require a feature flag to use. To get started,
//! Rustplacements is available on [crates.io](https://crates.io/crates/rustplacements). To download the latest version, add the
//! following line to the `Cargo.toml`.
//!
//! ```toml
//! [dependencies]
//! rustplacements = "*"
//! ```
//!
//! To enable the compiler plugin, add the following lines on the top of your `main.rs` or `lib.rs`.
//!
//! ```rust,ignore
//! #![feature(plugin)]
//! #![plugin(rustplacements)]
//! ```
//!
//! You can now use the plugin anywhere in the crate by applying the `#[Rustplacements = "one-direction"]` to any language element.
//! You can place the element in the root with `#![Rustplacements = "got-quotes"]` and it will transform all the string literals
//! in your module. It can also be applied to specific strings / impls / functions for more fine grained control.
//!
//! That's pretty much all there is to it. Check out the [categories page](https://github.com/Peternator7/rustplacements/blob/master/CATEGORIES.md) for more categories that you can use.
extern crate syntax;
extern crate rustc_plugin;
extern crate rand;
#[macro_use]
extern crate lazy_static;
use rustc_plugin::Registry;
use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension};
use syntax::ast::*;
use syntax::codemap::Span;
use syntax::symbol::Symbol;
use syntax::codemap::Spanned;
use syntax::ptr::P;
mod exprs;
struct Context<'a> {
text: &'a Vec<&'static str>,
}
/// Compiler hook for Rust to register plugins.
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(Symbol::intern("Rustplacements"),
SyntaxExtension::MultiModifier(Box::new(rustplace)))
}
fn rustplace(_: &mut ExtCtxt, _: Span, m: &MetaItem, an: Annotatable) -> Vec<Annotatable> {
let category = match m.node {
MetaItemKind::List(..) => panic!("This plugin does not support list style attributes."),
MetaItemKind::Word => Symbol::intern("fizzbuzz"),
MetaItemKind::NameValue(ref l) => {
use LitKind::*;
match l.node {
Str(symbol, _) => symbol,
_ => panic!("Only string literals are supported"),
}
}
};
let ctxt = Context { text: exprs::HASHMAP.get(&*category.as_str()).unwrap() };
vec![an.trans(&ctxt)]
}
trait Rustplace {
fn trans(self, ctxt: &Context) -> Self;
}
impl<T: Rustplace + 'static> Rustplace for P<T> {
fn trans(self, ctxt: &Context) -> Self {
self.map(|inner| inner.trans(ctxt))
}
}
impl<T: Rustplace> Rustplace for Vec<T> {
fn trans(self, ctxt: &Context) -> Self {
self.into_iter().map(|i| i.trans(ctxt)).collect()
}
}
// We can invoke this rule on most of the struct types.
macro_rules! Rustplace {
// For many of the structs, the field is called "node" so we simplify that case.
($ty:ident) => (Rustplace!($ty,node););
($ty:ident,$field:tt) => (
impl Rustplace for $ty {
fn trans(self, ctxt: &Context) -> Self {
$ty {
$field: self.$field.trans(ctxt),
..self
}
}
}
)
}
// We can autoimplement some of the structs because the all change the same field. :)
Rustplace!(Item);
Rustplace!(TraitItem);
Rustplace!(ImplItem);
Rustplace!(Stmt);
Rustplace!(Expr);
// These follow the same basic pattern, but the field has a different name.
Rustplace!(Block, stmts);
Rustplace!(Field, expr);
Rustplace!(Mod, items);
// These need 1 extra map so we just wrote them out.
impl Rustplace for Local {
fn trans(self, ctxt: &Context) -> Self {
Local {
init: self.init.map(|i| i.trans(ctxt)),
..self
}
}
}
impl Rustplace for Arm {
fn trans(self, ctxt: &Context) -> Self {
Arm {
guard: self.guard.map(|i| i.trans(ctxt)),
..self
}
}
}
// All the enums need to be manually implemented and we figure out what variants it makes sense
// for us to transform.
impl Rustplace for Annotatable {
fn trans(self, ctxt: &Context) -> Self {
use Annotatable::*;
match self {
Item(item) => Item(item.trans(ctxt)),
TraitItem(item) => TraitItem(item.trans(ctxt)),
ImplItem(item) => ImplItem(item.trans(ctxt)),
}
}
}
impl Rustplace for ItemKind {
fn trans(self, ctxt: &Context) -> Self {
use ItemKind::*;
match self {
Fn(a, b, c, d, e, block) => Fn(a, b, c, d, e, block.trans(ctxt)),
Static(ty, m, expr) => Static(ty, m, expr.trans(ctxt)),
Const(ty, expr) => Const(ty, expr.trans(ctxt)),
Trait(u, g, ty, v) => Trait(u, g, ty, v.trans(ctxt)),
Impl(a, b, c, d, e, f, v) => Impl(a, b, c, d, e, f, v.trans(ctxt)),
Mod(m) => Mod(m.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for TraitItemKind {
fn trans(self, ctxt: &Context) -> Self {
use TraitItemKind::*;
match self {
Const(ty, Some(expr)) => Const(ty, Some(expr.trans(ctxt))),
Method(sig, Some(block)) => Method(sig, Some(block.trans(ctxt))),
_ => self,
}
}
}
impl Rustplace for ImplItemKind {
fn trans(self, ctxt: &Context) -> Self {
use ImplItemKind::*;
match self {
Const(ty, expr) => Const(ty, expr.trans(ctxt)),
Method(sig, block) => Method(sig, block.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for StmtKind {
fn trans(self, ctxt: &Context) -> Self {
use StmtKind::*;
match self {
Local(l) => Local(l.trans(ctxt)),
Item(i) => Item(i.trans(ctxt)),
Expr(e) => Expr(e.trans(ctxt)),
Semi(s) => Semi(s.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for ExprKind {
fn trans(self, ctxt: &Context) -> Self {
use ExprKind::*;
match self {
Lit(l) => Li | rans(ctxt)),
Box(b) => Box(b.trans(ctxt)),
InPlace(a, b) => InPlace(a.trans(ctxt), b.trans(ctxt)),
Array(v) => Array(v.trans(ctxt)),
Call(a, v) => Call(a.trans(ctxt), v.trans(ctxt)),
MethodCall(p, v) => MethodCall(p, v.trans(ctxt)),
Tup(v) => Tup(v.trans(ctxt)),
Binary(op, l, r) => Binary(op, l.trans(ctxt), r.trans(ctxt)),
Unary(op, expr) => Unary(op, expr.trans(ctxt)),
Cast(expr, ty) => Cast(expr.trans(ctxt), ty),
Type(expr, ty) => Type(expr.trans(ctxt), ty),
If(cond, iff, els) => {
If(cond.trans(ctxt),
iff.trans(ctxt),
els.map(|i| i.trans(ctxt)))
}
IfLet(pat, expr, iff, els) => {
IfLet(pat,
expr.trans(ctxt),
iff.trans(ctxt),
els.map(|i| i.trans(ctxt)))
}
While(cond, blk, si) => While(cond.trans(ctxt), blk.trans(ctxt), si),
WhileLet(p, expr, blk, si) => WhileLet(p, expr.trans(ctxt), blk.trans(ctxt), si),
ForLoop(p, expr, blk, si) => ForLoop(p, expr.trans(ctxt), blk.trans(ctxt), si),
Loop(expr, si) => Loop(expr.trans(ctxt), si),
Match(expr, v) => Match(expr.trans(ctxt), v.trans(ctxt)),
Closure(c, p, blk, s) => Closure(c, p, blk.trans(ctxt), s),
Block(blk) => Block(blk.trans(ctxt)),
Catch(blk) => Catch(blk.trans(ctxt)),
Assign(a, b) => Assign(a.trans(ctxt), b.trans(ctxt)),
AssignOp(op, lhs, rhs) => AssignOp(op, lhs.trans(ctxt), rhs.trans(ctxt)),
Field(expr, si) => Field(expr.trans(ctxt), si),
TupField(expr, span) => TupField(expr.trans(ctxt), span),
Index(a, b) => Index(a.trans(ctxt), b.trans(ctxt)),
Range(lower, upper, lim) => {
Range(lower.map(|i| i.trans(ctxt)),
upper.map(|i| i.trans(ctxt)),
lim)
}
AddrOf(m, expr) => AddrOf(m, expr.trans(ctxt)),
Break(br, expr) => Break(br, expr.map(|i| i.trans(ctxt))),
Ret(opt) => Ret(opt.map(|i| i.trans(ctxt))),
Struct(p, v, opt) => Struct(p, v.trans(ctxt), opt.map(|i| i.trans(ctxt))),
Repeat(a, b) => Repeat(a.trans(ctxt), b.trans(ctxt)),
Paren(expr) => Paren(expr.trans(ctxt)),
Try(expr) => Try(expr.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for Spanned<LitKind> {
fn trans(self, ctxt: &Context) -> Self {
use LitKind::*;
match self.node {
// All that code above just so we can do this one transformation :)
Str(s, _) => {
let new_string = s.as_str()
.lines()
.map(|line| {
let mut output = String::new();
let mut idx = 0;
// Copy the lead whitespace over.
for c in line.chars() {
if c.is_whitespace() {
idx += 1;
output.push(c);
} else {
break;
}
}
let l = line.chars().count();
// Now just append random stuff.
while idx < l {
let r = rand::random::<usize>() % ctxt.text.len();
output.push_str(ctxt.text[r]);
output.push(' ');
idx += ctxt.text[r].chars().count();
}
// TODO: Remove the trailing ' '.
output
})
.collect::<Vec<_>>()
.join("\n");
Spanned {
node: LitKind::Str(Symbol::intern(&*new_string), StrStyle::Cooked),
..self
}
}
_ => self,
}
}
}
| t(l.t | identifier_name |
lib.rs | #![crate_type = "dylib"]
#![feature(plugin_registrar, rustc_private)]
//! # Rustplacements
//!
//! This is a compiler plugin for the [Rust language](https://www.rust-lang.org/en-US/) that replaces all of your string literals
//! in the source code with random text. Well, it's not really random. You can choose to replace text with items from any of the
//! lists on [this page](https://github.com/Peternator7/rustplacements/blob/master/CATEGORIES.md) by simply adding a few
//! attributes to your existing Rust code.
//!
//! ## A Brief Example
//!
//! Let's start with a simple example like the one below. It prints out the words in the sentence below one word at a time.
//!
//! ```rust,ignore
//! const SENTENCE: [&'static str; 9] = ["The", "Quick", "Brown", "Fox", "Jumped", "Over", "the",
//! "Lazy", "Dog"];
//!
//! fn main() {
//! for word in &SENTENCE {
//! println!("{}", word);
//! }
//! }
//! ```
//!
//! The output should look like:
//!
//! ```txt
//! The
//! Quick
//! Brown
//! Fox
//! Jumped
//! Over
//! the
//! Lazy
//! Dog
//! ```
//!
//! Rustplacements let's us replace all the strings at compile with other values. Let's say we want to replace all the text with
//! emojis. Rustplacements can do that.
//!
//! ```rust,ignore
//! #![feature(plugin)]
//! #![plugin(rustplacements)]
//!
//! // Placing it in the module root will replace everything in the module
//! #![Rustplacements = "emojis"]
//!
//! const SENTENCE: [&'static str; 9] = ["The", "Quick", "Brown", "Fox", "Jumped", "Over", "the",
//! "Lazy", "Dog"];
//!
//! fn main() {
//! for word in &SENTENCE {
//! println!("{}", word);
//! }
//! }
//! ```
//!
//! The new output will look something like this. The output is randomized so it will be re-generated everytime you compile
//! your crate.
//!
//! ```text
//! π’ π« π€
//! π π π π π§
//! π¬ π¬ π π‘ π
//! π π π¬
//! π π π€§ π¬ π§ π‘
//! π π π π«
//! π π± π°
//! π π€‘ π
π―
//! π€ π π
//! ```
//!
//! ## Using Rustplacements
//!
//! Compiler plugins like Rustplacements are only available on nightly rust because they require a feature flag to use. To get started,
//! Rustplacements is available on [crates.io](https://crates.io/crates/rustplacements). To download the latest version, add the
//! following line to the `Cargo.toml`.
//!
//! ```toml
//! [dependencies]
//! rustplacements = "*"
//! ```
//!
//! To enable the compiler plugin, add the following lines on the top of your `main.rs` or `lib.rs`.
//!
//! ```rust,ignore
//! #![feature(plugin)]
//! #![plugin(rustplacements)]
//! ```
//!
//! You can now use the plugin anywhere in the crate by applying the `#[Rustplacements = "one-direction"]` to any language element.
//! You can place the element in the root with `#![Rustplacements = "got-quotes"]` and it will transform all the string literals
//! in your module. It can also be applied to specific strings / impls / functions for more fine grained control.
//!
//! That's pretty much all there is to it. Check out the [categories page](https://github.com/Peternator7/rustplacements/blob/master/CATEGORIES.md) for more categories that you can use.
extern crate syntax;
extern crate rustc_plugin;
extern crate rand;
#[macro_use]
extern crate lazy_static;
use rustc_plugin::Registry;
use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension};
use syntax::ast::*;
use syntax::codemap::Span;
use syntax::symbol::Symbol;
use syntax::codemap::Spanned;
use syntax::ptr::P;
mod exprs;
struct Context<'a> {
text: &'a Vec<&'static str>,
}
/// Compiler hook for Rust to register plugins.
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(Symbol::intern("Rustplacements"),
SyntaxExtension::MultiModifier(Box::new(rustplace)))
}
fn rustplace(_: &mut ExtCtxt, _: Span, m: &MetaItem, an: Annotatable) -> Vec<Annotatable> {
let category = match m.node {
MetaItemKind::List(..) => panic!("This plugin does not support list style attributes."),
MetaItemKind::Word => Symbol::intern("fizzbuzz"),
MetaItemKind::NameValue(ref l) => {
use LitKind::*;
match l.node {
Str(symbol, _) => symbol,
_ => panic!("Only string literals are supported"),
}
}
};
let ctxt = Context { text: exprs::HASHMAP.get(&*category.as_str()).unwrap() };
vec![an.trans(&ctxt)]
}
trait Rustplace {
fn trans(self, ctxt: &Context) -> Self;
}
impl<T: Rustplace + 'static> Rustplace for P<T> {
fn trans(self, ctxt: &Context) -> Self {
self.map(|inner| inner.trans(ctxt))
}
}
impl<T: Rustplace> Rustplace for Vec<T> {
fn tran | _iter().map(|i| i.trans(ctxt)).collect()
}
}
// We can invoke this rule on most of the struct types.
macro_rules! Rustplace {
// For many of the structs, the field is called "node" so we simplify that case.
($ty:ident) => (Rustplace!($ty,node););
($ty:ident,$field:tt) => (
impl Rustplace for $ty {
fn trans(self, ctxt: &Context) -> Self {
$ty {
$field: self.$field.trans(ctxt),
..self
}
}
}
)
}
// We can autoimplement some of the structs because the all change the same field. :)
Rustplace!(Item);
Rustplace!(TraitItem);
Rustplace!(ImplItem);
Rustplace!(Stmt);
Rustplace!(Expr);
// These follow the same basic pattern, but the field has a different name.
Rustplace!(Block, stmts);
Rustplace!(Field, expr);
Rustplace!(Mod, items);
// These need 1 extra map so we just wrote them out.
impl Rustplace for Local {
fn trans(self, ctxt: &Context) -> Self {
Local {
init: self.init.map(|i| i.trans(ctxt)),
..self
}
}
}
impl Rustplace for Arm {
fn trans(self, ctxt: &Context) -> Self {
Arm {
guard: self.guard.map(|i| i.trans(ctxt)),
..self
}
}
}
// All the enums need to be manually implemented and we figure out what variants it makes sense
// for us to transform.
impl Rustplace for Annotatable {
fn trans(self, ctxt: &Context) -> Self {
use Annotatable::*;
match self {
Item(item) => Item(item.trans(ctxt)),
TraitItem(item) => TraitItem(item.trans(ctxt)),
ImplItem(item) => ImplItem(item.trans(ctxt)),
}
}
}
impl Rustplace for ItemKind {
fn trans(self, ctxt: &Context) -> Self {
use ItemKind::*;
match self {
Fn(a, b, c, d, e, block) => Fn(a, b, c, d, e, block.trans(ctxt)),
Static(ty, m, expr) => Static(ty, m, expr.trans(ctxt)),
Const(ty, expr) => Const(ty, expr.trans(ctxt)),
Trait(u, g, ty, v) => Trait(u, g, ty, v.trans(ctxt)),
Impl(a, b, c, d, e, f, v) => Impl(a, b, c, d, e, f, v.trans(ctxt)),
Mod(m) => Mod(m.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for TraitItemKind {
fn trans(self, ctxt: &Context) -> Self {
use TraitItemKind::*;
match self {
Const(ty, Some(expr)) => Const(ty, Some(expr.trans(ctxt))),
Method(sig, Some(block)) => Method(sig, Some(block.trans(ctxt))),
_ => self,
}
}
}
impl Rustplace for ImplItemKind {
fn trans(self, ctxt: &Context) -> Self {
use ImplItemKind::*;
match self {
Const(ty, expr) => Const(ty, expr.trans(ctxt)),
Method(sig, block) => Method(sig, block.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for StmtKind {
fn trans(self, ctxt: &Context) -> Self {
use StmtKind::*;
match self {
Local(l) => Local(l.trans(ctxt)),
Item(i) => Item(i.trans(ctxt)),
Expr(e) => Expr(e.trans(ctxt)),
Semi(s) => Semi(s.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for ExprKind {
fn trans(self, ctxt: &Context) -> Self {
use ExprKind::*;
match self {
Lit(l) => Lit(l.trans(ctxt)),
Box(b) => Box(b.trans(ctxt)),
InPlace(a, b) => InPlace(a.trans(ctxt), b.trans(ctxt)),
Array(v) => Array(v.trans(ctxt)),
Call(a, v) => Call(a.trans(ctxt), v.trans(ctxt)),
MethodCall(p, v) => MethodCall(p, v.trans(ctxt)),
Tup(v) => Tup(v.trans(ctxt)),
Binary(op, l, r) => Binary(op, l.trans(ctxt), r.trans(ctxt)),
Unary(op, expr) => Unary(op, expr.trans(ctxt)),
Cast(expr, ty) => Cast(expr.trans(ctxt), ty),
Type(expr, ty) => Type(expr.trans(ctxt), ty),
If(cond, iff, els) => {
If(cond.trans(ctxt),
iff.trans(ctxt),
els.map(|i| i.trans(ctxt)))
}
IfLet(pat, expr, iff, els) => {
IfLet(pat,
expr.trans(ctxt),
iff.trans(ctxt),
els.map(|i| i.trans(ctxt)))
}
While(cond, blk, si) => While(cond.trans(ctxt), blk.trans(ctxt), si),
WhileLet(p, expr, blk, si) => WhileLet(p, expr.trans(ctxt), blk.trans(ctxt), si),
ForLoop(p, expr, blk, si) => ForLoop(p, expr.trans(ctxt), blk.trans(ctxt), si),
Loop(expr, si) => Loop(expr.trans(ctxt), si),
Match(expr, v) => Match(expr.trans(ctxt), v.trans(ctxt)),
Closure(c, p, blk, s) => Closure(c, p, blk.trans(ctxt), s),
Block(blk) => Block(blk.trans(ctxt)),
Catch(blk) => Catch(blk.trans(ctxt)),
Assign(a, b) => Assign(a.trans(ctxt), b.trans(ctxt)),
AssignOp(op, lhs, rhs) => AssignOp(op, lhs.trans(ctxt), rhs.trans(ctxt)),
Field(expr, si) => Field(expr.trans(ctxt), si),
TupField(expr, span) => TupField(expr.trans(ctxt), span),
Index(a, b) => Index(a.trans(ctxt), b.trans(ctxt)),
Range(lower, upper, lim) => {
Range(lower.map(|i| i.trans(ctxt)),
upper.map(|i| i.trans(ctxt)),
lim)
}
AddrOf(m, expr) => AddrOf(m, expr.trans(ctxt)),
Break(br, expr) => Break(br, expr.map(|i| i.trans(ctxt))),
Ret(opt) => Ret(opt.map(|i| i.trans(ctxt))),
Struct(p, v, opt) => Struct(p, v.trans(ctxt), opt.map(|i| i.trans(ctxt))),
Repeat(a, b) => Repeat(a.trans(ctxt), b.trans(ctxt)),
Paren(expr) => Paren(expr.trans(ctxt)),
Try(expr) => Try(expr.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for Spanned<LitKind> {
fn trans(self, ctxt: &Context) -> Self {
use LitKind::*;
match self.node {
// All that code above just so we can do this one transformation :)
Str(s, _) => {
let new_string = s.as_str()
.lines()
.map(|line| {
let mut output = String::new();
let mut idx = 0;
// Copy the lead whitespace over.
for c in line.chars() {
if c.is_whitespace() {
idx += 1;
output.push(c);
} else {
break;
}
}
let l = line.chars().count();
// Now just append random stuff.
while idx < l {
let r = rand::random::<usize>() % ctxt.text.len();
output.push_str(ctxt.text[r]);
output.push(' ');
idx += ctxt.text[r].chars().count();
}
// TODO: Remove the trailing ' '.
output
})
.collect::<Vec<_>>()
.join("\n");
Spanned {
node: LitKind::Str(Symbol::intern(&*new_string), StrStyle::Cooked),
..self
}
}
_ => self,
}
}
}
| s(self, ctxt: &Context) -> Self {
self.into | identifier_body |
lib.rs | #![crate_type = "dylib"]
#![feature(plugin_registrar, rustc_private)]
//! # Rustplacements
//!
//! This is a compiler plugin for the [Rust language](https://www.rust-lang.org/en-US/) that replaces all of your string literals
//! in the source code with random text. Well, it's not really random. You can choose to replace text with items from any of the
//! lists on [this page](https://github.com/Peternator7/rustplacements/blob/master/CATEGORIES.md) by simply adding a few
//! attributes to your existing Rust code.
//!
//! ## A Brief Example
//!
//! Let's start with a simple example like the one below. It prints out the words in the sentence below one word at a time.
//!
//! ```rust,ignore
//! const SENTENCE: [&'static str; 9] = ["The", "Quick", "Brown", "Fox", "Jumped", "Over", "the",
//! "Lazy", "Dog"];
//!
//! fn main() {
//! for word in &SENTENCE {
//! println!("{}", word);
//! }
//! }
//! ```
//!
//! The output should look like:
//!
//! ```txt
//! The
//! Quick
//! Brown
//! Fox
//! Jumped
//! Over
//! the
//! Lazy
//! Dog
//! ```
//!
//! Rustplacements let's us replace all the strings at compile with other values. Let's say we want to replace all the text with
//! emojis. Rustplacements can do that.
//!
//! ```rust,ignore
//! #![feature(plugin)]
//! #![plugin(rustplacements)]
//!
//! // Placing it in the module root will replace everything in the module
//! #![Rustplacements = "emojis"]
//!
//! const SENTENCE: [&'static str; 9] = ["The", "Quick", "Brown", "Fox", "Jumped", "Over", "the",
//! "Lazy", "Dog"];
//!
//! fn main() {
//! for word in &SENTENCE {
//! println!("{}", word);
//! }
//! }
//! ```
//!
//! The new output will look something like this. The output is randomized so it will be re-generated everytime you compile
//! your crate.
//!
//! ```text
//! π’ π« π€
//! π π π π π§
//! π¬ π¬ π π‘ π
//! π π π¬
//! π π π€§ π¬ π§ π‘
//! π π π π«
//! π π± π°
//! π π€‘ π
π―
//! π€ π π
//! ```
//!
//! ## Using Rustplacements
//!
//! Compiler plugins like Rustplacements are only available on nightly rust because they require a feature flag to use. To get started,
//! Rustplacements is available on [crates.io](https://crates.io/crates/rustplacements). To download the latest version, add the
//! following line to the `Cargo.toml`.
//!
//! ```toml
//! [dependencies]
//! rustplacements = "*"
//! ```
//!
//! To enable the compiler plugin, add the following lines on the top of your `main.rs` or `lib.rs`.
//!
//! ```rust,ignore
//! #![feature(plugin)]
//! #![plugin(rustplacements)]
//! ```
//!
//! You can now use the plugin anywhere in the crate by applying the `#[Rustplacements = "one-direction"]` to any language element.
//! You can place the element in the root with `#![Rustplacements = "got-quotes"]` and it will transform all the string literals
//! in your module. It can also be applied to specific strings / impls / functions for more fine grained control.
//!
//! That's pretty much all there is to it. Check out the [categories page](https://github.com/Peternator7/rustplacements/blob/master/CATEGORIES.md) for more categories that you can use.
extern crate syntax;
extern crate rustc_plugin;
extern crate rand;
#[macro_use]
extern crate lazy_static;
use rustc_plugin::Registry;
use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension};
use syntax::ast::*;
use syntax::codemap::Span;
use syntax::symbol::Symbol;
use syntax::codemap::Spanned;
use syntax::ptr::P;
mod exprs;
struct Context<'a> {
text: &'a Vec<&'static str>,
}
/// Compiler hook for Rust to register plugins.
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(Symbol::intern("Rustplacements"),
SyntaxExtension::MultiModifier(Box::new(rustplace)))
}
fn rustplace(_: &mut ExtCtxt, _: Span, m: &MetaItem, an: Annotatable) -> Vec<Annotatable> {
let category = match m.node {
MetaItemKind::List(..) => panic!("This plugin does not support list style attributes."),
MetaItemKind::Word => Symbol::intern("fizzbuzz"),
MetaItemKind::NameValue(ref l) => {
use LitKind::*;
match l.node {
Str(symbol, _) => symbol,
_ => panic!("Only string literals are supported"),
}
}
};
let ctxt = Context { text: exprs::HASHMAP.get(&*category.as_str()).unwrap() };
vec![an.trans(&ctxt)]
}
trait Rustplace {
fn trans(self, ctxt: &Context) -> Self;
}
impl<T: Rustplace + 'static> Rustplace for P<T> {
fn trans(self, ctxt: &Context) -> Self {
self.map(|inner| inner.trans(ctxt))
}
}
impl<T: Rustplace> Rustplace for Vec<T> {
fn trans(self, ctxt: &Context) -> Self {
self.into_iter().map(|i| i.trans(ctxt)).collect()
}
}
// We can invoke this rule on most of the struct types.
macro_rules! Rustplace {
// For many of the structs, the field is called "node" so we simplify that case.
($ty:ident) => (Rustplace!($ty,node););
($ty:ident,$field:tt) => (
impl Rustplace for $ty {
fn trans(self, ctxt: &Context) -> Self {
$ty {
$field: self.$field.trans(ctxt),
..self
}
}
}
)
}
// We can autoimplement some of the structs because the all change the same field. :)
Rustplace!(Item);
Rustplace!(TraitItem);
Rustplace!(ImplItem);
Rustplace!(Stmt);
Rustplace!(Expr);
// These follow the same basic pattern, but the field has a different name.
Rustplace!(Block, stmts);
Rustplace!(Field, expr);
Rustplace!(Mod, items);
// These need 1 extra map so we just wrote them out.
impl Rustplace for Local {
fn trans(self, ctxt: &Context) -> Self {
Local {
init: self.init.map(|i| i.trans(ctxt)),
..self
}
}
}
impl Rustplace for Arm {
fn trans(self, ctxt: &Context) -> Self {
Arm {
guard: self.guard.map(|i| i.trans(ctxt)),
..self
}
}
}
// All the enums need to be manually implemented and we figure out what variants it makes sense
// for us to transform.
impl Rustplace for Annotatable {
fn trans(self, ctxt: &Context) -> Self {
use Annotatable::*;
match self {
Item(item) => Item(item.trans(ctxt)),
TraitItem(item) => TraitItem(item.trans(ctxt)),
ImplItem(item) => ImplItem(item.trans(ctxt)),
}
}
}
impl Rustplace for ItemKind {
fn trans(self, ctxt: &Context) -> Self {
use ItemKind::*;
match self {
Fn(a, b, c, d, e, block) => Fn(a, b, c, d, e, block.trans(ctxt)),
Static(ty, m, expr) => Static(ty, m, expr.trans(ctxt)),
Const(ty, expr) => Const(ty, expr.trans(ctxt)),
Trait(u, g, ty, v) => Trait(u, g, ty, v.trans(ctxt)),
Impl(a, b, c, d, e, f, v) => Impl(a, b, c, d, e, f, v.trans(ctxt)),
Mod(m) => Mod(m.trans(ctxt)),
_ => self,
}
} | use TraitItemKind::*;
match self {
Const(ty, Some(expr)) => Const(ty, Some(expr.trans(ctxt))),
Method(sig, Some(block)) => Method(sig, Some(block.trans(ctxt))),
_ => self,
}
}
}
impl Rustplace for ImplItemKind {
fn trans(self, ctxt: &Context) -> Self {
use ImplItemKind::*;
match self {
Const(ty, expr) => Const(ty, expr.trans(ctxt)),
Method(sig, block) => Method(sig, block.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for StmtKind {
fn trans(self, ctxt: &Context) -> Self {
use StmtKind::*;
match self {
Local(l) => Local(l.trans(ctxt)),
Item(i) => Item(i.trans(ctxt)),
Expr(e) => Expr(e.trans(ctxt)),
Semi(s) => Semi(s.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for ExprKind {
fn trans(self, ctxt: &Context) -> Self {
use ExprKind::*;
match self {
Lit(l) => Lit(l.trans(ctxt)),
Box(b) => Box(b.trans(ctxt)),
InPlace(a, b) => InPlace(a.trans(ctxt), b.trans(ctxt)),
Array(v) => Array(v.trans(ctxt)),
Call(a, v) => Call(a.trans(ctxt), v.trans(ctxt)),
MethodCall(p, v) => MethodCall(p, v.trans(ctxt)),
Tup(v) => Tup(v.trans(ctxt)),
Binary(op, l, r) => Binary(op, l.trans(ctxt), r.trans(ctxt)),
Unary(op, expr) => Unary(op, expr.trans(ctxt)),
Cast(expr, ty) => Cast(expr.trans(ctxt), ty),
Type(expr, ty) => Type(expr.trans(ctxt), ty),
If(cond, iff, els) => {
If(cond.trans(ctxt),
iff.trans(ctxt),
els.map(|i| i.trans(ctxt)))
}
IfLet(pat, expr, iff, els) => {
IfLet(pat,
expr.trans(ctxt),
iff.trans(ctxt),
els.map(|i| i.trans(ctxt)))
}
While(cond, blk, si) => While(cond.trans(ctxt), blk.trans(ctxt), si),
WhileLet(p, expr, blk, si) => WhileLet(p, expr.trans(ctxt), blk.trans(ctxt), si),
ForLoop(p, expr, blk, si) => ForLoop(p, expr.trans(ctxt), blk.trans(ctxt), si),
Loop(expr, si) => Loop(expr.trans(ctxt), si),
Match(expr, v) => Match(expr.trans(ctxt), v.trans(ctxt)),
Closure(c, p, blk, s) => Closure(c, p, blk.trans(ctxt), s),
Block(blk) => Block(blk.trans(ctxt)),
Catch(blk) => Catch(blk.trans(ctxt)),
Assign(a, b) => Assign(a.trans(ctxt), b.trans(ctxt)),
AssignOp(op, lhs, rhs) => AssignOp(op, lhs.trans(ctxt), rhs.trans(ctxt)),
Field(expr, si) => Field(expr.trans(ctxt), si),
TupField(expr, span) => TupField(expr.trans(ctxt), span),
Index(a, b) => Index(a.trans(ctxt), b.trans(ctxt)),
Range(lower, upper, lim) => {
Range(lower.map(|i| i.trans(ctxt)),
upper.map(|i| i.trans(ctxt)),
lim)
}
AddrOf(m, expr) => AddrOf(m, expr.trans(ctxt)),
Break(br, expr) => Break(br, expr.map(|i| i.trans(ctxt))),
Ret(opt) => Ret(opt.map(|i| i.trans(ctxt))),
Struct(p, v, opt) => Struct(p, v.trans(ctxt), opt.map(|i| i.trans(ctxt))),
Repeat(a, b) => Repeat(a.trans(ctxt), b.trans(ctxt)),
Paren(expr) => Paren(expr.trans(ctxt)),
Try(expr) => Try(expr.trans(ctxt)),
_ => self,
}
}
}
impl Rustplace for Spanned<LitKind> {
fn trans(self, ctxt: &Context) -> Self {
use LitKind::*;
match self.node {
// All that code above just so we can do this one transformation :)
Str(s, _) => {
let new_string = s.as_str()
.lines()
.map(|line| {
let mut output = String::new();
let mut idx = 0;
// Copy the lead whitespace over.
for c in line.chars() {
if c.is_whitespace() {
idx += 1;
output.push(c);
} else {
break;
}
}
let l = line.chars().count();
// Now just append random stuff.
while idx < l {
let r = rand::random::<usize>() % ctxt.text.len();
output.push_str(ctxt.text[r]);
output.push(' ');
idx += ctxt.text[r].chars().count();
}
// TODO: Remove the trailing ' '.
output
})
.collect::<Vec<_>>()
.join("\n");
Spanned {
node: LitKind::Str(Symbol::intern(&*new_string), StrStyle::Cooked),
..self
}
}
_ => self,
}
}
} | }
impl Rustplace for TraitItemKind {
fn trans(self, ctxt: &Context) -> Self { | random_line_split |
shared.js | "use strict";;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
var types_1 = __importDefault(require("./types"));
function default_1(fork) {
var types = fork.use(types_1.default);
var Type = types.Type;
var builtin = types.builtInTypes;
var isNumber = builtin.number;
// An example of constructing a new type with arbitrary constraints from
// an existing type.
function geq(than) |
;
// Default value-returning functions that may optionally be passed as a
// third argument to Def.prototype.field.
var defaults = {
// Functions were used because (among other reasons) that's the most
// elegant way to allow for the emptyArray one always to give a new
// array instance.
"null": function () { return null; },
"emptyArray": function () { return []; },
"false": function () { return false; },
"true": function () { return true; },
"undefined": function () { },
"use strict": function () { return "use strict"; }
};
var naiveIsPrimitive = Type.or(builtin.string, builtin.number, builtin.boolean, builtin.null, builtin.undefined);
var isPrimitive = Type.from(function (value) {
if (value === null)
return true;
var type = typeof value;
if (type === "object" ||
type === "function") {
return false;
}
return true;
}, naiveIsPrimitive.toString());
return {
geq: geq,
defaults: defaults,
isPrimitive: isPrimitive,
};
}
exports.default = default_1;
module.exports = exports["default"];
| {
return Type.from(function (value) { return isNumber.check(value) && value >= than; }, isNumber + " >= " + than);
} | identifier_body |
shared.js | "use strict";;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
var types_1 = __importDefault(require("./types"));
function default_1(fork) {
var types = fork.use(types_1.default);
var Type = types.Type;
var builtin = types.builtInTypes;
var isNumber = builtin.number;
// An example of constructing a new type with arbitrary constraints from
// an existing type.
function geq(than) {
return Type.from(function (value) { return isNumber.check(value) && value >= than; }, isNumber + " >= " + than);
}
;
// Default value-returning functions that may optionally be passed as a
// third argument to Def.prototype.field.
var defaults = {
// Functions were used because (among other reasons) that's the most
// elegant way to allow for the emptyArray one always to give a new
// array instance.
"null": function () { return null; },
"emptyArray": function () { return []; },
"false": function () { return false; },
"true": function () { return true; },
"undefined": function () { },
"use strict": function () { return "use strict"; }
};
var naiveIsPrimitive = Type.or(builtin.string, builtin.number, builtin.boolean, builtin.null, builtin.undefined);
var isPrimitive = Type.from(function (value) {
if (value === null)
return true;
var type = typeof value;
if (type === "object" ||
type === "function") |
return true;
}, naiveIsPrimitive.toString());
return {
geq: geq,
defaults: defaults,
isPrimitive: isPrimitive,
};
}
exports.default = default_1;
module.exports = exports["default"];
| {
return false;
} | conditional_block |
shared.js | "use strict";;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
var types_1 = __importDefault(require("./types"));
function default_1(fork) {
var types = fork.use(types_1.default);
var Type = types.Type;
var builtin = types.builtInTypes;
var isNumber = builtin.number;
// An example of constructing a new type with arbitrary constraints from
// an existing type.
function geq(than) {
return Type.from(function (value) { return isNumber.check(value) && value >= than; }, isNumber + " >= " + than);
}
;
// Default value-returning functions that may optionally be passed as a
// third argument to Def.prototype.field.
var defaults = {
// Functions were used because (among other reasons) that's the most
// elegant way to allow for the emptyArray one always to give a new
// array instance.
"null": function () { return null; },
"emptyArray": function () { return []; },
"false": function () { return false; },
"true": function () { return true; },
"undefined": function () { },
"use strict": function () { return "use strict"; }
};
var naiveIsPrimitive = Type.or(builtin.string, builtin.number, builtin.boolean, builtin.null, builtin.undefined);
var isPrimitive = Type.from(function (value) {
if (value === null)
return true;
var type = typeof value; | }, naiveIsPrimitive.toString());
return {
geq: geq,
defaults: defaults,
isPrimitive: isPrimitive,
};
}
exports.default = default_1;
module.exports = exports["default"]; | if (type === "object" ||
type === "function") {
return false;
}
return true; | random_line_split |
shared.js | "use strict";;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
var types_1 = __importDefault(require("./types"));
function default_1(fork) {
var types = fork.use(types_1.default);
var Type = types.Type;
var builtin = types.builtInTypes;
var isNumber = builtin.number;
// An example of constructing a new type with arbitrary constraints from
// an existing type.
function | (than) {
return Type.from(function (value) { return isNumber.check(value) && value >= than; }, isNumber + " >= " + than);
}
;
// Default value-returning functions that may optionally be passed as a
// third argument to Def.prototype.field.
var defaults = {
// Functions were used because (among other reasons) that's the most
// elegant way to allow for the emptyArray one always to give a new
// array instance.
"null": function () { return null; },
"emptyArray": function () { return []; },
"false": function () { return false; },
"true": function () { return true; },
"undefined": function () { },
"use strict": function () { return "use strict"; }
};
var naiveIsPrimitive = Type.or(builtin.string, builtin.number, builtin.boolean, builtin.null, builtin.undefined);
var isPrimitive = Type.from(function (value) {
if (value === null)
return true;
var type = typeof value;
if (type === "object" ||
type === "function") {
return false;
}
return true;
}, naiveIsPrimitive.toString());
return {
geq: geq,
defaults: defaults,
isPrimitive: isPrimitive,
};
}
exports.default = default_1;
module.exports = exports["default"];
| geq | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.