file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
message.go | "encoding/hex"
"dad-go/common"
"dad-go/node"
)
const (
MSGCMDLEN = 12
CMDOFFSET = 4
CHECKSUMLEN = 4
HASHLEN = 32 // hash length in byte
MSGHDRLEN = 24
)
// The Inventory type
const (
TXN = 0x01 // Transaction
BLOCK = 0x02
CONSENSUS = 0xe0
)
type messager interface {
verify([]byte) error
serialization() ([]byte, error)
deserialization([]byte) error
handle(*node) error
}
// The network communication message header
type msgHdr struct {
Magic uint32
CMD [MSGCMDLEN]byte // The message type
Length uint32
Checksum [CHECKSUMLEN]byte
}
// The message body and header
type msgCont struct {
hdr msgHdr
p interface{}
}
type varStr struct {
len uint
buf []byte
}
type verACK struct {
msgHdr
// No payload
}
type version struct {
Hdr msgHdr
P struct {
Version uint32
Services uint64
TimeStamp uint32
Port uint16
Nonce uint32
// TODO remove tempory to get serilization function passed
UserAgent uint8
StartHeight uint32
// FIXME check with the specify relay type length
Relay uint8
}
}
type headersReq struct {
hdr msgHdr
p struct {
len uint8
hashStart [HASHLEN]byte
hashEnd [HASHLEN]byte
}
}
type addrReq struct {
Hdr msgHdr
// No payload
}
type blkHeader struct {
hdr msgHdr
blkHdr []byte
}
type addr struct {
msgHdr
// TBD
}
type invPayload struct {
invType uint8
blk []byte
}
type inv struct {
hdr msgHdr
p invPayload
}
type dataReq struct {
msgHdr
// TBD
}
type block struct {
msgHdr
// TBD
}
// Transaction message
type trn struct {
msgHdr
// TBD
}
// Alloc different message stucture
// @t the message name or type
// @len the message length only valid for varible length structure
//
// Return:
// @messager the messager structure
// @error error code
// FixMe fix the ugly multiple return.
func allocMsg(t string, length int) (messager, error) {
switch t {
case "msgheader":
var msg msgHdr
return &msg, nil
case "version":
var msg version
return &msg, nil
case "verack":
var msg verACK
return &msg, nil
case "getheaders":
var msg headersReq
return &msg, nil
case "headers":
var msg blkHeader
return &msg, nil
case "getaddr":
var msg addrReq
return &msg, nil
case "addr":
var msg addr
return &msg, nil
case "inv":
var msg inv
// the 1 is the inv type lenght
msg.p.blk = make([]byte, length - MSGHDRLEN - 1)
return &msg, nil
case "getdata":
var msg dataReq
return &msg, nil
case "block":
var msg block
return &msg, nil
case "tx":
var msg trn
return &msg, nil
default:
return nil, errors.New("Unknown message type")
}
}
// TODO combine all of message alloc in one function via interface
func newMsg(t string) ([]byte, error) {
switch t {
case "version":
return newVersion()
case "verack":
return newVerack()
case "getheaders":
return newHeadersReq()
case "getaddr":
return newGetAddr()
default:
return nil, errors.New("Unknown message type")
}
}
func (hdr *msgHdr) init(cmd string, checksum []byte, length uint32) {
hdr.Magic = NETMAGIC
copy(hdr.CMD[0: uint32(len(cmd))], cmd)
copy(hdr.Checksum[:], checksum[:CHECKSUMLEN])
hdr.Length = length
fmt.Printf("The message payload length is %d\n", hdr.Length)
fmt.Printf("The message header length is %d\n", uint32(unsafe.Sizeof(*hdr)))
}
func (msg *version) init(n node) {
// Do the init
}
func newVersion() ([]byte, error) {
common.Trace()
var msg version
// TODO Need Node read lock or channel
msg.P.Version = nodes.node.version
msg.P.Services = nodes.node.services
// FIXME Time overflow
msg.P.TimeStamp = uint32(time.Now().UTC().UnixNano())
msg.P.Port = nodes.node.port
msg.P.Nonce = nodes.node.nonce
fmt.Printf("The nonce is 0x%x", msg.P.Nonce)
msg.P.UserAgent = 0x00
// Fixme Get the block height from ledger
msg.P.StartHeight = 1
if nodes.node.relay {
msg.P.Relay = 1
} else {
msg.P.Relay = 0
}
msg.Hdr.Magic = NETMAGIC
ver := "version"
copy(msg.Hdr.CMD[0:7], ver)
p := new(bytes.Buffer)
err := binary.Write(p, binary.LittleEndian, &(msg.P))
if err != nil {
fmt.Println("Binary Write failed at new Msg")
return nil, err
}
s := sha256.Sum256(p.Bytes())
s2 := s[:]
s = sha256.Sum256(s2)
buf := bytes.NewBuffer(s[:4])
binary.Read(buf, binary.LittleEndian, &(msg.Hdr.Checksum))
msg.Hdr.Length = uint32(len(p.Bytes()))
fmt.Printf("The message payload length is %d\n", msg.Hdr.Length)
m, err := msg.serialization()
if (err != nil) {
fmt.Println("Error Convert net message ", err.Error())
return nil, err
}
str := hex.EncodeToString(m)
fmt.Printf("The message length is %d, %s\n", len(m), str)
return m, nil
}
func newVerack() ([]byte, error) {
var msg verACK
// Fixme the check is the []byte{0} instead of 0
var sum []byte
sum = []byte{0x5d, 0xf6, 0xe0, 0xe2}
msg.msgHdr.init("verack", sum, 0)
buf, err := msg.serialization()
if (err != nil) {
return nil, err
}
str := hex.EncodeToString(buf)
fmt.Printf("The message tx verack length is %d, %s", len(buf), str)
return buf, err
}
func newGetAddr() ([]byte, error) {
var msg addrReq
// Fixme the check is the []byte{0} instead of 0
var sum []byte
sum = []byte{0x5d, 0xf6, 0xe0, 0xe2}
msg.Hdr.init("getaddr", sum, 0)
buf, err := msg.serialization()
if (err != nil) {
return nil, err
}
str := hex.EncodeToString(buf)
fmt.Printf("The message get addr length is %d, %s", len(buf), str)
return buf, err
}
func magicVerify(magic uint32) bool {
if (magic != NETMAGIC) {
return false
}
return true
}
func payloadLen(buf []byte) int {
var h msgHdr
h.deserialization(buf)
return int(h.Length)
}
func msgType(buf []byte) (string, error) {
cmd := buf[CMDOFFSET : CMDOFFSET + MSGCMDLEN]
n := bytes.IndexByte(cmd, 0)
if (n < 0 || n >= MSGCMDLEN) {
return "", errors.New("Unexpected length of CMD command")
}
s := string(cmd[:n])
return s, nil
}
func checkSum(p []byte) []byte {
t := sha256.Sum256(p)
s := sha256.Sum256(t[:])
// Currently we only need the front 4 bytes as checksum
return s[: CHECKSUMLEN]
}
func reverse(input []byte) []byte {
if len(input) == 0 {
return input
}
return append(reverse(input[1:]), input[0])
}
func newHeadersReq() ([]byte, error) {
var h headersReq
// Fixme correct with the exactly request length
h.p.len = 1
buf, err := LedgerGetHeader()
if (err != nil) {
return nil, err
}
copy(h.p.hashStart[:], reverse(buf))
p := new(bytes.Buffer)
err = binary.Write(p, binary.LittleEndian, &(h.p))
if err != nil {
fmt.Println("Binary Write failed at new headersReq")
return nil, err
}
s := checkSum(p.Bytes())
h | "crypto/sha256"
"encoding/binary" | random_line_split |
|
message.go | 0
var sum []byte
sum = []byte{0x5d, 0xf6, 0xe0, 0xe2}
msg.msgHdr.init("verack", sum, 0)
buf, err := msg.serialization()
if (err != nil) {
return nil, err
}
str := hex.EncodeToString(buf)
fmt.Printf("The message tx verack length is %d, %s", len(buf), str)
return buf, err
}
func newGetAddr() ([]byte, error) {
var msg addrReq
// Fixme the check is the []byte{0} instead of 0
var sum []byte
sum = []byte{0x5d, 0xf6, 0xe0, 0xe2}
msg.Hdr.init("getaddr", sum, 0)
buf, err := msg.serialization()
if (err != nil) {
return nil, err
}
str := hex.EncodeToString(buf)
fmt.Printf("The message get addr length is %d, %s", len(buf), str)
return buf, err
}
func magicVerify(magic uint32) bool {
if (magic != NETMAGIC) {
return false
}
return true
}
func payloadLen(buf []byte) int {
var h msgHdr
h.deserialization(buf)
return int(h.Length)
}
func msgType(buf []byte) (string, error) {
cmd := buf[CMDOFFSET : CMDOFFSET + MSGCMDLEN]
n := bytes.IndexByte(cmd, 0)
if (n < 0 || n >= MSGCMDLEN) {
return "", errors.New("Unexpected length of CMD command")
}
s := string(cmd[:n])
return s, nil
}
func checkSum(p []byte) []byte {
t := sha256.Sum256(p)
s := sha256.Sum256(t[:])
// Currently we only need the front 4 bytes as checksum
return s[: CHECKSUMLEN]
}
func reverse(input []byte) []byte {
if len(input) == 0 {
return input
}
return append(reverse(input[1:]), input[0])
}
func newHeadersReq() ([]byte, error) {
var h headersReq
// Fixme correct with the exactly request length
h.p.len = 1
buf, err := LedgerGetHeader()
if (err != nil) {
return nil, err
}
copy(h.p.hashStart[:], reverse(buf))
p := new(bytes.Buffer)
err = binary.Write(p, binary.LittleEndian, &(h.p))
if err != nil {
fmt.Println("Binary Write failed at new headersReq")
return nil, err
}
s := checkSum(p.Bytes())
h.hdr.init("getheaders", s, uint32(len(p.Bytes())))
m, err := h.serialization()
str := hex.EncodeToString(m)
fmt.Printf("The message length is %d, %s\n", len(m), str)
return m, err
}
// Verify the message header information
// @p payload of the message
func (hdr msgHdr) verify(buf []byte) error {
if (hdr.Magic != NETMAGIC) {
fmt.Printf("Unmatched magic number 0x%d\n", hdr.Magic)
return errors.New("Unmatched magic number")
}
checkSum := checkSum(buf)
if (bytes.Equal(hdr.Checksum[:], checkSum[:]) == false) {
str1 := hex.EncodeToString(hdr.Checksum[:])
str2 := hex.EncodeToString(checkSum[:])
fmt.Printf("Message Checksum error, Received checksum %s Wanted checksum: %s\n",
str1, str2)
return errors.New("Message Checksum error")
}
return nil
}
func (msg version) verify(buf []byte) error {
err := msg.Hdr.verify(buf)
// TODO verify the message Content
return err
}
func (msg headersReq) verify(buf []byte) error {
// TODO verify the message Content
err := msg.hdr.verify(buf)
return err
}
func (msg blkHeader) verify(buf []byte) error {
// TODO verify the message Content
err := msg.hdr.verify(buf)
return err
}
func (msg addrReq) verify(buf []byte) error {
// TODO verify the message Content
err := msg.Hdr.verify(buf)
return err
}
func (msg inv) verify(buf []byte) error {
// TODO verify the message Content
err := msg.hdr.verify(buf)
return err
}
// FIXME how to avoid duplicate serial/deserial function as
// most of them are the same
func (hdr msgHdr) serialization() ([]byte, error) {
var buf bytes.Buffer
err := binary.Write(&buf, binary.LittleEndian, hdr)
if err != nil {
return nil, err
}
return buf.Bytes(), err
}
func (msg *msgHdr) deserialization(p []byte) error {
buf := bytes.NewBuffer(p[0 : MSGHDRLEN])
err := binary.Read(buf, binary.LittleEndian, msg)
return err
}
func (msg version) serialization() ([]byte, error) {
var buf bytes.Buffer
fmt.Printf("The size of messge is %d in serialization\n",
uint32(unsafe.Sizeof(msg)))
err := binary.Write(&buf, binary.LittleEndian, msg)
if err != nil {
return nil, err
}
return buf.Bytes(), err
}
func (msg *version) deserialization(p []byte) error {
fmt.Printf("The size of messge is %d in deserialization\n",
uint32(unsafe.Sizeof(*msg)))
buf := bytes.NewBuffer(p)
err := binary.Read(buf, binary.LittleEndian, msg)
return err
}
func (msg headersReq) serialization() ([]byte, error) {
var buf bytes.Buffer
fmt.Printf("The size of messge is %d in serialization\n",
uint32(unsafe.Sizeof(msg)))
err := binary.Write(&buf, binary.LittleEndian, msg)
if err != nil {
return nil, err
}
return buf.Bytes(), err
}
func (msg *headersReq) deserialization(p []byte) error {
fmt.Printf("The size of messge is %d in deserialization\n",
uint32(unsafe.Sizeof(*msg)))
buf := bytes.NewBuffer(p)
err := binary.Read(buf, binary.LittleEndian, msg)
return err
}
func (msg blkHeader) serialization() ([]byte, error) {
var buf bytes.Buffer
fmt.Printf("The size of messge is %d in serialization\n",
uint32(unsafe.Sizeof(msg)))
err := binary.Write(&buf, binary.LittleEndian, msg)
if err != nil {
return nil, err
}
// TODO serilization the header, then the payload
return buf.Bytes(), err
}
func (msg *blkHeader) deserialization(p []byte) error {
fmt.Printf("The size of messge is %d in deserialization\n",
uint32(unsafe.Sizeof(*msg)))
err := msg.hdr.deserialization(p)
msg.blkHdr = p[MSGHDRLEN : ]
return err
}
func (msg addrReq) serialization() ([]byte, error) {
var buf bytes.Buffer
fmt.Printf("The size of messge is %d in serialization\n",
uint32(unsafe.Sizeof(msg)))
err := binary.Write(&buf, binary.LittleEndian, msg)
if err != nil {
return nil, err
}
return buf.Bytes(), err
}
func (msg *addrReq) deserialization(p []byte) error {
fmt.Printf("The size of messge is %d in deserialization\n",
uint32(unsafe.Sizeof(*msg)))
buf := bytes.NewBuffer(p)
err := binary.Read(buf, binary.LittleEndian, msg)
return err
}
func (msg inv) serialization() ([]byte, error) {
var buf bytes.Buffer
fmt.Printf("The size of messge is %d in serialization\n",
uint32(unsafe.Sizeof(msg)))
err := binary.Write(&buf, binary.LittleEndian, msg)
if err != nil {
return nil, err
}
return buf.Bytes(), err
}
func (msg *inv) deserialization(p []byte) error {
fmt.Printf("The size of messge is %d in deserialization\n",
uint32(unsafe.Sizeof(*msg)))
err := msg.hdr.deserialization(p)
msg.p.invType = p[MSGHDRLEN]
msg.p.blk = p[MSGHDRLEN + 1 :]
return err
}
func (msg inv) invType() byte {
return msg.p.invType
}
//func (msg inv) invLen() (uint64, uint8) {
func (msg inv) invLen() (uint64, uint8) | {
var val uint64
var size uint8
len := binary.LittleEndian.Uint64(msg.p.blk[0:1])
if (len < 0xfd) {
val = len
size = 1
} else if (len == 0xfd) {
val = binary.LittleEndian.Uint64(msg.p.blk[1 : 3])
size = 3
} else if (len == 0xfe) {
val = binary.LittleEndian.Uint64(msg.p.blk[1 : 5])
size = 5
} else if (len == 0xff) {
val = binary.LittleEndian.Uint64(msg.p.blk[1 : 9])
size = 9
}
return val, size | identifier_body |
|
message.go | Header struct {
hdr msgHdr
blkHdr []byte
}
type addr struct {
msgHdr
// TBD
}
type invPayload struct {
invType uint8
blk []byte
}
type inv struct {
hdr msgHdr
p invPayload
}
type dataReq struct {
msgHdr
// TBD
}
type block struct {
msgHdr
// TBD
}
// Transaction message
type trn struct {
msgHdr
// TBD
}
// Alloc different message stucture
// @t the message name or type
// @len the message length only valid for varible length structure
//
// Return:
// @messager the messager structure
// @error error code
// FixMe fix the ugly multiple return.
func allocMsg(t string, length int) (messager, error) {
switch t {
case "msgheader":
var msg msgHdr
return &msg, nil
case "version":
var msg version
return &msg, nil
case "verack":
var msg verACK
return &msg, nil
case "getheaders":
var msg headersReq
return &msg, nil
case "headers":
var msg blkHeader
return &msg, nil
case "getaddr":
var msg addrReq
return &msg, nil
case "addr":
var msg addr
return &msg, nil
case "inv":
var msg inv
// the 1 is the inv type lenght
msg.p.blk = make([]byte, length - MSGHDRLEN - 1)
return &msg, nil
case "getdata":
var msg dataReq
return &msg, nil
case "block":
var msg block
return &msg, nil
case "tx":
var msg trn
return &msg, nil
default:
return nil, errors.New("Unknown message type")
}
}
// TODO combine all of message alloc in one function via interface
func newMsg(t string) ([]byte, error) {
switch t {
case "version":
return newVersion()
case "verack":
return newVerack()
case "getheaders":
return newHeadersReq()
case "getaddr":
return newGetAddr()
default:
return nil, errors.New("Unknown message type")
}
}
func (hdr *msgHdr) init(cmd string, checksum []byte, length uint32) {
hdr.Magic = NETMAGIC
copy(hdr.CMD[0: uint32(len(cmd))], cmd)
copy(hdr.Checksum[:], checksum[:CHECKSUMLEN])
hdr.Length = length
fmt.Printf("The message payload length is %d\n", hdr.Length)
fmt.Printf("The message header length is %d\n", uint32(unsafe.Sizeof(*hdr)))
}
func (msg *version) init(n node) {
// Do the init
}
func newVersion() ([]byte, error) {
common.Trace()
var msg version
// TODO Need Node read lock or channel
msg.P.Version = nodes.node.version
msg.P.Services = nodes.node.services
// FIXME Time overflow
msg.P.TimeStamp = uint32(time.Now().UTC().UnixNano())
msg.P.Port = nodes.node.port
msg.P.Nonce = nodes.node.nonce
fmt.Printf("The nonce is 0x%x", msg.P.Nonce)
msg.P.UserAgent = 0x00
// Fixme Get the block height from ledger
msg.P.StartHeight = 1
if nodes.node.relay {
msg.P.Relay = 1
} else {
msg.P.Relay = 0
}
msg.Hdr.Magic = NETMAGIC
ver := "version"
copy(msg.Hdr.CMD[0:7], ver)
p := new(bytes.Buffer)
err := binary.Write(p, binary.LittleEndian, &(msg.P))
if err != nil {
fmt.Println("Binary Write failed at new Msg")
return nil, err
}
s := sha256.Sum256(p.Bytes())
s2 := s[:]
s = sha256.Sum256(s2)
buf := bytes.NewBuffer(s[:4])
binary.Read(buf, binary.LittleEndian, &(msg.Hdr.Checksum))
msg.Hdr.Length = uint32(len(p.Bytes()))
fmt.Printf("The message payload length is %d\n", msg.Hdr.Length)
m, err := msg.serialization()
if (err != nil) {
fmt.Println("Error Convert net message ", err.Error())
return nil, err
}
str := hex.EncodeToString(m)
fmt.Printf("The message length is %d, %s\n", len(m), str)
return m, nil
}
func newVerack() ([]byte, error) {
var msg verACK
// Fixme the check is the []byte{0} instead of 0
var sum []byte
sum = []byte{0x5d, 0xf6, 0xe0, 0xe2}
msg.msgHdr.init("verack", sum, 0)
buf, err := msg.serialization()
if (err != nil) |
str := hex.EncodeToString(buf)
fmt.Printf("The message tx verack length is %d, %s", len(buf), str)
return buf, err
}
func newGetAddr() ([]byte, error) {
var msg addrReq
// Fixme the check is the []byte{0} instead of 0
var sum []byte
sum = []byte{0x5d, 0xf6, 0xe0, 0xe2}
msg.Hdr.init("getaddr", sum, 0)
buf, err := msg.serialization()
if (err != nil) {
return nil, err
}
str := hex.EncodeToString(buf)
fmt.Printf("The message get addr length is %d, %s", len(buf), str)
return buf, err
}
func magicVerify(magic uint32) bool {
if (magic != NETMAGIC) {
return false
}
return true
}
func payloadLen(buf []byte) int {
var h msgHdr
h.deserialization(buf)
return int(h.Length)
}
func msgType(buf []byte) (string, error) {
cmd := buf[CMDOFFSET : CMDOFFSET + MSGCMDLEN]
n := bytes.IndexByte(cmd, 0)
if (n < 0 || n >= MSGCMDLEN) {
return "", errors.New("Unexpected length of CMD command")
}
s := string(cmd[:n])
return s, nil
}
func checkSum(p []byte) []byte {
t := sha256.Sum256(p)
s := sha256.Sum256(t[:])
// Currently we only need the front 4 bytes as checksum
return s[: CHECKSUMLEN]
}
func reverse(input []byte) []byte {
if len(input) == 0 {
return input
}
return append(reverse(input[1:]), input[0])
}
func newHeadersReq() ([]byte, error) {
var h headersReq
// Fixme correct with the exactly request length
h.p.len = 1
buf, err := LedgerGetHeader()
if (err != nil) {
return nil, err
}
copy(h.p.hashStart[:], reverse(buf))
p := new(bytes.Buffer)
err = binary.Write(p, binary.LittleEndian, &(h.p))
if err != nil {
fmt.Println("Binary Write failed at new headersReq")
return nil, err
}
s := checkSum(p.Bytes())
h.hdr.init("getheaders", s, uint32(len(p.Bytes())))
m, err := h.serialization()
str := hex.EncodeToString(m)
fmt.Printf("The message length is %d, %s\n", len(m), str)
return m, err
}
// Verify the message header information
// @p payload of the message
func (hdr msgHdr) verify(buf []byte) error {
if (hdr.Magic != NETMAGIC) {
fmt.Printf("Unmatched magic number 0x%d\n", hdr.Magic)
return errors.New("Unmatched magic number")
}
checkSum := checkSum(buf)
if (bytes.Equal(hdr.Checksum[:], checkSum[:]) == false) {
str1 := hex.EncodeToString(hdr.Checksum[:])
str2 := hex.EncodeToString(checkSum[:])
fmt.Printf("Message Checksum error, Received checksum %s Wanted checksum: %s\n",
str1, str2)
return errors.New("Message Checksum error")
}
return nil
}
func (msg version) verify(buf []byte) error {
err := msg.Hdr.verify(buf)
// TODO verify the message Content
return err
}
func (msg headersReq) verify(buf []byte) error {
// TODO verify the message Content
err := msg.hdr.verify(buf)
return err
}
func (msg blkHeader) verify(buf []byte) error {
// TODO verify the message Content
err := msg.hdr.verify(buf)
return err
}
func (msg addrReq) verify(buf []byte) error {
// TODO verify the message Content
err := msg.Hdr.verify(buf)
return err
}
func (msg inv) verify(buf []byte) error {
// TODO verify the message Content
err := msg.hdr.verify(buf)
return err
}
// FIXME how to avoid duplicate serial/deserial function as
// most of them are the same
func (hdr msgHdr) serialization() ([]byte, error) {
var buf bytes.Buffer
err := binary.Write(&buf | {
return nil, err
} | conditional_block |
Reinforcement_Learning_second_train.py | 'done': []}
#最多要保留幾筆紀錄
self.memory_capacity = memory_capacity
#最少保留幾筆紀錄後開始做訓練
self.min_memory = min_memory
#每次訓練的取樣數量
self.batch_size = batch_size
#目前主流的Deep Q learning會用兩個一樣的模型來做訓練
#只對train_model做訓練
#target_model只被動接收權重
#訓練模型
self.train_model = creat_model()
#目標模型
self.target_model = creat_model()
#設定最佳化方式
self.optimizer = tf.keras.optimizers.Adam(learning_rate = learning_rate)
#設定損失函數
self.loss_function = tf.keras.losses.MeanSquaredError()
self.loss_value = None
def get_action(self, state, random_action_rate):
if np.random.random() < random_action_rate:
action = np.random.randint(0, self.num_actions)
else:
#action_rate = self.train_model(np.reshape(state, [1, self.num_states]))
action_rate = self.train_model.predict(np.reshape(state, [1, self.num_states]))
action = np.argmax(action_rate)
return action
def save_memory(self, new_memory):
#如果紀錄滿了丟掉最舊的
if len(self.memory['state']) >= self.memory_capacity:
for key in self.memory.keys():
self.memory[key].pop(0)
#新增紀錄
| return len(self.memory['state'])
def get_loss_value(self):
return self.loss_value
#對於在eager model中的tensorflow運算可以在function前面加上@tf.function來改善運算效率
#但是該function會不能用debug監看
@tf.function
def calculate_gradient(self, train_state, train_action, nextq_value):
#在GradientTape中計算loss_value以便計算梯度
with tf.GradientTape() as tape:
train_model_output = self.train_model(train_state)
q_value = tf.math.reduce_sum(train_model_output
* tf.one_hot(train_action, self.num_actions, dtype = 'float64'), axis=1)
loss_value = self.loss_function(nextq_value, q_value)
#計算梯度
weight = self.train_model.trainable_variables
gradients = tape.gradient(loss_value, weight)
#根據梯度更新權重
self.optimizer.apply_gradients(zip(gradients, weight))
#self.loss_value = loss_value.numpy()
def training_model(self):
if len(self.memory['state']) > self.min_memory:
#取得一次批量的訓練資料
sample_index = np.random.choice(len(self.memory['state']), self.batch_size)
train_state = np.asarray([self.memory['state'][index] for index in sample_index])
train_action = np.asarray([self.memory['action'][index] for index in sample_index])
train_reward = np.asarray([self.memory['reward'][index] for index in sample_index])
train_next_state = np.asarray([self.memory['next_state'][index] for index in sample_index])
train_done = np.asarray([self.memory['done'][index] for index in sample_index])
#取的目標模型對next_state的預測結果
#taeget_predict = np.max(self.target_model(train_next_state), axis = 1)
taeget_predict = np.max(self.target_model.predict(train_next_state), axis = 1)
#計算next_q value
#如果選擇的動作會導致done發生就直接輸出reward,不考慮next_state帶來的回饋
#nextq_value = train_reward + (self.gamma * taeget_predict)
nextq_value = np.where(train_done, train_reward, train_reward + (self.gamma * taeget_predict))
self.calculate_gradient(train_state, train_action, nextq_value)
def copy_weight(self):
#將Train Model的權重複製到Target Model
self.target_model.set_weights(self.train_model.get_weights())
def save_model(self):
self.train_model.save('E:/python program/增強式學習結果/Model/DQL_Model_second_train',
include_optimizer = False)
def creat_model():
#匯入模型
return load_model('E:/python program/增強式學習結果/Model/DQL_Model')
def training_loop(epochs, num_states, num_actions, gamma, random_action_rate, target_replace_count, memory_capacity,
min_memory, batch_size, learning_rate):
DQL_model = Deep_Q_Learning(num_states, num_actions, gamma,
memory_capacity, min_memory, batch_size, learning_rate)
step_list = []
reward_list = []
step_mean_list = []
loss_list = []
target_step = 0
#建立一個loop先把最少memory需求補齊
#讓 environment 重回初始狀態
state = env.reset()
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
while DQL_model.get_memory_size() < (min_memory - 1):
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times =+ 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
DQL_model.save_memory(new_memory)
#更新環境資訊
if done:
step_times = 0
state = env.reset()
else:
state = next_state
#print(len(DQL_model.get_memory()['state']))
for epoch in range(epochs):
#讓 environment 重回初始狀態
state = env.reset()
#累計各epoch中的reward
rewards = 0
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
loss = []
while True:
#呈現 environment
#env.render()
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times += 1
target_step += 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
#計算這次epoch中的reward總和
rewards += reward
#將現有資訊封裝後放入記憶體以便之後訓練
DQL_model.save_memory(new_memory)
#有足夠的資料後開始訓練
DQL_model.training_model()
#loss_value = DQL_model.get_loss_value()
#if loss_value != None:
# loss.append(loss_value)
# | for key, value in new_memory.items():
self.memory[key].append(value)
def get_memory_size(self):
| conditional_block |
Reinforcement_Learning_second_train.py | [], 'done': []}
#最多要保留幾筆紀錄
self.memory_capacity = memory_capacity
#最少保留幾筆紀錄後開始做訓練
self.min_memory = min_memory
#每次訓練的取樣數量
self.batch_size = batch_size
#目前主流的Deep Q learning會用兩個一樣的模型來做訓練
#只對train_model做訓練
#target_model只被動接收權重
#訓練模型
self.train_model = creat_model()
#目標模型
self.target_model = creat_model()
#設定最佳化方式
self.optimizer = tf.keras.optimizers.Adam(learning_rate = learning_rate)
#設定損失函數
self.loss_function = tf.keras.losses.MeanSquaredError()
self.loss_value = None
def get_action(self, state, random_action_rate):
if np.random.random() < random_action_rate:
action = np.random.randint(0, self.num_actions)
else:
#action_rate = self.train_model(np.reshape(state, [1, self.num_states]))
action_rate = self.train_model.predict(np.reshape(state, [1, self.num_states]))
action = np.argmax(action_rate)
return action
def save_memory(self, new_memory):
#如果紀錄滿了丟掉最舊的
if len(self.memory['state']) >= self.memory_capacity:
for key in self.memory.keys():
self.memory[key].pop(0)
#新增紀錄
for key, value in new_memory.items():
self.memory[key].append(value)
def get_memory_size(self):
return len(self.memory['state'])
def get_loss_value(self):
return self.loss_value
#對於在eager model中的tensorflow運算可以在function前面加上@tf.function來改善運算效率
#但是該function會不能用debug監看
@tf.function
def calculate_gradient(self, train_state, train_action, nextq_value):
#在GradientTape中計算loss_value以便計算梯度
with tf.GradientTape() as tape:
train_model_output = self.train_model(train_state)
q_value = tf.math.reduce_sum(train_model_output
* tf.one_hot(train_action, self.num_actions, dtype = 'float64'), axis=1)
loss_value = self.loss_function(nextq_value, q_value)
# | t = self.train_model.trainable_variables
gradients = tape.gradient(loss_value, weight)
#根據梯度更新權重
self.optimizer.apply_gradients(zip(gradients, weight))
#self.loss_value = loss_value.numpy()
def training_model(self):
if len(self.memory['state']) > self.min_memory:
#取得一次批量的訓練資料
sample_index = np.random.choice(len(self.memory['state']), self.batch_size)
train_state = np.asarray([self.memory['state'][index] for index in sample_index])
train_action = np.asarray([self.memory['action'][index] for index in sample_index])
train_reward = np.asarray([self.memory['reward'][index] for index in sample_index])
train_next_state = np.asarray([self.memory['next_state'][index] for index in sample_index])
train_done = np.asarray([self.memory['done'][index] for index in sample_index])
#取的目標模型對next_state的預測結果
#taeget_predict = np.max(self.target_model(train_next_state), axis = 1)
taeget_predict = np.max(self.target_model.predict(train_next_state), axis = 1)
#計算next_q value
#如果選擇的動作會導致done發生就直接輸出reward,不考慮next_state帶來的回饋
#nextq_value = train_reward + (self.gamma * taeget_predict)
nextq_value = np.where(train_done, train_reward, train_reward + (self.gamma * taeget_predict))
self.calculate_gradient(train_state, train_action, nextq_value)
def copy_weight(self):
#將Train Model的權重複製到Target Model
self.target_model.set_weights(self.train_model.get_weights())
def save_model(self):
self.train_model.save('E:/python program/增強式學習結果/Model/DQL_Model_second_train',
include_optimizer = False)
def creat_model():
#匯入模型
return load_model('E:/python program/增強式學習結果/Model/DQL_Model')
def training_loop(epochs, num_states, num_actions, gamma, random_action_rate, target_replace_count, memory_capacity,
min_memory, batch_size, learning_rate):
DQL_model = Deep_Q_Learning(num_states, num_actions, gamma,
memory_capacity, min_memory, batch_size, learning_rate)
step_list = []
reward_list = []
step_mean_list = []
loss_list = []
target_step = 0
#建立一個loop先把最少memory需求補齊
#讓 environment 重回初始狀態
state = env.reset()
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
while DQL_model.get_memory_size() < (min_memory - 1):
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times =+ 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
DQL_model.save_memory(new_memory)
#更新環境資訊
if done:
step_times = 0
state = env.reset()
else:
state = next_state
#print(len(DQL_model.get_memory()['state']))
for epoch in range(epochs):
#讓 environment 重回初始狀態
state = env.reset()
#累計各epoch中的reward
rewards = 0
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
loss = []
while True:
#呈現 environment
#env.render()
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times += 1
target_step += 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
#計算這次epoch中的reward總和
rewards += reward
#將現有資訊封裝後放入記憶體以便之後訓練
DQL_model.save_memory(new_memory)
#有足夠的資料後開始訓練
DQL_model.training_model()
#loss_value = DQL_model.get_loss_value()
#if loss_value != None:
# loss.append(loss_value)
# | 計算梯度
weigh | identifier_name |
Reinforcement_Learning_second_train.py | 'done': []}
#最多要保留幾筆紀錄
self.memory_capacity = memory_capacity
#最少保留幾筆紀錄後開始做訓練
self.min_memory = min_memory
#每次訓練的取樣數量
self.batch_size = batch_size
#目前主流的Deep Q learning會用兩個一樣的模型來做訓練
#只對train_model做訓練
#target_model只被動接收權重
#訓練模型
self.train_model = creat_model()
#目標模型
self.target_model = creat_model()
#設定最佳化方式
self.optimizer = tf.keras.optimizers.Adam(learning_rate = learning_rate)
#設定損失函數
self.loss_function = tf.keras.losses.MeanSquaredError()
self.loss_value = None
def get_action(self, state, random_action_rate):
if np.random.random() < random_action_rate:
action = np.random.randint(0, self.num_actions)
else:
#action_rate = self.train_model(np.reshape(state, [1, self.num_states]))
action_rate = self.train_model.predict(np.reshape(state, [1, self.num_states]))
action = np.argmax(action_rate)
return action
def save_memory(self, new_memory):
#如果紀錄滿了丟掉最舊的
if len(self.memory['state']) >= self.memory_capacity:
for key in self.memory.keys():
self.memory[key].pop(0)
#新增紀錄
for key, value in new_memory.items():
self.memory[key].append(value)
def get_memory_size(self):
return len(self.memory['state'])
def get_loss_value(self):
return self.loss_value
#對於在eager model中的tensorflow運算可以在function前面加上@tf.function來改善運算效率
#但是該function會不能用debug監看
@tf.function
def calculate_gradient(self, train_state, train_action, nextq_value):
#在GradientTape中計算loss_value以便計算梯度
with tf.GradientTape() as tape:
train_model_output = self.train_model(train_state)
q_value = tf.math.reduce_sum(tra | * tf.one_hot(train_action, self.num_actions, dtype = 'float64'), axis=1)
loss_value = self.loss_function(nextq_value, q_value)
#計算梯度
weight = self.train_model.trainable_variables
gradients = tape.gradient(loss_value, weight)
#根據梯度更新權重
self.optimizer.apply_gradients(zip(gradients, weight))
#self.loss_value = loss_value.numpy()
def training_model(self):
if len(self.memory['state']) > self.min_memory:
#取得一次批量的訓練資料
sample_index = np.random.choice(len(self.memory['state']), self.batch_size)
train_state = np.asarray([self.memory['state'][index] for index in sample_index])
train_action = np.asarray([self.memory['action'][index] for index in sample_index])
train_reward = np.asarray([self.memory['reward'][index] for index in sample_index])
train_next_state = np.asarray([self.memory['next_state'][index] for index in sample_index])
train_done = np.asarray([self.memory['done'][index] for index in sample_index])
#取的目標模型對next_state的預測結果
#taeget_predict = np.max(self.target_model(train_next_state), axis = 1)
taeget_predict = np.max(self.target_model.predict(train_next_state), axis = 1)
#計算next_q value
#如果選擇的動作會導致done發生就直接輸出reward,不考慮next_state帶來的回饋
#nextq_value = train_reward + (self.gamma * taeget_predict)
nextq_value = np.where(train_done, train_reward, train_reward + (self.gamma * taeget_predict))
self.calculate_gradient(train_state, train_action, nextq_value)
def copy_weight(self):
#將Train Model的權重複製到Target Model
self.target_model.set_weights(self.train_model.get_weights())
def save_model(self):
self.train_model.save('E:/python program/增強式學習結果/Model/DQL_Model_second_train',
include_optimizer = False)
def creat_model():
#匯入模型
return load_model('E:/python program/增強式學習結果/Model/DQL_Model')
def training_loop(epochs, num_states, num_actions, gamma, random_action_rate, target_replace_count, memory_capacity,
min_memory, batch_size, learning_rate):
DQL_model = Deep_Q_Learning(num_states, num_actions, gamma,
memory_capacity, min_memory, batch_size, learning_rate)
step_list = []
reward_list = []
step_mean_list = []
loss_list = []
target_step = 0
#建立一個loop先把最少memory需求補齊
#讓 environment 重回初始狀態
state = env.reset()
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
while DQL_model.get_memory_size() < (min_memory - 1):
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times =+ 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
DQL_model.save_memory(new_memory)
#更新環境資訊
if done:
step_times = 0
state = env.reset()
else:
state = next_state
#print(len(DQL_model.get_memory()['state']))
for epoch in range(epochs):
#讓 environment 重回初始狀態
state = env.reset()
#累計各epoch中的reward
rewards = 0
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
loss = []
while True:
#呈現 environment
#env.render()
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times += 1
target_step += 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
#計算這次epoch中的reward總和
rewards += reward
#將現有資訊封裝後放入記憶體以便之後訓練
DQL_model.save_memory(new_memory)
#有足夠的資料後開始訓練
DQL_model.training_model()
#loss_value = DQL_model.get_loss_value()
#if loss_value != None:
# loss.append(loss_value)
| in_model_output
| identifier_body |
Reinforcement_Learning_second_train.py | self.memory_capacity = memory_capacity
#最少保留幾筆紀錄後開始做訓練
self.min_memory = min_memory
#每次訓練的取樣數量
self.batch_size = batch_size
#目前主流的Deep Q learning會用兩個一樣的模型來做訓練
#只對train_model做訓練
#target_model只被動接收權重
#訓練模型
self.train_model = creat_model()
#目標模型
self.target_model = creat_model()
#設定最佳化方式
self.optimizer = tf.keras.optimizers.Adam(learning_rate = learning_rate)
#設定損失函數
self.loss_function = tf.keras.losses.MeanSquaredError()
self.loss_value = None
def get_action(self, state, random_action_rate):
if np.random.random() < random_action_rate:
action = np.random.randint(0, self.num_actions)
else:
#action_rate = self.train_model(np.reshape(state, [1, self.num_states]))
action_rate = self.train_model.predict(np.reshape(state, [1, self.num_states]))
action = np.argmax(action_rate)
return action
def save_memory(self, new_memory):
#如果紀錄滿了丟掉最舊的
if len(self.memory['state']) >= self.memory_capacity:
for key in self.memory.keys():
self.memory[key].pop(0)
#新增紀錄
for key, value in new_memory.items():
self.memory[key].append(value)
def get_memory_size(self):
return len(self.memory['state'])
def get_loss_value(self):
return self.loss_value
#對於在eager model中的tensorflow運算可以在function前面加上@tf.function來改善運算效率
#但是該function會不能用debug監看
@tf.function
def calculate_gradient(self, train_state, train_action, nextq_value):
#在GradientTape中計算loss_value以便計算梯度
with tf.GradientTape() as tape:
train_model_output = self.train_model(train_state)
q_value = tf.math.reduce_sum(train_model_output
* tf.one_hot(train_action, self.num_actions, dtype = 'float64'), axis=1)
loss_value = self.loss_function(nextq_value, q_value)
#計算梯度
weight = self.train_model.trainable_variables
gradients = tape.gradient(loss_value, weight)
#根據梯度更新權重
self.optimizer.apply_gradients(zip(gradients, weight))
#self.loss_value = loss_value.numpy()
def training_model(self):
if len(self.memory['state']) > self.min_memory:
#取得一次批量的訓練資料
sample_index = np.random.choice(len(self.memory['state']), self.batch_size)
train_state = np.asarray([self.memory['state'][index] for index in sample_index])
train_action = np.asarray([self.memory['action'][index] for index in sample_index])
train_reward = np.asarray([self.memory['reward'][index] for index in sample_index])
train_next_state = np.asarray([self.memory['next_state'][index] for index in sample_index])
train_done = np.asarray([self.memory['done'][index] for index in sample_index])
#取的目標模型對next_state的預測結果
#taeget_predict = np.max(self.target_model(train_next_state), axis = 1)
taeget_predict = np.max(self.target_model.predict(train_next_state), axis = 1)
#計算next_q value
#如果選擇的動作會導致done發生就直接輸出reward,不考慮next_state帶來的回饋
#nextq_value = train_reward + (self.gamma * taeget_predict)
nextq_value = np.where(train_done, train_reward, train_reward + (self.gamma * taeget_predict))
self.calculate_gradient(train_state, train_action, nextq_value)
def copy_weight(self):
#將Train Model的權重複製到Target Model
self.target_model.set_weights(self.train_model.get_weights())
def save_model(self):
self.train_model.save('E:/python program/增強式學習結果/Model/DQL_Model_second_train',
include_optimizer = False)
def creat_model():
#匯入模型
return load_model('E:/python program/增強式學習結果/Model/DQL_Model')
def training_loop(epochs, num_states, num_actions, gamma, random_action_rate, target_replace_count, memory_capacity,
min_memory, batch_size, learning_rate):
DQL_model = Deep_Q_Learning(num_states, num_actions, gamma,
memory_capacity, min_memory, batch_size, learning_rate)
step_list = []
reward_list = []
step_mean_list = []
loss_list = []
target_step = 0
#建立一個loop先把最少memory需求補齊
#讓 environment 重回初始狀態
state = env.reset()
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
while DQL_model.get_memory_size() < (min_memory - 1):
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times =+ 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
DQL_model.save_memory(new_memory)
#更新環境資訊
if done:
step_times = 0
state = env.reset()
else:
state = next_state
#print(len(DQL_model.get_memory()['state']))
for epoch in range(epochs):
#讓 environment 重回初始狀態
state = env.reset()
#累計各epoch中的reward
rewards = 0
#統計在一次epoch中總共做了計次動作才結束
step_times = 0
loss = []
while True:
#呈現 environment
#env.render()
#取得模型選擇的動作
action = DQL_model.get_action(state, random_action_rate)
#在這次的環境中根據給予的動作會得到四個回傳值
# next_state:互動後的新環境
# reward:新環境給予的回饋值
# done:是否已達到環境的結束條件
#action = train_model.get_action(state, random_action_rate)
next_state, reward, done, info = env.step(action)
#theta單位是弧度
#和theta_threshold_radians 相同
x, v, theta, omega = next_state
##改善reward所代表的意義以提升訓練效果
##小車離中間越近越好
#r1 = ((env.x_threshold - abs(x)) / env.x_threshold) * 0.2
##柱子越正越好
#r2 = ((env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians) * 0.8
#reward = r1 + r2
step_times += 1
target_step += 1
if done:
reward = 0
if step_times == 200:
reward = 1
DQL_model.save_model()
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': False}
else:
#建立環境經驗
new_memory = {'state': state, 'action': action, 'reward': reward, 'next_state': next_state, 'done': done}
#計算這次epoch中的reward總和
rewards += reward
#將現有資訊封裝後放入記憶體以便之後訓練
DQL_model.save_memory(new_memory)
#有足夠的資料後開始訓練
DQL_model | #每一筆memory資料包含 state, action, reward, next_state
self.memory = {'state': [], 'action': [], 'reward': [], 'next_state': [], 'done': []}
#最多要保留幾筆紀錄 | random_line_split |
|
useful.rs | 4,
pub quartz: i64,
pub ruby: i64,
pub rust: i64,
pub shale: i64,
pub sulfur: i64,
pub tar: i64,
pub uranium: i64,
pub zillium: i64,
}
// Useful functions for Player
impl Player {
pub fn empty() -> Self {
return Player {
id: 0,
sprite: "Empty".to_string(),
class: "Bard".to_string(),
aspect: "Light".to_string(),
materials: Materials::empty(),
inventory: vec!["disc".to_string()],
storage: vec![],
sylladex_type: "".to_owned(),
}
}
}
// Useful functions for Materials
impl Materials {
pub fn empty() -> Self {
return Materials {
build: 0,
amber: 0,
amethyst: 0,
caulk: 0,
chalk: 0,
cobalt: 0,
diamond: 0,
garnet: 0,
gold: 0,
iodine: 0,
marble: 0,
mercury: 0,
quartz: 0,
ruby: 0,
rust: 0,
shale: 0,
sulfur: 0,
tar: 0,
uranium: 0,
zillium: 0,
}
}
}
// Makes it so you can iterate through materials
impl IntoIterator for Materials {
type Item = i64;
type IntoIter = std::array::IntoIter<i64, 20>;
fn into_iter(self) -> Self::IntoIter {
std::array::IntoIter::new([
self.build,
self.amber,
self.amethyst,
self.caulk,
self.chalk,
self.cobalt,
self.diamond,
self.garnet,
self.gold,
self.iodine,
self.marble,
self.mercury,
self.quartz,
self.ruby,
self.rust,
self.shale,
self.sulfur,
self.tar,
self.uranium,
self.zillium
])
}
}
// Easily send a message
pub async fn sendmessage(message: &str, ctx: &Context, msg: &Message) {
// Send a message or direct message the user saying there was an error
if let Err(why) = msg.channel_id.say(&ctx.http, message).await {
if let Err(why2) = msg.author.direct_message(&ctx, |m| {
m.content(
format!("Hello {}, The error I got is `{}`", msg.author, why)
)
}).await {
println!("{} | {}", why, why2)
}
}
}
// Send embed
pub async fn send_embed<F>(ctx: &Context, msg: &Message, closure: F) where F: FnOnce(&mut CreateEmbed) -> &mut CreateEmbed, {
if let Err(why) = msg.channel_id.send_message(&ctx, |m| {
m.embed(closure);
m
}).await {
sendmessage(format!("Error {}", why).as_str(), ctx, msg).await;
}
}
// Executes a sql statement
pub async fn sqlstatement(statement: &str) -> Result<(), Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute(statement, &[]).await?;
Ok(())
}
// Executes a update sql statement
pub async fn update_sqlstatement(statement: &str, author_id: u64, params: &[&(dyn ToSql + Sync)],) -> Result<(), Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute(format!("UPDATE player SET {} WHERE \"id\"={}", statement, author_id).as_str(), params).await?;
Ok(())
}
// Checks if the user has an entry in the DB
pub async fn check_if_registered(id: u64) -> Result<(), Error> {
// Get player
let result = get_player(id).await;
let player = result.unwrap_or(Player::empty());
// if player.id is 0 then they don't have an entry
// so then create an entry
if player.id == 0 {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute("INSERT INTO player
(\"id\") VALUES ($1);", &[&(id as i64)]).await.unwrap();
}
Ok(())
}
// SQLite search statement
pub async fn get_player(author_id: u64) -> Result<Player, Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await?;
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let mut player = Player::empty();
// Create Player struct
for row in client.query("SELECT * FROM player WHERE \"id\"=$1",&[&(author_id as i64)]).await? {
let inventory = row.get::<_, String>(24).split("ˌ").map(str::to_string).collect::<Vec<String>>();
let storage = row.get::<_, String>(25).split("ˌ").map(str::to_string).collect::<Vec<String>>();
player = Player {
id: row.get(0),
sprite: row.get(21),
class: row.get(22),
aspect: row.get(23),
materials: Materials {
build: row.get(1),
amber: row.get(2),
amethyst: row.get(3),
caulk: row.get(4),
chalk: row.get(5),
cobalt: row.get(6),
diamond: row.get(7),
garnet: row.get(8),
gold: row.get(9),
iodine: row.get(10),
marble: row.get(11),
mercury: row.get(12),
quartz: row.get(13),
ruby: row.get(14),
rust: row.get(15),
shale: row.get(16),
sulfur: row.get(17),
tar: row.get(18),
uranium: row.get(19),
zillium: row.get(20),
},
inventory,
storage,
sylladex_type: row.get(26),
}
}
return Ok(player)
}
// Gets exile quote
pub async fn get_exile_quote(ctx: &Context, msg: &Message) {
// Exile quotes
let exile_1: Vec<&str> = vec!["What are you doing", "Good job hero"];
let exile_2: Vec<&str> = vec!["DO YOU HAVE ANY IDEA WHAT YOU ARE DOING?", "YOU ARE DOING GOOD MAGGOT!"];
let exile_3: Vec<&str> = vec!["Good.", "Yes more."];
let exile_4: Vec<&str> = vec!["i could do better than that", "what are you doing loser"];
// Send embed function
async fn send_embed(ctx: &Context, msg: &Message, embed_text: &str) {
let randcolor: u32 = thread_rng().gen_range(0x000000..0xFFFFFF);
if let Err(why) = msg.channel_id.send_message(&ctx.http, |m| {
m.embed(|e| {
e.title(format!("{}'s Exile", msg.author.name).as_str());
e.description(format_emojis!("{}", embed_text));
e.color(randcolor);
e.author(|a| {
a.icon_url(msg.author.avatar_url().unwrap());
a.name(msg.author.name.as_str());
a
});e
});m
}).await {
sendmessage(format!("Error {}", why).as_str(), ctx, msg).await;
}
}
// Random index for exile quote
let rand_index: u32 = thread_rng().gen_range(0..exile_1.len() as u32);
// Send exile quote
let author_exile = (msg.author.id.as_u64() % 4) + 1;
if author_exile == 1 {
send_embed(ctx, msg, exile_1[rand_index as usize]).await;
} else if author_exile == 2 {
send_embed(ctx, msg, exile_2[rand_index as usize]).await;
} else if author_exile == 3 {
|
send_embed(ctx, msg, exile_3[rand_index as usize]).await;
} e | conditional_block |
|
useful.rs | ,
uranium: 0,
zillium: 0,
}
}
}
// Makes it so you can iterate through materials
impl IntoIterator for Materials {
type Item = i64;
type IntoIter = std::array::IntoIter<i64, 20>;
fn into_iter(self) -> Self::IntoIter {
std::array::IntoIter::new([
self.build,
self.amber,
self.amethyst,
self.caulk,
self.chalk,
self.cobalt,
self.diamond,
self.garnet,
self.gold,
self.iodine,
self.marble,
self.mercury,
self.quartz,
self.ruby,
self.rust,
self.shale,
self.sulfur,
self.tar,
self.uranium,
self.zillium
])
}
}
// Easily send a message
pub async fn sendmessage(message: &str, ctx: &Context, msg: &Message) {
// Send a message or direct message the user saying there was an error
if let Err(why) = msg.channel_id.say(&ctx.http, message).await {
if let Err(why2) = msg.author.direct_message(&ctx, |m| {
m.content(
format!("Hello {}, The error I got is `{}`", msg.author, why)
)
}).await {
println!("{} | {}", why, why2)
}
}
}
// Send embed
pub async fn send_embed<F>(ctx: &Context, msg: &Message, closure: F) where F: FnOnce(&mut CreateEmbed) -> &mut CreateEmbed, {
if let Err(why) = msg.channel_id.send_message(&ctx, |m| {
m.embed(closure);
m
}).await {
sendmessage(format!("Error {}", why).as_str(), ctx, msg).await;
}
}
// Executes a sql statement
pub async fn sqlstatement(statement: &str) -> Result<(), Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute(statement, &[]).await?;
Ok(())
}
// Executes a update sql statement
pub async fn update_sqlstatement(statement: &str, author_id: u64, params: &[&(dyn ToSql + Sync)],) -> Result<(), Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute(format!("UPDATE player SET {} WHERE \"id\"={}", statement, author_id).as_str(), params).await?;
Ok(())
}
// Checks if the user has an entry in the DB
pub async fn check_if_registered(id: u64) -> Result<(), Error> {
// Get player
let result = get_player(id).await;
let player = result.unwrap_or(Player::empty());
// if player.id is 0 then they don't have an entry
// so then create an entry
if player.id == 0 {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute("INSERT INTO player
(\"id\") VALUES ($1);", &[&(id as i64)]).await.unwrap();
}
Ok(())
}
// SQLite search statement
pub async fn get_player(author_id: u64) -> Result<Player, Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await?;
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let mut player = Player::empty();
// Create Player struct
for row in client.query("SELECT * FROM player WHERE \"id\"=$1",&[&(author_id as i64)]).await? {
let inventory = row.get::<_, String>(24).split("ˌ").map(str::to_string).collect::<Vec<String>>();
let storage = row.get::<_, String>(25).split("ˌ").map(str::to_string).collect::<Vec<String>>();
player = Player {
id: row.get(0),
sprite: row.get(21),
class: row.get(22),
aspect: row.get(23),
materials: Materials {
build: row.get(1),
amber: row.get(2),
amethyst: row.get(3),
caulk: row.get(4),
chalk: row.get(5),
cobalt: row.get(6),
diamond: row.get(7),
garnet: row.get(8),
gold: row.get(9),
iodine: row.get(10),
marble: row.get(11),
mercury: row.get(12),
quartz: row.get(13),
ruby: row.get(14),
rust: row.get(15),
shale: row.get(16),
sulfur: row.get(17),
tar: row.get(18),
uranium: row.get(19),
zillium: row.get(20),
},
inventory,
storage,
sylladex_type: row.get(26),
}
}
return Ok(player)
}
// Gets exile quote
pub async fn get_exile_quote(ctx: &Context, msg: &Message) {
// Exile quotes
let exile_1: Vec<&str> = vec!["What are you doing", "Good job hero"];
let exile_2: Vec<&str> = vec!["DO YOU HAVE ANY IDEA WHAT YOU ARE DOING?", "YOU ARE DOING GOOD MAGGOT!"];
let exile_3: Vec<&str> = vec!["Good.", "Yes more."];
let exile_4: Vec<&str> = vec!["i could do better than that", "what are you doing loser"];
// Send embed function
async fn send_embed(ctx: &Context, msg: &Message, embed_text: &str) {
let randcolor: u32 = thread_rng().gen_range(0x000000..0xFFFFFF);
if let Err(why) = msg.channel_id.send_message(&ctx.http, |m| {
m.embed(|e| {
e.title(format!("{}'s Exile", msg.author.name).as_str());
e.description(format_emojis!("{}", embed_text));
e.color(randcolor);
e.author(|a| {
a.icon_url(msg.author.avatar_url().unwrap());
a.name(msg.author.name.as_str());
a
});e
});m
}).await {
sendmessage(format!("Error {}", why).as_str(), ctx, msg).await;
}
}
// Random index for exile quote
let rand_index: u32 = thread_rng().gen_range(0..exile_1.len() as u32);
// Send exile quote
let author_exile = (msg.author.id.as_u64() % 4) + 1;
if author_exile == 1 {
send_embed(ctx, msg, exile_1[rand_index as usize]).await;
} else if author_exile == 2 {
send_embed(ctx, msg, exile_2[rand_index as usize]).await;
} else if author_exile == 3 {
send_embed(ctx, msg, exile_3[rand_index as usize]).await;
} else if author_exile == 4 {
send_embed(ctx, msg, exile_4[rand_index as usize]).await;
}
}
pub trait InVec: std::cmp::PartialEq + Sized {
fn in_vec(self, vector: Vec<Self>) -> bool {
vector.contains(&self)
}
}
impl<T> InVec for T where T: std::cmp::PartialEq {}
pub trait ConvertCaseToSnake {
fn to_snakecase(&self) -> String;
}
impl ConvertCaseToSnake for String {
fn to_snakecase(&self) -> String {
let part1 = &self.to_uppercase()[0..1];
let part2 = &self.to_lowercase()[1..self.len()];
return format!("{}{}", part1, part2);
}
}
pub trait VecStrToString {
fn vec_to_string(self) -> Vec<String>;
}
impl<T: std::fmt::Display> VecStrToString for Vec<T> {
fn vec_to_string(self) -> Vec<String> {
let mut return_vector = vec![];
for x in 0..self.len() {
return_vector.push(self[x].to_string());
}
return return_vector;
}
}
pub trait FormatVec {
fn format_vec(&self) -> String;
}
|
impl<T: std::fmt::Display> FormatVec for Vec<T> {
fn format_vec(&self) -> String {
| random_line_split |
|
useful.rs | ,
pub materials: Materials,
pub inventory: Vec<String>,
pub storage: Vec<String>,
pub sylladex_type: String,
}
#[derive(Debug, Clone)]
pub struct Materials {
pub build: i64,
pub amber: i64,
pub amethyst: i64,
pub caulk: i64,
pub chalk: i64,
pub cobalt: i64,
pub diamond: i64,
pub garnet: i64,
pub gold: i64,
pub iodine: i64,
pub marble: i64,
pub mercury: i64,
pub quartz: i64,
pub ruby: i64,
pub rust: i64,
pub shale: i64,
pub sulfur: i64,
pub tar: i64,
pub uranium: i64,
pub zillium: i64,
}
// Useful functions for Player
impl Player {
pub fn empty() -> Self {
return Player {
id: 0,
sprite: "Empty".to_string(),
class: "Bard".to_string(),
aspect: "Light".to_string(),
materials: Materials::empty(),
inventory: vec!["disc".to_string()],
storage: vec![],
sylladex_type: "".to_owned(),
}
}
}
// Useful functions for Materials
impl Materials {
pub fn empty() -> Self {
return Materials {
build: 0,
amber: 0,
amethyst: 0,
caulk: 0,
chalk: 0,
cobalt: 0,
diamond: 0,
garnet: 0,
gold: 0,
iodine: 0,
marble: 0,
mercury: 0,
quartz: 0,
ruby: 0,
rust: 0,
shale: 0,
sulfur: 0,
tar: 0,
uranium: 0,
zillium: 0,
}
}
}
// Makes it so you can iterate through materials
impl IntoIterator for Materials {
type Item = i64;
type IntoIter = std::array::IntoIter<i64, 20>;
fn into_iter(self) -> Self::IntoIter {
std::array::IntoIter::new([
self.build,
self.amber,
self.amethyst,
self.caulk,
self.chalk,
self.cobalt,
self.diamond,
self.garnet,
self.gold,
self.iodine,
self.marble,
self.mercury,
self.quartz,
self.ruby,
self.rust,
self.shale,
self.sulfur,
self.tar,
self.uranium,
self.zillium
])
}
}
// Easily send a message
pub async fn sendmessage(message: &str, ctx: &Context, msg: &Message) {
// Send a message or direct message the user saying there was an error
if let Err(why) = msg.channel_id.say(&ctx.http, message).await {
if let Err(why2) = msg.author.direct_message(&ctx, |m| {
m.content(
format!("Hello {}, The error I got is `{}`", msg.author, why)
)
}).await {
println!("{} | {}", why, why2)
}
}
}
// Send embed
pub async fn send_embed<F>(ctx: &Context, msg: &Message, closure: F) where F: FnOnce(&mut CreateEmbed) -> &mut CreateEmbed, {
if let Err(why) = msg.channel_id.send_message(&ctx, |m| {
m.embed(closure);
m
}).await {
sendmessage(format!("Error {}", why).as_str(), ctx, msg).await;
}
}
// Executes a sql statement
pub async fn | (statement: &str) -> Result<(), Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute(statement, &[]).await?;
Ok(())
}
// Executes a update sql statement
pub async fn update_sqlstatement(statement: &str, author_id: u64, params: &[&(dyn ToSql + Sync)],) -> Result<(), Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute(format!("UPDATE player SET {} WHERE \"id\"={}", statement, author_id).as_str(), params).await?;
Ok(())
}
// Checks if the user has an entry in the DB
pub async fn check_if_registered(id: u64) -> Result<(), Error> {
// Get player
let result = get_player(id).await;
let player = result.unwrap_or(Player::empty());
// if player.id is 0 then they don't have an entry
// so then create an entry
if player.id == 0 {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await.unwrap();
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let _ = client.execute("INSERT INTO player
(\"id\") VALUES ($1);", &[&(id as i64)]).await.unwrap();
}
Ok(())
}
// SQLite search statement
pub async fn get_player(author_id: u64) -> Result<Player, Error> {
let (client, connection) = tokio_postgres::connect(POSTGRE, NoTls).await?;
tokio::spawn(async move {
if let Err(e) = connection.await {
eprintln!("connection error: {}", e);
}
});
let mut player = Player::empty();
// Create Player struct
for row in client.query("SELECT * FROM player WHERE \"id\"=$1",&[&(author_id as i64)]).await? {
let inventory = row.get::<_, String>(24).split("ˌ").map(str::to_string).collect::<Vec<String>>();
let storage = row.get::<_, String>(25).split("ˌ").map(str::to_string).collect::<Vec<String>>();
player = Player {
id: row.get(0),
sprite: row.get(21),
class: row.get(22),
aspect: row.get(23),
materials: Materials {
build: row.get(1),
amber: row.get(2),
amethyst: row.get(3),
caulk: row.get(4),
chalk: row.get(5),
cobalt: row.get(6),
diamond: row.get(7),
garnet: row.get(8),
gold: row.get(9),
iodine: row.get(10),
marble: row.get(11),
mercury: row.get(12),
quartz: row.get(13),
ruby: row.get(14),
rust: row.get(15),
shale: row.get(16),
sulfur: row.get(17),
tar: row.get(18),
uranium: row.get(19),
zillium: row.get(20),
},
inventory,
storage,
sylladex_type: row.get(26),
}
}
return Ok(player)
}
// Gets exile quote
pub async fn get_exile_quote(ctx: &Context, msg: &Message) {
// Exile quotes
let exile_1: Vec<&str> = vec!["What are you doing", "Good job hero"];
let exile_2: Vec<&str> = vec!["DO YOU HAVE ANY IDEA WHAT YOU ARE DOING?", "YOU ARE DOING GOOD MAGGOT!"];
let exile_3: Vec<&str> = vec!["Good.", "Yes more."];
let exile_4: Vec<&str> = vec!["i could do better than that", "what are you doing loser"];
// Send embed function
async fn send_embed(ctx: &Context, msg: &Message, embed_text: &str) {
let randcolor: u32 = thread_rng().gen_range(0x000000..0xFFFFFF);
if let Err(why) = msg.channel_id.send_message(&ctx.http, |m| {
m.embed(|e| {
e.title(format!("{}'s Exile", msg.author.name).as_str());
e.description(format_emojis!("{}", embed_text));
e.color(randcolor);
e.author(|a| {
a.icon_url(msg.author.avatar_url().unwrap());
a.name(msg.author.name.as_str());
a
});e
});m
}).await {
sendmessage(format!("Error {}", why).as_str(), ctx, msg).await;
}
}
// Random index for exile quote
| sqlstatement | identifier_name |
main.go | returns a TransferRecord filled out with a UUID,
// StartTime, Status of "requested", and a Kind of "download".
func NewDownloadRecord() *TransferRecord {
return &TransferRecord{
UUID: uuid.New(),
StartTime: time.Now(),
Status: RequestedStatus,
Kind: DownloadKind,
}
}
// NewUploadRecord returns a TransferRecord filled out with a UUID,
// StartTime, Status of "requested", and a Kind of "upload".
func NewUploadRecord() *TransferRecord {
return &TransferRecord{
UUID: uuid.New(),
StartTime: time.Now(),
Status: RequestedStatus,
Kind: DownloadKind,
}
}
// MarshalAndWrite serializes the TransferRecord to json and writes it out using writer.
func (r *TransferRecord) MarshalAndWrite(writer io.Writer) error {
var (
recordbytes []byte
err error
)
r.mutex.Lock()
if recordbytes, err = json.Marshal(r); err != nil {
r.mutex.Unlock()
return errors.Wrap(err, "error serializing download record")
}
r.mutex.Unlock()
_, err = writer.Write(recordbytes)
return err
}
// SetCompletionTime sets the CompletionTime field for the TransferRecord to the current time.
func (r *TransferRecord) SetCompletionTime() {
r.mutex.Lock()
r.CompletionTime = time.Now()
r.mutex.Unlock()
}
// SetStatus sets the Status field for the TransferRecord to the provided value.
func (r *TransferRecord) SetStatus(status string) {
r.mutex.Lock()
r.Status = status
r.mutex.Unlock()
}
// HistoricalRecords maintains a list of []*TransferRecords and provides thread-safe access
// to them.
type HistoricalRecords struct {
records []*TransferRecord
mutex sync.Mutex
}
// Append adds another *TransferRecord to the list.
func (h *HistoricalRecords) Append(tr *TransferRecord) {
h.mutex.Lock()
h.records = append(h.records, tr)
h.mutex.Unlock()
}
// FindRecord looks up a record by UUID and returns the pointer to it. The lookup is locked
// to prevent dirty reads. Return value will be nil if no records are found with the provided
// id.
func (h *HistoricalRecords) FindRecord(id string) *TransferRecord {
h.mutex.Lock()
defer h.mutex.Unlock()
for _, dr := range h.records {
if dr.UUID.String() == id {
return dr
}
}
return nil
}
// App contains application state.
type App struct {
LogDirectory string
User string
UploadDestination string
DownloadDestination string
InvocationID string
InputPathList string
ExcludesPath string
ConfigPath string
FileMetadata []string
downloadWait sync.WaitGroup
uploadWait sync.WaitGroup
uploadRecords *HistoricalRecords
downloadRecords *HistoricalRecords
}
func (a *App) downloadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"get",
"--user", a.User,
"--source-list", a.InputPathList,
"--destination", a.DownloadDestination,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
func (a *App) fileUseable(aPath string) bool {
if _, err := os.Stat(aPath); err != nil {
return false
}
return true
}
// DownloadFiles triggers a download and returns a *TransferRecord.
func (a *App) DownloadFiles() *TransferRecord {
downloadRecord := NewDownloadRecord()
a.downloadRecords.Append(downloadRecord)
downloadRunningMutex.Lock()
shouldRun := !downloadRunning && a.fileUseable(a.InputPathList)
downloadRunningMutex.Unlock()
if shouldRun {
log.Info("starting download goroutine")
a.downloadWait.Add(1)
go func() {
log.Info("running download goroutine")
var (
downloadLogStderrFile *os.File
downloadLogStdoutFile *os.File
downloadLogStderrPath string
downloadLogStdoutPath string
err error
)
downloadRunningMutex.Lock()
downloadRunning = true
downloadRunningMutex.Unlock()
downloadRecord.SetStatus(DownloadingStatus)
defer func() {
downloadRecord.SetCompletionTime()
downloadRunningMutex.Lock()
downloadRunning = false
downloadRunningMutex.Unlock()
a.downloadWait.Done()
}()
downloadLogStdoutPath = path.Join(a.LogDirectory, "downloads.stdout.log")
downloadLogStdoutFile, err = os.Create(downloadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStdoutPath))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadLogStderrPath = path.Join(a.LogDirectory, "downloads.stderr.log")
downloadLogStderrFile, err = os.Create(downloadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStderrPath))
downloadRecord.SetStatus(FailedStatus)
return
}
parts := a.downloadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = downloadLogStdoutFile
cmd.Stderr = downloadLogStderrFile
if err = cmd.Run(); err != nil {
log.Error(errors.Wrap(err, "error running porklock for downloads"))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadRecord.SetStatus(CompletedStatus)
log.Info("exiting download goroutine without errors")
}()
}
return downloadRecord
}
// DownloadFilesHandler handles requests to download files.
func (a *App) DownloadFilesHandler(writer http.ResponseWriter, req *http.Request) {
log.Info("received download request")
downloadRecord := a.DownloadFiles()
if err := downloadRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// GetDownloadStatus returns the status of the possibly running download.
func (a *App) GetDownloadStatus(writer http.ResponseWriter, request *http.Request) |
// GetUploadStatus returns the status of the possibly running upload.
func (a *App) GetUploadStatus(writer http.ResponseWriter, request *http.Request) {
id := mux.Vars(request)["id"]
foundRecord := a.uploadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
func (a *App) uploadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"put",
"--user", a.User,
"--source", a.DownloadDestination,
"--destination", a.UploadDestination,
"--exclude", a.ExcludesPath,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
// UploadFiles handles requests to upload files.
func (a *App) UploadFiles(writer http.ResponseWriter, req *http.Request) {
log.Info("received upload request")
uploadRecord := NewUploadRecord()
a.uploadRecords.Append(uploadRecord)
uploadRunningMutex.Lock()
shouldRun := !uploadRunning
uploadRunning = true
uploadRunningMutex.Unlock()
if shouldRun {
log.Info("starting upload goroutine")
a.uploadWait.Add(1)
go func() {
log.Info("running upload goroutine")
uploadRecord.SetStatus(UploadingStatus)
defer func() {
uploadRecord.SetCompletionTime()
uploadRunningMutex.Lock()
uploadRunning = false
uploadRunningMutex.Unlock()
a.uploadWait.Done()
}()
uploadLogStdoutPath := path.Join(a.LogDirectory, "uploads.stdout.log")
uploadLogStdoutFile, err := os.Create(uploadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStdoutPath))
uploadRecord.SetStatus(FailedStatus)
return
}
uploadLogStderrPath := path.Join(a.LogDirectory, "uploads.stderr.log")
uploadLogStderrFile, err := os.Create(uploadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStderrPath))
uploadRecord.SetStatus(FailedStatus)
return
}
parts := a.uploadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = uploadLogStdoutFile
cmd.Stderr = uploadLogStd | {
id := mux.Vars(request)["id"]
foundRecord := a.downloadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
} | identifier_body |
main.go | string) {
r.mutex.Lock()
r.Status = status
r.mutex.Unlock()
}
// HistoricalRecords maintains a list of []*TransferRecords and provides thread-safe access
// to them.
type HistoricalRecords struct {
records []*TransferRecord
mutex sync.Mutex
}
// Append adds another *TransferRecord to the list.
func (h *HistoricalRecords) Append(tr *TransferRecord) {
h.mutex.Lock()
h.records = append(h.records, tr)
h.mutex.Unlock()
}
// FindRecord looks up a record by UUID and returns the pointer to it. The lookup is locked
// to prevent dirty reads. Return value will be nil if no records are found with the provided
// id.
func (h *HistoricalRecords) FindRecord(id string) *TransferRecord {
h.mutex.Lock()
defer h.mutex.Unlock()
for _, dr := range h.records {
if dr.UUID.String() == id {
return dr
}
}
return nil
}
// App contains application state.
type App struct {
LogDirectory string
User string
UploadDestination string
DownloadDestination string
InvocationID string
InputPathList string
ExcludesPath string
ConfigPath string
FileMetadata []string
downloadWait sync.WaitGroup
uploadWait sync.WaitGroup
uploadRecords *HistoricalRecords
downloadRecords *HistoricalRecords
}
func (a *App) downloadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"get",
"--user", a.User,
"--source-list", a.InputPathList,
"--destination", a.DownloadDestination,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
func (a *App) fileUseable(aPath string) bool {
if _, err := os.Stat(aPath); err != nil {
return false
}
return true
}
// DownloadFiles triggers a download and returns a *TransferRecord.
func (a *App) DownloadFiles() *TransferRecord {
downloadRecord := NewDownloadRecord()
a.downloadRecords.Append(downloadRecord)
downloadRunningMutex.Lock()
shouldRun := !downloadRunning && a.fileUseable(a.InputPathList)
downloadRunningMutex.Unlock()
if shouldRun {
log.Info("starting download goroutine")
a.downloadWait.Add(1)
go func() {
log.Info("running download goroutine")
var (
downloadLogStderrFile *os.File
downloadLogStdoutFile *os.File
downloadLogStderrPath string
downloadLogStdoutPath string
err error
)
downloadRunningMutex.Lock()
downloadRunning = true
downloadRunningMutex.Unlock()
downloadRecord.SetStatus(DownloadingStatus)
defer func() {
downloadRecord.SetCompletionTime()
downloadRunningMutex.Lock()
downloadRunning = false
downloadRunningMutex.Unlock()
a.downloadWait.Done()
}()
downloadLogStdoutPath = path.Join(a.LogDirectory, "downloads.stdout.log")
downloadLogStdoutFile, err = os.Create(downloadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStdoutPath))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadLogStderrPath = path.Join(a.LogDirectory, "downloads.stderr.log")
downloadLogStderrFile, err = os.Create(downloadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStderrPath))
downloadRecord.SetStatus(FailedStatus)
return
}
parts := a.downloadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = downloadLogStdoutFile
cmd.Stderr = downloadLogStderrFile
if err = cmd.Run(); err != nil {
log.Error(errors.Wrap(err, "error running porklock for downloads"))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadRecord.SetStatus(CompletedStatus)
log.Info("exiting download goroutine without errors")
}()
}
return downloadRecord
}
// DownloadFilesHandler handles requests to download files.
func (a *App) DownloadFilesHandler(writer http.ResponseWriter, req *http.Request) {
log.Info("received download request")
downloadRecord := a.DownloadFiles()
if err := downloadRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// GetDownloadStatus returns the status of the possibly running download.
func (a *App) GetDownloadStatus(writer http.ResponseWriter, request *http.Request) {
id := mux.Vars(request)["id"]
foundRecord := a.downloadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// GetUploadStatus returns the status of the possibly running upload.
func (a *App) GetUploadStatus(writer http.ResponseWriter, request *http.Request) {
id := mux.Vars(request)["id"]
foundRecord := a.uploadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
func (a *App) uploadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"put",
"--user", a.User,
"--source", a.DownloadDestination,
"--destination", a.UploadDestination,
"--exclude", a.ExcludesPath,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
// UploadFiles handles requests to upload files.
func (a *App) UploadFiles(writer http.ResponseWriter, req *http.Request) {
log.Info("received upload request")
uploadRecord := NewUploadRecord()
a.uploadRecords.Append(uploadRecord)
uploadRunningMutex.Lock()
shouldRun := !uploadRunning
uploadRunning = true
uploadRunningMutex.Unlock()
if shouldRun {
log.Info("starting upload goroutine")
a.uploadWait.Add(1)
go func() {
log.Info("running upload goroutine")
uploadRecord.SetStatus(UploadingStatus)
defer func() {
uploadRecord.SetCompletionTime()
uploadRunningMutex.Lock()
uploadRunning = false
uploadRunningMutex.Unlock()
a.uploadWait.Done()
}()
uploadLogStdoutPath := path.Join(a.LogDirectory, "uploads.stdout.log")
uploadLogStdoutFile, err := os.Create(uploadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStdoutPath))
uploadRecord.SetStatus(FailedStatus)
return
}
uploadLogStderrPath := path.Join(a.LogDirectory, "uploads.stderr.log")
uploadLogStderrFile, err := os.Create(uploadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStderrPath))
uploadRecord.SetStatus(FailedStatus)
return
}
parts := a.uploadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = uploadLogStdoutFile
cmd.Stderr = uploadLogStderrFile
if err = cmd.Run(); err != nil {
log.Error(errors.Wrap(err, "error running porklock for uploads"))
uploadRecord.SetStatus(FailedStatus)
return
}
uploadRecord.SetStatus(CompletedStatus)
log.Info("exiting upload goroutine without errors")
}()
}
if err := uploadRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// Hello is an HTTP handler that simply says hello.
func (a *App) Hello(writer http.ResponseWriter, request *http.Request) {
fmt.Fprintln(writer, "Hello from vice-file-transfers")
}
func main() {
var options struct {
ListenPort int `short:"l" long:"listen-port" default:"60001" description:"The port to listen on for requests"`
LogDirectory string `long:"log-dir" default:"/input-files" description:"The directory in which to write log files"` | User string `long:"user" required:"true" description:"The user to run the transfers for"`
UploadDestination string `long:"upload-destination" required:"true" description:"The destination directory for uploads"`
DownloadDestination string `long:"download-destination" default:"/input-files" description:"The destination directory for downloads"`
ExcludesFile string `long:"excludes-file" default:"/excludes/excludes-file" description:"The path to the excludes file"` | random_line_split |
|
main.go | LogDirectory string
User string
UploadDestination string
DownloadDestination string
InvocationID string
InputPathList string
ExcludesPath string
ConfigPath string
FileMetadata []string
downloadWait sync.WaitGroup
uploadWait sync.WaitGroup
uploadRecords *HistoricalRecords
downloadRecords *HistoricalRecords
}
func (a *App) downloadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"get",
"--user", a.User,
"--source-list", a.InputPathList,
"--destination", a.DownloadDestination,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
func (a *App) fileUseable(aPath string) bool {
if _, err := os.Stat(aPath); err != nil {
return false
}
return true
}
// DownloadFiles triggers a download and returns a *TransferRecord.
func (a *App) DownloadFiles() *TransferRecord {
downloadRecord := NewDownloadRecord()
a.downloadRecords.Append(downloadRecord)
downloadRunningMutex.Lock()
shouldRun := !downloadRunning && a.fileUseable(a.InputPathList)
downloadRunningMutex.Unlock()
if shouldRun {
log.Info("starting download goroutine")
a.downloadWait.Add(1)
go func() {
log.Info("running download goroutine")
var (
downloadLogStderrFile *os.File
downloadLogStdoutFile *os.File
downloadLogStderrPath string
downloadLogStdoutPath string
err error
)
downloadRunningMutex.Lock()
downloadRunning = true
downloadRunningMutex.Unlock()
downloadRecord.SetStatus(DownloadingStatus)
defer func() {
downloadRecord.SetCompletionTime()
downloadRunningMutex.Lock()
downloadRunning = false
downloadRunningMutex.Unlock()
a.downloadWait.Done()
}()
downloadLogStdoutPath = path.Join(a.LogDirectory, "downloads.stdout.log")
downloadLogStdoutFile, err = os.Create(downloadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStdoutPath))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadLogStderrPath = path.Join(a.LogDirectory, "downloads.stderr.log")
downloadLogStderrFile, err = os.Create(downloadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStderrPath))
downloadRecord.SetStatus(FailedStatus)
return
}
parts := a.downloadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = downloadLogStdoutFile
cmd.Stderr = downloadLogStderrFile
if err = cmd.Run(); err != nil {
log.Error(errors.Wrap(err, "error running porklock for downloads"))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadRecord.SetStatus(CompletedStatus)
log.Info("exiting download goroutine without errors")
}()
}
return downloadRecord
}
// DownloadFilesHandler handles requests to download files.
func (a *App) DownloadFilesHandler(writer http.ResponseWriter, req *http.Request) {
log.Info("received download request")
downloadRecord := a.DownloadFiles()
if err := downloadRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// GetDownloadStatus returns the status of the possibly running download.
func (a *App) GetDownloadStatus(writer http.ResponseWriter, request *http.Request) {
id := mux.Vars(request)["id"]
foundRecord := a.downloadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// GetUploadStatus returns the status of the possibly running upload.
func (a *App) GetUploadStatus(writer http.ResponseWriter, request *http.Request) {
id := mux.Vars(request)["id"]
foundRecord := a.uploadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
func (a *App) uploadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"put",
"--user", a.User,
"--source", a.DownloadDestination,
"--destination", a.UploadDestination,
"--exclude", a.ExcludesPath,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
// UploadFiles handles requests to upload files.
func (a *App) UploadFiles(writer http.ResponseWriter, req *http.Request) {
log.Info("received upload request")
uploadRecord := NewUploadRecord()
a.uploadRecords.Append(uploadRecord)
uploadRunningMutex.Lock()
shouldRun := !uploadRunning
uploadRunning = true
uploadRunningMutex.Unlock()
if shouldRun {
log.Info("starting upload goroutine")
a.uploadWait.Add(1)
go func() {
log.Info("running upload goroutine")
uploadRecord.SetStatus(UploadingStatus)
defer func() {
uploadRecord.SetCompletionTime()
uploadRunningMutex.Lock()
uploadRunning = false
uploadRunningMutex.Unlock()
a.uploadWait.Done()
}()
uploadLogStdoutPath := path.Join(a.LogDirectory, "uploads.stdout.log")
uploadLogStdoutFile, err := os.Create(uploadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStdoutPath))
uploadRecord.SetStatus(FailedStatus)
return
}
uploadLogStderrPath := path.Join(a.LogDirectory, "uploads.stderr.log")
uploadLogStderrFile, err := os.Create(uploadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStderrPath))
uploadRecord.SetStatus(FailedStatus)
return
}
parts := a.uploadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = uploadLogStdoutFile
cmd.Stderr = uploadLogStderrFile
if err = cmd.Run(); err != nil {
log.Error(errors.Wrap(err, "error running porklock for uploads"))
uploadRecord.SetStatus(FailedStatus)
return
}
uploadRecord.SetStatus(CompletedStatus)
log.Info("exiting upload goroutine without errors")
}()
}
if err := uploadRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// Hello is an HTTP handler that simply says hello.
func (a *App) Hello(writer http.ResponseWriter, request *http.Request) {
fmt.Fprintln(writer, "Hello from vice-file-transfers")
}
func main() {
var options struct {
ListenPort int `short:"l" long:"listen-port" default:"60001" description:"The port to listen on for requests"`
LogDirectory string `long:"log-dir" default:"/input-files" description:"The directory in which to write log files"`
User string `long:"user" required:"true" description:"The user to run the transfers for"`
UploadDestination string `long:"upload-destination" required:"true" description:"The destination directory for uploads"`
DownloadDestination string `long:"download-destination" default:"/input-files" description:"The destination directory for downloads"`
ExcludesFile string `long:"excludes-file" default:"/excludes/excludes-file" description:"The path to the excludes file"`
PathListFile string `long:"path-list-file" default:"/input-paths/input-path-list" description:"The path to the input paths list file"`
IRODSConfig string `long:"irods-config" default:"/etc/porklock/irods-config.properties" description:"The path to the porklock iRODS config file"`
InvocationID string `long:"invocation-id" required:"true" description:"The invocation UUID"`
FileMetadata []string `short:"m" description:"Metadata to apply to files"`
NoService bool `short:"n" long:"no-service" description:"Disables running as a continuous process. Effectively becomes a download tool"`
}
if _, err := flags.Parse(&options); err != nil | {
if flagsErr, ok := err.(*flags.Error); ok && flagsErr.Type == flags.ErrHelp {
os.Exit(0)
}
log.Fatal(err)
} | conditional_block |
|
main.go | returns a TransferRecord filled out with a UUID,
// StartTime, Status of "requested", and a Kind of "download".
func NewDownloadRecord() *TransferRecord {
return &TransferRecord{
UUID: uuid.New(),
StartTime: time.Now(),
Status: RequestedStatus,
Kind: DownloadKind,
}
}
// NewUploadRecord returns a TransferRecord filled out with a UUID,
// StartTime, Status of "requested", and a Kind of "upload".
func | () *TransferRecord {
return &TransferRecord{
UUID: uuid.New(),
StartTime: time.Now(),
Status: RequestedStatus,
Kind: DownloadKind,
}
}
// MarshalAndWrite serializes the TransferRecord to json and writes it out using writer.
func (r *TransferRecord) MarshalAndWrite(writer io.Writer) error {
var (
recordbytes []byte
err error
)
r.mutex.Lock()
if recordbytes, err = json.Marshal(r); err != nil {
r.mutex.Unlock()
return errors.Wrap(err, "error serializing download record")
}
r.mutex.Unlock()
_, err = writer.Write(recordbytes)
return err
}
// SetCompletionTime sets the CompletionTime field for the TransferRecord to the current time.
func (r *TransferRecord) SetCompletionTime() {
r.mutex.Lock()
r.CompletionTime = time.Now()
r.mutex.Unlock()
}
// SetStatus sets the Status field for the TransferRecord to the provided value.
func (r *TransferRecord) SetStatus(status string) {
r.mutex.Lock()
r.Status = status
r.mutex.Unlock()
}
// HistoricalRecords maintains a list of []*TransferRecords and provides thread-safe access
// to them.
type HistoricalRecords struct {
records []*TransferRecord
mutex sync.Mutex
}
// Append adds another *TransferRecord to the list.
func (h *HistoricalRecords) Append(tr *TransferRecord) {
h.mutex.Lock()
h.records = append(h.records, tr)
h.mutex.Unlock()
}
// FindRecord looks up a record by UUID and returns the pointer to it. The lookup is locked
// to prevent dirty reads. Return value will be nil if no records are found with the provided
// id.
func (h *HistoricalRecords) FindRecord(id string) *TransferRecord {
h.mutex.Lock()
defer h.mutex.Unlock()
for _, dr := range h.records {
if dr.UUID.String() == id {
return dr
}
}
return nil
}
// App contains application state.
type App struct {
LogDirectory string
User string
UploadDestination string
DownloadDestination string
InvocationID string
InputPathList string
ExcludesPath string
ConfigPath string
FileMetadata []string
downloadWait sync.WaitGroup
uploadWait sync.WaitGroup
uploadRecords *HistoricalRecords
downloadRecords *HistoricalRecords
}
func (a *App) downloadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"get",
"--user", a.User,
"--source-list", a.InputPathList,
"--destination", a.DownloadDestination,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
func (a *App) fileUseable(aPath string) bool {
if _, err := os.Stat(aPath); err != nil {
return false
}
return true
}
// DownloadFiles triggers a download and returns a *TransferRecord.
func (a *App) DownloadFiles() *TransferRecord {
downloadRecord := NewDownloadRecord()
a.downloadRecords.Append(downloadRecord)
downloadRunningMutex.Lock()
shouldRun := !downloadRunning && a.fileUseable(a.InputPathList)
downloadRunningMutex.Unlock()
if shouldRun {
log.Info("starting download goroutine")
a.downloadWait.Add(1)
go func() {
log.Info("running download goroutine")
var (
downloadLogStderrFile *os.File
downloadLogStdoutFile *os.File
downloadLogStderrPath string
downloadLogStdoutPath string
err error
)
downloadRunningMutex.Lock()
downloadRunning = true
downloadRunningMutex.Unlock()
downloadRecord.SetStatus(DownloadingStatus)
defer func() {
downloadRecord.SetCompletionTime()
downloadRunningMutex.Lock()
downloadRunning = false
downloadRunningMutex.Unlock()
a.downloadWait.Done()
}()
downloadLogStdoutPath = path.Join(a.LogDirectory, "downloads.stdout.log")
downloadLogStdoutFile, err = os.Create(downloadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStdoutPath))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadLogStderrPath = path.Join(a.LogDirectory, "downloads.stderr.log")
downloadLogStderrFile, err = os.Create(downloadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", downloadLogStderrPath))
downloadRecord.SetStatus(FailedStatus)
return
}
parts := a.downloadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = downloadLogStdoutFile
cmd.Stderr = downloadLogStderrFile
if err = cmd.Run(); err != nil {
log.Error(errors.Wrap(err, "error running porklock for downloads"))
downloadRecord.SetStatus(FailedStatus)
return
}
downloadRecord.SetStatus(CompletedStatus)
log.Info("exiting download goroutine without errors")
}()
}
return downloadRecord
}
// DownloadFilesHandler handles requests to download files.
func (a *App) DownloadFilesHandler(writer http.ResponseWriter, req *http.Request) {
log.Info("received download request")
downloadRecord := a.DownloadFiles()
if err := downloadRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// GetDownloadStatus returns the status of the possibly running download.
func (a *App) GetDownloadStatus(writer http.ResponseWriter, request *http.Request) {
id := mux.Vars(request)["id"]
foundRecord := a.downloadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
// GetUploadStatus returns the status of the possibly running upload.
func (a *App) GetUploadStatus(writer http.ResponseWriter, request *http.Request) {
id := mux.Vars(request)["id"]
foundRecord := a.uploadRecords.FindRecord(id)
if foundRecord == nil {
writer.WriteHeader(http.StatusNotFound)
return
}
if err := foundRecord.MarshalAndWrite(writer); err != nil {
log.Error(err)
http.Error(writer, err.Error(), http.StatusInternalServerError)
}
}
func (a *App) uploadCommand() []string {
retval := []string{
"porklock",
"-jar",
"/usr/src/app/porklock-standalone.jar",
"put",
"--user", a.User,
"--source", a.DownloadDestination,
"--destination", a.UploadDestination,
"--exclude", a.ExcludesPath,
"-c", a.ConfigPath,
}
for _, fm := range a.FileMetadata {
retval = append(retval, "-m", fm)
}
return retval
}
// UploadFiles handles requests to upload files.
func (a *App) UploadFiles(writer http.ResponseWriter, req *http.Request) {
log.Info("received upload request")
uploadRecord := NewUploadRecord()
a.uploadRecords.Append(uploadRecord)
uploadRunningMutex.Lock()
shouldRun := !uploadRunning
uploadRunning = true
uploadRunningMutex.Unlock()
if shouldRun {
log.Info("starting upload goroutine")
a.uploadWait.Add(1)
go func() {
log.Info("running upload goroutine")
uploadRecord.SetStatus(UploadingStatus)
defer func() {
uploadRecord.SetCompletionTime()
uploadRunningMutex.Lock()
uploadRunning = false
uploadRunningMutex.Unlock()
a.uploadWait.Done()
}()
uploadLogStdoutPath := path.Join(a.LogDirectory, "uploads.stdout.log")
uploadLogStdoutFile, err := os.Create(uploadLogStdoutPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStdoutPath))
uploadRecord.SetStatus(FailedStatus)
return
}
uploadLogStderrPath := path.Join(a.LogDirectory, "uploads.stderr.log")
uploadLogStderrFile, err := os.Create(uploadLogStderrPath)
if err != nil {
log.Error(errors.Wrapf(err, "failed to open file %s", uploadLogStderrPath))
uploadRecord.SetStatus(FailedStatus)
return
}
parts := a.uploadCommand()
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Stdout = uploadLogStdoutFile
cmd.Stderr = uploadLogStderr | NewUploadRecord | identifier_name |
server.go | json:"temperature"`
Precipitation float64 `json:"precipitation"`
Humidity float64 `json:"humidity"`
}
type AirData struct {
TagDate string `json:"timestamp"`
ObsName string `json:"observatory_name"`
Location [2]float64 `json:"coordinates"`
Wind [2]float64 `json:"wind"`
Temperature float64 `json:"temperature"`
Precipitation float64 `json:"precipitation"`
Humidity float64 `json:"humidity"`
ItemPM10 float64 `json:"pm10"`
ItemPM25 float64 `json:"pm25"`
ItemO3 float64 `json:"o3"`
ItemNO2 float64 `json:"no2"`
ItemCO float64 `json:"co"`
ItemSO2 float64 `json:"so2"`
}
func (t *TemplateRenderer) Render(w io.Writer, dataType string, data interface{}, c echo.Context) error {
return t.templates.ExecuteTemplate(w, dataType, data)
}
func indexPage(c echo.Context) error {
return c.Render(http.StatusOK, "index.html", map[string]interface{}{
"data_type": "pm25",
})
}
func byPass(c echo.Context) error {
buf, err := ioutil.ReadFile("data/seoul_topo.json")
if err != nil {
fmt.Println(err)
}
stringSeoulTopo := string(buf)
return c.String(http.StatusOK, stringSeoulTopo)
}
func redirectPage(c echo.Context) error {
return c.Render(http.StatusOK, "index.html", map[string]interface{}{
"data_type": c.Param("data_type"),
})
}
func getJSON(c echo.Context) error | for i := 0; i < len(observatory); i++ {
air := AirData{}
idxAir := getIndexAirPollution(airPollution, observatory[i].AWSName)
idxWeather := getIndexWeatherData(weatherData, observatory[i].AWSName)
if idxAir != -1 && idxWeather != -1 {
air.TagDate = airPollution[idxAir].TagDate
air.ObsName = airPollution[idxAir].ObsName
air.Location[0] = observatory[i].AWSLongitude
air.Location[1] = observatory[i].AWSLatitude
air.Wind[0] = weatherData[idxWeather].WindDirection
air.Wind[1] = weatherData[idxWeather].WindSpeed
air.Temperature = weatherData[idxWeather].Temperature
air.Humidity = weatherData[idxWeather].Humidity
air.Precipitation = weatherData[idxWeather].Precipitation
air.ItemPM25 = airPollution[idxAir].ItemPM25
air.ItemPM10 = airPollution[idxAir].ItemPM10
air.ItemO3 = airPollution[idxAir].ItemO3
air.ItemNO2 = airPollution[idxAir].ItemNO2
air.ItemCO = airPollution[idxAir].ItemCO
air.ItemSO2 = airPollution[idxAir].ItemSO2
airData = append(airData, air)
}
}
return c.JSON(http.StatusOK, airData)
}
func initObservatory(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
if !db.HasTable(&Observatory{}) {
db.CreateTable(&Observatory{})
}
obsFile, err := os.Open("data/observatory.csv")
if err != nil {
log.Fatal(err)
}
csvReader := csv.NewReader(bufio.NewReader(obsFile))
rows, _ := csvReader.ReadAll()
for i, row := range rows {
if i != 0 {
obs := Observatory{}
obs.AWSName = rows[i][0] + "구"
valLat, _ := strconv.ParseFloat(rows[i][1], 64)
obs.AWSLatitude = valLat
valLon, _ := strconv.ParseFloat(rows[i][2], 64)
obs.AWSLongitude = valLon
db.NewRecord(obs)
db.Create(&obs)
for j := range row {
log.Printf("%s ", rows[i][j])
}
log.Println()
}
}
return c.String(http.StatusOK, "Hello, World")
}
func stringToFloat(strValue string) float64 {
val, err := strconv.ParseFloat(strValue, 64)
if err == nil {
return val
} else {
return -999
}
}
func weatherDataScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://aws.seoul.go.kr/RealTime/RealTimeWeatherUser.asp?TITLE=%C0%FC%20%C1%F6%C1%A1%20%C7%F6%C8%B2")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
log.Fatalf("status code error: %d %s", resp.StatusCode, resp.Status)
}
bytes, err := ioutil.ReadAll(resp.Body)
euckrDecoder := korean.EUCKR.NewDecoder()
decodedContents, err := euckrDecoder.String(string(bytes))
doc, err := goquery.NewDocumentFromReader(strings.NewReader(decodedContents))
if err != nil {
log.Fatal(err)
}
timeItems := strings.Fields(doc.Find(".top tbody tr tbody tr td").Eq(1).Text())
re := regexp.MustCompile("[0-9]+")
year := re.FindAllString(timeItems[0], -1)
month := re.FindAllString(timeItems[1], -1)
day := re.FindAllString(timeItems[2], -1)
hour := re.FindAllString(timeItems[3], -1)
tagDate := fmt.Sprintf("%s-%s-%s %s:00", year[0], month[0], day[0], hour[0])
queryResult := WeatherData{}
db.Where("tag_date = ?", tagDate).First(&queryResult)
var doScrape bool
if queryResult.ID == 0 {
doScrape = true
} else {
doScrape = false
}
if doScrape {
items := doc.Find(".top .main tr td table tbody tr")
for i := 1; i < 27; i++ {
replacedItem := strings.Replace(items.Eq(i).Text(), "\n", "", -1)
listSubItem := strings.Fields(replacedItem)
log.Println(listSubItem[1])
var obsName string
if listSubItem[1] == "중구" {
obsName = listSubItem[1]
} else {
obsName = listSubItem[1] + "구"
}
windDirection := stringToFloat(listSubItem[2])
windDirectionString := listSubItem[3]
windSpeed := stringToFloat(listSubItem[4])
temperature := stringToFloat(listSubItem[5])
precipitation := stringToFloat(listSubItem[6])
humidity := stringToFloat(listSubItem[8])
obs := WeatherData{}
obs.TagDate = tagDate
obs.ObsName = obsName
obs.WindDirection = windDirection
obs.WindDirectionString = windDirectionString
obs.WindSpeed = windSpeed
obs.Temperature = temperature
obs.Precipitation = precipitation
obs.Humidity = humidity
db.NewRecord(obs)
db.Create(&obs)
}
}
return c.String(http.StatusOK, "Hello, World!")
}
func airPollutionScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://cleanair.seoul.go.kr/air_city.htm?method=measure&grp1=pm10")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
log | {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
airPollution := []AirPollution{}
weatherData := []WeatherData{}
observatory := []Observatory{}
airPollutionItem := AirPollution{}
db.Last(&airPollutionItem)
timeString := airPollutionItem.TagDate
db.Where("tag_date = ?", timeString).Find(&airPollution)
db.Where("tag_date = ?", timeString).Find(&weatherData)
db.Find(&observatory)
airData := []AirData{} | identifier_body |
server.go | json:"temperature"`
Precipitation float64 `json:"precipitation"`
Humidity float64 `json:"humidity"`
}
type AirData struct {
TagDate string `json:"timestamp"`
ObsName string `json:"observatory_name"`
Location [2]float64 `json:"coordinates"`
Wind [2]float64 `json:"wind"`
Temperature float64 `json:"temperature"`
Precipitation float64 `json:"precipitation"`
Humidity float64 `json:"humidity"`
ItemPM10 float64 `json:"pm10"`
ItemPM25 float64 `json:"pm25"`
ItemO3 float64 `json:"o3"`
ItemNO2 float64 `json:"no2"`
ItemCO float64 `json:"co"`
ItemSO2 float64 `json:"so2"`
}
func (t *TemplateRenderer) Render(w io.Writer, dataType string, data interface{}, c echo.Context) error {
return t.templates.ExecuteTemplate(w, dataType, data)
}
func indexPage(c echo.Context) error {
return c.Render(http.StatusOK, "index.html", map[string]interface{}{
"data_type": "pm25",
})
}
func byPass(c echo.Context) error {
buf, err := ioutil.ReadFile("data/seoul_topo.json")
if err != nil {
fmt.Println(err)
}
stringSeoulTopo := string(buf)
return c.String(http.StatusOK, stringSeoulTopo)
}
func redirectPage(c echo.Context) error {
return c.Render(http.StatusOK, "index.html", map[string]interface{}{
"data_type": c.Param("data_type"),
})
}
func getJSON(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
airPollution := []AirPollution{}
weatherData := []WeatherData{}
observatory := []Observatory{}
airPollutionItem := AirPollution{}
db.Last(&airPollutionItem)
timeString := airPollutionItem.TagDate
db.Where("tag_date = ?", timeString).Find(&airPollution)
db.Where("tag_date = ?", timeString).Find(&weatherData)
db.Find(&observatory)
airData := []AirData{}
for i := 0; i < len(observatory); i++ {
air := AirData{}
idxAir := getIndexAirPollution(airPollution, observatory[i].AWSName)
idxWeather := getIndexWeatherData(weatherData, observatory[i].AWSName)
if idxAir != -1 && idxWeather != -1 {
air.TagDate = airPollution[idxAir].TagDate
air.ObsName = airPollution[idxAir].ObsName
air.Location[0] = observatory[i].AWSLongitude
air.Location[1] = observatory[i].AWSLatitude
air.Wind[0] = weatherData[idxWeather].WindDirection
air.Wind[1] = weatherData[idxWeather].WindSpeed
air.Temperature = weatherData[idxWeather].Temperature
air.Humidity = weatherData[idxWeather].Humidity
air.Precipitation = weatherData[idxWeather].Precipitation
air.ItemPM25 = airPollution[idxAir].ItemPM25
air.ItemPM10 = airPollution[idxAir].ItemPM10
air.ItemO3 = airPollution[idxAir].ItemO3
air.ItemNO2 = airPollution[idxAir].ItemNO2
air.ItemCO = airPollution[idxAir].ItemCO
air.ItemSO2 = airPollution[idxAir].ItemSO2
airData = append(airData, air)
}
}
return c.JSON(http.StatusOK, airData)
}
func initObservatory(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
if !db.HasTable(&Observatory{}) {
db.CreateTable(&Observatory{})
}
obsFile, err := os.Open("data/observatory.csv")
if err != nil {
log.Fatal(err)
}
csvReader := csv.NewReader(bufio.NewReader(obsFile))
rows, _ := csvReader.ReadAll()
for i, row := range rows {
if i != 0 {
obs := Observatory{}
obs.AWSName = rows[i][0] + "구"
valLat, _ := strconv.ParseFloat(rows[i][1], 64)
obs.AWSLatitude = valLat
valLon, _ := strconv.ParseFloat(rows[i][2], 64)
obs.AWSLongitude = valLon
db.NewRecord(obs)
db.Create(&obs)
for j := range row {
log.Printf("%s ", rows[i][j])
}
log.Println()
}
}
return c.String(http.StatusOK, "Hello, World")
}
func st | trValue string) float64 {
val, err := strconv.ParseFloat(strValue, 64)
if err == nil {
return val
} else {
return -999
}
}
func weatherDataScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://aws.seoul.go.kr/RealTime/RealTimeWeatherUser.asp?TITLE=%C0%FC%20%C1%F6%C1%A1%20%C7%F6%C8%B2")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
log.Fatalf("status code error: %d %s", resp.StatusCode, resp.Status)
}
bytes, err := ioutil.ReadAll(resp.Body)
euckrDecoder := korean.EUCKR.NewDecoder()
decodedContents, err := euckrDecoder.String(string(bytes))
doc, err := goquery.NewDocumentFromReader(strings.NewReader(decodedContents))
if err != nil {
log.Fatal(err)
}
timeItems := strings.Fields(doc.Find(".top tbody tr tbody tr td").Eq(1).Text())
re := regexp.MustCompile("[0-9]+")
year := re.FindAllString(timeItems[0], -1)
month := re.FindAllString(timeItems[1], -1)
day := re.FindAllString(timeItems[2], -1)
hour := re.FindAllString(timeItems[3], -1)
tagDate := fmt.Sprintf("%s-%s-%s %s:00", year[0], month[0], day[0], hour[0])
queryResult := WeatherData{}
db.Where("tag_date = ?", tagDate).First(&queryResult)
var doScrape bool
if queryResult.ID == 0 {
doScrape = true
} else {
doScrape = false
}
if doScrape {
items := doc.Find(".top .main tr td table tbody tr")
for i := 1; i < 27; i++ {
replacedItem := strings.Replace(items.Eq(i).Text(), "\n", "", -1)
listSubItem := strings.Fields(replacedItem)
log.Println(listSubItem[1])
var obsName string
if listSubItem[1] == "중구" {
obsName = listSubItem[1]
} else {
obsName = listSubItem[1] + "구"
}
windDirection := stringToFloat(listSubItem[2])
windDirectionString := listSubItem[3]
windSpeed := stringToFloat(listSubItem[4])
temperature := stringToFloat(listSubItem[5])
precipitation := stringToFloat(listSubItem[6])
humidity := stringToFloat(listSubItem[8])
obs := WeatherData{}
obs.TagDate = tagDate
obs.ObsName = obsName
obs.WindDirection = windDirection
obs.WindDirectionString = windDirectionString
obs.WindSpeed = windSpeed
obs.Temperature = temperature
obs.Precipitation = precipitation
obs.Humidity = humidity
db.NewRecord(obs)
db.Create(&obs)
}
}
return c.String(http.StatusOK, "Hello, World!")
}
func airPollutionScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://cleanair.seoul.go.kr/air_city.htm?method=measure&grp1=pm10")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
| ringToFloat(s | identifier_name |
server.go | air.ObsName = airPollution[idxAir].ObsName
air.Location[0] = observatory[i].AWSLongitude
air.Location[1] = observatory[i].AWSLatitude
air.Wind[0] = weatherData[idxWeather].WindDirection
air.Wind[1] = weatherData[idxWeather].WindSpeed
air.Temperature = weatherData[idxWeather].Temperature
air.Humidity = weatherData[idxWeather].Humidity
air.Precipitation = weatherData[idxWeather].Precipitation
air.ItemPM25 = airPollution[idxAir].ItemPM25
air.ItemPM10 = airPollution[idxAir].ItemPM10
air.ItemO3 = airPollution[idxAir].ItemO3
air.ItemNO2 = airPollution[idxAir].ItemNO2
air.ItemCO = airPollution[idxAir].ItemCO
air.ItemSO2 = airPollution[idxAir].ItemSO2
airData = append(airData, air)
}
}
return c.JSON(http.StatusOK, airData)
}
func initObservatory(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
if !db.HasTable(&Observatory{}) {
db.CreateTable(&Observatory{})
}
obsFile, err := os.Open("data/observatory.csv")
if err != nil {
log.Fatal(err)
}
csvReader := csv.NewReader(bufio.NewReader(obsFile))
rows, _ := csvReader.ReadAll()
for i, row := range rows {
if i != 0 {
obs := Observatory{}
obs.AWSName = rows[i][0] + "구"
valLat, _ := strconv.ParseFloat(rows[i][1], 64)
obs.AWSLatitude = valLat
valLon, _ := strconv.ParseFloat(rows[i][2], 64)
obs.AWSLongitude = valLon
db.NewRecord(obs)
db.Create(&obs)
for j := range row {
log.Printf("%s ", rows[i][j])
}
log.Println()
}
}
return c.String(http.StatusOK, "Hello, World")
}
func stringToFloat(strValue string) float64 {
val, err := strconv.ParseFloat(strValue, 64)
if err == nil {
return val
} else {
return -999
}
}
func weatherDataScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://aws.seoul.go.kr/RealTime/RealTimeWeatherUser.asp?TITLE=%C0%FC%20%C1%F6%C1%A1%20%C7%F6%C8%B2")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
log.Fatalf("status code error: %d %s", resp.StatusCode, resp.Status)
}
bytes, err := ioutil.ReadAll(resp.Body)
euckrDecoder := korean.EUCKR.NewDecoder()
decodedContents, err := euckrDecoder.String(string(bytes))
doc, err := goquery.NewDocumentFromReader(strings.NewReader(decodedContents))
if err != nil {
log.Fatal(err)
}
timeItems := strings.Fields(doc.Find(".top tbody tr tbody tr td").Eq(1).Text())
re := regexp.MustCompile("[0-9]+")
year := re.FindAllString(timeItems[0], -1)
month := re.FindAllString(timeItems[1], -1)
day := re.FindAllString(timeItems[2], -1)
hour := re.FindAllString(timeItems[3], -1)
tagDate := fmt.Sprintf("%s-%s-%s %s:00", year[0], month[0], day[0], hour[0])
queryResult := WeatherData{}
db.Where("tag_date = ?", tagDate).First(&queryResult)
var doScrape bool
if queryResult.ID == 0 {
doScrape = true
} else {
doScrape = false
}
if doScrape {
items := doc.Find(".top .main tr td table tbody tr")
for i := 1; i < 27; i++ {
replacedItem := strings.Replace(items.Eq(i).Text(), "\n", "", -1)
listSubItem := strings.Fields(replacedItem)
log.Println(listSubItem[1])
var obsName string
if listSubItem[1] == "중구" {
obsName = listSubItem[1]
} else {
obsName = listSubItem[1] + "구"
}
windDirection := stringToFloat(listSubItem[2])
windDirectionString := listSubItem[3]
windSpeed := stringToFloat(listSubItem[4])
temperature := stringToFloat(listSubItem[5])
precipitation := stringToFloat(listSubItem[6])
humidity := stringToFloat(listSubItem[8])
obs := WeatherData{}
obs.TagDate = tagDate
obs.ObsName = obsName
obs.WindDirection = windDirection
obs.WindDirectionString = windDirectionString
obs.WindSpeed = windSpeed
obs.Temperature = temperature
obs.Precipitation = precipitation
obs.Humidity = humidity
db.NewRecord(obs)
db.Create(&obs)
}
}
return c.String(http.StatusOK, "Hello, World!")
}
func airPollutionScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://cleanair.seoul.go.kr/air_city.htm?method=measure&grp1=pm10")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
log.Fatalf("status code error: %d %s", resp.StatusCode, resp.Status)
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
log.Fatal(err)
}
items := doc.Find(".tbl1 tbody tr")
ii := strings.Fields(strings.Replace(items.Eq(0).Text(), "\n", "", -1))
tagDate := ii[0] + " " + ii[1]
queryResult := AirPollution{}
db.Where("tag_date = ?", tagDate).First(&queryResult)
var doScrape bool
if queryResult.ID == 0 {
doScrape = true
} else {
doScrape = false
}
if doScrape {
for i := 1; i < len(items.Nodes); i++ {
tagTime := items.Eq(i).Find("th").Text()
subItem := items.Eq(i).Find("td")
replacedSubItem := strings.Replace(subItem.Text(), "\n", "", -1)
listSubItem := strings.Fields(replacedSubItem)
obsName := listSubItem[0]
pm10 := stringToFloat(listSubItem[1])
pm25 := stringToFloat(listSubItem[2])
o3 := stringToFloat(listSubItem[3])
no2 := stringToFloat(listSubItem[4])
co := stringToFloat(listSubItem[5])
so2 := stringToFloat(listSubItem[6])
obs := AirPollution{}
obs.ObsName = obsName
obs.TagDate = tagTime
obs.ItemPM10 = pm10
obs.ItemPM25 = pm25
obs.ItemO3 = o3
obs.ItemNO2 = no2
obs.ItemCO = co
obs.ItemSO2 = so2
db.NewRecord(obs)
db.Create(&obs)
}
}
return c.String(http.StatusOK, "Hello, World!")
}
func getIndexAirPollution(data []AirPollution, obsName string) int {
for i := 0; i < len(data); i++ {
log.Println(data[i].ObsName, obsName)
if data[i].ObsName == obsName {
if | data[i].ItemPM10 == -999 {
return -1
}
if data[i].ItemPM25 == -999 {
return -1
}
if data[i].ItemCO == -999 {
return -1
}
if data[i].ItemNO2 == -999 {
return -1
}
if data[i].ItemSO2 == -999 {
return -1
}
if data[i].ItemO3 == -999 {
return -1
}
return i | conditional_block |
|
server.go | (localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
airPollution := []AirPollution{}
weatherData := []WeatherData{}
observatory := []Observatory{}
airPollutionItem := AirPollution{}
db.Last(&airPollutionItem)
timeString := airPollutionItem.TagDate
db.Where("tag_date = ?", timeString).Find(&airPollution)
db.Where("tag_date = ?", timeString).Find(&weatherData)
db.Find(&observatory)
airData := []AirData{}
for i := 0; i < len(observatory); i++ {
air := AirData{}
idxAir := getIndexAirPollution(airPollution, observatory[i].AWSName)
idxWeather := getIndexWeatherData(weatherData, observatory[i].AWSName)
if idxAir != -1 && idxWeather != -1 {
air.TagDate = airPollution[idxAir].TagDate
air.ObsName = airPollution[idxAir].ObsName
air.Location[0] = observatory[i].AWSLongitude
air.Location[1] = observatory[i].AWSLatitude
air.Wind[0] = weatherData[idxWeather].WindDirection
air.Wind[1] = weatherData[idxWeather].WindSpeed
air.Temperature = weatherData[idxWeather].Temperature
air.Humidity = weatherData[idxWeather].Humidity
air.Precipitation = weatherData[idxWeather].Precipitation
air.ItemPM25 = airPollution[idxAir].ItemPM25
air.ItemPM10 = airPollution[idxAir].ItemPM10
air.ItemO3 = airPollution[idxAir].ItemO3
air.ItemNO2 = airPollution[idxAir].ItemNO2
air.ItemCO = airPollution[idxAir].ItemCO
air.ItemSO2 = airPollution[idxAir].ItemSO2
airData = append(airData, air)
}
}
return c.JSON(http.StatusOK, airData)
}
func initObservatory(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
if !db.HasTable(&Observatory{}) {
db.CreateTable(&Observatory{})
}
obsFile, err := os.Open("data/observatory.csv")
if err != nil {
log.Fatal(err)
}
csvReader := csv.NewReader(bufio.NewReader(obsFile))
rows, _ := csvReader.ReadAll()
for i, row := range rows {
if i != 0 {
obs := Observatory{}
obs.AWSName = rows[i][0] + "구"
valLat, _ := strconv.ParseFloat(rows[i][1], 64)
obs.AWSLatitude = valLat
valLon, _ := strconv.ParseFloat(rows[i][2], 64)
obs.AWSLongitude = valLon
db.NewRecord(obs)
db.Create(&obs)
for j := range row {
log.Printf("%s ", rows[i][j])
}
log.Println()
}
}
return c.String(http.StatusOK, "Hello, World")
}
func stringToFloat(strValue string) float64 {
val, err := strconv.ParseFloat(strValue, 64)
if err == nil {
return val
} else {
return -999
}
}
func weatherDataScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://aws.seoul.go.kr/RealTime/RealTimeWeatherUser.asp?TITLE=%C0%FC%20%C1%F6%C1%A1%20%C7%F6%C8%B2")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
log.Fatalf("status code error: %d %s", resp.StatusCode, resp.Status)
}
bytes, err := ioutil.ReadAll(resp.Body)
euckrDecoder := korean.EUCKR.NewDecoder()
decodedContents, err := euckrDecoder.String(string(bytes))
doc, err := goquery.NewDocumentFromReader(strings.NewReader(decodedContents))
if err != nil {
log.Fatal(err)
}
timeItems := strings.Fields(doc.Find(".top tbody tr tbody tr td").Eq(1).Text())
re := regexp.MustCompile("[0-9]+")
year := re.FindAllString(timeItems[0], -1)
month := re.FindAllString(timeItems[1], -1)
day := re.FindAllString(timeItems[2], -1)
hour := re.FindAllString(timeItems[3], -1)
tagDate := fmt.Sprintf("%s-%s-%s %s:00", year[0], month[0], day[0], hour[0])
queryResult := WeatherData{}
db.Where("tag_date = ?", tagDate).First(&queryResult)
var doScrape bool
if queryResult.ID == 0 {
doScrape = true
} else {
doScrape = false
}
if doScrape {
items := doc.Find(".top .main tr td table tbody tr")
for i := 1; i < 27; i++ {
replacedItem := strings.Replace(items.Eq(i).Text(), "\n", "", -1)
listSubItem := strings.Fields(replacedItem)
log.Println(listSubItem[1])
var obsName string
if listSubItem[1] == "중구" {
obsName = listSubItem[1]
} else {
obsName = listSubItem[1] + "구"
}
windDirection := stringToFloat(listSubItem[2])
windDirectionString := listSubItem[3]
windSpeed := stringToFloat(listSubItem[4])
temperature := stringToFloat(listSubItem[5])
precipitation := stringToFloat(listSubItem[6])
humidity := stringToFloat(listSubItem[8])
obs := WeatherData{}
obs.TagDate = tagDate
obs.ObsName = obsName
obs.WindDirection = windDirection
obs.WindDirectionString = windDirectionString
obs.WindSpeed = windSpeed
obs.Temperature = temperature
obs.Precipitation = precipitation
obs.Humidity = humidity
db.NewRecord(obs)
db.Create(&obs)
}
}
return c.String(http.StatusOK, "Hello, World!")
}
func airPollutionScrape(c echo.Context) error {
db, err := gorm.Open("mysql", "user:passwd@tcp(localhost:3306)/dev_gowind?charset=utf8")
if err != nil {
log.Fatal(err)
}
defer db.Close()
resp, err := http.Get("http://cleanair.seoul.go.kr/air_city.htm?method=measure&grp1=pm10")
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
log.Fatalf("status code error: %d %s", resp.StatusCode, resp.Status)
}
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
log.Fatal(err)
}
items := doc.Find(".tbl1 tbody tr")
ii := strings.Fields(strings.Replace(items.Eq(0).Text(), "\n", "", -1))
tagDate := ii[0] + " " + ii[1]
queryResult := AirPollution{}
db.Where("tag_date = ?", tagDate).First(&queryResult)
var doScrape bool
if queryResult.ID == 0 {
doScrape = true
} else {
doScrape = false
}
if doScrape {
for i := 1; i < len(items.Nodes); i++ {
tagTime := items.Eq(i).Find("th").Text()
subItem := items.Eq(i).Find("td")
replacedSubItem := strings.Replace(subItem.Text(), "\n", "", -1)
listSubItem := strings.Fields(replacedSubItem)
obsName := listSubItem[0]
pm10 := stringToFloat(listSubItem[1])
pm25 := stringToFloat(listSubItem[2])
o3 := stringToFloat(listSubItem[3])
no2 := stringToFloat(listSubItem[4])
co := stringToFloat(listSubItem[5])
so2 := stringToFloat(listSubItem[6])
obs := AirPollution{}
obs.ObsName = obsName
obs.TagDate = tagTime
obs.ItemPM10 = pm10
obs.ItemPM25 = pm25
obs.ItemO3 = o3
obs.ItemNO2 = no2 | obs.ItemCO = co
obs.ItemSO2 = so2 | random_line_split |
|
main.rs | 3.0;
const BALL_ACC: f32 = 0.005;
// AI constants
const AI_ENABLED: bool = true;
const AI_MAX_ITERS: u32 = 400; // Experimentation results: around 800 is more than sufficient,
// 400 is quite good though is insufficient for a short time after ball leaves enemy paddle
const AI_WAIT_FOR_PLAYER_HIT: bool = true; // wait for the player to hit the ball first before calculating solution
// (= will not have to guess the player's angle of attack)
// NB: if waiting for player hit, max iters may be set to a lower value
const EPSILON: f32 = 1.0;
#[derive(Clone)]
struct Paddle {
paddle_texture: Texture,
position: Vec2<f32>,
}
#[derive(Clone)]
struct Ball {
ball_texture: Texture,
position: Vec2<f32>,
velocity: Vec2<f32>,
}
impl Ball {
fn reset(&mut self){
self.position = Vec2::new(
(SCREEN_WIDTH as f32)/2.0 - (self.ball_texture.width() as f32)/2.0,
(SCREEN_HEIGHT as f32)/2.0 - (self.ball_texture.height() as f32)/2.0
);
}
}
struct GameState {
ball: Ball,
player_paddle: Paddle,
player_score: i32,
enemy_paddle: Paddle,
enemy_score: i32,
simulated: bool,
enemy_hit: bool, // used when simulating
}
impl GameState {
fn new(ctx: &mut Context) -> tetra::Result<GameState> {
// init textures
let paddle_texture = Texture::new(ctx, "res/paddle.png")?;
let ball_texture = Texture::new(ctx, "res/ball.png")?;
// init ball
let mut ball = Ball {
ball_texture,
position: Vec2::new(0.0, 0.0),
velocity: Vec2::new(1.0, 1.0),
};
ball.reset(); // initialise ball's position
ball.velocity = ball.velocity.normalized() * BALL_SPEED; // init ball speed
// calculate paddle initial y
let paddle_initial_y = (SCREEN_HEIGHT as f32)/2.0 - (paddle_texture.height() as f32)/2.0;
Ok(GameState {
ball,
player_paddle: Paddle {
paddle_texture: paddle_texture.clone(),
position: Vec2::new(
(SCREEN_WIDTH as f32) - PADDING - (paddle_texture.width() as f32),
paddle_initial_y,
),
},
player_score: 0,
enemy_paddle: Paddle {
paddle_texture: paddle_texture.clone(),
position: Vec2::new(
PADDING,
paddle_initial_y,
),
},
enemy_score: 0,
simulated: false,
enemy_hit: false,
})
}
fn draw_paddle(ctx: &mut Context, paddle: &Paddle){
graphics::draw(ctx, &paddle.paddle_texture, paddle.position)
}
fn handle_inputs(&mut self, ctx: &mut Context){
if input::is_key_down(ctx, Key::W) {
self.player_paddle.position.y -= PADDLE_SPEED;
}
if input::is_key_down(ctx, Key::S) {
self.player_paddle.position.y += PADDLE_SPEED;
}
// if !AI_ENABLED {
if !false {
if input::is_key_down(ctx, Key::O) {
self.enemy_paddle.position.y -= PADDLE_SPEED;
}
if input::is_key_down(ctx, Key::L) {
self.enemy_paddle.position.y += PADDLE_SPEED;
}
}
}
/// Check for ball-paddle collision with the given paddle
fn check_intersects(ball: &Ball, paddle: &Paddle) -> bool{
// check if ball's centre point is inside paddle rectangle:
// method adapted from: https://stackoverflow.com/a/2763387/5013267
let ab = Vec2::new(paddle.paddle_texture.width() as f32, 0.0); // vector a->b
let bc = Vec2::new(0.0, paddle.paddle_texture.height() as f32); // vector b->c
let m = ball.position + Vec2::new(ball.ball_texture.width() as f32, ball.ball_texture.height() as f32)/2.0;
let ab_dot_am = ab.dot(m - paddle.position);
let bc_dot_bm = bc.dot(m - (paddle.position + (paddle.paddle_texture.width() as f32, 0.0)));
// return value:
0.0 <= ab_dot_am && ab_dot_am <= ab.dot(ab)
&& 0.0 <= bc_dot_bm && bc_dot_bm <= bc.dot(bc)
}
fn apply_collision_response(ball: &mut Ball, paddle: &Paddle){
ball.velocity.x = -(ball.velocity.x + (BALL_ACC * ball.velocity.x.signum()));
let offset = (paddle.position.y - ball.position.y) / paddle.paddle_texture.height() as f32;
ball.velocity.y += PADDLE_SPIN * -offset;
}
fn update_collision(ball: &mut Ball, paddle: &Paddle){
if GameState::check_intersects(ball, &paddle) |
}
fn update_ball(&mut self, _ctx: &mut Context){
self.update_ai(_ctx);
self.ball.position += self.ball.velocity;
if !self.simulated {
GameState::update_collision(&mut self.ball, &self.player_paddle);
GameState::update_collision(&mut self.ball, &self.enemy_paddle);
}else {
// if simulated, use simplified calculations
// (always assume ball hits player paddle, otherwise AI would win anyway)
// only need to check player paddle
if self.ball.position.x + ((self.ball.ball_texture.width() as f32)/2.0) >= self.player_paddle.position.x {
GameState::apply_collision_response(&mut self.ball, &mut self.player_paddle);
}
// check reaches enemy's side (so that iteration can be terminated)
if self.ball.position.x <= self.enemy_paddle.position.x + self.enemy_paddle.paddle_texture.width() as f32 {
self.enemy_hit = true;
return; // no need to do rest of update calculations
}
}
// walls
// if bouncing off top or bottom walls...
if (self.ball.position[1] + (self.ball.ball_texture.height() as f32) >= (SCREEN_HEIGHT as f32)) || self.ball.position[1] <= 0.0 {
self.ball.velocity[1] = -self.ball.velocity[1];
}
// if bouncing off either of the side walls...
if self.ball.position[0] + (self.ball.ball_texture.width() as f32) >= (SCREEN_WIDTH as f32) || self.ball.position[0] <= 0.0 {
if self.ball.position[0] <= 0.0 {
// bounced off left wall
self.player_score += 1;
} else {
// bounced off right wall
self.enemy_score += 1;
self.ball.velocity = Vec2::new(1.0, 1.0); // setting direction
}
// reset ball to centre
self.ball.reset();
// reset ball speed (but not direction)
self.ball.velocity = self.ball.velocity.normalized() * BALL_SPEED;
}
}
fn update_ai(&mut self, ctx: &mut Context){
if self.simulated || !AI_ENABLED {
return;
}
if AI_WAIT_FOR_PLAYER_HIT && self.ball.velocity.x >= 0.0 {
// ball vel.x >= 0.0 implies ball moving towards player still, and has not been returned yet
return;
}
// create a simulation GameState, cloned from real GameState
let mut sim = GameState {
ball: self.ball.clone(),
player_paddle: self.player_paddle.clone(),
player_score: self.player_score,
enemy_paddle: self.enemy_paddle.clone(),
enemy_score: self.enemy_score,
simulated: true,
enemy_hit: false,
};
for i in 0..AI_MAX_ITERS {
if !sim.enemy_hit {
sim.update(ctx).expect("bruh moment when updating sim");
// sim.draw(ctx).expect("bruh moment when drawing sim"); // NB: only for debug -- rendering here slows down program signficantly
} else {
// if enemy_hit, stop iterating: found solution
// TODO: maybe implement solution caching
// (but low prio because solution prediction is variable anyway [depends on other player's angle of attack])
let target_y = sim.ball.position.y + (sim.ball.ball_texture.height() as f32)/2.0
- (self.enemy_paddle.paddle_texture.height() as f32)/2.0;
let delta = target_y - self.enemy_paddle.position.y;
if delta.abs() > EPSILON {
self.enemy_paddle.position | {
GameState::apply_collision_response(ball, paddle);
} | conditional_block |
main.rs | = 3.0;
const BALL_ACC: f32 = 0.005;
// AI constants
const AI_ENABLED: bool = true;
const AI_MAX_ITERS: u32 = 400; // Experimentation results: around 800 is more than sufficient,
// 400 is quite good though is insufficient for a short time after ball leaves enemy paddle
const AI_WAIT_FOR_PLAYER_HIT: bool = true; // wait for the player to hit the ball first before calculating solution
// (= will not have to guess the player's angle of attack)
// NB: if waiting for player hit, max iters may be set to a lower value
const EPSILON: f32 = 1.0;
#[derive(Clone)]
struct Paddle {
paddle_texture: Texture,
position: Vec2<f32>,
}
#[derive(Clone)]
struct | {
ball_texture: Texture,
position: Vec2<f32>,
velocity: Vec2<f32>,
}
impl Ball {
fn reset(&mut self){
self.position = Vec2::new(
(SCREEN_WIDTH as f32)/2.0 - (self.ball_texture.width() as f32)/2.0,
(SCREEN_HEIGHT as f32)/2.0 - (self.ball_texture.height() as f32)/2.0
);
}
}
struct GameState {
ball: Ball,
player_paddle: Paddle,
player_score: i32,
enemy_paddle: Paddle,
enemy_score: i32,
simulated: bool,
enemy_hit: bool, // used when simulating
}
impl GameState {
fn new(ctx: &mut Context) -> tetra::Result<GameState> {
// init textures
let paddle_texture = Texture::new(ctx, "res/paddle.png")?;
let ball_texture = Texture::new(ctx, "res/ball.png")?;
// init ball
let mut ball = Ball {
ball_texture,
position: Vec2::new(0.0, 0.0),
velocity: Vec2::new(1.0, 1.0),
};
ball.reset(); // initialise ball's position
ball.velocity = ball.velocity.normalized() * BALL_SPEED; // init ball speed
// calculate paddle initial y
let paddle_initial_y = (SCREEN_HEIGHT as f32)/2.0 - (paddle_texture.height() as f32)/2.0;
Ok(GameState {
ball,
player_paddle: Paddle {
paddle_texture: paddle_texture.clone(),
position: Vec2::new(
(SCREEN_WIDTH as f32) - PADDING - (paddle_texture.width() as f32),
paddle_initial_y,
),
},
player_score: 0,
enemy_paddle: Paddle {
paddle_texture: paddle_texture.clone(),
position: Vec2::new(
PADDING,
paddle_initial_y,
),
},
enemy_score: 0,
simulated: false,
enemy_hit: false,
})
}
fn draw_paddle(ctx: &mut Context, paddle: &Paddle){
graphics::draw(ctx, &paddle.paddle_texture, paddle.position)
}
fn handle_inputs(&mut self, ctx: &mut Context){
if input::is_key_down(ctx, Key::W) {
self.player_paddle.position.y -= PADDLE_SPEED;
}
if input::is_key_down(ctx, Key::S) {
self.player_paddle.position.y += PADDLE_SPEED;
}
// if !AI_ENABLED {
if !false {
if input::is_key_down(ctx, Key::O) {
self.enemy_paddle.position.y -= PADDLE_SPEED;
}
if input::is_key_down(ctx, Key::L) {
self.enemy_paddle.position.y += PADDLE_SPEED;
}
}
}
/// Check for ball-paddle collision with the given paddle
fn check_intersects(ball: &Ball, paddle: &Paddle) -> bool{
// check if ball's centre point is inside paddle rectangle:
// method adapted from: https://stackoverflow.com/a/2763387/5013267
let ab = Vec2::new(paddle.paddle_texture.width() as f32, 0.0); // vector a->b
let bc = Vec2::new(0.0, paddle.paddle_texture.height() as f32); // vector b->c
let m = ball.position + Vec2::new(ball.ball_texture.width() as f32, ball.ball_texture.height() as f32)/2.0;
let ab_dot_am = ab.dot(m - paddle.position);
let bc_dot_bm = bc.dot(m - (paddle.position + (paddle.paddle_texture.width() as f32, 0.0)));
// return value:
0.0 <= ab_dot_am && ab_dot_am <= ab.dot(ab)
&& 0.0 <= bc_dot_bm && bc_dot_bm <= bc.dot(bc)
}
fn apply_collision_response(ball: &mut Ball, paddle: &Paddle){
ball.velocity.x = -(ball.velocity.x + (BALL_ACC * ball.velocity.x.signum()));
let offset = (paddle.position.y - ball.position.y) / paddle.paddle_texture.height() as f32;
ball.velocity.y += PADDLE_SPIN * -offset;
}
fn update_collision(ball: &mut Ball, paddle: &Paddle){
if GameState::check_intersects(ball, &paddle){
GameState::apply_collision_response(ball, paddle);
}
}
fn update_ball(&mut self, _ctx: &mut Context){
self.update_ai(_ctx);
self.ball.position += self.ball.velocity;
if !self.simulated {
GameState::update_collision(&mut self.ball, &self.player_paddle);
GameState::update_collision(&mut self.ball, &self.enemy_paddle);
}else {
// if simulated, use simplified calculations
// (always assume ball hits player paddle, otherwise AI would win anyway)
// only need to check player paddle
if self.ball.position.x + ((self.ball.ball_texture.width() as f32)/2.0) >= self.player_paddle.position.x {
GameState::apply_collision_response(&mut self.ball, &mut self.player_paddle);
}
// check reaches enemy's side (so that iteration can be terminated)
if self.ball.position.x <= self.enemy_paddle.position.x + self.enemy_paddle.paddle_texture.width() as f32 {
self.enemy_hit = true;
return; // no need to do rest of update calculations
}
}
// walls
// if bouncing off top or bottom walls...
if (self.ball.position[1] + (self.ball.ball_texture.height() as f32) >= (SCREEN_HEIGHT as f32)) || self.ball.position[1] <= 0.0 {
self.ball.velocity[1] = -self.ball.velocity[1];
}
// if bouncing off either of the side walls...
if self.ball.position[0] + (self.ball.ball_texture.width() as f32) >= (SCREEN_WIDTH as f32) || self.ball.position[0] <= 0.0 {
if self.ball.position[0] <= 0.0 {
// bounced off left wall
self.player_score += 1;
} else {
// bounced off right wall
self.enemy_score += 1;
self.ball.velocity = Vec2::new(1.0, 1.0); // setting direction
}
// reset ball to centre
self.ball.reset();
// reset ball speed (but not direction)
self.ball.velocity = self.ball.velocity.normalized() * BALL_SPEED;
}
}
fn update_ai(&mut self, ctx: &mut Context){
if self.simulated || !AI_ENABLED {
return;
}
if AI_WAIT_FOR_PLAYER_HIT && self.ball.velocity.x >= 0.0 {
// ball vel.x >= 0.0 implies ball moving towards player still, and has not been returned yet
return;
}
// create a simulation GameState, cloned from real GameState
let mut sim = GameState {
ball: self.ball.clone(),
player_paddle: self.player_paddle.clone(),
player_score: self.player_score,
enemy_paddle: self.enemy_paddle.clone(),
enemy_score: self.enemy_score,
simulated: true,
enemy_hit: false,
};
for i in 0..AI_MAX_ITERS {
if !sim.enemy_hit {
sim.update(ctx).expect("bruh moment when updating sim");
// sim.draw(ctx).expect("bruh moment when drawing sim"); // NB: only for debug -- rendering here slows down program signficantly
} else {
// if enemy_hit, stop iterating: found solution
// TODO: maybe implement solution caching
// (but low prio because solution prediction is variable anyway [depends on other player's angle of attack])
let target_y = sim.ball.position.y + (sim.ball.ball_texture.height() as f32)/2.0
- (self.enemy_paddle.paddle_texture.height() as f32)/2.0;
let delta = target_y - self.enemy_paddle.position.y;
if delta.abs() > EPSILON {
self.enemy_paddle.position | Ball | identifier_name |
main.rs | = 3.0;
const BALL_ACC: f32 = 0.005;
// AI constants
const AI_ENABLED: bool = true;
const AI_MAX_ITERS: u32 = 400; // Experimentation results: around 800 is more than sufficient,
// 400 is quite good though is insufficient for a short time after ball leaves enemy paddle
const AI_WAIT_FOR_PLAYER_HIT: bool = true; // wait for the player to hit the ball first before calculating solution
// (= will not have to guess the player's angle of attack)
// NB: if waiting for player hit, max iters may be set to a lower value
const EPSILON: f32 = 1.0;
#[derive(Clone)]
struct Paddle {
paddle_texture: Texture,
position: Vec2<f32>,
}
#[derive(Clone)]
struct Ball {
ball_texture: Texture,
position: Vec2<f32>,
velocity: Vec2<f32>,
}
impl Ball {
fn reset(&mut self){
self.position = Vec2::new(
(SCREEN_WIDTH as f32)/2.0 - (self.ball_texture.width() as f32)/2.0,
(SCREEN_HEIGHT as f32)/2.0 - (self.ball_texture.height() as f32)/2.0
);
}
}
struct GameState {
ball: Ball,
player_paddle: Paddle,
player_score: i32,
enemy_paddle: Paddle,
enemy_score: i32,
simulated: bool,
enemy_hit: bool, // used when simulating
}
impl GameState {
fn new(ctx: &mut Context) -> tetra::Result<GameState> {
// init textures
let paddle_texture = Texture::new(ctx, "res/paddle.png")?;
let ball_texture = Texture::new(ctx, "res/ball.png")?;
// init ball
let mut ball = Ball {
ball_texture,
position: Vec2::new(0.0, 0.0),
velocity: Vec2::new(1.0, 1.0),
};
ball.reset(); // initialise ball's position
ball.velocity = ball.velocity.normalized() * BALL_SPEED; // init ball speed
// calculate paddle initial y
let paddle_initial_y = (SCREEN_HEIGHT as f32)/2.0 - (paddle_texture.height() as f32)/2.0;
Ok(GameState {
ball,
player_paddle: Paddle {
paddle_texture: paddle_texture.clone(),
position: Vec2::new(
(SCREEN_WIDTH as f32) - PADDING - (paddle_texture.width() as f32),
paddle_initial_y,
),
},
player_score: 0,
enemy_paddle: Paddle {
paddle_texture: paddle_texture.clone(),
position: Vec2::new(
PADDING,
paddle_initial_y,
),
},
enemy_score: 0,
simulated: false,
enemy_hit: false,
}) | fn draw_paddle(ctx: &mut Context, paddle: &Paddle){
graphics::draw(ctx, &paddle.paddle_texture, paddle.position)
}
fn handle_inputs(&mut self, ctx: &mut Context){
if input::is_key_down(ctx, Key::W) {
self.player_paddle.position.y -= PADDLE_SPEED;
}
if input::is_key_down(ctx, Key::S) {
self.player_paddle.position.y += PADDLE_SPEED;
}
// if !AI_ENABLED {
if !false {
if input::is_key_down(ctx, Key::O) {
self.enemy_paddle.position.y -= PADDLE_SPEED;
}
if input::is_key_down(ctx, Key::L) {
self.enemy_paddle.position.y += PADDLE_SPEED;
}
}
}
/// Check for ball-paddle collision with the given paddle
fn check_intersects(ball: &Ball, paddle: &Paddle) -> bool{
// check if ball's centre point is inside paddle rectangle:
// method adapted from: https://stackoverflow.com/a/2763387/5013267
let ab = Vec2::new(paddle.paddle_texture.width() as f32, 0.0); // vector a->b
let bc = Vec2::new(0.0, paddle.paddle_texture.height() as f32); // vector b->c
let m = ball.position + Vec2::new(ball.ball_texture.width() as f32, ball.ball_texture.height() as f32)/2.0;
let ab_dot_am = ab.dot(m - paddle.position);
let bc_dot_bm = bc.dot(m - (paddle.position + (paddle.paddle_texture.width() as f32, 0.0)));
// return value:
0.0 <= ab_dot_am && ab_dot_am <= ab.dot(ab)
&& 0.0 <= bc_dot_bm && bc_dot_bm <= bc.dot(bc)
}
fn apply_collision_response(ball: &mut Ball, paddle: &Paddle){
ball.velocity.x = -(ball.velocity.x + (BALL_ACC * ball.velocity.x.signum()));
let offset = (paddle.position.y - ball.position.y) / paddle.paddle_texture.height() as f32;
ball.velocity.y += PADDLE_SPIN * -offset;
}
fn update_collision(ball: &mut Ball, paddle: &Paddle){
if GameState::check_intersects(ball, &paddle){
GameState::apply_collision_response(ball, paddle);
}
}
fn update_ball(&mut self, _ctx: &mut Context){
self.update_ai(_ctx);
self.ball.position += self.ball.velocity;
if !self.simulated {
GameState::update_collision(&mut self.ball, &self.player_paddle);
GameState::update_collision(&mut self.ball, &self.enemy_paddle);
}else {
// if simulated, use simplified calculations
// (always assume ball hits player paddle, otherwise AI would win anyway)
// only need to check player paddle
if self.ball.position.x + ((self.ball.ball_texture.width() as f32)/2.0) >= self.player_paddle.position.x {
GameState::apply_collision_response(&mut self.ball, &mut self.player_paddle);
}
// check reaches enemy's side (so that iteration can be terminated)
if self.ball.position.x <= self.enemy_paddle.position.x + self.enemy_paddle.paddle_texture.width() as f32 {
self.enemy_hit = true;
return; // no need to do rest of update calculations
}
}
// walls
// if bouncing off top or bottom walls...
if (self.ball.position[1] + (self.ball.ball_texture.height() as f32) >= (SCREEN_HEIGHT as f32)) || self.ball.position[1] <= 0.0 {
self.ball.velocity[1] = -self.ball.velocity[1];
}
// if bouncing off either of the side walls...
if self.ball.position[0] + (self.ball.ball_texture.width() as f32) >= (SCREEN_WIDTH as f32) || self.ball.position[0] <= 0.0 {
if self.ball.position[0] <= 0.0 {
// bounced off left wall
self.player_score += 1;
} else {
// bounced off right wall
self.enemy_score += 1;
self.ball.velocity = Vec2::new(1.0, 1.0); // setting direction
}
// reset ball to centre
self.ball.reset();
// reset ball speed (but not direction)
self.ball.velocity = self.ball.velocity.normalized() * BALL_SPEED;
}
}
fn update_ai(&mut self, ctx: &mut Context){
if self.simulated || !AI_ENABLED {
return;
}
if AI_WAIT_FOR_PLAYER_HIT && self.ball.velocity.x >= 0.0 {
// ball vel.x >= 0.0 implies ball moving towards player still, and has not been returned yet
return;
}
// create a simulation GameState, cloned from real GameState
let mut sim = GameState {
ball: self.ball.clone(),
player_paddle: self.player_paddle.clone(),
player_score: self.player_score,
enemy_paddle: self.enemy_paddle.clone(),
enemy_score: self.enemy_score,
simulated: true,
enemy_hit: false,
};
for i in 0..AI_MAX_ITERS {
if !sim.enemy_hit {
sim.update(ctx).expect("bruh moment when updating sim");
// sim.draw(ctx).expect("bruh moment when drawing sim"); // NB: only for debug -- rendering here slows down program signficantly
} else {
// if enemy_hit, stop iterating: found solution
// TODO: maybe implement solution caching
// (but low prio because solution prediction is variable anyway [depends on other player's angle of attack])
let target_y = sim.ball.position.y + (sim.ball.ball_texture.height() as f32)/2.0
- (self.enemy_paddle.paddle_texture.height() as f32)/2.0;
let delta = target_y - self.enemy_paddle.position.y;
if delta.abs() > EPSILON {
self.enemy_paddle.position.y | }
| random_line_split |
ffi.rs |
#[allow(non_camel_case_types)]
pub type io_object_t = mach_port_t;
#[allow(non_camel_case_types)]
pub type io_iterator_t = io_object_t;
#[allow(non_camel_case_types)]
pub type io_registry_entry_t = io_object_t;
// This is a hack, `io_name_t` should normally be `[c_char; 128]` but Rust makes it very annoying
// to deal with that so we go around it a bit.
#[allow(non_camel_case_types, dead_code)]
pub type io_name = [c_char; 128];
#[allow(non_camel_case_types)]
pub type io_name_t = *const c_char;
pub type IOOptionBits = u32;
#[allow(non_upper_case_globals)]
pub const kIOServicePlane: &[u8] = b"IOService\0";
#[allow(non_upper_case_globals)]
pub const kIOPropertyDeviceCharacteristicsKey: &str = "Device Characteristics";
#[allow(non_upper_case_globals)]
pub const kIOPropertyMediumTypeKey: &str = "Medium Type";
#[allow(non_upper_case_globals)]
pub const kIOPropertyMediumTypeSolidStateKey: &str = "Solid State";
#[allow(non_upper_case_globals)]
pub const kIOPropertyMediumTypeRotationalKey: &str = "Rotational";
// Based on https://github.com/libusb/libusb/blob/bed8d3034eac74a6e1ba123b5c270ea63cb6cf1a/libusb/os/darwin_usb.c#L54-L55,
// we can simply set it to 0 (and is the same value as its replacement `kIOMainPortDefault`).
#[allow(non_upper_case_globals)]
pub const kIOMasterPortDefault: mach_port_t = 0;
// Note: Obtaining information about disks using IOKIt is allowed inside the default macOS App Sandbox.
#[link(name = "IOKit", kind = "framework")]
extern "C" {
pub fn IOServiceGetMatchingServices(
mainPort: mach_port_t,
matching: CFMutableDictionaryRef,
existing: *mut io_iterator_t,
) -> kern_return_t;
#[allow(dead_code)]
pub fn IOServiceMatching(a: *const c_char) -> CFMutableDictionaryRef;
pub fn IOIteratorNext(iterator: io_iterator_t) -> io_object_t;
pub fn IOObjectRelease(obj: io_object_t) -> kern_return_t;
pub fn IORegistryEntryCreateCFProperty(
entry: io_registry_entry_t,
key: CFStringRef,
allocator: CFAllocatorRef,
options: IOOptionBits,
) -> CFDictionaryRef;
pub fn IORegistryEntryGetParentEntry(
entry: io_registry_entry_t,
plane: io_name_t,
parent: *mut io_registry_entry_t,
) -> kern_return_t;
#[allow(dead_code)]
pub fn IORegistryEntryGetName(entry: io_registry_entry_t, name: io_name_t) -> kern_return_t;
pub fn IOBSDNameMatching(
mainPort: mach_port_t,
options: u32,
bsdName: *const c_char,
) -> CFMutableDictionaryRef;
}
#[allow(dead_code)]
pub const KIO_RETURN_SUCCESS: i32 = 0;
extern "C" {
// FIXME: to be removed once higher version than core_foundation_sys 0.8.4 is released.
#[allow(dead_code)]
pub fn CFStringCreateWithCStringNoCopy(
alloc: CFAllocatorRef,
cStr: *const c_char,
encoding: core_foundation_sys::string::CFStringEncoding,
contentsDeallocator: CFAllocatorRef,
) -> CFStringRef;
}
#[cfg(all(
not(feature = "apple-sandbox"),
any(target_arch = "x86", target_arch = "x86_64")
))]
mod io_service {
use super::{io_object_t, mach_port_t};
use libc::{kern_return_t, size_t, task_t};
#[allow(non_camel_case_types)]
pub type io_connect_t = io_object_t;
#[allow(non_camel_case_types)]
pub type io_service_t = io_object_t;
#[allow(non_camel_case_types)]
pub type task_port_t = task_t;
extern "C" {
pub fn IOServiceOpen(
device: io_service_t,
owning_task: task_port_t,
type_: u32,
connect: *mut io_connect_t,
) -> kern_return_t;
pub fn IOServiceClose(a: io_connect_t) -> kern_return_t;
#[allow(dead_code)]
pub fn IOConnectCallStructMethod(
connection: mach_port_t,
selector: u32,
inputStruct: *const KeyData_t,
inputStructCnt: size_t,
outputStruct: *mut KeyData_t,
outputStructCnt: *mut size_t,
) -> kern_return_t;
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct KeyData_vers_t {
pub major: u8,
pub minor: u8,
pub build: u8,
pub reserved: [u8; 1],
pub release: u16,
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct KeyData_pLimitData_t {
pub version: u16,
pub length: u16,
pub cpu_plimit: u32,
pub gpu_plimit: u32,
pub mem_plimit: u32,
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct KeyData_keyInfo_t {
pub data_size: u32,
pub data_type: u32,
pub data_attributes: u8,
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct KeyData_t {
pub key: u32,
pub vers: KeyData_vers_t,
pub p_limit_data: KeyData_pLimitData_t,
pub key_info: KeyData_keyInfo_t,
pub result: u8,
pub status: u8,
pub data8: u8,
pub data32: u32,
pub bytes: [i8; 32], // SMCBytes_t
}
#[allow(dead_code)]
pub const KERNEL_INDEX_SMC: i32 = 2;
#[allow(dead_code)]
pub const SMC_CMD_READ_KEYINFO: u8 = 9;
#[allow(dead_code)]
pub const SMC_CMD_READ_BYTES: u8 = 5;
}
#[cfg(feature = "apple-sandbox")]
mod io_service {}
#[cfg(all(
not(feature = "apple-sandbox"),
any(target_arch = "x86", target_arch = "x86_64")
))]
pub use io_service::*;
#[cfg(all(not(feature = "apple-sandbox"), target_arch = "aarch64"))]
mod io_service {
use std::ptr::null;
use super::CFStringCreateWithCStringNoCopy;
use core_foundation_sys::array::CFArrayRef;
use core_foundation_sys::base::{CFAllocatorRef, CFRelease};
use core_foundation_sys::dictionary::{
kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks, CFDictionaryCreate,
CFDictionaryRef,
};
use core_foundation_sys::number::{kCFNumberSInt32Type, CFNumberCreate};
use core_foundation_sys::string::CFStringRef;
#[repr(C)]
pub struct __IOHIDServiceClient(libc::c_void);
pub type IOHIDServiceClientRef = *const __IOHIDServiceClient;
#[repr(C)]
pub struct __IOHIDEventSystemClient(libc::c_void);
pub type IOHIDEventSystemClientRef = *const __IOHIDEventSystemClient;
#[repr(C)]
pub struct __IOHIDEvent(libc::c_void);
pub type IOHIDEventRef = *const __IOHIDEvent;
#[allow(non_upper_case_globals)]
pub const kIOHIDEventTypeTemperature: i64 = 15;
#[inline]
#[allow(non_snake_case)]
pub fn IOHIDEventFieldBase(event_type: i64) -> i64 {
event_type << 16
}
#[cfg(not(feature = "apple-sandbox"))]
extern "C" {
pub fn IOHIDEventSystemClientCreate(allocator: CFAllocatorRef)
-> IOHIDEventSystemClientRef;
pub fn IOHIDEventSystemClientSetMatching(
client: IOHIDEventSystemClientRef,
matches: CFDictionaryRef,
) -> i32;
pub fn IOHIDEventSystemClientCopyServices(client: IOHIDEventSystemClientRef) -> CFArrayRef;
pub fn IOHIDServiceClientCopyProperty(
service: IOHIDServiceClientRef,
key: CFStringRef,
) -> CFStringRef;
pub fn IOHIDServiceClientCopyEvent(
service: IOHIDServiceClientRef,
v0: |
// Note: IOKit is only available on MacOS up until very recent iOS versions: https://developer.apple.com/documentation/iokit | random_line_split |
|
ffi.rs |
// to deal with that so we go around it a bit.
#[allow(non_camel_case_types, dead_code)]
pub type io_name = [c_char; 128];
#[allow(non_camel_case_types)]
pub type io_name_t = *const c_char;
pub type IOOptionBits = u32;
#[allow(non_upper_case_globals)]
pub const kIOServicePlane: &[u8] = b"IOService\0";
#[allow(non_upper_case_globals)]
pub const kIOPropertyDeviceCharacteristicsKey: &str = "Device Characteristics";
#[allow(non_upper_case_globals)]
pub const kIOPropertyMediumTypeKey: &str = "Medium Type";
#[allow(non_upper_case_globals)]
pub const kIOPropertyMediumTypeSolidStateKey: &str = "Solid State";
#[allow(non_upper_case_globals)]
pub const kIOPropertyMediumTypeRotationalKey: &str = "Rotational";
// Based on https://github.com/libusb/libusb/blob/bed8d3034eac74a6e1ba123b5c270ea63cb6cf1a/libusb/os/darwin_usb.c#L54-L55,
// we can simply set it to 0 (and is the same value as its replacement `kIOMainPortDefault`).
#[allow(non_upper_case_globals)]
pub const kIOMasterPortDefault: mach_port_t = 0;
// Note: Obtaining information about disks using IOKIt is allowed inside the default macOS App Sandbox.
#[link(name = "IOKit", kind = "framework")]
extern "C" {
pub fn IOServiceGetMatchingServices(
mainPort: mach_port_t,
matching: CFMutableDictionaryRef,
existing: *mut io_iterator_t,
) -> kern_return_t;
#[allow(dead_code)]
pub fn IOServiceMatching(a: *const c_char) -> CFMutableDictionaryRef;
pub fn IOIteratorNext(iterator: io_iterator_t) -> io_object_t;
pub fn IOObjectRelease(obj: io_object_t) -> kern_return_t;
pub fn IORegistryEntryCreateCFProperty(
entry: io_registry_entry_t,
key: CFStringRef,
allocator: CFAllocatorRef,
options: IOOptionBits,
) -> CFDictionaryRef;
pub fn IORegistryEntryGetParentEntry(
entry: io_registry_entry_t,
plane: io_name_t,
parent: *mut io_registry_entry_t,
) -> kern_return_t;
#[allow(dead_code)]
pub fn IORegistryEntryGetName(entry: io_registry_entry_t, name: io_name_t) -> kern_return_t;
pub fn IOBSDNameMatching(
mainPort: mach_port_t,
options: u32,
bsdName: *const c_char,
) -> CFMutableDictionaryRef;
}
#[allow(dead_code)]
pub const KIO_RETURN_SUCCESS: i32 = 0;
extern "C" {
// FIXME: to be removed once higher version than core_foundation_sys 0.8.4 is released.
#[allow(dead_code)]
pub fn CFStringCreateWithCStringNoCopy(
alloc: CFAllocatorRef,
cStr: *const c_char,
encoding: core_foundation_sys::string::CFStringEncoding,
contentsDeallocator: CFAllocatorRef,
) -> CFStringRef;
}
#[cfg(all(
not(feature = "apple-sandbox"),
any(target_arch = "x86", target_arch = "x86_64")
))]
mod io_service {
use super::{io_object_t, mach_port_t};
use libc::{kern_return_t, size_t, task_t};
#[allow(non_camel_case_types)]
pub type io_connect_t = io_object_t;
#[allow(non_camel_case_types)]
pub type io_service_t = io_object_t;
#[allow(non_camel_case_types)]
pub type task_port_t = task_t;
extern "C" {
pub fn IOServiceOpen(
device: io_service_t,
owning_task: task_port_t,
type_: u32,
connect: *mut io_connect_t,
) -> kern_return_t;
pub fn IOServiceClose(a: io_connect_t) -> kern_return_t;
#[allow(dead_code)]
pub fn IOConnectCallStructMethod(
connection: mach_port_t,
selector: u32,
inputStruct: *const KeyData_t,
inputStructCnt: size_t,
outputStruct: *mut KeyData_t,
outputStructCnt: *mut size_t,
) -> kern_return_t;
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct KeyData_vers_t {
pub major: u8,
pub minor: u8,
pub build: u8,
pub reserved: [u8; 1],
pub release: u16,
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct | {
pub version: u16,
pub length: u16,
pub cpu_plimit: u32,
pub gpu_plimit: u32,
pub mem_plimit: u32,
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct KeyData_keyInfo_t {
pub data_size: u32,
pub data_type: u32,
pub data_attributes: u8,
}
#[cfg_attr(feature = "debug", derive(Debug, Eq, Hash, PartialEq))]
#[repr(C)]
pub struct KeyData_t {
pub key: u32,
pub vers: KeyData_vers_t,
pub p_limit_data: KeyData_pLimitData_t,
pub key_info: KeyData_keyInfo_t,
pub result: u8,
pub status: u8,
pub data8: u8,
pub data32: u32,
pub bytes: [i8; 32], // SMCBytes_t
}
#[allow(dead_code)]
pub const KERNEL_INDEX_SMC: i32 = 2;
#[allow(dead_code)]
pub const SMC_CMD_READ_KEYINFO: u8 = 9;
#[allow(dead_code)]
pub const SMC_CMD_READ_BYTES: u8 = 5;
}
#[cfg(feature = "apple-sandbox")]
mod io_service {}
#[cfg(all(
not(feature = "apple-sandbox"),
any(target_arch = "x86", target_arch = "x86_64")
))]
pub use io_service::*;
#[cfg(all(not(feature = "apple-sandbox"), target_arch = "aarch64"))]
mod io_service {
use std::ptr::null;
use super::CFStringCreateWithCStringNoCopy;
use core_foundation_sys::array::CFArrayRef;
use core_foundation_sys::base::{CFAllocatorRef, CFRelease};
use core_foundation_sys::dictionary::{
kCFTypeDictionaryKeyCallBacks, kCFTypeDictionaryValueCallBacks, CFDictionaryCreate,
CFDictionaryRef,
};
use core_foundation_sys::number::{kCFNumberSInt32Type, CFNumberCreate};
use core_foundation_sys::string::CFStringRef;
#[repr(C)]
pub struct __IOHIDServiceClient(libc::c_void);
pub type IOHIDServiceClientRef = *const __IOHIDServiceClient;
#[repr(C)]
pub struct __IOHIDEventSystemClient(libc::c_void);
pub type IOHIDEventSystemClientRef = *const __IOHIDEventSystemClient;
#[repr(C)]
pub struct __IOHIDEvent(libc::c_void);
pub type IOHIDEventRef = *const __IOHIDEvent;
#[allow(non_upper_case_globals)]
pub const kIOHIDEventTypeTemperature: i64 = 15;
#[inline]
#[allow(non_snake_case)]
pub fn IOHIDEventFieldBase(event_type: i64) -> i64 {
event_type << 16
}
#[cfg(not(feature = "apple-sandbox"))]
extern "C" {
pub fn IOHIDEventSystemClientCreate(allocator: CFAllocatorRef)
-> IOHIDEventSystemClientRef;
pub fn IOHIDEventSystemClientSetMatching(
client: IOHIDEventSystemClientRef,
matches: CFDictionaryRef,
) -> i32;
pub fn IOHIDEventSystemClientCopyServices(client: IOHIDEventSystemClientRef) -> CFArrayRef;
pub fn IOHIDServiceClientCopyProperty(
service: IOHIDServiceClientRef,
key: CFStringRef,
) -> CFStringRef;
pub fn IOHIDServiceClientCopyEvent(
service: IOHIDServiceClientRef,
v0: i64,
v1: i32,
v2: i64,
) -> IOHIDEventRef;
pub fn IOHIDEventGetFloatValue(event: IOHIDEventRef, field: i64) -> f64;
}
pub(crate) const HID_DEVICE_PROPERTY_PRODUCT: &[u8] = b"Product\0";
pub(crate) const HID_DEVICE_PROPERTY_PRIMARY_USAGE: &[u8] = b"PrimaryUsage\0";
pub(crate) const HID_DEVICE_PROPERTY | KeyData_pLimitData_t | identifier_name |
dma.py | ),strides=(2,2), data_format = 'channels_last', input_shape = input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(5,5),padding='valid'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(1000))
model.add(Activation('relu'))
model.add(Dense(256))
model.add(Activation('relu'))
return model
def generate_sequences(n_batches, images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,idxs):
# generate batches of samples
while True:
for bid in xrange(0, n_batches):
if bid == n_batches - 1:
batch_idxs = idxs[bid * batch_size:]
else:
batch_idxs = idxs[bid * batch_size: (bid + 1) * batch_size]
batch_length=len(batch_idxs)
X1= np.zeros((batch_length,224,224,3),np.float32)
X2= np.zeros((batch_length,224,224,3),np.float32)
X3= np.zeros((batch_length,224,224,3),np.float32)
X4= np.zeros((batch_length,224,224,3),np.float32)
X5= np.zeros((batch_length,224,224,3),np.float32)
y = labels[batch_idxs]
Y = np_utils.to_categorical(y, 2)
count=0
# for every image of a batch
for i in batch_idxs:
xx1 = images1[i, ...].astype(np.float32)
xx1 -= mean1
xx2 = images2[i, ...].astype(np.float32)
xx2 -= mean2
xx3 = images3[i, ...].astype(np.float32)
xx3 -= mean3
xx4 = images4[i, ...].astype(np.float32)
xx4 -= mean4
xx5 = images5[i, ...].astype(np.float32)
xx5 -= mean5
offset_x=random.randint(0,31)
offset_y=random.randint(0,31)
xx1=xx1[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx2=xx2[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx3=xx3[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx4=xx4[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx5=xx5[offset_x:offset_x+224,offset_y:offset_y+224,:]
X1[count,...]=xx1
X2[count,...]=xx2
X3[count,...]=xx3
X4[count,...]=xx4
X5[count,...]=xx5
count+=1
yield [X1,X2,X3,X4,X5],Y
def statistics_layer(xx):
print xx.shape
x_min=tf.reduce_min(xx,axis=1)
print x_min.shape
x_max=tf.reduce_max(xx,axis=1)
x_sum=tf.reduce_sum(xx,axis=1)
x_mean=tf.reduce_mean(xx,axis=1)
x_sta=tf.concat([x_min,x_max,x_sum,x_mean],1)
print x_sta.shape
return x_sta
if __name__ == '__main__':
input_shape = (224, 224, 3)
num_classes = 2
model1 = build_net(input_shape)
model1.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model2 = build_net(input_shape)
model2.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model3 = build_net(input_shape)
model3.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model4 = build_net(input_shape)
model4.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model5 = build_net(input_shape)
model5.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
merged=Merge(layers=[model1,model2,model3,model4,model5],mode='concat',concat_axis=1)
model=Sequential()
model.add(merged)
model.add(Reshape((5,256),input_shape=(1280,)))
print 'merged'
model.add(Lambda(statistics_layer,output_shape=None))
# model.add(Flatten())
model.add(Activation('relu'))
model.add(Dense(2))
model.add(Activation('softmax'))
sgd = SGD(lr=0.001, momentum=0.9, decay=1e-5, nesterov=True)
model.compile(optimizer=sgd,loss='binary_crossentropy', metrics=['accuracy'])
data_format = 'channels_last'
batch_size = 64
nb_epoch = 150
validation_ratio = 0.1
# training
path_train1='/home/liuhuihui/DMA_Net/randomCrop/train_data1.hdf5'
path_train2='/home/liuhuihui/DMA_Net/randomCrop/train_data2.hdf5'
path_train3='/home/liuhuihui/DMA_Net/randomCrop/train_data3.hdf5'
path_train4='/home/liuhuihui/DMA_Net/randomCrop/train_data4.hdf5'
path_train5='/home/liuhuihui/DMA_Net/randomCrop/train_data5.hdf5'
with h5py.File(path_train1, 'r') as train_file1,h5py.File(path_train2, 'r') as train_file2,h5py.File(path_train3, 'r') as train_file3,h5py.File(path_train4, 'r') as train_file4,h5py.File(path_train5, 'r') as train_file5:
print 'enter'
images1 = train_file1['images']
labels = train_file1['labels']
mean1= train_file1['mean'][...]
idxs = range(len(images1))
train_idxs = idxs[: int(len(images1) * (1 - validation_ratio))]
validation_idxs = idxs[int(len(images1) * (1 - validation_ratio)) :]
images2 = train_file2['images']
mean2 = train_file2['mean'][...]
images3 = train_file3['images']
mean3 = train_file3['mean'][...]
images4 = train_file4['images']
mean4 = train_file4['mean'][...]
images5 = train_file5['images']
mean5 = train_file5['mean'][...]
n_train_batches = len(train_idxs) // batch_size
n_remainder = len(train_idxs) % batch_size
if n_remainder:
n_train_batches = n_train_batches + 1
train_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
n_validation_batches = len(validation_idxs) // batch_size
n_remainder = len(validation_idxs) % batch_size
if n_remainder:
|
validation_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
ReduceLR = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=10,
verbose=1, mode='min', epsilon=1e-4, cooldown=0, min_lr=0)
Checkpoint = ModelCheckpoint(filepath='/home/liuhuihui/DMA_Net/data_avatest/weight_dma2.h5', monitor='val_loss', verbose=1,
save_best_only=True, save_weights_only=False,mode='min', period=1)
earlyStop = EarlyStopping(monitor='val_loss',pat | n_validation_batches = n_validation_batches + 1 | conditional_block |
dma.py | ),strides=(2,2), data_format = 'channels_last', input_shape = input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(5,5),padding='valid'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(1000))
model.add(Activation('relu'))
model.add(Dense(256))
model.add(Activation('relu'))
return model
def generate_sequences(n_batches, images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,idxs):
# generate batches of samples
while True:
for bid in xrange(0, n_batches):
if bid == n_batches - 1:
batch_idxs = idxs[bid * batch_size:]
else:
batch_idxs = idxs[bid * batch_size: (bid + 1) * batch_size]
batch_length=len(batch_idxs)
X1= np.zeros((batch_length,224,224,3),np.float32)
X2= np.zeros((batch_length,224,224,3),np.float32)
X3= np.zeros((batch_length,224,224,3),np.float32)
X4= np.zeros((batch_length,224,224,3),np.float32)
X5= np.zeros((batch_length,224,224,3),np.float32)
y = labels[batch_idxs]
Y = np_utils.to_categorical(y, 2)
count=0
# for every image of a batch
for i in batch_idxs:
xx1 = images1[i, ...].astype(np.float32)
xx1 -= mean1
xx2 = images2[i, ...].astype(np.float32)
xx2 -= mean2
xx3 = images3[i, ...].astype(np.float32)
xx3 -= mean3
xx4 = images4[i, ...].astype(np.float32)
xx4 -= mean4
xx5 = images5[i, ...].astype(np.float32)
xx5 -= mean5
offset_x=random.randint(0,31)
offset_y=random.randint(0,31)
xx1=xx1[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx2=xx2[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx3=xx3[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx4=xx4[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx5=xx5[offset_x:offset_x+224,offset_y:offset_y+224,:]
X1[count,...]=xx1
X2[count,...]=xx2
X3[count,...]=xx3
X4[count,...]=xx4
X5[count,...]=xx5
count+=1
yield [X1,X2,X3,X4,X5],Y
def | (xx):
print xx.shape
x_min=tf.reduce_min(xx,axis=1)
print x_min.shape
x_max=tf.reduce_max(xx,axis=1)
x_sum=tf.reduce_sum(xx,axis=1)
x_mean=tf.reduce_mean(xx,axis=1)
x_sta=tf.concat([x_min,x_max,x_sum,x_mean],1)
print x_sta.shape
return x_sta
if __name__ == '__main__':
input_shape = (224, 224, 3)
num_classes = 2
model1 = build_net(input_shape)
model1.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model2 = build_net(input_shape)
model2.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model3 = build_net(input_shape)
model3.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model4 = build_net(input_shape)
model4.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model5 = build_net(input_shape)
model5.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
merged=Merge(layers=[model1,model2,model3,model4,model5],mode='concat',concat_axis=1)
model=Sequential()
model.add(merged)
model.add(Reshape((5,256),input_shape=(1280,)))
print 'merged'
model.add(Lambda(statistics_layer,output_shape=None))
# model.add(Flatten())
model.add(Activation('relu'))
model.add(Dense(2))
model.add(Activation('softmax'))
sgd = SGD(lr=0.001, momentum=0.9, decay=1e-5, nesterov=True)
model.compile(optimizer=sgd,loss='binary_crossentropy', metrics=['accuracy'])
data_format = 'channels_last'
batch_size = 64
nb_epoch = 150
validation_ratio = 0.1
# training
path_train1='/home/liuhuihui/DMA_Net/randomCrop/train_data1.hdf5'
path_train2='/home/liuhuihui/DMA_Net/randomCrop/train_data2.hdf5'
path_train3='/home/liuhuihui/DMA_Net/randomCrop/train_data3.hdf5'
path_train4='/home/liuhuihui/DMA_Net/randomCrop/train_data4.hdf5'
path_train5='/home/liuhuihui/DMA_Net/randomCrop/train_data5.hdf5'
with h5py.File(path_train1, 'r') as train_file1,h5py.File(path_train2, 'r') as train_file2,h5py.File(path_train3, 'r') as train_file3,h5py.File(path_train4, 'r') as train_file4,h5py.File(path_train5, 'r') as train_file5:
print 'enter'
images1 = train_file1['images']
labels = train_file1['labels']
mean1= train_file1['mean'][...]
idxs = range(len(images1))
train_idxs = idxs[: int(len(images1) * (1 - validation_ratio))]
validation_idxs = idxs[int(len(images1) * (1 - validation_ratio)) :]
images2 = train_file2['images']
mean2 = train_file2['mean'][...]
images3 = train_file3['images']
mean3 = train_file3['mean'][...]
images4 = train_file4['images']
mean4 = train_file4['mean'][...]
images5 = train_file5['images']
mean5 = train_file5['mean'][...]
n_train_batches = len(train_idxs) // batch_size
n_remainder = len(train_idxs) % batch_size
if n_remainder:
n_train_batches = n_train_batches + 1
train_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
n_validation_batches = len(validation_idxs) // batch_size
n_remainder = len(validation_idxs) % batch_size
if n_remainder:
n_validation_batches = n_validation_batches + 1
validation_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
ReduceLR = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=10,
verbose=1, mode='min', epsilon=1e-4, cooldown=0, min_lr=0)
Checkpoint = ModelCheckpoint(filepath='/home/liuhuihui/DMA_Net/data_avatest/weight_dma2.h5', monitor='val_loss', verbose=1,
save_best_only=True, save_weights_only=False,mode='min', period=1)
earlyStop = EarlyStopping(monitor='val_loss', | statistics_layer | identifier_name |
dma.py | ),strides=(2,2), data_format = 'channels_last', input_shape = input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(5,5),padding='valid'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(1000))
model.add(Activation('relu'))
model.add(Dense(256))
model.add(Activation('relu'))
return model
def generate_sequences(n_batches, images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,idxs):
# generate batches of samples
while True:
for bid in xrange(0, n_batches):
if bid == n_batches - 1:
batch_idxs = idxs[bid * batch_size:]
else:
batch_idxs = idxs[bid * batch_size: (bid + 1) * batch_size]
batch_length=len(batch_idxs)
X1= np.zeros((batch_length,224,224,3),np.float32)
X2= np.zeros((batch_length,224,224,3),np.float32)
X3= np.zeros((batch_length,224,224,3),np.float32)
X4= np.zeros((batch_length,224,224,3),np.float32)
X5= np.zeros((batch_length,224,224,3),np.float32)
y = labels[batch_idxs]
Y = np_utils.to_categorical(y, 2)
count=0
# for every image of a batch
for i in batch_idxs:
xx1 = images1[i, ...].astype(np.float32)
xx1 -= mean1
xx2 = images2[i, ...].astype(np.float32)
xx2 -= mean2
xx3 = images3[i, ...].astype(np.float32)
xx3 -= mean3
xx4 = images4[i, ...].astype(np.float32)
xx4 -= mean4
xx5 = images5[i, ...].astype(np.float32)
xx5 -= mean5
offset_x=random.randint(0,31)
offset_y=random.randint(0,31)
xx1=xx1[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx2=xx2[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx3=xx3[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx4=xx4[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx5=xx5[offset_x:offset_x+224,offset_y:offset_y+224,:]
X1[count,...]=xx1
X2[count,...]=xx2
X3[count,...]=xx3
X4[count,...]=xx4
X5[count,...]=xx5
count+=1
yield [X1,X2,X3,X4,X5],Y
def statistics_layer(xx):
print xx.shape
x_min=tf.reduce_min(xx,axis=1)
print x_min.shape
x_max=tf.reduce_max(xx,axis=1)
x_sum=tf.reduce_sum(xx,axis=1)
x_mean=tf.reduce_mean(xx,axis=1)
x_sta=tf.concat([x_min,x_max,x_sum,x_mean],1)
print x_sta.shape
return x_sta
if __name__ == '__main__':
input_shape = (224, 224, 3)
num_classes = 2
model1 = build_net(input_shape)
model1.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model2 = build_net(input_shape)
model2.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model3 = build_net(input_shape)
model3.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model4 = build_net(input_shape)
model4.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model5 = build_net(input_shape)
model5.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
merged=Merge(layers=[model1,model2,model3,model4,model5],mode='concat',concat_axis=1)
model=Sequential()
model.add(merged)
model.add(Reshape((5,256),input_shape=(1280,)))
print 'merged'
model.add(Lambda(statistics_layer,output_shape=None))
# model.add(Flatten())
model.add(Activation('relu'))
model.add(Dense(2))
model.add(Activation('softmax'))
sgd = SGD(lr=0.001, momentum=0.9, decay=1e-5, nesterov=True)
model.compile(optimizer=sgd,loss='binary_crossentropy', metrics=['accuracy'])
data_format = 'channels_last'
batch_size = 64
nb_epoch = 150
validation_ratio = 0.1
# training
path_train1='/home/liuhuihui/DMA_Net/randomCrop/train_data1.hdf5'
path_train2='/home/liuhuihui/DMA_Net/randomCrop/train_data2.hdf5'
path_train3='/home/liuhuihui/DMA_Net/randomCrop/train_data3.hdf5'
path_train4='/home/liuhuihui/DMA_Net/randomCrop/train_data4.hdf5'
path_train5='/home/liuhuihui/DMA_Net/randomCrop/train_data5.hdf5'
with h5py.File(path_train1, 'r') as train_file1,h5py.File(path_train2, 'r') as train_file2,h5py.File(path_train3, 'r') as train_file3,h5py.File(path_train4, 'r') as train_file4,h5py.File(path_train5, 'r') as train_file5:
print 'enter'
images1 = train_file1['images']
labels = train_file1['labels']
mean1= train_file1['mean'][...]
idxs = range(len(images1))
train_idxs = idxs[: int(len(images1) * (1 - validation_ratio))] | validation_idxs = idxs[int(len(images1) * (1 - validation_ratio)) :]
images2 = train_file2['images']
mean2 = train_file2['mean'][...]
images3 = train_file3['images']
mean3 = train_file3['mean'][...]
images4 = train_file4['images']
mean4 = train_file4['mean'][...]
images5 = train_file5['images']
mean5 = train_file5['mean'][...]
n_train_batches = len(train_idxs) // batch_size
n_remainder = len(train_idxs) % batch_size
if n_remainder:
n_train_batches = n_train_batches + 1
train_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
n_validation_batches = len(validation_idxs) // batch_size
n_remainder = len(validation_idxs) % batch_size
if n_remainder:
n_validation_batches = n_validation_batches + 1
validation_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
ReduceLR = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=10,
verbose=1, mode='min', epsilon=1e-4, cooldown=0, min_lr=0)
Checkpoint = ModelCheckpoint(filepath='/home/liuhuihui/DMA_Net/data_avatest/weight_dma2.h5', monitor='val_loss', verbose=1,
save_best_only=True, save_weights_only=False,mode='min', period=1)
earlyStop = EarlyStopping(monitor='val_loss',pat | random_line_split |
|
dma.py | ),strides=(2,2), data_format = 'channels_last', input_shape = input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(5,5),padding='valid'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(BatchNormalization(axis=-1,momentum=0.99,epsilon=0.001))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Conv2D(64,(3,3),strides=(1,1)))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dense(1000))
model.add(Activation('relu'))
model.add(Dense(256))
model.add(Activation('relu'))
return model
def generate_sequences(n_batches, images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,idxs):
# generate batches of samples
| xx1 = images1[i, ...].astype(np.float32)
xx1 -= mean1
xx2 = images2[i, ...].astype(np.float32)
xx2 -= mean2
xx3 = images3[i, ...].astype(np.float32)
xx3 -= mean3
xx4 = images4[i, ...].astype(np.float32)
xx4 -= mean4
xx5 = images5[i, ...].astype(np.float32)
xx5 -= mean5
offset_x=random.randint(0,31)
offset_y=random.randint(0,31)
xx1=xx1[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx2=xx2[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx3=xx3[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx4=xx4[offset_x:offset_x+224,offset_y:offset_y+224,:]
xx5=xx5[offset_x:offset_x+224,offset_y:offset_y+224,:]
X1[count,...]=xx1
X2[count,...]=xx2
X3[count,...]=xx3
X4[count,...]=xx4
X5[count,...]=xx5
count+=1
yield [X1,X2,X3,X4,X5],Y
def statistics_layer(xx):
print xx.shape
x_min=tf.reduce_min(xx,axis=1)
print x_min.shape
x_max=tf.reduce_max(xx,axis=1)
x_sum=tf.reduce_sum(xx,axis=1)
x_mean=tf.reduce_mean(xx,axis=1)
x_sta=tf.concat([x_min,x_max,x_sum,x_mean],1)
print x_sta.shape
return x_sta
if __name__ == '__main__':
input_shape = (224, 224, 3)
num_classes = 2
model1 = build_net(input_shape)
model1.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model2 = build_net(input_shape)
model2.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model3 = build_net(input_shape)
model3.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model4 = build_net(input_shape)
model4.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
model5 = build_net(input_shape)
model5.load_weights('/home/liuhuihui/CUHKPQ_DMA/ava/model_ini.h5',by_name=True)
merged=Merge(layers=[model1,model2,model3,model4,model5],mode='concat',concat_axis=1)
model=Sequential()
model.add(merged)
model.add(Reshape((5,256),input_shape=(1280,)))
print 'merged'
model.add(Lambda(statistics_layer,output_shape=None))
# model.add(Flatten())
model.add(Activation('relu'))
model.add(Dense(2))
model.add(Activation('softmax'))
sgd = SGD(lr=0.001, momentum=0.9, decay=1e-5, nesterov=True)
model.compile(optimizer=sgd,loss='binary_crossentropy', metrics=['accuracy'])
data_format = 'channels_last'
batch_size = 64
nb_epoch = 150
validation_ratio = 0.1
# training
path_train1='/home/liuhuihui/DMA_Net/randomCrop/train_data1.hdf5'
path_train2='/home/liuhuihui/DMA_Net/randomCrop/train_data2.hdf5'
path_train3='/home/liuhuihui/DMA_Net/randomCrop/train_data3.hdf5'
path_train4='/home/liuhuihui/DMA_Net/randomCrop/train_data4.hdf5'
path_train5='/home/liuhuihui/DMA_Net/randomCrop/train_data5.hdf5'
with h5py.File(path_train1, 'r') as train_file1,h5py.File(path_train2, 'r') as train_file2,h5py.File(path_train3, 'r') as train_file3,h5py.File(path_train4, 'r') as train_file4,h5py.File(path_train5, 'r') as train_file5:
print 'enter'
images1 = train_file1['images']
labels = train_file1['labels']
mean1= train_file1['mean'][...]
idxs = range(len(images1))
train_idxs = idxs[: int(len(images1) * (1 - validation_ratio))]
validation_idxs = idxs[int(len(images1) * (1 - validation_ratio)) :]
images2 = train_file2['images']
mean2 = train_file2['mean'][...]
images3 = train_file3['images']
mean3 = train_file3['mean'][...]
images4 = train_file4['images']
mean4 = train_file4['mean'][...]
images5 = train_file5['images']
mean5 = train_file5['mean'][...]
n_train_batches = len(train_idxs) // batch_size
n_remainder = len(train_idxs) % batch_size
if n_remainder:
n_train_batches = n_train_batches + 1
train_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
n_validation_batches = len(validation_idxs) // batch_size
n_remainder = len(validation_idxs) % batch_size
if n_remainder:
n_validation_batches = n_validation_batches + 1
validation_generator = generate_sequences(n_train_batches,images1,images2,images3,images4,images5,labels,mean1,mean2,mean3,mean4,mean5,train_idxs)
ReduceLR = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=10,
verbose=1, mode='min', epsilon=1e-4, cooldown=0, min_lr=0)
Checkpoint = ModelCheckpoint(filepath='/home/liuhuihui/DMA_Net/data_avatest/weight_dma2.h5', monitor='val_loss', verbose=1,
save_best_only=True, save_weights_only=False,mode='min', period=1)
earlyStop = EarlyStopping(monitor='val_loss', | while True:
for bid in xrange(0, n_batches):
if bid == n_batches - 1:
batch_idxs = idxs[bid * batch_size:]
else:
batch_idxs = idxs[bid * batch_size: (bid + 1) * batch_size]
batch_length=len(batch_idxs)
X1= np.zeros((batch_length,224,224,3),np.float32)
X2= np.zeros((batch_length,224,224,3),np.float32)
X3= np.zeros((batch_length,224,224,3),np.float32)
X4= np.zeros((batch_length,224,224,3),np.float32)
X5= np.zeros((batch_length,224,224,3),np.float32)
y = labels[batch_idxs]
Y = np_utils.to_categorical(y, 2)
count=0
# for every image of a batch
for i in batch_idxs: | identifier_body |
test_helper.go | that has a path by the value of TEST_ENV_FILE_PATH environment variable.
func LoadEnvFile(t *testing.T) error {
envFileName := os.Getenv(TestEnvFilePath)
err := godotenv.Load(envFileName)
if err != nil {
return fmt.Errorf("Can not read .env file: %s", envFileName)
}
return nil
}
// InitializeTestValuesE fill the value from environment variables.
func InitializeTestValues(s interface{}) interface{} {
fields := reflect.ValueOf(s).Elem()
// iterate across all configuration properties
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
environmentVariablesKey := typeField.Tag.Get("env")
if fields.Field(i).Kind() == reflect.String {
// check if we want a property inside a complex object
propertyKey, exists := typeField.Tag.Lookup("property")
if exists {
// get object string
objectString := os.Getenv(environmentVariablesKey)
// grab property value inside string
propertyValue := getPropertyValueFromString(objectString, propertyKey)
// set the value in the correct field
fields.Field(i).SetString(propertyValue)
} else {
fields.Field(i).SetString(os.Getenv(environmentVariablesKey))
}
}
}
return s
}
func getPropertyValueFromString(object string, propertyKey string) string {
// compile regex to look for key="value"
regexString := fmt.Sprintf(`%s=\"(.*?)\"`, propertyKey)
re := regexp.MustCompile(regexString)
match := string(re.Find([]byte(object)))
if len(match) == 0 {
log.Printf("Warning: Could not find property with key %s\n", propertyKey)
return ""
}
match = strings.Replace(match, "\"", "", -1)
propertyValue := strings.Split(match, "=")[1]
return propertyValue
}
// ValidateTestValues validate if the all parameters has the value. skipGenerated allows ignore a field that has the `generated:"true"` tag.
func ValidateTestValues(s interface{}, skipGenerated bool) bool {
fields := reflect.ValueOf(s).Elem()
flag := true
for i := 0; i < fields.NumField(); i++ {
value := fields.Field(i)
typeField := fields.Type().Field(i)
if !validateTags(typeField.Tag) {
log.Printf("Warning: Struct Field %s has invalid tags.\n", typeField.Name)
flag = false
continue
}
if value.Kind() == reflect.String {
if len(value.String()) == 0 {
if !anyTagExists(typeField.Tag) {
continue
} else if skipGenerated && tagExists(typeField.Tag, "env") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("env"))
continue
} else if skipGenerated && tagExists(typeField.Tag, "kv") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("kv"))
continue
} else if tagExists(typeField.Tag, "kv") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("kv"))
flag = false
} else if tagExists(typeField.Tag, "val") {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = false
} else {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("env"))
flag = false
}
}
} else if value.Kind() == reflect.Map {
if value.IsNil() {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Slice {
if value.IsNil() {
log.Printf("Warning: Array Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Bool || value.Kind() == reflect.Int32 || value.Kind() == reflect.Int64 {
// all of these have default "zero" values so they are always valid
} else {
log.Printf("Warning: Found Field %s of type %s which is not allowed for Config Structures.\n", value.Kind(), typeField.Name)
return false
}
}
return flag
}
// FetchKeyVaultSecretE fill the value from keyvault
func FetchKeyVaultSecretE(s interface{}) (interface{}, error) {
keyVaultName, err := getKeyVaultName(s)
if err != nil {
return nil, err
}
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if typeField.Tag.Get("kv") != "" {
secretName := typeField.Tag.Get("kv")
if fields.Field(i).Kind() == reflect.String {
secret, err := GetKeyVaultSecret(keyVaultName, secretName)
if err != nil {
return nil, err
}
fields.Field(i).SetString(secret)
}
}
}
return s, nil
}
func getKeyVaultName(s interface{}) (string, error) {
structName := reflect.TypeOf(s)
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if len(typeField.Tag.Get("kvname")) != 0 {
if fields.Field(i).Kind() == reflect.String {
kvname := fields.Field(i).String()
kvNameField := fields.Type().Field(i).Name
if len(kvname) == 0 {
return "", fmt.Errorf("Empty KeyVault name is not allowed. Please add `kvname` on your struct %s.%s", structName, kvNameField)
}
return fields.Field(i).String(), nil
}
}
}
return "", fmt.Errorf("Can not find kvname filed on your struct %s", structName)
}
// IsTagExists test if the tag is there or not.
func tagExists(tag reflect.StructTag, tagName string) bool {
_, ok := tag.Lookup(tagName)
return ok
}
// validateTags test if any tags are invalid
func validateTags(tag reflect.StructTag) bool {
val, isVal := tag.Lookup("val")
generated, isGenerated := tag.Lookup("generated")
if isVal {
v, err := strconv.ParseBool(val)
if err != nil || !v {
log.Printf("Warning: Value of \"val\" tag should be true")
return false
}
}
if isGenerated {
v, err := strconv.ParseBool(generated)
if err != nil || !v {
log.Printf("Warning: Value of \"generated\" tag should be true")
return false
}
}
return true
}
// IsAnyTagExists test if any tags are exists.
func anyTagExists(tag reflect.StructTag) bool {
_, isEnv := tag.Lookup("env")
_, isKv := tag.Lookup("kv")
_, isVal := tag.Lookup("val")
return isEnv || isKv || isVal
}
// GetYamlVariables reads the yaml file in filePath and returns valus specified by interface s
func GetYamlVariables(filePath string, s interface{}) (interface{}, error) {
// read yaml file
yamlFile, err := ioutil.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("Path to Yaml file not set or invalid: %s", filePath)
}
// parse yaml file
m := make(map[interface{}]interface{})
err = yaml.UnmarshalStrict(yamlFile, &m)
if err != nil {
return nil, fmt.Errorf("Error parsing Yaml File %s: %s", filePath, err.Error())
}
err = mapstructure.Decode(m, &s)
return s, nil
}
// CheckIfEndpointIsResponding test an endpoint for availability. Returns true if endpoint is available, false otherwise
func CheckIfEndpointIsResponding(t *testing.T, endpoint string) bool {
// we ignore certificates at this point
tlsConfig := tls.Config{}
tlsConfig.InsecureSkipVerify = true
err := http_helper.HttpGetWithRetryWithCustomValidationE(
t,
fmt.Sprintf("https://%s", endpoint),
&tlsConfig,
1,
10*time.Second,
func(statusCode int, body string) bool {
if statusCode == 200 {
return true
}
if statusCode == 404 |
return false
},
)
return err == nil
}
//CheckSQLConnectivity checks if we can successfully connect to a SQL Managed Instance, MySql server or Azure SQL | {
t.Log("Warning: 404 response from endpoint. Test will still PASS.")
return true
} | conditional_block |
test_helper.go | that has a path by the value of TEST_ENV_FILE_PATH environment variable.
func LoadEnvFile(t *testing.T) error {
envFileName := os.Getenv(TestEnvFilePath)
err := godotenv.Load(envFileName)
if err != nil {
return fmt.Errorf("Can not read .env file: %s", envFileName)
}
return nil
}
// InitializeTestValuesE fill the value from environment variables.
func InitializeTestValues(s interface{}) interface{} | }
return s
}
func getPropertyValueFromString(object string, propertyKey string) string {
// compile regex to look for key="value"
regexString := fmt.Sprintf(`%s=\"(.*?)\"`, propertyKey)
re := regexp.MustCompile(regexString)
match := string(re.Find([]byte(object)))
if len(match) == 0 {
log.Printf("Warning: Could not find property with key %s\n", propertyKey)
return ""
}
match = strings.Replace(match, "\"", "", -1)
propertyValue := strings.Split(match, "=")[1]
return propertyValue
}
// ValidateTestValues validate if the all parameters has the value. skipGenerated allows ignore a field that has the `generated:"true"` tag.
func ValidateTestValues(s interface{}, skipGenerated bool) bool {
fields := reflect.ValueOf(s).Elem()
flag := true
for i := 0; i < fields.NumField(); i++ {
value := fields.Field(i)
typeField := fields.Type().Field(i)
if !validateTags(typeField.Tag) {
log.Printf("Warning: Struct Field %s has invalid tags.\n", typeField.Name)
flag = false
continue
}
if value.Kind() == reflect.String {
if len(value.String()) == 0 {
if !anyTagExists(typeField.Tag) {
continue
} else if skipGenerated && tagExists(typeField.Tag, "env") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("env"))
continue
} else if skipGenerated && tagExists(typeField.Tag, "kv") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("kv"))
continue
} else if tagExists(typeField.Tag, "kv") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("kv"))
flag = false
} else if tagExists(typeField.Tag, "val") {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = false
} else {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("env"))
flag = false
}
}
} else if value.Kind() == reflect.Map {
if value.IsNil() {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Slice {
if value.IsNil() {
log.Printf("Warning: Array Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Bool || value.Kind() == reflect.Int32 || value.Kind() == reflect.Int64 {
// all of these have default "zero" values so they are always valid
} else {
log.Printf("Warning: Found Field %s of type %s which is not allowed for Config Structures.\n", value.Kind(), typeField.Name)
return false
}
}
return flag
}
// FetchKeyVaultSecretE fill the value from keyvault
func FetchKeyVaultSecretE(s interface{}) (interface{}, error) {
keyVaultName, err := getKeyVaultName(s)
if err != nil {
return nil, err
}
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if typeField.Tag.Get("kv") != "" {
secretName := typeField.Tag.Get("kv")
if fields.Field(i).Kind() == reflect.String {
secret, err := GetKeyVaultSecret(keyVaultName, secretName)
if err != nil {
return nil, err
}
fields.Field(i).SetString(secret)
}
}
}
return s, nil
}
func getKeyVaultName(s interface{}) (string, error) {
structName := reflect.TypeOf(s)
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if len(typeField.Tag.Get("kvname")) != 0 {
if fields.Field(i).Kind() == reflect.String {
kvname := fields.Field(i).String()
kvNameField := fields.Type().Field(i).Name
if len(kvname) == 0 {
return "", fmt.Errorf("Empty KeyVault name is not allowed. Please add `kvname` on your struct %s.%s", structName, kvNameField)
}
return fields.Field(i).String(), nil
}
}
}
return "", fmt.Errorf("Can not find kvname filed on your struct %s", structName)
}
// IsTagExists test if the tag is there or not.
func tagExists(tag reflect.StructTag, tagName string) bool {
_, ok := tag.Lookup(tagName)
return ok
}
// validateTags test if any tags are invalid
func validateTags(tag reflect.StructTag) bool {
val, isVal := tag.Lookup("val")
generated, isGenerated := tag.Lookup("generated")
if isVal {
v, err := strconv.ParseBool(val)
if err != nil || !v {
log.Printf("Warning: Value of \"val\" tag should be true")
return false
}
}
if isGenerated {
v, err := strconv.ParseBool(generated)
if err != nil || !v {
log.Printf("Warning: Value of \"generated\" tag should be true")
return false
}
}
return true
}
// IsAnyTagExists test if any tags are exists.
func anyTagExists(tag reflect.StructTag) bool {
_, isEnv := tag.Lookup("env")
_, isKv := tag.Lookup("kv")
_, isVal := tag.Lookup("val")
return isEnv || isKv || isVal
}
// GetYamlVariables reads the yaml file in filePath and returns valus specified by interface s
func GetYamlVariables(filePath string, s interface{}) (interface{}, error) {
// read yaml file
yamlFile, err := ioutil.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("Path to Yaml file not set or invalid: %s", filePath)
}
// parse yaml file
m := make(map[interface{}]interface{})
err = yaml.UnmarshalStrict(yamlFile, &m)
if err != nil {
return nil, fmt.Errorf("Error parsing Yaml File %s: %s", filePath, err.Error())
}
err = mapstructure.Decode(m, &s)
return s, nil
}
// CheckIfEndpointIsResponding test an endpoint for availability. Returns true if endpoint is available, false otherwise
func CheckIfEndpointIsResponding(t *testing.T, endpoint string) bool {
// we ignore certificates at this point
tlsConfig := tls.Config{}
tlsConfig.InsecureSkipVerify = true
err := http_helper.HttpGetWithRetryWithCustomValidationE(
t,
fmt.Sprintf("https://%s", endpoint),
&tlsConfig,
1,
10*time.Second,
func(statusCode int, body string) bool {
if statusCode == 200 {
return true
}
if statusCode == 404 {
t.Log("Warning: 404 response from endpoint. Test will still PASS.")
return true
}
return false
},
)
return err == nil
}
//CheckSQLConnectivity checks if we can successfully connect to a SQL Managed Instance, MySql server or Azure | {
fields := reflect.ValueOf(s).Elem()
// iterate across all configuration properties
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
environmentVariablesKey := typeField.Tag.Get("env")
if fields.Field(i).Kind() == reflect.String {
// check if we want a property inside a complex object
propertyKey, exists := typeField.Tag.Lookup("property")
if exists {
// get object string
objectString := os.Getenv(environmentVariablesKey)
// grab property value inside string
propertyValue := getPropertyValueFromString(objectString, propertyKey)
// set the value in the correct field
fields.Field(i).SetString(propertyValue)
} else {
fields.Field(i).SetString(os.Getenv(environmentVariablesKey))
}
}
| identifier_body |
test_helper.go | }
func getPropertyValueFromString(object string, propertyKey string) string {
// compile regex to look for key="value"
regexString := fmt.Sprintf(`%s=\"(.*?)\"`, propertyKey)
re := regexp.MustCompile(regexString)
match := string(re.Find([]byte(object)))
if len(match) == 0 {
log.Printf("Warning: Could not find property with key %s\n", propertyKey)
return ""
}
match = strings.Replace(match, "\"", "", -1)
propertyValue := strings.Split(match, "=")[1]
return propertyValue
}
// ValidateTestValues validate if the all parameters has the value. skipGenerated allows ignore a field that has the `generated:"true"` tag.
func ValidateTestValues(s interface{}, skipGenerated bool) bool {
fields := reflect.ValueOf(s).Elem()
flag := true
for i := 0; i < fields.NumField(); i++ {
value := fields.Field(i)
typeField := fields.Type().Field(i)
if !validateTags(typeField.Tag) {
log.Printf("Warning: Struct Field %s has invalid tags.\n", typeField.Name)
flag = false
continue
}
if value.Kind() == reflect.String {
if len(value.String()) == 0 {
if !anyTagExists(typeField.Tag) {
continue
} else if skipGenerated && tagExists(typeField.Tag, "env") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("env"))
continue
} else if skipGenerated && tagExists(typeField.Tag, "kv") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("kv"))
continue
} else if tagExists(typeField.Tag, "kv") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("kv"))
flag = false
} else if tagExists(typeField.Tag, "val") {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = false
} else {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("env"))
flag = false
}
}
} else if value.Kind() == reflect.Map {
if value.IsNil() {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Slice {
if value.IsNil() {
log.Printf("Warning: Array Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Bool || value.Kind() == reflect.Int32 || value.Kind() == reflect.Int64 {
// all of these have default "zero" values so they are always valid
} else {
log.Printf("Warning: Found Field %s of type %s which is not allowed for Config Structures.\n", value.Kind(), typeField.Name)
return false
}
}
return flag
}
// FetchKeyVaultSecretE fill the value from keyvault
func FetchKeyVaultSecretE(s interface{}) (interface{}, error) {
keyVaultName, err := getKeyVaultName(s)
if err != nil {
return nil, err
}
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if typeField.Tag.Get("kv") != "" {
secretName := typeField.Tag.Get("kv")
if fields.Field(i).Kind() == reflect.String {
secret, err := GetKeyVaultSecret(keyVaultName, secretName)
if err != nil {
return nil, err
}
fields.Field(i).SetString(secret)
}
}
}
return s, nil
}
func getKeyVaultName(s interface{}) (string, error) {
structName := reflect.TypeOf(s)
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if len(typeField.Tag.Get("kvname")) != 0 {
if fields.Field(i).Kind() == reflect.String {
kvname := fields.Field(i).String()
kvNameField := fields.Type().Field(i).Name
if len(kvname) == 0 {
return "", fmt.Errorf("Empty KeyVault name is not allowed. Please add `kvname` on your struct %s.%s", structName, kvNameField)
}
return fields.Field(i).String(), nil
}
}
}
return "", fmt.Errorf("Can not find kvname filed on your struct %s", structName)
}
// IsTagExists test if the tag is there or not.
func tagExists(tag reflect.StructTag, tagName string) bool {
_, ok := tag.Lookup(tagName)
return ok
}
// validateTags test if any tags are invalid
func validateTags(tag reflect.StructTag) bool {
val, isVal := tag.Lookup("val")
generated, isGenerated := tag.Lookup("generated")
if isVal {
v, err := strconv.ParseBool(val)
if err != nil || !v {
log.Printf("Warning: Value of \"val\" tag should be true")
return false
}
}
if isGenerated {
v, err := strconv.ParseBool(generated)
if err != nil || !v {
log.Printf("Warning: Value of \"generated\" tag should be true")
return false
}
}
return true
}
// IsAnyTagExists test if any tags are exists.
func anyTagExists(tag reflect.StructTag) bool {
_, isEnv := tag.Lookup("env")
_, isKv := tag.Lookup("kv")
_, isVal := tag.Lookup("val")
return isEnv || isKv || isVal
}
// GetYamlVariables reads the yaml file in filePath and returns valus specified by interface s
func GetYamlVariables(filePath string, s interface{}) (interface{}, error) {
// read yaml file
yamlFile, err := ioutil.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("Path to Yaml file not set or invalid: %s", filePath)
}
// parse yaml file
m := make(map[interface{}]interface{})
err = yaml.UnmarshalStrict(yamlFile, &m)
if err != nil {
return nil, fmt.Errorf("Error parsing Yaml File %s: %s", filePath, err.Error())
}
err = mapstructure.Decode(m, &s)
return s, nil
}
// CheckIfEndpointIsResponding test an endpoint for availability. Returns true if endpoint is available, false otherwise
func CheckIfEndpointIsResponding(t *testing.T, endpoint string) bool {
// we ignore certificates at this point
tlsConfig := tls.Config{}
tlsConfig.InsecureSkipVerify = true
err := http_helper.HttpGetWithRetryWithCustomValidationE(
t,
fmt.Sprintf("https://%s", endpoint),
&tlsConfig,
1,
10*time.Second,
func(statusCode int, body string) bool {
if statusCode == 200 {
return true
}
if statusCode == 404 {
t.Log("Warning: 404 response from endpoint. Test will still PASS.")
return true
}
return false
},
)
return err == nil
}
//CheckSQLConnectivity checks if we can successfully connect to a SQL Managed Instance, MySql server or Azure SQL Server
func CheckSQLConnectivity(t *testing.T, driver string, connString string) {
// Create connection pool
db, err := sql.Open(driver, connString)
require.NoErrorf(t, err, "Error creating connection pool: %s ", err)
// Close the database connection pool after program executes
defer db.Close()
// Use background context
ctx := context.Background()
//As open doesn't actually create the connection we need to make some sort of command to check that connectivity works
//err = db.Ping()
//require.NoErrorf(t, err, "Error pinging database: %s", err)
var result string
// Run query and scan for result
err = db.QueryRowContext(ctx, "SELECT @@version").Scan(&result)
require.NoErrorf(t, err, "Error: %s", err)
t.Logf("%s\n", result)
}
//CheckRedisCacheConnectivity checks if we can successfully connect to a Redis cache instance
func CheckRedisCacheConnectivity(t *testing.T, redisCacheURL string, redisCachePort int, redisCachePassword string) {
conn, err := redis.Dial(
"tcp",
fmt.Sprintf("%s:%d", redisCacheURL, redisCachePort),
redis.DialPassword(redisCachePassword),
| redis.DialUseTLS(true))
if err != nil {
| random_line_split |
|
test_helper.go | that has a path by the value of TEST_ENV_FILE_PATH environment variable.
func | (t *testing.T) error {
envFileName := os.Getenv(TestEnvFilePath)
err := godotenv.Load(envFileName)
if err != nil {
return fmt.Errorf("Can not read .env file: %s", envFileName)
}
return nil
}
// InitializeTestValuesE fill the value from environment variables.
func InitializeTestValues(s interface{}) interface{} {
fields := reflect.ValueOf(s).Elem()
// iterate across all configuration properties
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
environmentVariablesKey := typeField.Tag.Get("env")
if fields.Field(i).Kind() == reflect.String {
// check if we want a property inside a complex object
propertyKey, exists := typeField.Tag.Lookup("property")
if exists {
// get object string
objectString := os.Getenv(environmentVariablesKey)
// grab property value inside string
propertyValue := getPropertyValueFromString(objectString, propertyKey)
// set the value in the correct field
fields.Field(i).SetString(propertyValue)
} else {
fields.Field(i).SetString(os.Getenv(environmentVariablesKey))
}
}
}
return s
}
func getPropertyValueFromString(object string, propertyKey string) string {
// compile regex to look for key="value"
regexString := fmt.Sprintf(`%s=\"(.*?)\"`, propertyKey)
re := regexp.MustCompile(regexString)
match := string(re.Find([]byte(object)))
if len(match) == 0 {
log.Printf("Warning: Could not find property with key %s\n", propertyKey)
return ""
}
match = strings.Replace(match, "\"", "", -1)
propertyValue := strings.Split(match, "=")[1]
return propertyValue
}
// ValidateTestValues validate if the all parameters has the value. skipGenerated allows ignore a field that has the `generated:"true"` tag.
func ValidateTestValues(s interface{}, skipGenerated bool) bool {
fields := reflect.ValueOf(s).Elem()
flag := true
for i := 0; i < fields.NumField(); i++ {
value := fields.Field(i)
typeField := fields.Type().Field(i)
if !validateTags(typeField.Tag) {
log.Printf("Warning: Struct Field %s has invalid tags.\n", typeField.Name)
flag = false
continue
}
if value.Kind() == reflect.String {
if len(value.String()) == 0 {
if !anyTagExists(typeField.Tag) {
continue
} else if skipGenerated && tagExists(typeField.Tag, "env") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("env"))
continue
} else if skipGenerated && tagExists(typeField.Tag, "kv") && tagExists(typeField.Tag, "generated") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value. (Generated = true. skipped.)\n", typeField.Name, typeField.Tag.Get("kv"))
continue
} else if tagExists(typeField.Tag, "kv") {
log.Printf("Warning: Struct Field %s (kv:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("kv"))
flag = false
} else if tagExists(typeField.Tag, "val") {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = false
} else {
log.Printf("Warning: Struct Field %s (env:%s) doesn't have any value.\n", typeField.Name, typeField.Tag.Get("env"))
flag = false
}
}
} else if value.Kind() == reflect.Map {
if value.IsNil() {
log.Printf("Warning: Struct Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Slice {
if value.IsNil() {
log.Printf("Warning: Array Field %s doesn't have any value.\n", typeField.Name)
flag = !tagExists(typeField.Tag, "val")
}
} else if value.Kind() == reflect.Bool || value.Kind() == reflect.Int32 || value.Kind() == reflect.Int64 {
// all of these have default "zero" values so they are always valid
} else {
log.Printf("Warning: Found Field %s of type %s which is not allowed for Config Structures.\n", value.Kind(), typeField.Name)
return false
}
}
return flag
}
// FetchKeyVaultSecretE fill the value from keyvault
func FetchKeyVaultSecretE(s interface{}) (interface{}, error) {
keyVaultName, err := getKeyVaultName(s)
if err != nil {
return nil, err
}
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if typeField.Tag.Get("kv") != "" {
secretName := typeField.Tag.Get("kv")
if fields.Field(i).Kind() == reflect.String {
secret, err := GetKeyVaultSecret(keyVaultName, secretName)
if err != nil {
return nil, err
}
fields.Field(i).SetString(secret)
}
}
}
return s, nil
}
func getKeyVaultName(s interface{}) (string, error) {
structName := reflect.TypeOf(s)
fields := reflect.ValueOf(s).Elem()
for i := 0; i < fields.NumField(); i++ {
typeField := fields.Type().Field(i)
if len(typeField.Tag.Get("kvname")) != 0 {
if fields.Field(i).Kind() == reflect.String {
kvname := fields.Field(i).String()
kvNameField := fields.Type().Field(i).Name
if len(kvname) == 0 {
return "", fmt.Errorf("Empty KeyVault name is not allowed. Please add `kvname` on your struct %s.%s", structName, kvNameField)
}
return fields.Field(i).String(), nil
}
}
}
return "", fmt.Errorf("Can not find kvname filed on your struct %s", structName)
}
// IsTagExists test if the tag is there or not.
func tagExists(tag reflect.StructTag, tagName string) bool {
_, ok := tag.Lookup(tagName)
return ok
}
// validateTags test if any tags are invalid
func validateTags(tag reflect.StructTag) bool {
val, isVal := tag.Lookup("val")
generated, isGenerated := tag.Lookup("generated")
if isVal {
v, err := strconv.ParseBool(val)
if err != nil || !v {
log.Printf("Warning: Value of \"val\" tag should be true")
return false
}
}
if isGenerated {
v, err := strconv.ParseBool(generated)
if err != nil || !v {
log.Printf("Warning: Value of \"generated\" tag should be true")
return false
}
}
return true
}
// IsAnyTagExists test if any tags are exists.
func anyTagExists(tag reflect.StructTag) bool {
_, isEnv := tag.Lookup("env")
_, isKv := tag.Lookup("kv")
_, isVal := tag.Lookup("val")
return isEnv || isKv || isVal
}
// GetYamlVariables reads the yaml file in filePath and returns valus specified by interface s
func GetYamlVariables(filePath string, s interface{}) (interface{}, error) {
// read yaml file
yamlFile, err := ioutil.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("Path to Yaml file not set or invalid: %s", filePath)
}
// parse yaml file
m := make(map[interface{}]interface{})
err = yaml.UnmarshalStrict(yamlFile, &m)
if err != nil {
return nil, fmt.Errorf("Error parsing Yaml File %s: %s", filePath, err.Error())
}
err = mapstructure.Decode(m, &s)
return s, nil
}
// CheckIfEndpointIsResponding test an endpoint for availability. Returns true if endpoint is available, false otherwise
func CheckIfEndpointIsResponding(t *testing.T, endpoint string) bool {
// we ignore certificates at this point
tlsConfig := tls.Config{}
tlsConfig.InsecureSkipVerify = true
err := http_helper.HttpGetWithRetryWithCustomValidationE(
t,
fmt.Sprintf("https://%s", endpoint),
&tlsConfig,
1,
10*time.Second,
func(statusCode int, body string) bool {
if statusCode == 200 {
return true
}
if statusCode == 404 {
t.Log("Warning: 404 response from endpoint. Test will still PASS.")
return true
}
return false
},
)
return err == nil
}
//CheckSQLConnectivity checks if we can successfully connect to a SQL Managed Instance, MySql server or Azure SQL Server | LoadEnvFile | identifier_name |
yolo-and-dlib.py | nets.Darknet19)
ct = CentroidTracker(maxDisappeared=5, maxDistance=50) # Look into 'CentroidTracker' for further info about parameters
trackers = [] # List of all dlib trackers
trackableObjects = {} # Dictionary of trackable objects containing object's ID and its' corresponding centroid/s
skip_frames = 10 # Numbers of frames to skip from detecting
confidence_level = 0.40 # The confidence level of a detection
total = 0 # Total number of detected objects from classes of interest
use_original_video_size_as_output_size = True # Shows original video as output and not the 416x416 image that is used as yolov3 input (NOTE: Detection still happens with 416x416 img size but the output is displayed in original video size if this parameter is True)
video_path = os.getcwd() + "/videos/M6 Motorway Traffic - Short version.mp4"
video_name = os.path.basename(video_path)
print("Loading video {video_path}...".format(video_path=video_path))
if not os.path.exists(video_path):
print("File does not exist. Exited.")
exit()
# From https://github.com/experiencor/keras-yolo3/blob/master/yolo3_one_file_to_detect_them_all.py#L389
# YoloV3 detects 80 classes represented below
all_classes = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", \
"boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", \
"bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", \
"backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", \
"sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", \
"tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", \
"apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", \
"chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse", \
"remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", \
"book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"]
# Classes of interest (with their corresponding indexes for easier looping)
classes = { 1 : 'bicycle', 2 : 'car', 3 : 'motorbike', 5 : 'bus', 7 : 'truck' }
with tf.Session() as sess:
sess.run(model.pretrained())
cap = cv2.VideoCapture(video_path)
# Get video size (just for log purposes)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
# Scale used for output window size and net size
width_scale = 1
height_scale = 1
if use_original_video_size_as_output_size:
width_scale = width / img_size
height_scale = height / img_size
def drawRectangleCV2(img, pt1, pt2, color, thickness, width_scale=width_scale, height_scale=height_scale):
point1 = (int(pt1[0] * width_scale), int(pt1[1] * height_scale))
point2 = (int(pt2[0] * width_scale), int(pt2[1] * height_scale))
return cv2.rectangle(img, point1, point2, color, thickness)
def drawTextCV2(img, text, pt, font, font_scale, color, lineType, width_scale=width_scale, height_scale=height_scale):
pt = (int(pt[0] * width_scale), int(pt[1] * height_scale))
cv2.putText(img, text, pt, font, font_scale, color, lineType)
def | (img, center, radius, color, thickness, width_scale=width_scale, height_scale=height_scale):
center = (int(center[0] * width_scale), int(center[1] * height_scale))
cv2.circle(img, center, radius, color, thickness)
# Python 3.5.6 does not support f-strings (next line will generate syntax error)
#print(f"Loaded {video_path}. Width: {width}, Height: {height}")
print("Loaded {video_path}. Width: {width}, Height: {height}".format(video_path=video_path, width=width, height=height))
skipped_frames_counter = 0
while(cap.isOpened()):
ret, frame = cap.read()
if ret == False:
print("Error reading frame. cap.read() returned {ret}".format(ret))
# Frame must be resized to 'img_size' (because that's what YoloV3 accepts as input)
img = cv2.resize(frame, (img_size, img_size))
# Output image is used for drawing annotations (tracking rectangles and detected classes) on the image
output_img = frame if use_original_video_size_as_output_size else img
tracker_rects = []
if skipped_frames_counter == skip_frames:
# Detecting happens after number of frames have passes specified by 'skip_frames' variable value
print("[DETECTING]")
trackers = []
skipped_frames_counter = 0 # reset counter
np_img = np.array(img).reshape(-1, img_size, img_size, 3)
start_time=time.time()
predictions = sess.run(model.preds, {inputs: model.preprocess(np_img)})
print("Detection took %s seconds" % (time.time() - start_time))
# model.get_boxes returns a 80 element array containing information about detected classes
# each element contains a list of detected boxes, confidence level ...
detections = model.get_boxes(predictions, np_img.shape[1:3])
np_detections = np.array(detections)
# Loop only through classes we are interested in
for class_index in classes.keys():
local_count = 0
class_name = classes[class_index]
# Loop through detected infos of a class we are interested in
for i in range(len(np_detections[class_index])):
box = np_detections[class_index][i]
if np_detections[class_index][i][4] >= confidence_level:
print("Detected ", class_name, " with confidence of ", np_detections[class_index][i][4])
local_count += 1
startX, startY, endX, endY = box[0], box[1], box[2], box[3]
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (0, 255, 0), 1)
drawTextCV2(output_img, class_name, (startX, startY), cv2.FONT_HERSHEY_SIMPLEX, .5, (0, 0, 255), 1)
# Construct a dlib rectangle object from the bounding box coordinates and then start the dlib correlation
tracker = dlib.correlation_tracker()
rect = dlib.rectangle(int(startX), int(startY), int(endX), int(endY))
tracker.start_track(img, rect)
# Add the tracker to our list of trackers so we can utilize it during skip frames
trackers.append(tracker)
# Write the total number of detected objects for a given class on this frame
print(class_name," : ", local_count)
else:
# If detection is not happening then track previously detected objects (if any)
print("[TRACKING]")
skipped_frames_counter += 1 # Increase the number frames for which we did not use detection
# Loop through tracker, update each of them and display their rectangle
for tracker in trackers:
tracker.update(img)
pos = tracker.get_position()
# Unpack the position object
startX = int(pos.left())
startY = int(pos.top())
endX = int(pos.right())
endY = int(pos.bottom())
# Add the bounding box coordinates to the tracking rectangles list
tracker_rects.append((startX, startY, endX, endY))
# Draw tracking rectangles
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (255, 0, 0), 1)
# Use the centroid tracker to associate the (1) old object centroids with (2) the newly computed object centroids
objects = ct.update(tracker_rects)
# Loop over the tracked objects
for (objectID, centroid) in objects.items():
# Check to see if a trackable object exists for the current object ID
to = trackableObjects.get(objectID, None)
if to is None:
# If there is no existing trackable object, create one
to = TrackableObject(objectID, | drawCircleCV2 | identifier_name |
yolo-and-dlib.py | , nets.Darknet19)
ct = CentroidTracker(maxDisappeared=5, maxDistance=50) # Look into 'CentroidTracker' for further info about parameters
trackers = [] # List of all dlib trackers
trackableObjects = {} # Dictionary of trackable objects containing object's ID and its' corresponding centroid/s
skip_frames = 10 # Numbers of frames to skip from detecting
confidence_level = 0.40 # The confidence level of a detection
total = 0 # Total number of detected objects from classes of interest
use_original_video_size_as_output_size = True # Shows original video as output and not the 416x416 image that is used as yolov3 input (NOTE: Detection still happens with 416x416 img size but the output is displayed in original video size if this parameter is True)
video_path = os.getcwd() + "/videos/M6 Motorway Traffic - Short version.mp4"
video_name = os.path.basename(video_path)
print("Loading video {video_path}...".format(video_path=video_path))
if not os.path.exists(video_path):
print("File does not exist. Exited.")
exit()
# From https://github.com/experiencor/keras-yolo3/blob/master/yolo3_one_file_to_detect_them_all.py#L389
# YoloV3 detects 80 classes represented below
all_classes = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", \
"boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", \
"bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", \
"backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", \
"sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", \
"tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", \
"apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", \
"chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse", \
"remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", \
"book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"]
# Classes of interest (with their corresponding indexes for easier looping)
classes = { 1 : 'bicycle', 2 : 'car', 3 : 'motorbike', 5 : 'bus', 7 : 'truck' }
with tf.Session() as sess:
sess.run(model.pretrained())
cap = cv2.VideoCapture(video_path)
# Get video size (just for log purposes)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
# Scale used for output window size and net size
width_scale = 1
height_scale = 1
if use_original_video_size_as_output_size:
width_scale = width / img_size
height_scale = height / img_size
def drawRectangleCV2(img, pt1, pt2, color, thickness, width_scale=width_scale, height_scale=height_scale):
point1 = (int(pt1[0] * width_scale), int(pt1[1] * height_scale))
point2 = (int(pt2[0] * width_scale), int(pt2[1] * height_scale))
return cv2.rectangle(img, point1, point2, color, thickness)
def drawTextCV2(img, text, pt, font, font_scale, color, lineType, width_scale=width_scale, height_scale=height_scale):
|
def drawCircleCV2(img, center, radius, color, thickness, width_scale=width_scale, height_scale=height_scale):
center = (int(center[0] * width_scale), int(center[1] * height_scale))
cv2.circle(img, center, radius, color, thickness)
# Python 3.5.6 does not support f-strings (next line will generate syntax error)
#print(f"Loaded {video_path}. Width: {width}, Height: {height}")
print("Loaded {video_path}. Width: {width}, Height: {height}".format(video_path=video_path, width=width, height=height))
skipped_frames_counter = 0
while(cap.isOpened()):
ret, frame = cap.read()
if ret == False:
print("Error reading frame. cap.read() returned {ret}".format(ret))
# Frame must be resized to 'img_size' (because that's what YoloV3 accepts as input)
img = cv2.resize(frame, (img_size, img_size))
# Output image is used for drawing annotations (tracking rectangles and detected classes) on the image
output_img = frame if use_original_video_size_as_output_size else img
tracker_rects = []
if skipped_frames_counter == skip_frames:
# Detecting happens after number of frames have passes specified by 'skip_frames' variable value
print("[DETECTING]")
trackers = []
skipped_frames_counter = 0 # reset counter
np_img = np.array(img).reshape(-1, img_size, img_size, 3)
start_time=time.time()
predictions = sess.run(model.preds, {inputs: model.preprocess(np_img)})
print("Detection took %s seconds" % (time.time() - start_time))
# model.get_boxes returns a 80 element array containing information about detected classes
# each element contains a list of detected boxes, confidence level ...
detections = model.get_boxes(predictions, np_img.shape[1:3])
np_detections = np.array(detections)
# Loop only through classes we are interested in
for class_index in classes.keys():
local_count = 0
class_name = classes[class_index]
# Loop through detected infos of a class we are interested in
for i in range(len(np_detections[class_index])):
box = np_detections[class_index][i]
if np_detections[class_index][i][4] >= confidence_level:
print("Detected ", class_name, " with confidence of ", np_detections[class_index][i][4])
local_count += 1
startX, startY, endX, endY = box[0], box[1], box[2], box[3]
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (0, 255, 0), 1)
drawTextCV2(output_img, class_name, (startX, startY), cv2.FONT_HERSHEY_SIMPLEX, .5, (0, 0, 255), 1)
# Construct a dlib rectangle object from the bounding box coordinates and then start the dlib correlation
tracker = dlib.correlation_tracker()
rect = dlib.rectangle(int(startX), int(startY), int(endX), int(endY))
tracker.start_track(img, rect)
# Add the tracker to our list of trackers so we can utilize it during skip frames
trackers.append(tracker)
# Write the total number of detected objects for a given class on this frame
print(class_name," : ", local_count)
else:
# If detection is not happening then track previously detected objects (if any)
print("[TRACKING]")
skipped_frames_counter += 1 # Increase the number frames for which we did not use detection
# Loop through tracker, update each of them and display their rectangle
for tracker in trackers:
tracker.update(img)
pos = tracker.get_position()
# Unpack the position object
startX = int(pos.left())
startY = int(pos.top())
endX = int(pos.right())
endY = int(pos.bottom())
# Add the bounding box coordinates to the tracking rectangles list
tracker_rects.append((startX, startY, endX, endY))
# Draw tracking rectangles
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (255, 0, 0), 1)
# Use the centroid tracker to associate the (1) old object centroids with (2) the newly computed object centroids
objects = ct.update(tracker_rects)
# Loop over the tracked objects
for (objectID, centroid) in objects.items():
# Check to see if a trackable object exists for the current object ID
to = trackableObjects.get(objectID, None)
if to is None:
# If there is no existing trackable object, create one
to = TrackableObject(objectID, | pt = (int(pt[0] * width_scale), int(pt[1] * height_scale))
cv2.putText(img, text, pt, font, font_scale, color, lineType) | identifier_body |
yolo-and-dlib.py | trackableObjects = {} # Dictionary of trackable objects containing object's ID and its' corresponding centroid/s
skip_frames = 10 # Numbers of frames to skip from detecting
confidence_level = 0.40 # The confidence level of a detection
total = 0 # Total number of detected objects from classes of interest
use_original_video_size_as_output_size = True # Shows original video as output and not the 416x416 image that is used as yolov3 input (NOTE: Detection still happens with 416x416 img size but the output is displayed in original video size if this parameter is True)
video_path = os.getcwd() + "/videos/M6 Motorway Traffic - Short version.mp4"
video_name = os.path.basename(video_path)
print("Loading video {video_path}...".format(video_path=video_path))
if not os.path.exists(video_path):
print("File does not exist. Exited.")
exit()
# From https://github.com/experiencor/keras-yolo3/blob/master/yolo3_one_file_to_detect_them_all.py#L389
# YoloV3 detects 80 classes represented below
all_classes = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", \
"boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", \
"bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", \
"backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", \
"sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", \
"tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", \
"apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", \
"chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse", \
"remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", \
"book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"]
# Classes of interest (with their corresponding indexes for easier looping)
classes = { 1 : 'bicycle', 2 : 'car', 3 : 'motorbike', 5 : 'bus', 7 : 'truck' }
with tf.Session() as sess:
sess.run(model.pretrained())
cap = cv2.VideoCapture(video_path)
# Get video size (just for log purposes)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
# Scale used for output window size and net size
width_scale = 1
height_scale = 1
if use_original_video_size_as_output_size:
width_scale = width / img_size
height_scale = height / img_size
def drawRectangleCV2(img, pt1, pt2, color, thickness, width_scale=width_scale, height_scale=height_scale):
point1 = (int(pt1[0] * width_scale), int(pt1[1] * height_scale))
point2 = (int(pt2[0] * width_scale), int(pt2[1] * height_scale))
return cv2.rectangle(img, point1, point2, color, thickness)
def drawTextCV2(img, text, pt, font, font_scale, color, lineType, width_scale=width_scale, height_scale=height_scale):
pt = (int(pt[0] * width_scale), int(pt[1] * height_scale))
cv2.putText(img, text, pt, font, font_scale, color, lineType)
def drawCircleCV2(img, center, radius, color, thickness, width_scale=width_scale, height_scale=height_scale):
center = (int(center[0] * width_scale), int(center[1] * height_scale))
cv2.circle(img, center, radius, color, thickness)
# Python 3.5.6 does not support f-strings (next line will generate syntax error)
#print(f"Loaded {video_path}. Width: {width}, Height: {height}")
print("Loaded {video_path}. Width: {width}, Height: {height}".format(video_path=video_path, width=width, height=height))
skipped_frames_counter = 0
while(cap.isOpened()):
ret, frame = cap.read()
if ret == False:
print("Error reading frame. cap.read() returned {ret}".format(ret))
# Frame must be resized to 'img_size' (because that's what YoloV3 accepts as input)
img = cv2.resize(frame, (img_size, img_size))
# Output image is used for drawing annotations (tracking rectangles and detected classes) on the image
output_img = frame if use_original_video_size_as_output_size else img
tracker_rects = []
if skipped_frames_counter == skip_frames:
# Detecting happens after number of frames have passes specified by 'skip_frames' variable value
print("[DETECTING]")
trackers = []
skipped_frames_counter = 0 # reset counter
np_img = np.array(img).reshape(-1, img_size, img_size, 3)
start_time=time.time()
predictions = sess.run(model.preds, {inputs: model.preprocess(np_img)})
print("Detection took %s seconds" % (time.time() - start_time))
# model.get_boxes returns a 80 element array containing information about detected classes
# each element contains a list of detected boxes, confidence level ...
detections = model.get_boxes(predictions, np_img.shape[1:3])
np_detections = np.array(detections)
# Loop only through classes we are interested in
for class_index in classes.keys():
local_count = 0
class_name = classes[class_index]
# Loop through detected infos of a class we are interested in
for i in range(len(np_detections[class_index])):
box = np_detections[class_index][i]
if np_detections[class_index][i][4] >= confidence_level:
print("Detected ", class_name, " with confidence of ", np_detections[class_index][i][4])
local_count += 1
startX, startY, endX, endY = box[0], box[1], box[2], box[3]
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (0, 255, 0), 1)
drawTextCV2(output_img, class_name, (startX, startY), cv2.FONT_HERSHEY_SIMPLEX, .5, (0, 0, 255), 1)
# Construct a dlib rectangle object from the bounding box coordinates and then start the dlib correlation
tracker = dlib.correlation_tracker()
rect = dlib.rectangle(int(startX), int(startY), int(endX), int(endY))
tracker.start_track(img, rect)
# Add the tracker to our list of trackers so we can utilize it during skip frames
trackers.append(tracker)
# Write the total number of detected objects for a given class on this frame
print(class_name," : ", local_count)
else:
# If detection is not happening then track previously detected objects (if any)
print("[TRACKING]")
skipped_frames_counter += 1 # Increase the number frames for which we did not use detection
# Loop through tracker, update each of them and display their rectangle
for tracker in trackers:
tracker.update(img)
pos = tracker.get_position()
# Unpack the position object
startX = int(pos.left())
startY = int(pos.top())
endX = int(pos.right())
endY = int(pos.bottom())
# Add the bounding box coordinates to the tracking rectangles list
tracker_rects.append((startX, startY, endX, endY))
# Draw tracking rectangles
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (255, 0, 0), 1)
# Use the centroid tracker to associate the (1) old object centroids with (2) the newly computed object centroids
objects = ct.update(tracker_rects)
# Loop over the tracked objects
for (objectID, centroid) in objects.items():
# Check to see if a trackable object exists for the current object ID
to = trackableObjects.get(objectID, None)
if to is None:
# If there is no existing trackable object, create one
to = TrackableObject(objectID, centroid)
else:
to.centroids.append(centroid)
# If the object has not been counted, count it and mark it as counted
if not to.counted:
| total += 1
to.counted = True | conditional_block |
|
yolo-and-dlib.py | import os
# For 'disable_v2_behavior' see https://github.com/theislab/scgen/issues/14
tf.disable_v2_behavior()
# Image size must be '416x416' as YoloV3 network expects that specific image size as input
img_size = 416
inputs = tf.placeholder(tf.float32, [None, img_size, img_size, 3])
model = nets.YOLOv3COCO(inputs, nets.Darknet19)
ct = CentroidTracker(maxDisappeared=5, maxDistance=50) # Look into 'CentroidTracker' for further info about parameters
trackers = [] # List of all dlib trackers
trackableObjects = {} # Dictionary of trackable objects containing object's ID and its' corresponding centroid/s
skip_frames = 10 # Numbers of frames to skip from detecting
confidence_level = 0.40 # The confidence level of a detection
total = 0 # Total number of detected objects from classes of interest
use_original_video_size_as_output_size = True # Shows original video as output and not the 416x416 image that is used as yolov3 input (NOTE: Detection still happens with 416x416 img size but the output is displayed in original video size if this parameter is True)
video_path = os.getcwd() + "/videos/M6 Motorway Traffic - Short version.mp4"
video_name = os.path.basename(video_path)
print("Loading video {video_path}...".format(video_path=video_path))
if not os.path.exists(video_path):
print("File does not exist. Exited.")
exit()
# From https://github.com/experiencor/keras-yolo3/blob/master/yolo3_one_file_to_detect_them_all.py#L389
# YoloV3 detects 80 classes represented below
all_classes = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", \
"boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", \
"bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", \
"backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", \
"sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", \
"tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", \
"apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", \
"chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse", \
"remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", \
"book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"]
# Classes of interest (with their corresponding indexes for easier looping)
classes = { 1 : 'bicycle', 2 : 'car', 3 : 'motorbike', 5 : 'bus', 7 : 'truck' }
with tf.Session() as sess:
sess.run(model.pretrained())
cap = cv2.VideoCapture(video_path)
# Get video size (just for log purposes)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
# Scale used for output window size and net size
width_scale = 1
height_scale = 1
if use_original_video_size_as_output_size:
width_scale = width / img_size
height_scale = height / img_size
def drawRectangleCV2(img, pt1, pt2, color, thickness, width_scale=width_scale, height_scale=height_scale):
point1 = (int(pt1[0] * width_scale), int(pt1[1] * height_scale))
point2 = (int(pt2[0] * width_scale), int(pt2[1] * height_scale))
return cv2.rectangle(img, point1, point2, color, thickness)
def drawTextCV2(img, text, pt, font, font_scale, color, lineType, width_scale=width_scale, height_scale=height_scale):
pt = (int(pt[0] * width_scale), int(pt[1] * height_scale))
cv2.putText(img, text, pt, font, font_scale, color, lineType)
def drawCircleCV2(img, center, radius, color, thickness, width_scale=width_scale, height_scale=height_scale):
center = (int(center[0] * width_scale), int(center[1] * height_scale))
cv2.circle(img, center, radius, color, thickness)
# Python 3.5.6 does not support f-strings (next line will generate syntax error)
#print(f"Loaded {video_path}. Width: {width}, Height: {height}")
print("Loaded {video_path}. Width: {width}, Height: {height}".format(video_path=video_path, width=width, height=height))
skipped_frames_counter = 0
while(cap.isOpened()):
ret, frame = cap.read()
if ret == False:
print("Error reading frame. cap.read() returned {ret}".format(ret))
# Frame must be resized to 'img_size' (because that's what YoloV3 accepts as input)
img = cv2.resize(frame, (img_size, img_size))
# Output image is used for drawing annotations (tracking rectangles and detected classes) on the image
output_img = frame if use_original_video_size_as_output_size else img
tracker_rects = []
if skipped_frames_counter == skip_frames:
# Detecting happens after number of frames have passes specified by 'skip_frames' variable value
print("[DETECTING]")
trackers = []
skipped_frames_counter = 0 # reset counter
np_img = np.array(img).reshape(-1, img_size, img_size, 3)
start_time=time.time()
predictions = sess.run(model.preds, {inputs: model.preprocess(np_img)})
print("Detection took %s seconds" % (time.time() - start_time))
# model.get_boxes returns a 80 element array containing information about detected classes
# each element contains a list of detected boxes, confidence level ...
detections = model.get_boxes(predictions, np_img.shape[1:3])
np_detections = np.array(detections)
# Loop only through classes we are interested in
for class_index in classes.keys():
local_count = 0
class_name = classes[class_index]
# Loop through detected infos of a class we are interested in
for i in range(len(np_detections[class_index])):
box = np_detections[class_index][i]
if np_detections[class_index][i][4] >= confidence_level:
print("Detected ", class_name, " with confidence of ", np_detections[class_index][i][4])
local_count += 1
startX, startY, endX, endY = box[0], box[1], box[2], box[3]
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (0, 255, 0), 1)
drawTextCV2(output_img, class_name, (startX, startY), cv2.FONT_HERSHEY_SIMPLEX, .5, (0, 0, 255), 1)
# Construct a dlib rectangle object from the bounding box coordinates and then start the dlib correlation
tracker = dlib.correlation_tracker()
rect = dlib.rectangle(int(startX), int(startY), int(endX), int(endY))
tracker.start_track(img, rect)
# Add the tracker to our list of trackers so we can utilize it during skip frames
trackers.append(tracker)
# Write the total number of detected objects for a given class on this frame
print(class_name," : ", local_count)
else:
# If detection is not happening then track previously detected objects (if any)
print("[TRACKING]")
skipped_frames_counter += 1 # Increase the number frames for which we did not use detection
# Loop through tracker, update each of them and display their rectangle
for tracker in trackers:
tracker.update(img)
pos = tracker.get_position()
# Unpack the position object
startX = int(pos.left())
startY = int(pos.top())
endX = int(pos.right())
endY = int(pos.bottom())
# Add the bounding box coordinates to the tracking rectangles list
tracker_rects.append((startX, startY, endX, endY))
# Draw tracking rectangles
drawRectangleCV2(output_img, (startX, startY), (endX, endY), (255, 0, 0), 1)
# Use | import tensorflow.compat.v1 as tf | random_line_split |
|
main.ts | [n] * this.times[n];
}
return total / this.totalTime;
}
}
export class GameScene extends Phaser.Scene {
private square: Phaser.GameObjects.Rectangle & { body: Phaser.Physics.Arcade.Body };
private terrain: Terrain = new Terrain();
private truck: Truck = new Truck();
private pickupTruck: Vehicles.PickupTruck
public cursors: Phaser.Types.Input.Keyboard.CursorKeys
isScrolling:boolean = false;
totalTime:number;
startTruckTime:number;
startTruckX:number;
truckProgress:ProgressCounter;
isLosing:boolean;
isLosingStartTime:number;
isWinning:boolean;
isWinningStartTime:number;
sceneData : BetweenLevelState;
skyBackground: Phaser.GameObjects.Sprite;
roadFillContainer: Phaser.GameObjects.Container;
backgroundContainer: Phaser.GameObjects.Container;
foregroundContainer: Phaser.GameObjects.Container;
instructionText: Phaser.GameObjects.Text;
scoreText: Phaser.GameObjects.Text;
score: number = 0;
keySpace: Phaser.Input.Keyboard.Key;
keyA: Phaser.Input.Keyboard.Key;
keyD: Phaser.Input.Keyboard.Key;
music: Phaser.Sound.BaseSound;
muteButton: Phaser.GameObjects.Sprite;
constructor() {
super(sceneConfig);
}
public init() |
public preload() {
Vehicles.PickupTruck.preload(this);
this.sceneData = (<BetweenLevelState>this.scene.settings.data) || new BetweenLevelState();
if (this.sceneData.startImmediately) {
this.isScrolling = true;
} else {
this.isScrolling = false;
}
this.truck.preload(this);
this.load.image('ground-tiles', '../assets/placeholder/ground_tiles.png');
this.load.image('sky', '../assets/placeholder/sky.png');
this.load.image('flag', '../assets/placeholder/finish_flag.png');
this.load.image('tree1', '../assets/placeholder/kenney_foliagePack_005.png');
this.load.image('tree2', '../assets/placeholder/kenney_foliagePack_006.png');
this.load.image('potholetruck', '../assets/placeholder/potholetruck.png');
this.load.image('music', '../assets/icons/music.png');
this.load.image('nomusic', '../assets/icons/nomusic.png');
this.load.image('sound', '../assets/icons/sound.png');
this.load.image('nosound', '../assets/icons/nosound.png');
this.load.audio('backgroundMusic', ['../assets/music/Great_Hope_Mono.mp3', '../assets/music/Great_Hope_Mono.ogg']);
this.load.tilemapTiledJSON('map', '../assets/tiled/level0.json');
}
public create() {
if(DEBUG) {
this.matter.add.mouseSpring();
}
this.skyBackground = this.add.sprite(0, 0, 'sky').setOrigin(0, 0).setScrollFactor(0);
if (!this.sceneData.startImmediately) {
const scrollButton = this.add.text(100, 50, 'Go!', { fontSize: '30px' })
.setInteractive();
scrollButton.on('pointerdown', () => {
this.isScrolling = true;
scrollButton.setVisible(false);
});
}
this.instructionText = this.add.text(440, 150, 'Use ←/→ cursor keys to move\nUse A and D to fill potholes', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.add.text(6302, 300, 'Do your duty', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'});
// const roadFillButton = this.add.text(1100, 50, 'Fill', { fontSize: '30px' })
// .setInteractive()
// .setScrollFactor(0);
// roadFillButton.on('pointerdown', () => this.fillRoad());
this.scoreText = this.add.text(140, 150, 'Damage: 0 / 5', { fontSize: '30px', align: 'center', color: 'red', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.input.keyboard.addKey('SPACE')
this.keySpace
.on('down', () => this.fillRoad());
this.keyA
.on('down', () => this.fillRoad(-190));
this.keyD
.on('down', () => this.fillRoad(210));
this.roadFillContainer = this.add.container(0, 0);
this.backgroundContainer = this.add.container(0, 0);
this.truck.createTruck(this, {x:900, y: 300});
this.pickupTruck = new Vehicles.PickupTruck(this);
this.events.on('barrelDrop', function() {
this.score++
this.scoreText.setText('Damage: ' + this.score + ' / 5')
if(this.score >= 5) {
this.startLose()
}
}, this)
this.cursors = this.input.keyboard.createCursorKeys();
this.foregroundContainer = this.add.container(0, 0);
this.terrain.create(this, this.sceneData.level, this.backgroundContainer, this.foregroundContainer);
let mute = this.sound.mute;
this.sound.volume = 0.5;
if (DEBUG) {
mute = true;
this.sound.mute = mute;
}
this.muteButton = this.add.sprite(640 - 8, 30, mute ? 'nomusic' : 'music')
.setInteractive()
.setScrollFactor(0);
this.muteButton.setTexture(mute ? 'nomusic' : 'music');
this.muteButton.on('pointerdown', () => {
let nextMute = !this.sound.mute;
this.sound.mute = nextMute;
this.muteButton.setTexture(nextMute ? 'nomusic' : 'music');
});
this.music = this.sound.add('backgroundMusic', {loop: true});
this.music.play();
}
public stop() {
this.music.stop()
this.keySpace.off('down');
this.keyA.off('down');
this.keyD.off('down');
this.input.keyboard.removeKey(this.keySpace);
this.input.keyboard.removeKey(this.keyA);
this.input.keyboard.removeKey(this.keyD);
this.events.off('shutdown');
this.events.off('barrelDrop')
}
startLose() {
if (this.isLosing) return;
this.isLosing = true;
this.isLosingStartTime = this.totalTime;
this.add.text(440, 150, 'You lose', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
startWin() {
if (this.isWinning) return;
this.isWinning = true;
this.isWinningStartTime = this.totalTime;
this.add.text(440, 150, 'You win!', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
fillRoad(offset?: number) {
offset = offset || -145
const fillHeights = [1, 1, 2, 3, 3, 3, 4, 4, 3, 3, 3, 2, 1, 1];
const fillX = this.truck.chasis.x + offset - (fillHeights.length / 2 * HEIGHTMAP_RESOLUTION);
const fillHeightMapX = Math.floor(fillX / HEIGHTMAP_RESOLUTION);
// Draw in the road fill
const yOffset = 720 - 192;
const FILL_HEIGHT = 64;
for (let fillHeightX = 0; fillHeightX < fillHeights.length; fillHeightX++) {
this.roadFillContainer.add(this.add.rectangle(
(fillHeightMapX + fillHeightX) * HEIGHTMAP_RESOLUTION + HEIGHTMAP_RESOLUTION / 2,
yOffset + (this.terrain.heightMap[fillHeightMapX + fillHeightX] - fillHeights[fillHeightX]) * HEIGHTMAP_YRESOLUTION + FILL_HEIGHT / 2,
HEIGHTMAP_RESOLUTION,
| {
this.terrain = new Terrain();
this.truck = new Truck();
this.totalTime = 0;
this.startTruckTime = 0;
this.startTruckX = 0;
this.truckProgress = new ProgressCounter();
this.isLosing = false;
this.isLosingStartTime = 0;
this.isWinning = false;
this.isWinningStartTime = 0;
this.score = 0;
this.keySpace = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.SPACE);
this.keyA = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.A);
this.keyD = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.D);
this.events.on('shutdown', () => this.stop());
} | identifier_body |
main.ts | this.terrain = new Terrain();
this.truck = new Truck();
this.totalTime = 0;
this.startTruckTime = 0;
this.startTruckX = 0;
this.truckProgress = new ProgressCounter();
this.isLosing = false;
this.isLosingStartTime = 0;
this.isWinning = false;
this.isWinningStartTime = 0;
this.score = 0;
this.keySpace = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.SPACE);
this.keyA = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.A);
this.keyD = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.D);
this.events.on('shutdown', () => this.stop());
}
public preload() {
Vehicles.PickupTruck.preload(this);
this.sceneData = (<BetweenLevelState>this.scene.settings.data) || new BetweenLevelState();
if (this.sceneData.startImmediately) {
this.isScrolling = true;
} else {
this.isScrolling = false;
}
this.truck.preload(this);
this.load.image('ground-tiles', '../assets/placeholder/ground_tiles.png');
this.load.image('sky', '../assets/placeholder/sky.png');
this.load.image('flag', '../assets/placeholder/finish_flag.png');
this.load.image('tree1', '../assets/placeholder/kenney_foliagePack_005.png');
this.load.image('tree2', '../assets/placeholder/kenney_foliagePack_006.png');
this.load.image('potholetruck', '../assets/placeholder/potholetruck.png');
this.load.image('music', '../assets/icons/music.png');
this.load.image('nomusic', '../assets/icons/nomusic.png');
this.load.image('sound', '../assets/icons/sound.png');
this.load.image('nosound', '../assets/icons/nosound.png');
this.load.audio('backgroundMusic', ['../assets/music/Great_Hope_Mono.mp3', '../assets/music/Great_Hope_Mono.ogg']);
this.load.tilemapTiledJSON('map', '../assets/tiled/level0.json');
}
public create() {
if(DEBUG) {
this.matter.add.mouseSpring();
}
this.skyBackground = this.add.sprite(0, 0, 'sky').setOrigin(0, 0).setScrollFactor(0);
if (!this.sceneData.startImmediately) {
const scrollButton = this.add.text(100, 50, 'Go!', { fontSize: '30px' })
.setInteractive();
scrollButton.on('pointerdown', () => {
this.isScrolling = true;
scrollButton.setVisible(false);
});
}
this.instructionText = this.add.text(440, 150, 'Use ←/→ cursor keys to move\nUse A and D to fill potholes', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.add.text(6302, 300, 'Do your duty', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'});
// const roadFillButton = this.add.text(1100, 50, 'Fill', { fontSize: '30px' })
// .setInteractive()
// .setScrollFactor(0);
// roadFillButton.on('pointerdown', () => this.fillRoad());
this.scoreText = this.add.text(140, 150, 'Damage: 0 / 5', { fontSize: '30px', align: 'center', color: 'red', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.input.keyboard.addKey('SPACE')
this.keySpace
.on('down', () => this.fillRoad());
this.keyA
.on('down', () => this.fillRoad(-190));
this.keyD
.on('down', () => this.fillRoad(210));
this.roadFillContainer = this.add.container(0, 0);
this.backgroundContainer = this.add.container(0, 0);
this.truck.createTruck(this, {x:900, y: 300});
this.pickupTruck = new Vehicles.PickupTruck(this);
this.events.on('barrelDrop', function() {
this.score++
this.scoreText.setText('Damage: ' + this.score + ' / 5')
if(this.score >= 5) {
this.startLose()
}
}, this)
this.cursors = this.input.keyboard.createCursorKeys();
this.foregroundContainer = this.add.container(0, 0);
this.terrain.create(this, this.sceneData.level, this.backgroundContainer, this.foregroundContainer);
let mute = this.sound.mute;
this.sound.volume = 0.5;
if (DEBUG) {
mute = true;
this.sound.mute = mute;
}
this.muteButton = this.add.sprite(640 - 8, 30, mute ? 'nomusic' : 'music')
.setInteractive()
.setScrollFactor(0);
this.muteButton.setTexture(mute ? 'nomusic' : 'music');
this.muteButton.on('pointerdown', () => {
let nextMute = !this.sound.mute;
this.sound.mute = nextMute;
this.muteButton.setTexture(nextMute ? 'nomusic' : 'music');
});
this.music = this.sound.add('backgroundMusic', {loop: true});
this.music.play();
}
public stop() {
this.music.stop()
this.keySpace.off('down');
this.keyA.off('down');
this.keyD.off('down');
this.input.keyboard.removeKey(this.keySpace);
this.input.keyboard.removeKey(this.keyA);
this.input.keyboard.removeKey(this.keyD);
this.events.off('shutdown');
this.events.off('barrelDrop')
}
startLose() {
if (this.isLosing) return;
this.isLosing = true;
this.isLosingStartTime = this.totalTime;
this.add.text(440, 150, 'You lose', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
startWin() {
if (this.isWinning) return;
this.isWinning = true;
this.isWinningStartTime = this.totalTime;
this.add.text(440, 150, 'You win!', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
fillRoad(offset?: number) {
offset = offset || -145
const fillHeights = [1, 1, 2, 3, 3, 3, 4, 4, 3, 3, 3, 2, 1, 1];
const fillX = this.truck.chasis.x + offset - (fillHeights.length / 2 * HEIGHTMAP_RESOLUTION);
const fillHeightMapX = Math.floor(fillX / HEIGHTMAP_RESOLUTION);
// Draw in the road fill
const yOffset = 720 - 192;
const FILL_HEIGHT = 64;
for (let fillHeightX = 0; fillHeightX < fillHeights.length; fillHeightX++) {
this.roadFillContainer.add(this.add.rectangle(
(fillHeightMapX + fillHeightX) * HEIGHTMAP_RESOLUTION + HEIGHTMAP_RESOLUTION / 2,
yOffset + (this.terrain.heightMap[fillHeightMapX + fillHeightX] - fillHeights[fillHeightX]) * HEIGHTMAP_YRESOLUTION + FILL_HEIGHT / 2,
HEIGHTMAP_RESOLUTION,
FILL_HEIGHT,
0x555555));
}
// Adjust the height map and the physics
for (let fillHeightX = 0; fillHeightX < fillHeights.length; fillHeightX++) {
this.terrain.heightMap[fillHeightMapX + fillHeightX] -= fillHeights[fillHeightX];
// Will this cause problems to have overlapping physics objects this way?
this.terrain.createPhysicsRectangleForHeightMap(fillHeightMapX + fillHeightX, fillHeightMapX + fillHeightX + 1, yOffset, this);
}
}
public update(time, delta) {
this.totalTime += delta;
if(this.truck.chasis) {
this.cameras.main.scrollX = this.truck.chasis.x + CAMERA_TRUCK_X_OFFSET;
this.truck.applyRumble();
}
if (this.cursors.left.isDown) {
this.truck.applyDrivingForce(0.018, -1);
} else if (this.cursors.right.isDown) {
this.truck.applyDrivingForce(0.018, 1);
}
if (this.cameras.main.scrollX < 0) this.cameras.main.scrollX = 0;
if (this.isScrolling && !this.isLosing) { | random_line_split |
||
main.ts |
this.progress.splice(0, cutoff);
this.times.splice(0, cutoff);
this.totalTime = 0;
this.times.forEach( time => this.totalTime += time );
}
}
getAverage() {
let total = 0;
for (let n = 0; n < this.progress.length; n++) {
total += this.progress[n] * this.times[n];
}
return total / this.totalTime;
}
}
export class GameScene extends Phaser.Scene {
private square: Phaser.GameObjects.Rectangle & { body: Phaser.Physics.Arcade.Body };
private terrain: Terrain = new Terrain();
private truck: Truck = new Truck();
private pickupTruck: Vehicles.PickupTruck
public cursors: Phaser.Types.Input.Keyboard.CursorKeys
isScrolling:boolean = false;
totalTime:number;
startTruckTime:number;
startTruckX:number;
truckProgress:ProgressCounter;
isLosing:boolean;
isLosingStartTime:number;
isWinning:boolean;
isWinningStartTime:number;
sceneData : BetweenLevelState;
skyBackground: Phaser.GameObjects.Sprite;
roadFillContainer: Phaser.GameObjects.Container;
backgroundContainer: Phaser.GameObjects.Container;
foregroundContainer: Phaser.GameObjects.Container;
instructionText: Phaser.GameObjects.Text;
scoreText: Phaser.GameObjects.Text;
score: number = 0;
keySpace: Phaser.Input.Keyboard.Key;
keyA: Phaser.Input.Keyboard.Key;
keyD: Phaser.Input.Keyboard.Key;
music: Phaser.Sound.BaseSound;
muteButton: Phaser.GameObjects.Sprite;
constructor() {
super(sceneConfig);
}
public init() {
this.terrain = new Terrain();
this.truck = new Truck();
this.totalTime = 0;
this.startTruckTime = 0;
this.startTruckX = 0;
this.truckProgress = new ProgressCounter();
this.isLosing = false;
this.isLosingStartTime = 0;
this.isWinning = false;
this.isWinningStartTime = 0;
this.score = 0;
this.keySpace = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.SPACE);
this.keyA = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.A);
this.keyD = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.D);
this.events.on('shutdown', () => this.stop());
}
public preload() {
Vehicles.PickupTruck.preload(this);
this.sceneData = (<BetweenLevelState>this.scene.settings.data) || new BetweenLevelState();
if (this.sceneData.startImmediately) {
this.isScrolling = true;
} else {
this.isScrolling = false;
}
this.truck.preload(this);
this.load.image('ground-tiles', '../assets/placeholder/ground_tiles.png');
this.load.image('sky', '../assets/placeholder/sky.png');
this.load.image('flag', '../assets/placeholder/finish_flag.png');
this.load.image('tree1', '../assets/placeholder/kenney_foliagePack_005.png');
this.load.image('tree2', '../assets/placeholder/kenney_foliagePack_006.png');
this.load.image('potholetruck', '../assets/placeholder/potholetruck.png');
this.load.image('music', '../assets/icons/music.png');
this.load.image('nomusic', '../assets/icons/nomusic.png');
this.load.image('sound', '../assets/icons/sound.png');
this.load.image('nosound', '../assets/icons/nosound.png');
this.load.audio('backgroundMusic', ['../assets/music/Great_Hope_Mono.mp3', '../assets/music/Great_Hope_Mono.ogg']);
this.load.tilemapTiledJSON('map', '../assets/tiled/level0.json');
}
public create() {
if(DEBUG) {
this.matter.add.mouseSpring();
}
this.skyBackground = this.add.sprite(0, 0, 'sky').setOrigin(0, 0).setScrollFactor(0);
if (!this.sceneData.startImmediately) {
const scrollButton = this.add.text(100, 50, 'Go!', { fontSize: '30px' })
.setInteractive();
scrollButton.on('pointerdown', () => {
this.isScrolling = true;
scrollButton.setVisible(false);
});
}
this.instructionText = this.add.text(440, 150, 'Use ←/→ cursor keys to move\nUse A and D to fill potholes', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.add.text(6302, 300, 'Do your duty', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'});
// const roadFillButton = this.add.text(1100, 50, 'Fill', { fontSize: '30px' })
// .setInteractive()
// .setScrollFactor(0);
// roadFillButton.on('pointerdown', () => this.fillRoad());
this.scoreText = this.add.text(140, 150, 'Damage: 0 / 5', { fontSize: '30px', align: 'center', color: 'red', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.input.keyboard.addKey('SPACE')
this.keySpace
.on('down', () => this.fillRoad());
this.keyA
.on('down', () => this.fillRoad(-190));
this.keyD
.on('down', () => this.fillRoad(210));
this.roadFillContainer = this.add.container(0, 0);
this.backgroundContainer = this.add.container(0, 0);
this.truck.createTruck(this, {x:900, y: 300});
this.pickupTruck = new Vehicles.PickupTruck(this);
this.events.on('barrelDrop', function() {
this.score++
this.scoreText.setText('Damage: ' + this.score + ' / 5')
if(this.score >= 5) {
this.startLose()
}
}, this)
this.cursors = this.input.keyboard.createCursorKeys();
this.foregroundContainer = this.add.container(0, 0);
this.terrain.create(this, this.sceneData.level, this.backgroundContainer, this.foregroundContainer);
let mute = this.sound.mute;
this.sound.volume = 0.5;
if (DEBUG) {
mute = true;
this.sound.mute = mute;
}
this.muteButton = this.add.sprite(640 - 8, 30, mute ? 'nomusic' : 'music')
.setInteractive()
.setScrollFactor(0);
this.muteButton.setTexture(mute ? 'nomusic' : 'music');
this.muteButton.on('pointerdown', () => {
let nextMute = !this.sound.mute;
this.sound.mute = nextMute;
this.muteButton.setTexture(nextMute ? 'nomusic' : 'music');
});
this.music = this.sound.add('backgroundMusic', {loop: true});
this.music.play();
}
public stop() {
this.music.stop()
this.keySpace.off('down');
this.keyA.off('down');
this.keyD.off('down');
this.input.keyboard.removeKey(this.keySpace);
this.input.keyboard.removeKey(this.keyA);
this.input.keyboard.removeKey(this.keyD);
this.events.off('shutdown');
this.events.off('barrelDrop')
}
startLose() {
if (this.isLosing) return;
this.isLosing = true;
this.isLosingStartTime = this.totalTime;
this.add.text(440, 150, 'You lose', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
startWin() {
if (this.isWinning) return;
this.isWinning = true;
this.isWinningStartTime = this.totalTime;
this.add.text(440, 150, 'You win!', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
fillRoad(offset?: number) {
offset = offset || -145
const fillHeights = [1, 1, 2, 3, 3, 3, 4, 4, 3, 3, 3, 2, 1, 1];
const fillX = this.truck.chasis.x + offset - (fillHeights.length / 2 * HEIGHTMAP_RESOLUTION);
const fillHeightMapX = Math.floor(fillX / HEIGHTMAP_RESOLUTION);
// Draw in the road fill
const yOffset = 720 - 192;
const FILL_HEIGHT = 6 | {
time -= this.times[n];
if (time < 1000) {
cutoff = n;
break;
}
} | conditional_block |
|
main.ts | [n] * this.times[n];
}
return total / this.totalTime;
}
}
export class GameScene extends Phaser.Scene {
private square: Phaser.GameObjects.Rectangle & { body: Phaser.Physics.Arcade.Body };
private terrain: Terrain = new Terrain();
private truck: Truck = new Truck();
private pickupTruck: Vehicles.PickupTruck
public cursors: Phaser.Types.Input.Keyboard.CursorKeys
isScrolling:boolean = false;
totalTime:number;
startTruckTime:number;
startTruckX:number;
truckProgress:ProgressCounter;
isLosing:boolean;
isLosingStartTime:number;
isWinning:boolean;
isWinningStartTime:number;
sceneData : BetweenLevelState;
skyBackground: Phaser.GameObjects.Sprite;
roadFillContainer: Phaser.GameObjects.Container;
backgroundContainer: Phaser.GameObjects.Container;
foregroundContainer: Phaser.GameObjects.Container;
instructionText: Phaser.GameObjects.Text;
scoreText: Phaser.GameObjects.Text;
score: number = 0;
keySpace: Phaser.Input.Keyboard.Key;
keyA: Phaser.Input.Keyboard.Key;
keyD: Phaser.Input.Keyboard.Key;
music: Phaser.Sound.BaseSound;
muteButton: Phaser.GameObjects.Sprite;
constructor() {
super(sceneConfig);
}
public init() {
this.terrain = new Terrain();
this.truck = new Truck();
this.totalTime = 0;
this.startTruckTime = 0;
this.startTruckX = 0;
this.truckProgress = new ProgressCounter();
this.isLosing = false;
this.isLosingStartTime = 0;
this.isWinning = false;
this.isWinningStartTime = 0;
this.score = 0;
this.keySpace = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.SPACE);
this.keyA = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.A);
this.keyD = this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.D);
this.events.on('shutdown', () => this.stop());
}
public | () {
Vehicles.PickupTruck.preload(this);
this.sceneData = (<BetweenLevelState>this.scene.settings.data) || new BetweenLevelState();
if (this.sceneData.startImmediately) {
this.isScrolling = true;
} else {
this.isScrolling = false;
}
this.truck.preload(this);
this.load.image('ground-tiles', '../assets/placeholder/ground_tiles.png');
this.load.image('sky', '../assets/placeholder/sky.png');
this.load.image('flag', '../assets/placeholder/finish_flag.png');
this.load.image('tree1', '../assets/placeholder/kenney_foliagePack_005.png');
this.load.image('tree2', '../assets/placeholder/kenney_foliagePack_006.png');
this.load.image('potholetruck', '../assets/placeholder/potholetruck.png');
this.load.image('music', '../assets/icons/music.png');
this.load.image('nomusic', '../assets/icons/nomusic.png');
this.load.image('sound', '../assets/icons/sound.png');
this.load.image('nosound', '../assets/icons/nosound.png');
this.load.audio('backgroundMusic', ['../assets/music/Great_Hope_Mono.mp3', '../assets/music/Great_Hope_Mono.ogg']);
this.load.tilemapTiledJSON('map', '../assets/tiled/level0.json');
}
public create() {
if(DEBUG) {
this.matter.add.mouseSpring();
}
this.skyBackground = this.add.sprite(0, 0, 'sky').setOrigin(0, 0).setScrollFactor(0);
if (!this.sceneData.startImmediately) {
const scrollButton = this.add.text(100, 50, 'Go!', { fontSize: '30px' })
.setInteractive();
scrollButton.on('pointerdown', () => {
this.isScrolling = true;
scrollButton.setVisible(false);
});
}
this.instructionText = this.add.text(440, 150, 'Use ←/→ cursor keys to move\nUse A and D to fill potholes', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.add.text(6302, 300, 'Do your duty', { fontSize: '30px', align: 'center', color: 'black', fontFamily: 'sans-serif'});
// const roadFillButton = this.add.text(1100, 50, 'Fill', { fontSize: '30px' })
// .setInteractive()
// .setScrollFactor(0);
// roadFillButton.on('pointerdown', () => this.fillRoad());
this.scoreText = this.add.text(140, 150, 'Damage: 0 / 5', { fontSize: '30px', align: 'center', color: 'red', fontFamily: 'sans-serif'})
.setScrollFactor(0);
this.input.keyboard.addKey('SPACE')
this.keySpace
.on('down', () => this.fillRoad());
this.keyA
.on('down', () => this.fillRoad(-190));
this.keyD
.on('down', () => this.fillRoad(210));
this.roadFillContainer = this.add.container(0, 0);
this.backgroundContainer = this.add.container(0, 0);
this.truck.createTruck(this, {x:900, y: 300});
this.pickupTruck = new Vehicles.PickupTruck(this);
this.events.on('barrelDrop', function() {
this.score++
this.scoreText.setText('Damage: ' + this.score + ' / 5')
if(this.score >= 5) {
this.startLose()
}
}, this)
this.cursors = this.input.keyboard.createCursorKeys();
this.foregroundContainer = this.add.container(0, 0);
this.terrain.create(this, this.sceneData.level, this.backgroundContainer, this.foregroundContainer);
let mute = this.sound.mute;
this.sound.volume = 0.5;
if (DEBUG) {
mute = true;
this.sound.mute = mute;
}
this.muteButton = this.add.sprite(640 - 8, 30, mute ? 'nomusic' : 'music')
.setInteractive()
.setScrollFactor(0);
this.muteButton.setTexture(mute ? 'nomusic' : 'music');
this.muteButton.on('pointerdown', () => {
let nextMute = !this.sound.mute;
this.sound.mute = nextMute;
this.muteButton.setTexture(nextMute ? 'nomusic' : 'music');
});
this.music = this.sound.add('backgroundMusic', {loop: true});
this.music.play();
}
public stop() {
this.music.stop()
this.keySpace.off('down');
this.keyA.off('down');
this.keyD.off('down');
this.input.keyboard.removeKey(this.keySpace);
this.input.keyboard.removeKey(this.keyA);
this.input.keyboard.removeKey(this.keyD);
this.events.off('shutdown');
this.events.off('barrelDrop')
}
startLose() {
if (this.isLosing) return;
this.isLosing = true;
this.isLosingStartTime = this.totalTime;
this.add.text(440, 150, 'You lose', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
startWin() {
if (this.isWinning) return;
this.isWinning = true;
this.isWinningStartTime = this.totalTime;
this.add.text(440, 150, 'You win!', { fontSize: '90px', align: 'center', color: 'black', fontFamily: 'sans-serif'})
.setScrollFactor(0);
}
fillRoad(offset?: number) {
offset = offset || -145
const fillHeights = [1, 1, 2, 3, 3, 3, 4, 4, 3, 3, 3, 2, 1, 1];
const fillX = this.truck.chasis.x + offset - (fillHeights.length / 2 * HEIGHTMAP_RESOLUTION);
const fillHeightMapX = Math.floor(fillX / HEIGHTMAP_RESOLUTION);
// Draw in the road fill
const yOffset = 720 - 192;
const FILL_HEIGHT = 64;
for (let fillHeightX = 0; fillHeightX < fillHeights.length; fillHeightX++) {
this.roadFillContainer.add(this.add.rectangle(
(fillHeightMapX + fillHeightX) * HEIGHTMAP_RESOLUTION + HEIGHTMAP_RESOLUTION / 2,
yOffset + (this.terrain.heightMap[fillHeightMapX + fillHeightX] - fillHeights[fillHeightX]) * HEIGHTMAP_YRESOLUTION + FILL_HEIGHT / 2,
HEIGHTMAP_RESOLUTION,
F | preload | identifier_name |
lib.rs | of [`Child::wait_with_output`] to read output while setting a timeout.
//! This crate aims to fill in those gaps and simplify the implementation,
//! now that [`Receiver::recv_timeout`] exists.
//!
//! # Examples
//!
//! ```
//! use std::io;
//! use std::process::Command;
//! use std::process::Stdio;
//! use std::time::Duration;
//!
//! use process_control::ChildExt;
//! use process_control::Timeout;
//!
//! let process = Command::new("echo")
//! .arg("hello")
//! .stdout(Stdio::piped())
//! .spawn()?;
//!
//! let output = process
//! .with_output_timeout(Duration::from_secs(1))
//! .terminating()
//! .wait()?
//! .ok_or_else(|| {
//! io::Error::new(io::ErrorKind::TimedOut, "Process timed out")
//! })?;
//! assert_eq!(b"hello", &output.stdout[..5]);
//! #
//! # Ok::<_, io::Error>(())
//! ```
//!
//! [crossbeam-channel]: https://crates.io/crates/crossbeam-channel
//! [`Receiver::recv_timeout`]: ::std::sync::mpsc::Receiver::recv_timeout
//! [sealed]: https://rust-lang.github.io/api-guidelines/future-proofing.html#c-sealed
//! [wait-timeout]: https://crates.io/crates/wait-timeout
// Only require a nightly compiler when building documentation for docs.rs.
// This is a private option that should not be used.
// https://github.com/rust-lang/docs.rs/issues/147#issuecomment-389544407
#![cfg_attr(process_control_docs_rs, feature(doc_cfg))]
#![warn(unused_results)]
use std::fmt;
use std::fmt::Display;
use std::fmt::Formatter;
use std::io;
use std::process;
use std::process::Child;
use std::time::Duration;
#[cfg_attr(unix, path = "unix.rs")]
#[cfg_attr(windows, path = "windows.rs")]
mod imp;
mod timeout;
/// A wrapper that stores enough information to terminate a process.
///
/// Instances can only be constructed using [`ChildExt::terminator`].
#[derive(Debug)]
pub struct Terminator(imp::Handle);
impl Terminator {
/// Terminates a process as immediately as the operating system allows.
///
/// Behavior should be equivalent to calling [`Child::kill`] for the same
/// process. However, this method does not require a reference of any kind
/// to the [`Child`] instance of the process, meaning that it can be called
/// even in some unsafe circumstances.
///
/// # Safety
///
/// If the process is no longer running, a different process may be
/// terminated on some operating systems. Reuse of process identifiers
/// makes it impossible for this method to determine if the intended
/// process still exists.
///
/// Thus, this method should not be used in production code, as
/// [`Child::kill`] more safely provides the same functionality. It is only
/// used for testing in this crate and may be used similarly in others.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::path::Path;
/// use std::process::Command;
/// use std::thread;
///
/// use process_control::ChildExt;
///
/// let dir = Path::new("hello");
/// let mut process = Command::new("mkdir").arg(dir).spawn()?;
/// let terminator = process.terminator()?;
///
/// let thread = thread::spawn(move || process.wait());
/// if !dir.exists() {
/// // [process.kill] requires a mutable reference.
/// unsafe { terminator.terminate()? }
/// }
///
/// let exit_status = thread.join().expect("thread panicked")?;
/// println!("exited {}", exit_status);
/// #
/// # Ok::<_, io::Error>(())
/// ```
#[inline]
pub unsafe fn terminate(&self) -> io::Result<()> {
self.0.terminate()
}
}
/// Equivalent to [`process::ExitStatus`] but allows for greater accuracy.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct ExitStatus(imp::ExitStatus);
impl ExitStatus {
/// Equivalent to [`process::ExitStatus::success`].
#[inline]
#[must_use]
pub fn success(self) -> bool {
self.0.success()
}
/// Equivalent to [`process::ExitStatus::code`], but a more accurate value
/// will be returned if possible.
#[inline]
#[must_use]
pub fn code(self) -> Option<i64> {
self.0.code().map(Into::into)
}
/// Equivalent to [`ExitStatusExt::signal`].
///
/// [`ExitStatusExt::signal`]: ::std::os::unix::process::ExitStatusExt::signal
#[cfg(any(unix, doc))]
#[cfg_attr(process_control_docs_rs, doc(cfg(unix)))]
#[inline]
#[must_use]
pub fn signal(self) -> Option<::std::os::raw::c_int> {
self.0.signal()
}
}
impl Display for ExitStatus {
#[inline]
fn | (&self, formatter: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl From<process::ExitStatus> for ExitStatus {
#[inline]
fn from(value: process::ExitStatus) -> Self {
#[cfg_attr(windows, allow(clippy::useless_conversion))]
Self(value.into())
}
}
/// Equivalent to [`process::Output`] but holds an instance of [`ExitStatus`]
/// from this crate.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Output {
/// Equivalent to [`process::Output::status`].
pub status: ExitStatus,
/// Equivalent to [`process::Output::stdout`].
pub stdout: Vec<u8>,
/// Equivalent to [`process::Output::stderr`].
pub stderr: Vec<u8>,
}
impl From<process::Output> for Output {
#[inline]
fn from(value: process::Output) -> Self {
Self {
status: value.status.into(),
stdout: value.stdout,
stderr: value.stderr,
}
}
}
/// A temporary wrapper for a process timeout.
pub trait Timeout: private::Sealed {
/// The type returned by [`wait`].
///
/// [`wait`]: Self::wait
type Result;
/// Causes [`wait`] to never suppress an error.
///
/// Typically, errors terminating the process will be ignored, as they are
/// often less important than the result. However, when this method is
/// called, those errors will be returned as well.
///
/// [`wait`]: Self::wait
#[must_use]
fn strict_errors(self) -> Self;
/// Causes the process to be terminated if it exceeds the time limit.
///
/// Process identifier reuse by the system will be mitigated. There should
/// never be a scenario that causes an unintended process to be terminated.
#[must_use]
fn terminating(self) -> Self;
/// Runs the process to completion, aborting if it exceeds the time limit.
///
/// At least one thread will be created to wait on the process without
/// blocking the current thread.
///
/// If the time limit is exceeded before the process finishes, `Ok(None)`
/// will be returned. However, the process will not be terminated in that
/// case unless [`terminating`] is called beforehand. It is recommended to
/// always call that method to allow system resources to be freed.
///
/// The stdin handle to the process, if it exists, will be closed before
/// waiting. Otherwise, the process would assuredly time out when reading
/// from that pipe.
///
/// This method cannot guarantee that the same [`io::ErrorKind`] variants
/// will be returned in the future for the same types of failures. Allowing
/// these breakages is required to enable calling [`Child::kill`]
/// internally.
///
/// [`terminating`]: Self::terminating
fn wait(self) -> io::Result<Option<Self::Result>>;
}
/// Extensions to [`Child`] for easily terminating processes.
///
/// For more information, see [the module-level documentation][crate].
pub trait ChildExt<'a>: private::Sealed {
/// The type returned by [`with_timeout`].
///
/// [`with_timeout`]: Self::with_timeout
type ExitStatusTimeout: 'a + Timeout<Result = ExitStatus>;
/// The type returned by [`with_output_timeout`].
///
/// [`with_output_timeout`]: Self::with_output_timeout
type OutputTimeout: Timeout<Result = Output>;
/// Creates an instance of [`Terminator`] for this process.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::process::Command;
///
/// use process_control::ChildExt;
///
/// let process = Command::new("echo").spawn()?;
/// let terminator = process.terminator()?;
/// #
/// # Ok::<_, io::Error>(())
/// ```
fn terminator(&self) -> io::Result<Terminator>;
/// Creates an instance of [`Timeout`] that yields | fmt | identifier_name |
lib.rs | {
/// Terminates a process as immediately as the operating system allows.
///
/// Behavior should be equivalent to calling [`Child::kill`] for the same
/// process. However, this method does not require a reference of any kind
/// to the [`Child`] instance of the process, meaning that it can be called
/// even in some unsafe circumstances.
///
/// # Safety
///
/// If the process is no longer running, a different process may be
/// terminated on some operating systems. Reuse of process identifiers
/// makes it impossible for this method to determine if the intended
/// process still exists.
///
/// Thus, this method should not be used in production code, as
/// [`Child::kill`] more safely provides the same functionality. It is only
/// used for testing in this crate and may be used similarly in others.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::path::Path;
/// use std::process::Command;
/// use std::thread;
///
/// use process_control::ChildExt;
///
/// let dir = Path::new("hello");
/// let mut process = Command::new("mkdir").arg(dir).spawn()?;
/// let terminator = process.terminator()?;
///
/// let thread = thread::spawn(move || process.wait());
/// if !dir.exists() {
/// // [process.kill] requires a mutable reference.
/// unsafe { terminator.terminate()? }
/// }
///
/// let exit_status = thread.join().expect("thread panicked")?;
/// println!("exited {}", exit_status);
/// #
/// # Ok::<_, io::Error>(())
/// ```
#[inline]
pub unsafe fn terminate(&self) -> io::Result<()> {
self.0.terminate()
}
}
/// Equivalent to [`process::ExitStatus`] but allows for greater accuracy.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct ExitStatus(imp::ExitStatus);
impl ExitStatus {
/// Equivalent to [`process::ExitStatus::success`].
#[inline]
#[must_use]
pub fn success(self) -> bool {
self.0.success()
}
/// Equivalent to [`process::ExitStatus::code`], but a more accurate value
/// will be returned if possible.
#[inline]
#[must_use]
pub fn code(self) -> Option<i64> {
self.0.code().map(Into::into)
}
/// Equivalent to [`ExitStatusExt::signal`].
///
/// [`ExitStatusExt::signal`]: ::std::os::unix::process::ExitStatusExt::signal
#[cfg(any(unix, doc))]
#[cfg_attr(process_control_docs_rs, doc(cfg(unix)))]
#[inline]
#[must_use]
pub fn signal(self) -> Option<::std::os::raw::c_int> {
self.0.signal()
}
}
impl Display for ExitStatus {
#[inline]
fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl From<process::ExitStatus> for ExitStatus {
#[inline]
fn from(value: process::ExitStatus) -> Self {
#[cfg_attr(windows, allow(clippy::useless_conversion))]
Self(value.into())
}
}
/// Equivalent to [`process::Output`] but holds an instance of [`ExitStatus`]
/// from this crate.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Output {
/// Equivalent to [`process::Output::status`].
pub status: ExitStatus,
/// Equivalent to [`process::Output::stdout`].
pub stdout: Vec<u8>,
/// Equivalent to [`process::Output::stderr`].
pub stderr: Vec<u8>,
}
impl From<process::Output> for Output {
#[inline]
fn from(value: process::Output) -> Self {
Self {
status: value.status.into(),
stdout: value.stdout,
stderr: value.stderr,
}
}
}
/// A temporary wrapper for a process timeout.
pub trait Timeout: private::Sealed {
/// The type returned by [`wait`].
///
/// [`wait`]: Self::wait
type Result;
/// Causes [`wait`] to never suppress an error.
///
/// Typically, errors terminating the process will be ignored, as they are
/// often less important than the result. However, when this method is
/// called, those errors will be returned as well.
///
/// [`wait`]: Self::wait
#[must_use]
fn strict_errors(self) -> Self;
/// Causes the process to be terminated if it exceeds the time limit.
///
/// Process identifier reuse by the system will be mitigated. There should
/// never be a scenario that causes an unintended process to be terminated.
#[must_use]
fn terminating(self) -> Self;
/// Runs the process to completion, aborting if it exceeds the time limit.
///
/// At least one thread will be created to wait on the process without
/// blocking the current thread.
///
/// If the time limit is exceeded before the process finishes, `Ok(None)`
/// will be returned. However, the process will not be terminated in that
/// case unless [`terminating`] is called beforehand. It is recommended to
/// always call that method to allow system resources to be freed.
///
/// The stdin handle to the process, if it exists, will be closed before
/// waiting. Otherwise, the process would assuredly time out when reading
/// from that pipe.
///
/// This method cannot guarantee that the same [`io::ErrorKind`] variants
/// will be returned in the future for the same types of failures. Allowing
/// these breakages is required to enable calling [`Child::kill`]
/// internally.
///
/// [`terminating`]: Self::terminating
fn wait(self) -> io::Result<Option<Self::Result>>;
}
/// Extensions to [`Child`] for easily terminating processes.
///
/// For more information, see [the module-level documentation][crate].
pub trait ChildExt<'a>: private::Sealed {
/// The type returned by [`with_timeout`].
///
/// [`with_timeout`]: Self::with_timeout
type ExitStatusTimeout: 'a + Timeout<Result = ExitStatus>;
/// The type returned by [`with_output_timeout`].
///
/// [`with_output_timeout`]: Self::with_output_timeout
type OutputTimeout: Timeout<Result = Output>;
/// Creates an instance of [`Terminator`] for this process.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::process::Command;
///
/// use process_control::ChildExt;
///
/// let process = Command::new("echo").spawn()?;
/// let terminator = process.terminator()?;
/// #
/// # Ok::<_, io::Error>(())
/// ```
fn terminator(&self) -> io::Result<Terminator>;
/// Creates an instance of [`Timeout`] that yields [`ExitStatus`] for this
/// process.
///
/// This method parallels [`Child::wait`] when the process must finish
/// within a time limit.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::process::Command;
/// use std::time::Duration;
///
/// use process_control::ChildExt;
/// use process_control::Timeout;
///
/// let exit_status = Command::new("echo")
/// .spawn()?
/// .with_timeout(Duration::from_secs(1))
/// .terminating()
/// .wait()?
/// .expect("process timed out");
/// assert!(exit_status.success());
/// #
/// # Ok::<_, io::Error>(())
/// ```
#[must_use]
fn with_timeout(
&'a mut self,
time_limit: Duration,
) -> Self::ExitStatusTimeout;
/// Creates an instance of [`Timeout`] that yields [`Output`] for this
/// process.
///
/// This method parallels [`Child::wait_with_output`] when the process must
/// finish within a time limit.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::process::Command;
/// use std::time::Duration;
///
/// use process_control::ChildExt;
/// use process_control::Timeout;
///
/// let output = Command::new("echo")
/// .spawn()?
/// .with_output_timeout(Duration::from_secs(1))
/// .terminating()
/// .wait()?
/// .expect("process timed out");
/// assert!(output.status.success());
/// #
/// # Ok::<_, io::Error>(())
/// ```
#[must_use]
fn with_output_timeout(self, time_limit: Duration) -> Self::OutputTimeout;
}
impl<'a> ChildExt<'a> for Child {
type ExitStatusTimeout = timeout::ExitStatusTimeout<'a>;
type OutputTimeout = timeout::OutputTimeout;
#[inline]
fn terminator(&self) -> io::Result<Terminator> | {
imp::Handle::new(self).map(Terminator)
} | identifier_body |
|
lib.rs | of [`Child::wait_with_output`] to read output while setting a timeout.
//! This crate aims to fill in those gaps and simplify the implementation,
//! now that [`Receiver::recv_timeout`] exists.
//!
//! # Examples
//!
//! ```
//! use std::io;
//! use std::process::Command;
//! use std::process::Stdio;
//! use std::time::Duration;
//!
//! use process_control::ChildExt;
//! use process_control::Timeout;
//!
//! let process = Command::new("echo")
//! .arg("hello")
//! .stdout(Stdio::piped())
//! .spawn()?;
//!
//! let output = process
//! .with_output_timeout(Duration::from_secs(1))
//! .terminating()
//! .wait()?
//! .ok_or_else(|| {
//! io::Error::new(io::ErrorKind::TimedOut, "Process timed out")
//! })?;
//! assert_eq!(b"hello", &output.stdout[..5]);
//! #
//! # Ok::<_, io::Error>(())
//! ```
//!
//! [crossbeam-channel]: https://crates.io/crates/crossbeam-channel
//! [`Receiver::recv_timeout`]: ::std::sync::mpsc::Receiver::recv_timeout
//! [sealed]: https://rust-lang.github.io/api-guidelines/future-proofing.html#c-sealed
//! [wait-timeout]: https://crates.io/crates/wait-timeout
// Only require a nightly compiler when building documentation for docs.rs.
// This is a private option that should not be used.
// https://github.com/rust-lang/docs.rs/issues/147#issuecomment-389544407
#![cfg_attr(process_control_docs_rs, feature(doc_cfg))]
#![warn(unused_results)]
use std::fmt;
use std::fmt::Display;
use std::fmt::Formatter;
use std::io;
use std::process;
use std::process::Child;
use std::time::Duration;
#[cfg_attr(unix, path = "unix.rs")]
#[cfg_attr(windows, path = "windows.rs")]
mod imp;
| ///
/// Instances can only be constructed using [`ChildExt::terminator`].
#[derive(Debug)]
pub struct Terminator(imp::Handle);
impl Terminator {
/// Terminates a process as immediately as the operating system allows.
///
/// Behavior should be equivalent to calling [`Child::kill`] for the same
/// process. However, this method does not require a reference of any kind
/// to the [`Child`] instance of the process, meaning that it can be called
/// even in some unsafe circumstances.
///
/// # Safety
///
/// If the process is no longer running, a different process may be
/// terminated on some operating systems. Reuse of process identifiers
/// makes it impossible for this method to determine if the intended
/// process still exists.
///
/// Thus, this method should not be used in production code, as
/// [`Child::kill`] more safely provides the same functionality. It is only
/// used for testing in this crate and may be used similarly in others.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::path::Path;
/// use std::process::Command;
/// use std::thread;
///
/// use process_control::ChildExt;
///
/// let dir = Path::new("hello");
/// let mut process = Command::new("mkdir").arg(dir).spawn()?;
/// let terminator = process.terminator()?;
///
/// let thread = thread::spawn(move || process.wait());
/// if !dir.exists() {
/// // [process.kill] requires a mutable reference.
/// unsafe { terminator.terminate()? }
/// }
///
/// let exit_status = thread.join().expect("thread panicked")?;
/// println!("exited {}", exit_status);
/// #
/// # Ok::<_, io::Error>(())
/// ```
#[inline]
pub unsafe fn terminate(&self) -> io::Result<()> {
self.0.terminate()
}
}
/// Equivalent to [`process::ExitStatus`] but allows for greater accuracy.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct ExitStatus(imp::ExitStatus);
impl ExitStatus {
/// Equivalent to [`process::ExitStatus::success`].
#[inline]
#[must_use]
pub fn success(self) -> bool {
self.0.success()
}
/// Equivalent to [`process::ExitStatus::code`], but a more accurate value
/// will be returned if possible.
#[inline]
#[must_use]
pub fn code(self) -> Option<i64> {
self.0.code().map(Into::into)
}
/// Equivalent to [`ExitStatusExt::signal`].
///
/// [`ExitStatusExt::signal`]: ::std::os::unix::process::ExitStatusExt::signal
#[cfg(any(unix, doc))]
#[cfg_attr(process_control_docs_rs, doc(cfg(unix)))]
#[inline]
#[must_use]
pub fn signal(self) -> Option<::std::os::raw::c_int> {
self.0.signal()
}
}
impl Display for ExitStatus {
#[inline]
fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(formatter)
}
}
impl From<process::ExitStatus> for ExitStatus {
#[inline]
fn from(value: process::ExitStatus) -> Self {
#[cfg_attr(windows, allow(clippy::useless_conversion))]
Self(value.into())
}
}
/// Equivalent to [`process::Output`] but holds an instance of [`ExitStatus`]
/// from this crate.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Output {
/// Equivalent to [`process::Output::status`].
pub status: ExitStatus,
/// Equivalent to [`process::Output::stdout`].
pub stdout: Vec<u8>,
/// Equivalent to [`process::Output::stderr`].
pub stderr: Vec<u8>,
}
impl From<process::Output> for Output {
#[inline]
fn from(value: process::Output) -> Self {
Self {
status: value.status.into(),
stdout: value.stdout,
stderr: value.stderr,
}
}
}
/// A temporary wrapper for a process timeout.
pub trait Timeout: private::Sealed {
/// The type returned by [`wait`].
///
/// [`wait`]: Self::wait
type Result;
/// Causes [`wait`] to never suppress an error.
///
/// Typically, errors terminating the process will be ignored, as they are
/// often less important than the result. However, when this method is
/// called, those errors will be returned as well.
///
/// [`wait`]: Self::wait
#[must_use]
fn strict_errors(self) -> Self;
/// Causes the process to be terminated if it exceeds the time limit.
///
/// Process identifier reuse by the system will be mitigated. There should
/// never be a scenario that causes an unintended process to be terminated.
#[must_use]
fn terminating(self) -> Self;
/// Runs the process to completion, aborting if it exceeds the time limit.
///
/// At least one thread will be created to wait on the process without
/// blocking the current thread.
///
/// If the time limit is exceeded before the process finishes, `Ok(None)`
/// will be returned. However, the process will not be terminated in that
/// case unless [`terminating`] is called beforehand. It is recommended to
/// always call that method to allow system resources to be freed.
///
/// The stdin handle to the process, if it exists, will be closed before
/// waiting. Otherwise, the process would assuredly time out when reading
/// from that pipe.
///
/// This method cannot guarantee that the same [`io::ErrorKind`] variants
/// will be returned in the future for the same types of failures. Allowing
/// these breakages is required to enable calling [`Child::kill`]
/// internally.
///
/// [`terminating`]: Self::terminating
fn wait(self) -> io::Result<Option<Self::Result>>;
}
/// Extensions to [`Child`] for easily terminating processes.
///
/// For more information, see [the module-level documentation][crate].
pub trait ChildExt<'a>: private::Sealed {
/// The type returned by [`with_timeout`].
///
/// [`with_timeout`]: Self::with_timeout
type ExitStatusTimeout: 'a + Timeout<Result = ExitStatus>;
/// The type returned by [`with_output_timeout`].
///
/// [`with_output_timeout`]: Self::with_output_timeout
type OutputTimeout: Timeout<Result = Output>;
/// Creates an instance of [`Terminator`] for this process.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use std::process::Command;
///
/// use process_control::ChildExt;
///
/// let process = Command::new("echo").spawn()?;
/// let terminator = process.terminator()?;
/// #
/// # Ok::<_, io::Error>(())
/// ```
fn terminator(&self) -> io::Result<Terminator>;
/// Creates an instance of [`Timeout`] that yields [` | mod timeout;
/// A wrapper that stores enough information to terminate a process. | random_line_split |
boundarypslg.py | :param points numpy.ndarray: array of PLC vertex coordinates.
:param adges numpy.ndarray: array of PLC tris (vertex topology).
:param holes numpy.ndarray: array of coordinates of holes in the PLC.
"""
self.points = points
self.tris = tris
self.holes = holes
def build_boundary_PSLGs(domain, sphere_pieces, ds):
"""Constructs PSLGs for domain boundaries. Each boundary is represented by a
Planar Straight Line Graph consisting of set of vertices and edges corresponding
to the union of all intersection loops which lie on the boundary and all boundary
perimeter vertices and edges.
:param domain Domain: spatial domain for mesh.
:param sphere_pieces list: list of SpherePiece objects.
:param ds float: characteristic segment length.
:return: list of PSLG objects for the lower bounds along each coordinate axis.
:rtype: list.
"""
# TODO : Break up this function a bit.
def compile_points_edges(sphere_pieces):
"""Produces consolidated arrays containing all SpherePiece vertices and
edges.
:param sphere_pieces list: list of SpherePiece objects.
:return: tuple of arrays of vertex coordinates and topology.
:rtype: tuple.
"""
def build_edge_list(tris, points):
v_adj = np.zeros(2*[points.shape[0]], dtype=np.int32)
v_adj[tris[:,0], tris[:,1]] = v_adj[tris[:,1], tris[:,0]] = 1
v_adj[tris[:,1], tris[:,2]] = v_adj[tris[:,2], tris[:,1]] = 1
v_adj[tris[:,2], tris[:,0]] = v_adj[tris[:,0], tris[:,2]] = 1
return np.array(np.where(np.triu(v_adj) == 1), dtype=np.int32).T
vcount = 0
all_points = []
all_edges = []
for points, tris in [(p.points, p.tris) for p in sphere_pieces]:
edges = build_edge_list(tris, points)
edges += vcount
vcount += len(points)
all_points.append(points)
all_edges.append(edges)
return np.vstack(all_points), np.vstack(all_edges)
def refined_perimeter(perim, axis, ds):
"""Adds additional vertices to subdivide perimeter edge segments.
:param perim numpy.ndarray: array of vertices intersecting perimeter.
:param axis int: ordinal value of axis 0:x, 1:y, 2:z.
:param ds float: characteristic segment length.
:return: array of vertices intersecting refined perimeter.
:rtype: numpy.ndarray.
"""
def filter_colocated_points(perim, axis):
delta = np.diff(perim[:,axis])
keep_idx = np.hstack(([0], np.where(~np.isclose(delta,0.))[0] + 1))
return perim[keep_idx]
perim = filter_colocated_points(perim, axis)
refined_points = [perim[0]]
for e in [[i, i+1] for i in range(perim.shape[0]-1)]:
e_len = perim[e[1], axis] - perim[e[0], axis]
ne = int(np.ceil(e_len / ds))
if ne > 1:
dse = e_len / ne
add_points = np.zeros((ne,3))
add_points[:,axis] = dse * np.arange(1,ne+1)
refined_points.append(perim[e[0]] + add_points)
return np.vstack(refined_points)
def add_holes(sphere_pieces):
"""Add hole points to boundary PSLGs.
:param sphere_pieces list: list of SpherePiece objects.
:return: array of hole point vertices.
:rtype: numpy.ndarray.
"""
# TODO : this is a placeholder function. Ultimately holes need to
# : be created at the point when a sphere is split into pieces.
holes = [[] for _ in range(3)]
for i in range(3):
j, k = (i+1)%3, (i+2)%3
for points, tris in [(p.points, p.tris) for p in sphere_pieces]:
points_ax = points[np.isclose(points[:,i], 0.)]
if points_ax.shape[0]:
holes[i].append([
0.5 * (points_ax[:,j].max() + points_ax[:,j].min()),
0.5 * (points_ax[:,k].max() + points_ax[:,k].min())
])
holes[i] = np.vstack(holes[i]) if len(holes[i])\
else np.empty((0,2), dtype=np.float64)
return holes
def reindex_edges(points, points_ax, edges_ax):
"""Reindexes edges along a given axis.
:param points numpy.ndarray: all point coordinates.
:param points_ax numpy.ndarray: indices of points intersecting boundary.
:param edges_ax numpy.ndarray: edges interecting boundary.
:return: tuple of arrays of point coordinates and reindexed edges.
:rtype: tuple.
"""
points_segment = points[points_ax]
reindex = {old: new for new, old in enumerate(np.where(points_ax)[0])}
for i, (v0, v1) in enumerate(edges_ax):
edges_ax[i] = np.array([reindex[v0], reindex[v1]])
return points_segment, edges_ax
def build_perim_edge_list(points_pieces, perim_refined):
"""Construct list of perimeter edges for boundary.
:param points_pieces numpy.ndarray: sphere points intersecting boundary.
:param perim_refined numpy.ndarray: refined perimeter points.
:return: array of perimeter edge topology for boundary.
:rtype: numpy.ndarray.
"""
# Need to adjust edge indices for perimeter segments
v_count = len(points_pieces)
perim_edges = 4 * [None]
for j in range(4):
v_count_perim = len(perim_refined[j])
perim_vidx = np.empty(v_count_perim, dtype=np.int32)
mask = np.full(v_count_perim, True)
v_count_new = 0
for i, p in enumerate(perim_refined[j]):
vidx = np.where(np.isclose(npl.norm(points_pieces - p, axis=1), 0.))[0]
if len(vidx):
mask[i] = False
perim_vidx[i] = vidx[0]
else:
perim_vidx[i] = v_count + v_count_new
v_count_new += 1
perim_edges[j] = np.array([
[perim_vidx[k], perim_vidx[k+1]] for k in range(v_count_perim-1)
])
perim_refined[j] = perim_refined[j][mask]
v_count += v_count_new
return perim_edges
def add_point_plane_intersections(hole_pieces, axis, domain):
"""Adds points for sphere which just "touch" the boundary at a single point.
:param hole_pieces list: list of SpherePiece objects.
:param axis int: ordinal value of axis 0:x, 1:y, 2:z.
:param domain Domain: spatial domain for mesh.
:return: array of points touching boundary (may be empty).
:rtype: numpy.ndarray.
"""
added_points = []
for hole_piece in hole_pieces:
if np.isclose(hole_piece.sphere.min[axis], 0.):
close = np.where(np.isclose(hole_piece.points[:,axis], 0.))[0]
for idx in close:
added_points.append(hole_piece.points[idx])
elif np.isclose(hole_piece.sphere.max[axis], domain.L[axis]):
close = np.where(np.isclose(hole_piece.points[:,axis], domain.L[axis]))[0]
trans = np.zeros(3)
trans[axis] = -domain.L[axis]
for idx in close:
added_points.append(hole_piece.points[idx] + trans)
if added_points:
return np.vstack(added_points)
else:
return np.empty((0,3), dtype=np.float64)
L = domain.L
PBC = domain.PBC
sphere_pieces_holes = [p for p in sphere_pieces if p.is_hole]
sphere_pieces = [p for p in sphere_pieces if not p.is_hole]
# TODO : Optimise this by compliling only edges from sphere piece
# : intersection loops rather than considering all edges.
if len(sphere_pieces):
points, edges = compile_points_edges(sphere_pieces)
else:
points = np.empty((0,3), dtype=np.float64)
edges = np.empty((0,2), dtype=np.int32)
# Get edges and points on each boundary
edges_ax = [
edges[np.all(np.isclose(points[edges,i], 0.), axis=1)]
for i in range(3)
]
points_ax = [np.isclose(points[:,i], 0.) for i in range(3)]
# Fix boundary | """
def __init__(self, points, tris, holes):
"""Constructs BoundaryPLC object. | random_line_split |
|
boundarypslg.py | dtype=np.int32)
v_adj[tris[:,0], tris[:,1]] = v_adj[tris[:,1], tris[:,0]] = 1
v_adj[tris[:,1], tris[:,2]] = v_adj[tris[:,2], tris[:,1]] = 1
v_adj[tris[:,2], tris[:,0]] = v_adj[tris[:,0], tris[:,2]] = 1
return np.array(np.where(np.triu(v_adj) == 1), dtype=np.int32).T
vcount = 0
all_points = []
all_edges = []
for points, tris in [(p.points, p.tris) for p in sphere_pieces]:
edges = build_edge_list(tris, points)
edges += vcount
vcount += len(points)
all_points.append(points)
all_edges.append(edges)
return np.vstack(all_points), np.vstack(all_edges)
def refined_perimeter(perim, axis, ds):
"""Adds additional vertices to subdivide perimeter edge segments.
:param perim numpy.ndarray: array of vertices intersecting perimeter.
:param axis int: ordinal value of axis 0:x, 1:y, 2:z.
:param ds float: characteristic segment length.
:return: array of vertices intersecting refined perimeter.
:rtype: numpy.ndarray.
"""
def filter_colocated_points(perim, axis):
delta = np.diff(perim[:,axis])
keep_idx = np.hstack(([0], np.where(~np.isclose(delta,0.))[0] + 1))
return perim[keep_idx]
perim = filter_colocated_points(perim, axis)
refined_points = [perim[0]]
for e in [[i, i+1] for i in range(perim.shape[0]-1)]:
e_len = perim[e[1], axis] - perim[e[0], axis]
ne = int(np.ceil(e_len / ds))
if ne > 1:
dse = e_len / ne
add_points = np.zeros((ne,3))
add_points[:,axis] = dse * np.arange(1,ne+1)
refined_points.append(perim[e[0]] + add_points)
return np.vstack(refined_points)
def add_holes(sphere_pieces):
"""Add hole points to boundary PSLGs.
:param sphere_pieces list: list of SpherePiece objects.
:return: array of hole point vertices.
:rtype: numpy.ndarray.
"""
# TODO : this is a placeholder function. Ultimately holes need to
# : be created at the point when a sphere is split into pieces.
holes = [[] for _ in range(3)]
for i in range(3):
j, k = (i+1)%3, (i+2)%3
for points, tris in [(p.points, p.tris) for p in sphere_pieces]:
|
holes[i] = np.vstack(holes[i]) if len(holes[i])\
else np.empty((0,2), dtype=np.float64)
return holes
def reindex_edges(points, points_ax, edges_ax):
"""Reindexes edges along a given axis.
:param points numpy.ndarray: all point coordinates.
:param points_ax numpy.ndarray: indices of points intersecting boundary.
:param edges_ax numpy.ndarray: edges interecting boundary.
:return: tuple of arrays of point coordinates and reindexed edges.
:rtype: tuple.
"""
points_segment = points[points_ax]
reindex = {old: new for new, old in enumerate(np.where(points_ax)[0])}
for i, (v0, v1) in enumerate(edges_ax):
edges_ax[i] = np.array([reindex[v0], reindex[v1]])
return points_segment, edges_ax
def build_perim_edge_list(points_pieces, perim_refined):
"""Construct list of perimeter edges for boundary.
:param points_pieces numpy.ndarray: sphere points intersecting boundary.
:param perim_refined numpy.ndarray: refined perimeter points.
:return: array of perimeter edge topology for boundary.
:rtype: numpy.ndarray.
"""
# Need to adjust edge indices for perimeter segments
v_count = len(points_pieces)
perim_edges = 4 * [None]
for j in range(4):
v_count_perim = len(perim_refined[j])
perim_vidx = np.empty(v_count_perim, dtype=np.int32)
mask = np.full(v_count_perim, True)
v_count_new = 0
for i, p in enumerate(perim_refined[j]):
vidx = np.where(np.isclose(npl.norm(points_pieces - p, axis=1), 0.))[0]
if len(vidx):
mask[i] = False
perim_vidx[i] = vidx[0]
else:
perim_vidx[i] = v_count + v_count_new
v_count_new += 1
perim_edges[j] = np.array([
[perim_vidx[k], perim_vidx[k+1]] for k in range(v_count_perim-1)
])
perim_refined[j] = perim_refined[j][mask]
v_count += v_count_new
return perim_edges
def add_point_plane_intersections(hole_pieces, axis, domain):
"""Adds points for sphere which just "touch" the boundary at a single point.
:param hole_pieces list: list of SpherePiece objects.
:param axis int: ordinal value of axis 0:x, 1:y, 2:z.
:param domain Domain: spatial domain for mesh.
:return: array of points touching boundary (may be empty).
:rtype: numpy.ndarray.
"""
added_points = []
for hole_piece in hole_pieces:
if np.isclose(hole_piece.sphere.min[axis], 0.):
close = np.where(np.isclose(hole_piece.points[:,axis], 0.))[0]
for idx in close:
added_points.append(hole_piece.points[idx])
elif np.isclose(hole_piece.sphere.max[axis], domain.L[axis]):
close = np.where(np.isclose(hole_piece.points[:,axis], domain.L[axis]))[0]
trans = np.zeros(3)
trans[axis] = -domain.L[axis]
for idx in close:
added_points.append(hole_piece.points[idx] + trans)
if added_points:
return np.vstack(added_points)
else:
return np.empty((0,3), dtype=np.float64)
L = domain.L
PBC = domain.PBC
sphere_pieces_holes = [p for p in sphere_pieces if p.is_hole]
sphere_pieces = [p for p in sphere_pieces if not p.is_hole]
# TODO : Optimise this by compliling only edges from sphere piece
# : intersection loops rather than considering all edges.
if len(sphere_pieces):
points, edges = compile_points_edges(sphere_pieces)
else:
points = np.empty((0,3), dtype=np.float64)
edges = np.empty((0,2), dtype=np.int32)
# Get edges and points on each boundary
edges_ax = [
edges[np.all(np.isclose(points[edges,i], 0.), axis=1)]
for i in range(3)
]
points_ax = [np.isclose(points[:,i], 0.) for i in range(3)]
# Fix boundary points to exactly zero
for i in range(3):
points[(points_ax[i], i)] = 0.
# reindex edge vertices
points_pieces, edges_ax = [list(x) for x in zip(*[
reindex_edges(points, points_ax[i], edges_ax[i]) for i in range(3)
])]
perim = []
perim_refined = []
perim_segs = np.array([[0, 1], [1, 2], [2, 3], [3, 0]])
perim_edges = []
for i in range(3):
perim.append(4 * [None])
perim_refined.append(4 * [None])
# Rotate coordinate system by cyclic permutation of axes
points_pieces[i][:,(0,1,2)] = points_pieces[i][:,(i,(i+1)%3,(i+2)%3)]
corners = np.array([
[0., 0., 0.], [0., L[1], 0.], [0., L[1], L[2]], [0., 0., L[2]]
])
points_on_perim = 4 * [None]
points_on_perim[0] = np.isclose(points_pieces[i][:, 2], 0.)
points_on_perim[1] = np.isclose(points_pieces[i][:, 1], L[1])
points_on_perim[2] = | points_ax = points[np.isclose(points[:,i], 0.)]
if points_ax.shape[0]:
holes[i].append([
0.5 * (points_ax[:,j].max() + points_ax[:,j].min()),
0.5 * (points_ax[:,k].max() + points_ax[:,k].min())
]) | conditional_block |
boundarypslg.py | dtype=np.int32)
v_adj[tris[:,0], tris[:,1]] = v_adj[tris[:,1], tris[:,0]] = 1
v_adj[tris[:,1], tris[:,2]] = v_adj[tris[:,2], tris[:,1]] = 1
v_adj[tris[:,2], tris[:,0]] = v_adj[tris[:,0], tris[:,2]] = 1
return np.array(np.where(np.triu(v_adj) == 1), dtype=np.int32).T
vcount = 0
all_points = []
all_edges = []
for points, tris in [(p.points, p.tris) for p in sphere_pieces]:
edges = build_edge_list(tris, points)
edges += vcount
vcount += len(points)
all_points.append(points)
all_edges.append(edges)
return np.vstack(all_points), np.vstack(all_edges)
def refined_perimeter(perim, axis, ds):
"""Adds additional vertices to subdivide perimeter edge segments.
:param perim numpy.ndarray: array of vertices intersecting perimeter.
:param axis int: ordinal value of axis 0:x, 1:y, 2:z.
:param ds float: characteristic segment length.
:return: array of vertices intersecting refined perimeter.
:rtype: numpy.ndarray.
"""
def filter_colocated_points(perim, axis):
|
perim = filter_colocated_points(perim, axis)
refined_points = [perim[0]]
for e in [[i, i+1] for i in range(perim.shape[0]-1)]:
e_len = perim[e[1], axis] - perim[e[0], axis]
ne = int(np.ceil(e_len / ds))
if ne > 1:
dse = e_len / ne
add_points = np.zeros((ne,3))
add_points[:,axis] = dse * np.arange(1,ne+1)
refined_points.append(perim[e[0]] + add_points)
return np.vstack(refined_points)
def add_holes(sphere_pieces):
"""Add hole points to boundary PSLGs.
:param sphere_pieces list: list of SpherePiece objects.
:return: array of hole point vertices.
:rtype: numpy.ndarray.
"""
# TODO : this is a placeholder function. Ultimately holes need to
# : be created at the point when a sphere is split into pieces.
holes = [[] for _ in range(3)]
for i in range(3):
j, k = (i+1)%3, (i+2)%3
for points, tris in [(p.points, p.tris) for p in sphere_pieces]:
points_ax = points[np.isclose(points[:,i], 0.)]
if points_ax.shape[0]:
holes[i].append([
0.5 * (points_ax[:,j].max() + points_ax[:,j].min()),
0.5 * (points_ax[:,k].max() + points_ax[:,k].min())
])
holes[i] = np.vstack(holes[i]) if len(holes[i])\
else np.empty((0,2), dtype=np.float64)
return holes
def reindex_edges(points, points_ax, edges_ax):
"""Reindexes edges along a given axis.
:param points numpy.ndarray: all point coordinates.
:param points_ax numpy.ndarray: indices of points intersecting boundary.
:param edges_ax numpy.ndarray: edges interecting boundary.
:return: tuple of arrays of point coordinates and reindexed edges.
:rtype: tuple.
"""
points_segment = points[points_ax]
reindex = {old: new for new, old in enumerate(np.where(points_ax)[0])}
for i, (v0, v1) in enumerate(edges_ax):
edges_ax[i] = np.array([reindex[v0], reindex[v1]])
return points_segment, edges_ax
def build_perim_edge_list(points_pieces, perim_refined):
"""Construct list of perimeter edges for boundary.
:param points_pieces numpy.ndarray: sphere points intersecting boundary.
:param perim_refined numpy.ndarray: refined perimeter points.
:return: array of perimeter edge topology for boundary.
:rtype: numpy.ndarray.
"""
# Need to adjust edge indices for perimeter segments
v_count = len(points_pieces)
perim_edges = 4 * [None]
for j in range(4):
v_count_perim = len(perim_refined[j])
perim_vidx = np.empty(v_count_perim, dtype=np.int32)
mask = np.full(v_count_perim, True)
v_count_new = 0
for i, p in enumerate(perim_refined[j]):
vidx = np.where(np.isclose(npl.norm(points_pieces - p, axis=1), 0.))[0]
if len(vidx):
mask[i] = False
perim_vidx[i] = vidx[0]
else:
perim_vidx[i] = v_count + v_count_new
v_count_new += 1
perim_edges[j] = np.array([
[perim_vidx[k], perim_vidx[k+1]] for k in range(v_count_perim-1)
])
perim_refined[j] = perim_refined[j][mask]
v_count += v_count_new
return perim_edges
def add_point_plane_intersections(hole_pieces, axis, domain):
"""Adds points for sphere which just "touch" the boundary at a single point.
:param hole_pieces list: list of SpherePiece objects.
:param axis int: ordinal value of axis 0:x, 1:y, 2:z.
:param domain Domain: spatial domain for mesh.
:return: array of points touching boundary (may be empty).
:rtype: numpy.ndarray.
"""
added_points = []
for hole_piece in hole_pieces:
if np.isclose(hole_piece.sphere.min[axis], 0.):
close = np.where(np.isclose(hole_piece.points[:,axis], 0.))[0]
for idx in close:
added_points.append(hole_piece.points[idx])
elif np.isclose(hole_piece.sphere.max[axis], domain.L[axis]):
close = np.where(np.isclose(hole_piece.points[:,axis], domain.L[axis]))[0]
trans = np.zeros(3)
trans[axis] = -domain.L[axis]
for idx in close:
added_points.append(hole_piece.points[idx] + trans)
if added_points:
return np.vstack(added_points)
else:
return np.empty((0,3), dtype=np.float64)
L = domain.L
PBC = domain.PBC
sphere_pieces_holes = [p for p in sphere_pieces if p.is_hole]
sphere_pieces = [p for p in sphere_pieces if not p.is_hole]
# TODO : Optimise this by compliling only edges from sphere piece
# : intersection loops rather than considering all edges.
if len(sphere_pieces):
points, edges = compile_points_edges(sphere_pieces)
else:
points = np.empty((0,3), dtype=np.float64)
edges = np.empty((0,2), dtype=np.int32)
# Get edges and points on each boundary
edges_ax = [
edges[np.all(np.isclose(points[edges,i], 0.), axis=1)]
for i in range(3)
]
points_ax = [np.isclose(points[:,i], 0.) for i in range(3)]
# Fix boundary points to exactly zero
for i in range(3):
points[(points_ax[i], i)] = 0.
# reindex edge vertices
points_pieces, edges_ax = [list(x) for x in zip(*[
reindex_edges(points, points_ax[i], edges_ax[i]) for i in range(3)
])]
perim = []
perim_refined = []
perim_segs = np.array([[0, 1], [1, 2], [2, 3], [3, 0]])
perim_edges = []
for i in range(3):
perim.append(4 * [None])
perim_refined.append(4 * [None])
# Rotate coordinate system by cyclic permutation of axes
points_pieces[i][:,(0,1,2)] = points_pieces[i][:,(i,(i+1)%3,(i+2)%3)]
corners = np.array([
[0., 0., 0.], [0., L[1], 0.], [0., L[1], L[2]], [0., 0., L[2]]
])
points_on_perim = 4 * [None]
points_on_perim[0] = np.isclose(points_pieces[i][:, 2], 0.)
points_on_perim[1] = np.isclose(points_pieces[i][:, 1], L[1])
points_on_perim[2] = | delta = np.diff(perim[:,axis])
keep_idx = np.hstack(([0], np.where(~np.isclose(delta,0.))[0] + 1))
return perim[keep_idx] | identifier_body |
boundarypslg.py | x[i] = vidx[0]
else:
perim_vidx[i] = v_count + v_count_new
v_count_new += 1
perim_edges[j] = np.array([
[perim_vidx[k], perim_vidx[k+1]] for k in range(v_count_perim-1)
])
perim_refined[j] = perim_refined[j][mask]
v_count += v_count_new
return perim_edges
def add_point_plane_intersections(hole_pieces, axis, domain):
"""Adds points for sphere which just "touch" the boundary at a single point.
:param hole_pieces list: list of SpherePiece objects.
:param axis int: ordinal value of axis 0:x, 1:y, 2:z.
:param domain Domain: spatial domain for mesh.
:return: array of points touching boundary (may be empty).
:rtype: numpy.ndarray.
"""
added_points = []
for hole_piece in hole_pieces:
if np.isclose(hole_piece.sphere.min[axis], 0.):
close = np.where(np.isclose(hole_piece.points[:,axis], 0.))[0]
for idx in close:
added_points.append(hole_piece.points[idx])
elif np.isclose(hole_piece.sphere.max[axis], domain.L[axis]):
close = np.where(np.isclose(hole_piece.points[:,axis], domain.L[axis]))[0]
trans = np.zeros(3)
trans[axis] = -domain.L[axis]
for idx in close:
added_points.append(hole_piece.points[idx] + trans)
if added_points:
return np.vstack(added_points)
else:
return np.empty((0,3), dtype=np.float64)
L = domain.L
PBC = domain.PBC
sphere_pieces_holes = [p for p in sphere_pieces if p.is_hole]
sphere_pieces = [p for p in sphere_pieces if not p.is_hole]
# TODO : Optimise this by compliling only edges from sphere piece
# : intersection loops rather than considering all edges.
if len(sphere_pieces):
points, edges = compile_points_edges(sphere_pieces)
else:
points = np.empty((0,3), dtype=np.float64)
edges = np.empty((0,2), dtype=np.int32)
# Get edges and points on each boundary
edges_ax = [
edges[np.all(np.isclose(points[edges,i], 0.), axis=1)]
for i in range(3)
]
points_ax = [np.isclose(points[:,i], 0.) for i in range(3)]
# Fix boundary points to exactly zero
for i in range(3):
points[(points_ax[i], i)] = 0.
# reindex edge vertices
points_pieces, edges_ax = [list(x) for x in zip(*[
reindex_edges(points, points_ax[i], edges_ax[i]) for i in range(3)
])]
perim = []
perim_refined = []
perim_segs = np.array([[0, 1], [1, 2], [2, 3], [3, 0]])
perim_edges = []
for i in range(3):
perim.append(4 * [None])
perim_refined.append(4 * [None])
# Rotate coordinate system by cyclic permutation of axes
points_pieces[i][:,(0,1,2)] = points_pieces[i][:,(i,(i+1)%3,(i+2)%3)]
corners = np.array([
[0., 0., 0.], [0., L[1], 0.], [0., L[1], L[2]], [0., 0., L[2]]
])
points_on_perim = 4 * [None]
points_on_perim[0] = np.isclose(points_pieces[i][:, 2], 0.)
points_on_perim[1] = np.isclose(points_pieces[i][:, 1], L[1])
points_on_perim[2] = np.isclose(points_pieces[i][:, 2], L[2])
points_on_perim[3] = np.isclose(points_pieces[i][:, 1], 0.)
for j in range(4):
axis = 1 + j % 2
if PBC[axis] and j >= 2:
continue
perim[i][j] = np.vstack(
(corners[perim_segs[j]], points_pieces[i][points_on_perim[j]])
)
if PBC[axis]:
translate = np.array([0., 0., -L[2]]) if axis == 1\
else np.array([0., L[1], 0.])
translated_points = points_pieces[i][points_on_perim[j + 2]]\
+ translate
perim[i][j] = np.vstack((perim[i][j], translated_points))
perim[i][j] = perim[i][j][perim[i][j][:, axis].argsort()]
perim_refined[i][j] = refined_perimeter(perim[i][j], axis, ds)
if PBC[axis]:
perim_refined[i][j+2] = perim_refined[i][j] - translate
# Add the corner points so that duplicate coners can be filtered out
# in build_perim_edge_list
points_pieces[i] = np.append(points_pieces[i], corners, axis=0)
perim_edges.append(
build_perim_edge_list(points_pieces[i], perim_refined[i])
)
# Put coordinates back in proper order for this axis
points_pieces[i][:,(i,(i+1)%3,(i+2)%3)] = points_pieces[i][:,(0,1,2)]
L = L[np.newaxis, (1, 2, 0)][0]
# TODO : refactor so boundary PSLG is built during above loop avoiding subsequent loops
# add holes
pslg_holes = add_holes(sphere_pieces)
# Add points which lie on the boundaries from hole particles
added_points = [
add_point_plane_intersections(sphere_pieces_holes, i, domain)
for i in range(3)
]
# Group together segment and perimeter points and edges for each axis
boundary_pslgs = []
for i in range(3):
pslg_points = np.vstack((
points_pieces[i][:,((i+1)%3,(i+2)%3)],
np.vstack(perim_refined[i])[:,(1,2)],
added_points[i][:,((i+1)%3,(i+2)%3)]
))
pslg_edges = np.vstack((edges_ax[i], np.vstack(perim_edges[i])))
boundary_pslgs.append(PSLG(pslg_points, pslg_edges, pslg_holes[i]))
return boundary_pslgs
def triangulate_PSLGs(pslgs, area_constraints):
"""Triangulates lower boundaries along each coordinate axis using Shewchuk's
Triangle library.
:param pslgs list: list of PSLG objects for the boundaries.
:param area_constraints AreaConstraints: object storing area constraint grids for
quality triangulation.
:return: list of BoundaryPLC objects for the triangulated boundaries.
:rtype: list.
"""
triangulated_boundaries = []
for i, pslg in enumerate(pslgs):
target_area_grid = area_constraints.grid[i]
inv_dx = area_constraints.inv_dx[i]
inv_dy = area_constraints.inv_dy[i]
def rfunc(vertices, area):
(ox, oy), (dx, dy), (ax, ay) = vertices
cx = ONE_THIRD * (ox + dx + ax) # Triangle center x coord.
cy = ONE_THIRD * (oy + dy + ay) # Triangle center y coord.
ix = int(cx * inv_dx)
iy = int(cy * inv_dy)
target_area = target_area_grid[iy][ix]
return int(area > target_area) # True -> 1 means refine
# Set mesh info for triangulation
mesh_data = triangle.MeshInfo()
mesh_data.set_points(pslg.points)
mesh_data.set_facets(pslg.edges.tolist())
if len(pslg.holes):
mesh_data.set_holes(pslg.holes)
# Call triangle library to perform Delaunay triangulation
max_volume = area_constraints.dA_max
min_angle = 20.
mesh = triangle.build(
mesh_data,
max_volume=max_volume,
min_angle=min_angle,
allow_boundary_steiner=False,
refinement_func=rfunc
)
# Extract triangle vertices from triangulation adding back x coord
points = np.column_stack((np.zeros(len(mesh.points)), np.array(mesh.points)))
points = points[:,(-i%3,(1-i)%3,(2-i)%3)]
tris = np.array(mesh.elements)
holes = np.column_stack((np.zeros(len(mesh.holes)), np.array(mesh.holes)))
holes = holes[:,(-i%3,(1-i)%3,(2-i)%3)]
triangulated_boundaries.append(BoundaryPLC(points, tris, holes))
return triangulated_boundaries
def | boundarypslg | identifier_name |
|
config.go | environments = []string{development, integration, preproduction, production}
)
type (
KafkaReportingConfig struct {
SmsReportingTopic *string
SubscribeUnsubscribeReportingTopic *string
FcmReportingTopic *string
ApnsReportingTopic *string
}
// PostgresConfig is used for configuring the Postgresql connection.
PostgresConfig struct {
Host *string
Port *int
User *string
Password *string
DbName *string
}
// ClusterConfig is used for configuring the cluster component.
ClusterConfig struct {
NodeID *uint8
NodePort *int
Remotes *tcpAddrList
}
// GubleConfig is used for configuring Guble server (including its modules / connectors).
GubleConfig struct {
Log *string
EnvName *string
HttpListen *string
KVS *string
MS *string
StoragePath *string
HealthEndpoint *string
MetricsEndpoint *string
PrometheusEndpoint *string
TogglesEndpoint *string
Profile *string
Postgres PostgresConfig
FCM fcm.Config
APNS apns.Config
SMS sms.Config
WS websocket.Config
KafkaProducer kafka.Config
Cluster ClusterConfig
KafkaReportingConfig KafkaReportingConfig
}
)
var (
parsed = false
// Config is the active configuration of guble (used when starting-up the server)
Config = &GubleConfig{
Log: kingpin.Flag("log", "Log level").
Default(log.ErrorLevel.String()).
Envar(g("LOG")).
Enum(logLevels()...),
EnvName: kingpin.Flag("env", `Name of the environment on which the application is running`).
Default(development).
Envar(g("ENV")).
Enum(environments...),
HttpListen: kingpin.Flag("http", `The address to for the HTTP server to listen on (format: "[Host]:Port")`).
Default(defaultHttpListen).
Envar(g("HTTP_LISTEN")).
String(),
KVS: kingpin.Flag("kvs", "The storage backend for the key-value store to use : file | memory | postgres ").
Default(defaultKVSBackend).
Envar(g("KVS")).
String(),
MS: kingpin.Flag("ms", "The message storage backend : file | memory").
Default(defaultMSBackend).
HintOptions("file", "memory").
Envar(g("MS")).
String(),
StoragePath: kingpin.Flag("storage-path", "The path for storing messages and key-value data if 'file' is selected").
Default(defaultStoragePath).
Envar(g("STORAGE_PATH")).
ExistingDir(),
HealthEndpoint: kingpin.Flag("health-endpoint", `The health endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultHealthEndpoint).
Envar(g("HEALTH_ENDPOINT")).
String(),
MetricsEndpoint: kingpin.Flag("metrics-endpoint", `The metrics endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultMetricsEndpoint).
Envar(g("METRICS_ENDPOINT")).
String(),
PrometheusEndpoint: kingpin.Flag("prometheus-endpoint", `The metrics Prometheus endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultPrometheusEndpoint).
Envar(g("PROMETHEUS_ENDPOINT")).
String(),
TogglesEndpoint: kingpin.Flag("toggles-endpoint", `The Feature-Toggles endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultTogglesEndpoint).
Envar(g("TOGGLES_ENDPOINT")).
String(),
Profile: kingpin.Flag("profile", `The profiler to be used (default: none): mem | cpu | block`).
Default("").
Envar(g("PROFILE")).
Enum("mem", "cpu", "block", ""),
Postgres: PostgresConfig{
Host: kingpin.Flag("pg-host", "The PostgreSQL hostname").
Default("localhost").
Envar(g("PG_HOST")).
String(),
Port: kingpin.Flag("pg-port", "The PostgreSQL port").
Default("5432").
Envar(g("PG_PORT")).
Int(),
User: kingpin.Flag("pg-user", "The PostgreSQL user").
Default("guble").
Envar(g("PG_USER")).
String(),
Password: kingpin.Flag("pg-password", "The PostgreSQL password").
Default("guble").
Envar(g("PG_PASSWORD")).
String(),
DbName: kingpin.Flag("pg-dbname", "The PostgreSQL database name").
Default("guble").
Envar(g("PG_DBNAME")).
String(),
},
FCM: fcm.Config{
Enabled: kingpin.Flag("fcm", "Enable the Google Firebase Cloud Messaging connector").
Envar(g("FCM")).
Bool(),
APIKey: kingpin.Flag("fcm-api-key", "The Google API Key for Google Firebase Cloud Messaging").
Envar(g("FCM_API_KEY")).
String(),
Workers: kingpin.Flag("fcm-workers", "The number of workers handling traffic with Firebase Cloud Messaging (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("FCM_WORKERS")).
Int(),
Endpoint: kingpin.Flag("fcm-endpoint", "The Google Firebase Cloud Messaging endpoint").
Default(defaultFCMEndpoint).
Envar(g("FCM_ENDPOINT")).
String(),
Prefix: kingpin.Flag("fcm-prefix", "The FCM prefix / endpoint").
Envar(g("FCM_PREFIX")).
Default("/fcm/"). | APNS: apns.Config{
Enabled: kingpin.Flag("apns", "Enable the APNS connector (by default, in Development mode)").
Envar(g("APNS")).
Bool(),
Production: kingpin.Flag("apns-production", "Enable the APNS connector in Production mode").
Envar(g("APNS_PRODUCTION")).
Bool(),
CertificateFileName: kingpin.Flag("apns-cert-file", "The APNS certificate file name").
Envar(g("APNS_CERT_FILE")).
String(),
CertificateBytes: kingpin.Flag("apns-cert-bytes", "The APNS certificate bytes, as a string of hex-values").
Envar(g("APNS_CERT_BYTES")).
HexBytes(),
CertificatePassword: kingpin.Flag("apns-cert-password", "The APNS certificate password").
Envar(g("APNS_CERT_PASSWORD")).
String(),
AppTopic: kingpin.Flag("apns-app-topic", "The APNS topic (as used by the mobile application)").
Envar(g("APNS_APP_TOPIC")).
String(),
Prefix: kingpin.Flag("apns-prefix", "The APNS prefix / endpoint").
Envar(g("APNS_PREFIX")).
Default("/apns/").
String(),
Workers: kingpin.Flag("apns-workers", "The number of workers handling traffic with APNS (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("APNS_WORKERS")).
Int(),
IntervalMetrics: &defaultAPNSMetrics,
},
Cluster: ClusterConfig{
NodeID: kingpin.Flag("node-id", "(cluster mode) This guble node's own ID: a strictly positive integer number which must be unique in cluster").
Envar(g("NODE_ID")).
Uint8(),
NodePort: kingpin.Flag("node-port", "(cluster mode) This guble node's own local port: a strictly positive integer number").
Default(defaultNodePort).
Envar(g("NODE_PORT")).
Int(),
Remotes: tcpAddrListParser(kingpin.Flag("remotes", `(cluster mode) The list of TCP addresses of some other guble nodes (format: "IP:port")`).
Envar(g("NODE_REMOTES"))),
},
SMS: sms.Config{
Enabled: kingpin.Flag("sms", "Enable the SMS gateway").
Envar(g("SMS")).
Bool(),
APIKey: kingpin.Flag("sms-api-key", "The Nexmo API Key for Sending sms").
Envar(g("SMS_API_KEY")).
String(),
APISecret: kingpin.Flag("sms-api-secret", "The Nexmo API Secret for Sending sms").
Envar(g("SMS_API_SECRET")).
String(),
SMSTopic: kingpin.Flag("sms-topic", "The topic for sms route").
Envar(g("SMS_TOPIC")).
Default(sms.SMSDefaultTopic).
String(),
Toggleable: kingpin.Flag("sms-toggleable", "If sms gateway should | String(),
IntervalMetrics: &defaultFCMMetrics,
}, | random_line_split |
config.go | (),
PrometheusEndpoint: kingpin.Flag("prometheus-endpoint", `The metrics Prometheus endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultPrometheusEndpoint).
Envar(g("PROMETHEUS_ENDPOINT")).
String(),
TogglesEndpoint: kingpin.Flag("toggles-endpoint", `The Feature-Toggles endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultTogglesEndpoint).
Envar(g("TOGGLES_ENDPOINT")).
String(),
Profile: kingpin.Flag("profile", `The profiler to be used (default: none): mem | cpu | block`).
Default("").
Envar(g("PROFILE")).
Enum("mem", "cpu", "block", ""),
Postgres: PostgresConfig{
Host: kingpin.Flag("pg-host", "The PostgreSQL hostname").
Default("localhost").
Envar(g("PG_HOST")).
String(),
Port: kingpin.Flag("pg-port", "The PostgreSQL port").
Default("5432").
Envar(g("PG_PORT")).
Int(),
User: kingpin.Flag("pg-user", "The PostgreSQL user").
Default("guble").
Envar(g("PG_USER")).
String(),
Password: kingpin.Flag("pg-password", "The PostgreSQL password").
Default("guble").
Envar(g("PG_PASSWORD")).
String(),
DbName: kingpin.Flag("pg-dbname", "The PostgreSQL database name").
Default("guble").
Envar(g("PG_DBNAME")).
String(),
},
FCM: fcm.Config{
Enabled: kingpin.Flag("fcm", "Enable the Google Firebase Cloud Messaging connector").
Envar(g("FCM")).
Bool(),
APIKey: kingpin.Flag("fcm-api-key", "The Google API Key for Google Firebase Cloud Messaging").
Envar(g("FCM_API_KEY")).
String(),
Workers: kingpin.Flag("fcm-workers", "The number of workers handling traffic with Firebase Cloud Messaging (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("FCM_WORKERS")).
Int(),
Endpoint: kingpin.Flag("fcm-endpoint", "The Google Firebase Cloud Messaging endpoint").
Default(defaultFCMEndpoint).
Envar(g("FCM_ENDPOINT")).
String(),
Prefix: kingpin.Flag("fcm-prefix", "The FCM prefix / endpoint").
Envar(g("FCM_PREFIX")).
Default("/fcm/").
String(),
IntervalMetrics: &defaultFCMMetrics,
},
APNS: apns.Config{
Enabled: kingpin.Flag("apns", "Enable the APNS connector (by default, in Development mode)").
Envar(g("APNS")).
Bool(),
Production: kingpin.Flag("apns-production", "Enable the APNS connector in Production mode").
Envar(g("APNS_PRODUCTION")).
Bool(),
CertificateFileName: kingpin.Flag("apns-cert-file", "The APNS certificate file name").
Envar(g("APNS_CERT_FILE")).
String(),
CertificateBytes: kingpin.Flag("apns-cert-bytes", "The APNS certificate bytes, as a string of hex-values").
Envar(g("APNS_CERT_BYTES")).
HexBytes(),
CertificatePassword: kingpin.Flag("apns-cert-password", "The APNS certificate password").
Envar(g("APNS_CERT_PASSWORD")).
String(),
AppTopic: kingpin.Flag("apns-app-topic", "The APNS topic (as used by the mobile application)").
Envar(g("APNS_APP_TOPIC")).
String(),
Prefix: kingpin.Flag("apns-prefix", "The APNS prefix / endpoint").
Envar(g("APNS_PREFIX")).
Default("/apns/").
String(),
Workers: kingpin.Flag("apns-workers", "The number of workers handling traffic with APNS (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("APNS_WORKERS")).
Int(),
IntervalMetrics: &defaultAPNSMetrics,
},
Cluster: ClusterConfig{
NodeID: kingpin.Flag("node-id", "(cluster mode) This guble node's own ID: a strictly positive integer number which must be unique in cluster").
Envar(g("NODE_ID")).
Uint8(),
NodePort: kingpin.Flag("node-port", "(cluster mode) This guble node's own local port: a strictly positive integer number").
Default(defaultNodePort).
Envar(g("NODE_PORT")).
Int(),
Remotes: tcpAddrListParser(kingpin.Flag("remotes", `(cluster mode) The list of TCP addresses of some other guble nodes (format: "IP:port")`).
Envar(g("NODE_REMOTES"))),
},
SMS: sms.Config{
Enabled: kingpin.Flag("sms", "Enable the SMS gateway").
Envar(g("SMS")).
Bool(),
APIKey: kingpin.Flag("sms-api-key", "The Nexmo API Key for Sending sms").
Envar(g("SMS_API_KEY")).
String(),
APISecret: kingpin.Flag("sms-api-secret", "The Nexmo API Secret for Sending sms").
Envar(g("SMS_API_SECRET")).
String(),
SMSTopic: kingpin.Flag("sms-topic", "The topic for sms route").
Envar(g("SMS_TOPIC")).
Default(sms.SMSDefaultTopic).
String(),
Toggleable: kingpin.Flag("sms-toggleable", "If sms gateway should be able to be stopped and restarted at runtime").
Envar(g("SMS_TOGGLEABLE")).
Bool(),
Workers: kingpin.Flag("sms-workers", "The number of workers handling traffic with Nexmo sms endpoint").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("SMS_WORKERS")).
Int(),
IntervalMetrics: &defaultSMSMetrics,
},
WS: websocket.Config{
Enabled: kingpin.Flag("ws", "Enable the websocket module").
Envar(g("WS")).
Bool(),
Prefix: kingpin.Flag("ws-prefix", "The Websocket prefix").
Envar(g("WS_PREFIX")).
Default("/stream/").
String(),
},
KafkaProducer: kafka.Config{
Brokers: configstring.NewFromKingpin(
kingpin.Flag("kafka-brokers", `The list Kafka brokers to which Guble should connect (formatted as host:port, separated by spaces or commas)`).
Envar(g("KAFKA_BROKERS"))),
},
KafkaReportingConfig: KafkaReportingConfig{
SmsReportingTopic: kingpin.Flag("sms-kafka-topic", "The name of the SMS-Reporting Kafka topic").
Envar("GUBLE_SMS_KAFKA_TOPIC").
String(),
SubscribeUnsubscribeReportingTopic: kingpin.Flag("subscribe-kafka-topic", "The name of the Subscribe/Unsubscribe Reporting Kafka topic").
Envar("GUBLE_SUBSCRIBE_KAFKA_TOPIC").
String(),
ApnsReportingTopic: kingpin.Flag("apns-kafka-topic", "The name of the Apns-Reporting Kafka topic").
Envar("GUBLE_APNS_KAFKA_TOPIC").
String(),
FcmReportingTopic: kingpin.Flag("fcm-kafka-topic", "The name of the fcm-Reporting Kafka topic").
Envar("GUBLE_FCM_KAFKA_TOPIC").
String(),
},
}
)
func g(s string) string {
return "GUBLE_" + s
}
func logLevels() (levels []string) {
for _, level := range log.AllLevels {
levels = append(levels, level.String())
}
return
}
// parseConfig parses the flags from command line. Must be used before accessing the config.
// If there are missing or invalid arguments it will exit the application
// and display a message.
func parseConfig() {
if parsed {
return
}
kingpin.Parse()
parsed = true
return
}
type tcpAddrList []*net.TCPAddr
func (h *tcpAddrList) Set(value string) error {
addresses := strings.Split(value, " ")
// Reset the list also, when running tests we add to the same list and is incorrect
*h = make(tcpAddrList, 0)
for _, addr := range addresses {
logger.WithField("addr", addr).Info("value")
parts := strings.SplitN(addr, ":", 2)
if len(parts) != 2 {
return fmt.Errorf("expected HEADER:VALUE got '%s'", addr)
}
addr, err := net.ResolveTCPAddr("tcp", addr)
if err != nil {
return err
}
*h = append(*h, addr)
}
return nil
}
func tcpAddrListParser(s kingpin.Settings) (target *tcpAddrList) | {
slist := make(tcpAddrList, 0)
s.SetValue(&slist)
return &slist
} | identifier_body |
|
config.go | : file | memory | postgres ").
Default(defaultKVSBackend).
Envar(g("KVS")).
String(),
MS: kingpin.Flag("ms", "The message storage backend : file | memory").
Default(defaultMSBackend).
HintOptions("file", "memory").
Envar(g("MS")).
String(),
StoragePath: kingpin.Flag("storage-path", "The path for storing messages and key-value data if 'file' is selected").
Default(defaultStoragePath).
Envar(g("STORAGE_PATH")).
ExistingDir(),
HealthEndpoint: kingpin.Flag("health-endpoint", `The health endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultHealthEndpoint).
Envar(g("HEALTH_ENDPOINT")).
String(),
MetricsEndpoint: kingpin.Flag("metrics-endpoint", `The metrics endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultMetricsEndpoint).
Envar(g("METRICS_ENDPOINT")).
String(),
PrometheusEndpoint: kingpin.Flag("prometheus-endpoint", `The metrics Prometheus endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultPrometheusEndpoint).
Envar(g("PROMETHEUS_ENDPOINT")).
String(),
TogglesEndpoint: kingpin.Flag("toggles-endpoint", `The Feature-Toggles endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultTogglesEndpoint).
Envar(g("TOGGLES_ENDPOINT")).
String(),
Profile: kingpin.Flag("profile", `The profiler to be used (default: none): mem | cpu | block`).
Default("").
Envar(g("PROFILE")).
Enum("mem", "cpu", "block", ""),
Postgres: PostgresConfig{
Host: kingpin.Flag("pg-host", "The PostgreSQL hostname").
Default("localhost").
Envar(g("PG_HOST")).
String(),
Port: kingpin.Flag("pg-port", "The PostgreSQL port").
Default("5432").
Envar(g("PG_PORT")).
Int(),
User: kingpin.Flag("pg-user", "The PostgreSQL user").
Default("guble").
Envar(g("PG_USER")).
String(),
Password: kingpin.Flag("pg-password", "The PostgreSQL password").
Default("guble").
Envar(g("PG_PASSWORD")).
String(),
DbName: kingpin.Flag("pg-dbname", "The PostgreSQL database name").
Default("guble").
Envar(g("PG_DBNAME")).
String(),
},
FCM: fcm.Config{
Enabled: kingpin.Flag("fcm", "Enable the Google Firebase Cloud Messaging connector").
Envar(g("FCM")).
Bool(),
APIKey: kingpin.Flag("fcm-api-key", "The Google API Key for Google Firebase Cloud Messaging").
Envar(g("FCM_API_KEY")).
String(),
Workers: kingpin.Flag("fcm-workers", "The number of workers handling traffic with Firebase Cloud Messaging (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("FCM_WORKERS")).
Int(),
Endpoint: kingpin.Flag("fcm-endpoint", "The Google Firebase Cloud Messaging endpoint").
Default(defaultFCMEndpoint).
Envar(g("FCM_ENDPOINT")).
String(),
Prefix: kingpin.Flag("fcm-prefix", "The FCM prefix / endpoint").
Envar(g("FCM_PREFIX")).
Default("/fcm/").
String(),
IntervalMetrics: &defaultFCMMetrics,
},
APNS: apns.Config{
Enabled: kingpin.Flag("apns", "Enable the APNS connector (by default, in Development mode)").
Envar(g("APNS")).
Bool(),
Production: kingpin.Flag("apns-production", "Enable the APNS connector in Production mode").
Envar(g("APNS_PRODUCTION")).
Bool(),
CertificateFileName: kingpin.Flag("apns-cert-file", "The APNS certificate file name").
Envar(g("APNS_CERT_FILE")).
String(),
CertificateBytes: kingpin.Flag("apns-cert-bytes", "The APNS certificate bytes, as a string of hex-values").
Envar(g("APNS_CERT_BYTES")).
HexBytes(),
CertificatePassword: kingpin.Flag("apns-cert-password", "The APNS certificate password").
Envar(g("APNS_CERT_PASSWORD")).
String(),
AppTopic: kingpin.Flag("apns-app-topic", "The APNS topic (as used by the mobile application)").
Envar(g("APNS_APP_TOPIC")).
String(),
Prefix: kingpin.Flag("apns-prefix", "The APNS prefix / endpoint").
Envar(g("APNS_PREFIX")).
Default("/apns/").
String(),
Workers: kingpin.Flag("apns-workers", "The number of workers handling traffic with APNS (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("APNS_WORKERS")).
Int(),
IntervalMetrics: &defaultAPNSMetrics,
},
Cluster: ClusterConfig{
NodeID: kingpin.Flag("node-id", "(cluster mode) This guble node's own ID: a strictly positive integer number which must be unique in cluster").
Envar(g("NODE_ID")).
Uint8(),
NodePort: kingpin.Flag("node-port", "(cluster mode) This guble node's own local port: a strictly positive integer number").
Default(defaultNodePort).
Envar(g("NODE_PORT")).
Int(),
Remotes: tcpAddrListParser(kingpin.Flag("remotes", `(cluster mode) The list of TCP addresses of some other guble nodes (format: "IP:port")`).
Envar(g("NODE_REMOTES"))),
},
SMS: sms.Config{
Enabled: kingpin.Flag("sms", "Enable the SMS gateway").
Envar(g("SMS")).
Bool(),
APIKey: kingpin.Flag("sms-api-key", "The Nexmo API Key for Sending sms").
Envar(g("SMS_API_KEY")).
String(),
APISecret: kingpin.Flag("sms-api-secret", "The Nexmo API Secret for Sending sms").
Envar(g("SMS_API_SECRET")).
String(),
SMSTopic: kingpin.Flag("sms-topic", "The topic for sms route").
Envar(g("SMS_TOPIC")).
Default(sms.SMSDefaultTopic).
String(),
Toggleable: kingpin.Flag("sms-toggleable", "If sms gateway should be able to be stopped and restarted at runtime").
Envar(g("SMS_TOGGLEABLE")).
Bool(),
Workers: kingpin.Flag("sms-workers", "The number of workers handling traffic with Nexmo sms endpoint").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("SMS_WORKERS")).
Int(),
IntervalMetrics: &defaultSMSMetrics,
},
WS: websocket.Config{
Enabled: kingpin.Flag("ws", "Enable the websocket module").
Envar(g("WS")).
Bool(),
Prefix: kingpin.Flag("ws-prefix", "The Websocket prefix").
Envar(g("WS_PREFIX")).
Default("/stream/").
String(),
},
KafkaProducer: kafka.Config{
Brokers: configstring.NewFromKingpin(
kingpin.Flag("kafka-brokers", `The list Kafka brokers to which Guble should connect (formatted as host:port, separated by spaces or commas)`).
Envar(g("KAFKA_BROKERS"))),
},
KafkaReportingConfig: KafkaReportingConfig{
SmsReportingTopic: kingpin.Flag("sms-kafka-topic", "The name of the SMS-Reporting Kafka topic").
Envar("GUBLE_SMS_KAFKA_TOPIC").
String(),
SubscribeUnsubscribeReportingTopic: kingpin.Flag("subscribe-kafka-topic", "The name of the Subscribe/Unsubscribe Reporting Kafka topic").
Envar("GUBLE_SUBSCRIBE_KAFKA_TOPIC").
String(),
ApnsReportingTopic: kingpin.Flag("apns-kafka-topic", "The name of the Apns-Reporting Kafka topic").
Envar("GUBLE_APNS_KAFKA_TOPIC").
String(),
FcmReportingTopic: kingpin.Flag("fcm-kafka-topic", "The name of the fcm-Reporting Kafka topic").
Envar("GUBLE_FCM_KAFKA_TOPIC").
String(),
},
}
)
func g(s string) string {
return "GUBLE_" + s
}
func logLevels() (levels []string) {
for _, level := range log.AllLevels {
levels = append(levels, level.String())
}
return
}
// parseConfig parses the flags from command line. Must be used before accessing the config.
// If there are missing or invalid arguments it will exit the application
// and display a message.
func | parseConfig | identifier_name |
|
config.go | (defaultKVSBackend).
Envar(g("KVS")).
String(),
MS: kingpin.Flag("ms", "The message storage backend : file | memory").
Default(defaultMSBackend).
HintOptions("file", "memory").
Envar(g("MS")).
String(),
StoragePath: kingpin.Flag("storage-path", "The path for storing messages and key-value data if 'file' is selected").
Default(defaultStoragePath).
Envar(g("STORAGE_PATH")).
ExistingDir(),
HealthEndpoint: kingpin.Flag("health-endpoint", `The health endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultHealthEndpoint).
Envar(g("HEALTH_ENDPOINT")).
String(),
MetricsEndpoint: kingpin.Flag("metrics-endpoint", `The metrics endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultMetricsEndpoint).
Envar(g("METRICS_ENDPOINT")).
String(),
PrometheusEndpoint: kingpin.Flag("prometheus-endpoint", `The metrics Prometheus endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultPrometheusEndpoint).
Envar(g("PROMETHEUS_ENDPOINT")).
String(),
TogglesEndpoint: kingpin.Flag("toggles-endpoint", `The Feature-Toggles endpoint to be used by the HTTP server (value for disabling it: "")`).
Default(defaultTogglesEndpoint).
Envar(g("TOGGLES_ENDPOINT")).
String(),
Profile: kingpin.Flag("profile", `The profiler to be used (default: none): mem | cpu | block`).
Default("").
Envar(g("PROFILE")).
Enum("mem", "cpu", "block", ""),
Postgres: PostgresConfig{
Host: kingpin.Flag("pg-host", "The PostgreSQL hostname").
Default("localhost").
Envar(g("PG_HOST")).
String(),
Port: kingpin.Flag("pg-port", "The PostgreSQL port").
Default("5432").
Envar(g("PG_PORT")).
Int(),
User: kingpin.Flag("pg-user", "The PostgreSQL user").
Default("guble").
Envar(g("PG_USER")).
String(),
Password: kingpin.Flag("pg-password", "The PostgreSQL password").
Default("guble").
Envar(g("PG_PASSWORD")).
String(),
DbName: kingpin.Flag("pg-dbname", "The PostgreSQL database name").
Default("guble").
Envar(g("PG_DBNAME")).
String(),
},
FCM: fcm.Config{
Enabled: kingpin.Flag("fcm", "Enable the Google Firebase Cloud Messaging connector").
Envar(g("FCM")).
Bool(),
APIKey: kingpin.Flag("fcm-api-key", "The Google API Key for Google Firebase Cloud Messaging").
Envar(g("FCM_API_KEY")).
String(),
Workers: kingpin.Flag("fcm-workers", "The number of workers handling traffic with Firebase Cloud Messaging (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("FCM_WORKERS")).
Int(),
Endpoint: kingpin.Flag("fcm-endpoint", "The Google Firebase Cloud Messaging endpoint").
Default(defaultFCMEndpoint).
Envar(g("FCM_ENDPOINT")).
String(),
Prefix: kingpin.Flag("fcm-prefix", "The FCM prefix / endpoint").
Envar(g("FCM_PREFIX")).
Default("/fcm/").
String(),
IntervalMetrics: &defaultFCMMetrics,
},
APNS: apns.Config{
Enabled: kingpin.Flag("apns", "Enable the APNS connector (by default, in Development mode)").
Envar(g("APNS")).
Bool(),
Production: kingpin.Flag("apns-production", "Enable the APNS connector in Production mode").
Envar(g("APNS_PRODUCTION")).
Bool(),
CertificateFileName: kingpin.Flag("apns-cert-file", "The APNS certificate file name").
Envar(g("APNS_CERT_FILE")).
String(),
CertificateBytes: kingpin.Flag("apns-cert-bytes", "The APNS certificate bytes, as a string of hex-values").
Envar(g("APNS_CERT_BYTES")).
HexBytes(),
CertificatePassword: kingpin.Flag("apns-cert-password", "The APNS certificate password").
Envar(g("APNS_CERT_PASSWORD")).
String(),
AppTopic: kingpin.Flag("apns-app-topic", "The APNS topic (as used by the mobile application)").
Envar(g("APNS_APP_TOPIC")).
String(),
Prefix: kingpin.Flag("apns-prefix", "The APNS prefix / endpoint").
Envar(g("APNS_PREFIX")).
Default("/apns/").
String(),
Workers: kingpin.Flag("apns-workers", "The number of workers handling traffic with APNS (default: number of CPUs)").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("APNS_WORKERS")).
Int(),
IntervalMetrics: &defaultAPNSMetrics,
},
Cluster: ClusterConfig{
NodeID: kingpin.Flag("node-id", "(cluster mode) This guble node's own ID: a strictly positive integer number which must be unique in cluster").
Envar(g("NODE_ID")).
Uint8(),
NodePort: kingpin.Flag("node-port", "(cluster mode) This guble node's own local port: a strictly positive integer number").
Default(defaultNodePort).
Envar(g("NODE_PORT")).
Int(),
Remotes: tcpAddrListParser(kingpin.Flag("remotes", `(cluster mode) The list of TCP addresses of some other guble nodes (format: "IP:port")`).
Envar(g("NODE_REMOTES"))),
},
SMS: sms.Config{
Enabled: kingpin.Flag("sms", "Enable the SMS gateway").
Envar(g("SMS")).
Bool(),
APIKey: kingpin.Flag("sms-api-key", "The Nexmo API Key for Sending sms").
Envar(g("SMS_API_KEY")).
String(),
APISecret: kingpin.Flag("sms-api-secret", "The Nexmo API Secret for Sending sms").
Envar(g("SMS_API_SECRET")).
String(),
SMSTopic: kingpin.Flag("sms-topic", "The topic for sms route").
Envar(g("SMS_TOPIC")).
Default(sms.SMSDefaultTopic).
String(),
Toggleable: kingpin.Flag("sms-toggleable", "If sms gateway should be able to be stopped and restarted at runtime").
Envar(g("SMS_TOGGLEABLE")).
Bool(),
Workers: kingpin.Flag("sms-workers", "The number of workers handling traffic with Nexmo sms endpoint").
Default(strconv.Itoa(runtime.NumCPU())).
Envar(g("SMS_WORKERS")).
Int(),
IntervalMetrics: &defaultSMSMetrics,
},
WS: websocket.Config{
Enabled: kingpin.Flag("ws", "Enable the websocket module").
Envar(g("WS")).
Bool(),
Prefix: kingpin.Flag("ws-prefix", "The Websocket prefix").
Envar(g("WS_PREFIX")).
Default("/stream/").
String(),
},
KafkaProducer: kafka.Config{
Brokers: configstring.NewFromKingpin(
kingpin.Flag("kafka-brokers", `The list Kafka brokers to which Guble should connect (formatted as host:port, separated by spaces or commas)`).
Envar(g("KAFKA_BROKERS"))),
},
KafkaReportingConfig: KafkaReportingConfig{
SmsReportingTopic: kingpin.Flag("sms-kafka-topic", "The name of the SMS-Reporting Kafka topic").
Envar("GUBLE_SMS_KAFKA_TOPIC").
String(),
SubscribeUnsubscribeReportingTopic: kingpin.Flag("subscribe-kafka-topic", "The name of the Subscribe/Unsubscribe Reporting Kafka topic").
Envar("GUBLE_SUBSCRIBE_KAFKA_TOPIC").
String(),
ApnsReportingTopic: kingpin.Flag("apns-kafka-topic", "The name of the Apns-Reporting Kafka topic").
Envar("GUBLE_APNS_KAFKA_TOPIC").
String(),
FcmReportingTopic: kingpin.Flag("fcm-kafka-topic", "The name of the fcm-Reporting Kafka topic").
Envar("GUBLE_FCM_KAFKA_TOPIC").
String(),
},
}
)
func g(s string) string {
return "GUBLE_" + s
}
func logLevels() (levels []string) {
for _, level := range log.AllLevels {
levels = append(levels, level.String())
}
return
}
// parseConfig parses the flags from command line. Must be used before accessing the config.
// If there are missing or invalid arguments it will exit the application
// and display a message.
func parseConfig() {
if parsed | {
return
} | conditional_block |
|
SelectOneView.js | 外部赋值给该fiield,在值改变的同时
//* 需要同步调用rc-form的onchange和获取到对应的label来显示
//! 所以如果是data_source 就无法同步进行赋值,尽量减少外部修改data_source的值
const options = this.getOptions();
if (_.isEmpty(options) || !_.isEmpty(this.dataSource)) return;
const selected = _.find(options, { value: currentValue });
//* 传递value没有匹配的options
if (_.isEmpty(selected)) return;
this.handleSelect({ selected: [selected] });
}
}
fetchSubordinateData = async (token: string) => {
const url = `${baseURL}${tutorial_query.replace(
'{id}',
global.FC_CRM_USERID,
)}/?restrict=true&token=${token}`;
const resultData = await request(url, 'GET');
const data = _.get(resultData, 'body.result', []);
const renderData = [];
_.map(data, (item) => {
renderData.push({
label: item.name,
value: item.id,
});
});
this.setState({ subordinateList: renderData });
};
isDisabled = () => {
const { disabled, record } = this.props;
//* disable优先考虑布局配置
if (disabled) {
return true;
}
//* 布局不为disable,则考虑是否设置了依赖字段
if (!_.isEmpty(this.dependency)) {
//* 有依赖项进行依赖判断
const { on } = this.dependency;
const dependencyFieldValue = _.get(record, on);
if (_.isUndefined(dependencyFieldValue) || _.isNull(dependencyFieldValue)) {
return true;
}
}
return false;
};
checkedDefaultValue = () => {
const { fieldDesc, fieldLayout } = this.props;
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
const need_default_checked = _.get(mergedObjectFieldDescribe, 'need_default_checked');
if (!_.isEmpty(need_default_checked) && _.get(need_default_checked, 'need_checked')) {
const defaultValue = _.get(need_default_checked, 'checked_value');
const options = _.get(mergedObjectFieldDescribe, 'options');
const defaultCheck = _.find(options, (e) => e.value === defaultValue);
if (!_.isEmpty(defaultCheck)) {
this.handleSelect({ selected: [defaultCheck], multipleSelect: false });
| && apiName === 'customer') {
_.set(selected, 'selected', [relatedParentData]);
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
}
this.setState({
selected: selected.selected,
});
if (onChange) {
const value = _.get(selected, 'selected[0].value', _.get(selected, 'selected[0].id'));
onChange(value);
}
handleCreate(selected);
};
fetchData = async (token: string, object_api_name: string, criterias: Array<any>) => {
const { renderType, placeholderValue, fieldLayout, value, record } = this.props;
const objectApiName = object_api_name;
const fieldValueList = _.split(_.toString(value), ',');
// 修改格式不正确的查询条件
const valueList = [];
fieldValueList.forEach((val) => {
if (typeof val === 'string') {
if (val.indexOf('[') > -1) {
val = _.replace(val, '[', '');
}
if (val.indexOf(']') > -1) {
val = _.replace(val, ']', '');
}
}
valueList.push(val);
});
const dataSourceCriterias = processCriterias(_.get(this.dataSource, 'criterias', []), record);
const fieldValueListCriteria = {
field: this.target_field ? `${this.target_field}.id` : 'id',
operator: 'in',
value: value ? valueList : [],
};
// * enablec_async_criterias默认为false,当为true时dataSource初始化查询条件取配置和id(target_field)的并集
const enablecAsyncCriterias = _.get(this.dataSource, 'enablec_async_criterias', false);
const criteria = enablecAsyncCriterias
? _.concat([fieldValueListCriteria], dataSourceCriterias)
: [fieldValueListCriteria];
const payload = {
token,
objectApiName,
criteria,
joiner: 'and',
pageSize: 2500,
pageNo: 1,
};
const data = await HttpRequest.query(payload);
const fetchList = _.get(data, 'result');
const selected = [];
const labelExp = _.get(fieldLayout, 'render_label_expression');
if (renderType === 'select_multiple' && placeholderValue) {
_.each(placeholderValue, (val) => {
_.each(fetchList, (fetchData) => {
const tmpData = this.target_field ? _.get(fetchData, this.target_field) : fetchData;
if (_.get(tmpData, 'id') == val) {
selected.push({
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: val,
});
}
});
});
this.setState({ selected });
}
if (renderType === 'select_one' && placeholderValue) {
const tmpData = this.target_field
? _.get(fetchList, ['0', this.target_field])
: _.get(fetchList, '0');
this.setState({
selected: {
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: _.get(tmpData, 'id'),
},
});
}
};
getOptions = () => {
const { fieldDesc, fieldLayout, record, reserveOptions, renderType } = this.props;
if (renderType === 'subordinate') {
return _.get(this.state, 'subordinateList', []);
}
if (!_.isEmpty(reserveOptions)) {
return reserveOptions;
}
let options = [];
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
if (!_.isEmpty(this.dataSource)) {
return options;
}
const { dependency = {} } = mergedObjectFieldDescribe;
//* 如果布局配置了options优先布局
if (!_.isEmpty(_.get(fieldLayout, 'options', ''))) {
this.hasLayoutOptions = true;
options = _.get(fieldLayout, 'options');
} else if (!_.isEmpty(dependency)) {
//* 有依赖项进行依赖判断
const { on, rules, defaults = [] } = dependency;
const dependencyFieldValue = _.get(record, on);
// * 当被依赖项未选择时,为disable状态
const rule = rules.find((x) => x.when.indexOf(dependencyFieldValue) >= 0);
//* 若没有设置对应的规则,则下拉选项为空,dependencyFieldValueMap = true
const optionValues = rule ? rule.then : defaults;
options = _.isEmpty(optionValues) ? [] : _.cloneDeep(_.get(fieldDesc, 'options'));
_.remove(options, (option) => _.indexOf(optionValues, option.value) < 0);
} else if (!_.isEmpty(_.get(fieldDesc, 'options'))) {
//* 根据对象描述填充options
options = _.cloneDeep(_.get(fieldDesc, 'options'));
}
return options;
};
matchMultipleName = (item, value) => {
let _resultValue = '';
if (this.target_field) {
_.each(value, (e) => {
const _id = _.get(item, `${this.target_field}.id`);
const _name = _.get(item, `${this.target_field}.name`);
if (e == _id && _name) {
_resultValue = _name;
return false;
}
});
} else {
_.each(value, (e) => {
const _id = _.get(item, 'id');
const _name = _.get(item, 'name');
if (e == _id && _name) {
_resultValue = _name;
return false;
}
});
}
return _resultValue;
};
selectedWebCallback = async (messageData) => {
const { handleCreate, fieldDesc, onChange, renderType, multipleSelect } = this.props;
const apiName = _.get(fieldDesc, 'api_name');
const resultData = {
selected: messageData,
apiName,
renderType,
multipleSelect,
};
this.setState({
selected: messageData,
});
if (onChange) {
const value = _.get(messageData, '[0].value') || _.get(messageData, '[0].id');
onChange(value);
}
handleCreate(resultData);
};
navigatePage = (disable, destination, param) => {
const { selected } = this.state;
| }
}
};
// 调用的回调函数
handleSelect = (selected) => {
const { handleCreate, fieldDesc, onChange, renderType, relatedParentData } = this.props;
const apiName = _.get(fieldDesc, 'api_name');
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
if (selected['selected'] && !selected['selected'][0] | conditional_block |
SelectOneView.js | this.hasLayoutOptions = false;
this.state = {
selected: null,
ModalSelector: false,
subordinateList: [],
};
}
componentDidMount() {
const { multipleSelect, token, renderType, placeholderVa
lue } = this.props;
if (!_.isEmpty(this.dataSource)) {
const { object_api_name, criterias = [], target_field = '' } = this.dataSource;
this.target_field = target_field;
this.fetchData(token, object_api_name, criterias);
}
if (renderType === 'subordinate') {
// ? 筛选下属组件?
this.fetchSubordinateData(token);
}
//* 初始化给多选 已选中
if (renderType === 'select_multiple' && placeholderValue && _.isEmpty(this.dataSource)) {
const options = this.getOptions();
const selected = [];
_.each(options, (option) => {
_.each(placeholderValue, (val) => {
if (option.value === val) {
selected.push(option);
}
});
});
this.setState({
selected,
});
}
//* 单选默认选中
if (!multipleSelect) {
this.checkedDefaultValue();
}
}
componentDidUpdate(prevProps, prevStates) {
const field = _.get(this.props, 'fieldDesc.api_name');
const currentValue = _.get(this.props, `record.${field}`);
const prevValue = _.get(prevProps, `record.${field}`);
if (currentValue != prevValue) {
// * 清空value
if (_.isUndefined(currentValue)) {
this.handleSelect({ selected: [] });
return;
}
//* 后面的程序,是外部赋值给该fiield,在值改变的同时
//* 需要同步调用rc-form的onchange和获取到对应的label来显示
//! 所以如果是data_source 就无法同步进行赋值,尽量减少外部修改data_source的值
const options = this.getOptions();
if (_.isEmpty(options) || !_.isEmpty(this.dataSource)) return;
const selected = _.find(options, { value: currentValue });
//* 传递value没有匹配的options
if (_.isEmpty(selected)) return;
this.handleSelect({ selected: [selected] });
}
}
fetchSubordinateData = async (token: string) => {
const url = `${baseURL}${tutorial_query.replace(
'{id}',
global.FC_CRM_USERID,
)}/?restrict=true&token=${token}`;
const resultData = await request(url, 'GET');
const data = _.get(resultData, 'body.result', []);
const renderData = [];
_.map(data, (item) => {
renderData.push({
label: item.name,
value: item.id,
});
});
this.setState({ subordinateList: renderData });
};
isDisabled = () => {
const { disabled, record } = this.props;
//* disable优先考虑布局配置
if (disabled) {
return true;
}
//* 布局不为disable,则考虑是否设置了依赖字段
if (!_.isEmpty(this.dependency)) {
//* 有依赖项进行依赖判断
const { on } = this.dependency;
const dependencyFieldValue = _.get(record, on);
if (_.isUndefined(dependencyFieldValue) || _.isNull(dependencyFieldValue)) {
return true;
}
}
return false;
};
checkedDefaultValue = () => {
const { fieldDesc, fieldLayout } = this.props;
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
const need_default_checked = _.get(mergedObjectFieldDescribe, 'need_default_checked');
if (!_.isEmpty(need_default_checked) && _.get(need_default_checked, 'need_checked')) {
const defaultValue = _.get(need_default_checked, 'checked_value');
const options = _.get(mergedObjectFieldDescribe, 'options');
const defaultCheck = _.find(options, (e) => e.value === defaultValue);
if (!_.isEmpty(defaultCheck)) {
this.handleSelect({ selected: [defaultCheck], multipleSelect: false });
}
}
};
// 调用的回调函数
handleSelect = (selected) => {
const { handleCreate, fieldDesc, onChange, renderType, relatedParentData } = this.props;
const apiName = _.get(fieldDesc, 'api_name');
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
if (selected['selected'] && !selected['selected'][0] && apiName === 'customer') {
_.set(selected, 'selected', [relatedParentData]);
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
}
this.setState({
selected: selected.selected,
});
if (onChange) {
const value = _.get(selected, 'selected[0].value', _.get(selected, 'selected[0].id'));
onChange(value);
}
handleCreate(selected);
};
fetchData = async (token: string, object_api_name: string, criterias: Array<any>) => {
const { renderType, placeholderValue, fieldLayout, value, record } = this.props;
const objectApiName = object_api_name;
const fieldValueList = _.split(_.toString(value), ',');
// 修改格式不正确的查询条件
const valueList = [];
fieldValueList.forEach((val) => {
if (typeof val === 'string') {
if (val.indexOf('[') > -1) {
val = _.replace(val, '[', '');
}
if (val.indexOf(']') > -1) {
val = _.replace(val, ']', '');
}
}
valueList.push(val);
});
const dataSourceCriterias = processCriterias(_.get(this.dataSource, 'criterias', []), record);
const fieldValueListCriteria = {
field: this.target_field ? `${this.target_field}.id` : 'id',
operator: 'in',
value: value ? valueList : [],
};
// * enablec_async_criterias默认为false,当为true时dataSource初始化查询条件取配置和id(target_field)的并集
const enablecAsyncCriterias = _.get(this.dataSource, 'enablec_async_criterias', false);
const criteria = enablecAsyncCriterias
? _.concat([fieldValueListCriteria], dataSourceCriterias)
: [fieldValueListCriteria];
const payload = {
token,
objectApiName,
criteria,
joiner: 'and',
pageSize: 2500,
pageNo: 1,
};
const data = await HttpRequest.query(payload);
const fetchList = _.get(data, 'result');
const selected = [];
const labelExp = _.get(fieldLayout, 'render_label_expression');
if (renderType === 'select_multiple' && placeholderValue) {
_.each(placeholderValue, (val) => {
_.each(fetchList, (fetchData) => {
const tmpData = this.target_field ? _.get(fetchData, this.target_field) : fetchData;
if (_.get(tmpData, 'id') == val) {
selected.push({
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: val,
});
}
});
});
this.setState({ selected });
}
if (renderType === 'select_one' && placeholderValue) {
const tmpData = this.target_field
? _.get(fetchList, ['0', this.target_field])
: _.get(fetchList, '0');
this.setState({
selected: {
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: _.get(tmpData, 'id'),
},
});
}
};
getOptions = () => {
const { fieldDesc, fieldLayout, record, reserveOptions, renderType } = this.props;
if (renderType === 'subordinate') {
return _.get(this.state, 'subordinateList', []);
}
if (!_.isEmpty(reserveOptions)) {
return reserveOptions;
}
let options = [];
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
if (!_.isEmpty(this.dataSource)) {
return options;
}
const { dependency = {} } = mergedObjectFieldDescribe;
//* 如果布局配置了options优先布局
if (!_.isEmpty(_.get(fieldLayout, 'options', ''))) {
this.hasLayoutOptions = true;
options = _.get(fieldLayout | fieldLayout, fieldDesc } = props;
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
//* data_source 配置项
this.dataSource = _.get(fieldLayout, 'data_source', {});
this.target_field = '';
// * 单选多选配置查询布局
this.targetRecordType =
_.get(fieldLayout, 'target_record_type') ||
_.get(fieldLayout, 'target_layout_record_type') ||
'master';
this.dataRecordType =
_.get(fieldLayout, 'target_data_record_type') || _.get(fieldLayout, 'target_record_type');
//* 判断是否设置依赖
this.dependency = _.get(mergedObjectFieldDescribe, 'dependency');
this.textColor = '';
//* 布局配置默认options | identifier_body |
|
SelectOneView.js | 是外部赋值给该fiield,在值改变的同时
//* 需要同步调用rc-form的onchange和获取到对应的label来显示
//! 所以如果是data_source 就无法同步进行赋值,尽量减少外部修改data_source的值
const options = this.getOptions();
if (_.isEmpty(options) || !_.isEmpty(this.dataSource)) return;
const selected = _.find(options, { value: currentValue });
//* 传递value没有匹配的options
if (_.isEmpty(selected)) return;
this.handleSelect({ selected: [selected] });
}
}
fetchSubordinateData = async (token: string) => {
const url = `${baseURL}${tutorial_query.replace(
'{id}',
global.FC_CRM_USERID,
)}/?restrict=true&token=${token}`;
const resultData = await request(url, 'GET');
const data = _.get(resultData, 'body.result', []);
const renderData = [];
_.map(data, (item) => {
renderData.push({
label: item.name,
value: item.id,
});
});
this.setState({ subordinateList: renderData });
};
isDisabled = () => {
const { disabled, record } = this.props;
//* disable优先考虑布局配置
if (disabled) {
return true;
}
//* 布局不为disable,则考虑是否设置了依赖字段
if (!_.isEmpty(this.dependency)) {
//* 有依赖项进行依赖判断
const { on } = this.dependency;
const dependencyFieldValue = _.get(record, on);
if (_.isUndefined(dependencyFieldValue) || _.isNull(dependencyFieldValue)) {
return true;
}
}
return false;
};
checkedDefaultValue = () => {
const { fieldDesc, fieldLayout } = this.props;
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
const need_default_checked = _.get(mergedObjectFieldDescribe, 'need_default_checked');
if (!_.isEmpty(need_default_checked) && _.get(need_default_checked, 'need_checked')) {
const defaultValue = _.get(need_default_checked, 'checked_value');
const options = _.get(mergedObjectFieldDescribe, 'options');
const defaultCheck = _.find(options, (e) => e.value === defaultValue);
if (!_.isEmpty(defaultCheck)) {
this.handleSelect({ selected: [defaultCheck], multipleSelect: false });
}
}
};
// 调用的回调函数
handleSelect = (selected) => {
const { handleCreate, fieldDesc, onChange, renderType, relatedParentData } = this.props;
const apiName = _.get(fieldDesc, 'api_name');
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
if (selected['selected'] && !selected['selected'][0] && apiName === 'customer') {
_.set(selected, 'selected', [relatedParentData]);
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
}
this.setState({
selected: selected.selected,
});
if (onChange) {
const value = _.get(selected, 'selected[0].value', _.get(selected, 'selected[0].id'));
onChange(value);
}
handleCreate(selected);
};
fetchData = async (token: string, object_api_name: string, criterias: Array<any>) => {
const { renderType, placeholderValue, fieldLayout, value, record } = this.props;
const objectApiName = object_api_name;
const fieldValueList = _.split(_.toString(value), ',');
// 修改格式不正确的查询条件
const valueList = [];
fieldValueList.forEach((val) => {
if (typeof val === 'string') {
if (val.indexOf('[') > -1) {
val = _.replace(val, '[', '');
}
if (val.indexOf(']') > -1) {
val = _.replace(val, ']', '');
}
}
valueList.push(val);
});
const dataSourceCriterias = processCriterias(_.get(this.dataSource, 'criterias', []), record);
const fieldValueListCriteria = {
field: this.target_field ? `${this.target_field}.id` : 'id',
operator: 'in',
value: value ? valueList : [],
};
// * enablec_async_criterias默认为false,当为true时dataSource初始化查询条件取配置和id(target_field)的并集
const enablecAsyncCriterias = _.get(this.dataSource, 'enablec_async_criterias', false);
const criteria = enablecAsyncCriterias
? _.concat([fieldValueListCriteria], dataSourceCriterias)
: [fieldValueListCriteria];
const payload = {
token,
objectApiName,
criteria,
joiner: 'and',
pageSize: 2500,
pageNo: 1,
};
const data = await HttpRequest.query(payload);
const fetchList = _.get(data, 'result');
| const tmpData = this.target_field ? _.get(fetchData, this.target_field) : fetchData;
if (_.get(tmpData, 'id') == val) {
selected.push({
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: val,
});
}
});
});
this.setState({ selected });
}
if (renderType === 'select_one' && placeholderValue) {
const tmpData = this.target_field
? _.get(fetchList, ['0', this.target_field])
: _.get(fetchList, '0');
this.setState({
selected: {
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: _.get(tmpData, 'id'),
},
});
}
};
getOptions = () => {
const { fieldDesc, fieldLayout, record, reserveOptions, renderType } = this.props;
if (renderType === 'subordinate') {
return _.get(this.state, 'subordinateList', []);
}
if (!_.isEmpty(reserveOptions)) {
return reserveOptions;
}
let options = [];
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
if (!_.isEmpty(this.dataSource)) {
return options;
}
const { dependency = {} } = mergedObjectFieldDescribe;
//* 如果布局配置了options优先布局
if (!_.isEmpty(_.get(fieldLayout, 'options', ''))) {
this.hasLayoutOptions = true;
options = _.get(fieldLayout, 'options');
} else if (!_.isEmpty(dependency)) {
//* 有依赖项进行依赖判断
const { on, rules, defaults = [] } = dependency;
const dependencyFieldValue = _.get(record, on);
// * 当被依赖项未选择时,为disable状态
const rule = rules.find((x) => x.when.indexOf(dependencyFieldValue) >= 0);
//* 若没有设置对应的规则,则下拉选项为空,dependencyFieldValueMap = true
const optionValues = rule ? rule.then : defaults;
options = _.isEmpty(optionValues) ? [] : _.cloneDeep(_.get(fieldDesc, 'options'));
_.remove(options, (option) => _.indexOf(optionValues, option.value) < 0);
} else if (!_.isEmpty(_.get(fieldDesc, 'options'))) {
//* 根据对象描述填充options
options = _.cloneDeep(_.get(fieldDesc, 'options'));
}
return options;
};
matchMultipleName = (item, value) => {
let _resultValue = '';
if (this.target_field) {
_.each(value, (e) => {
const _id = _.get(item, `${this.target_field}.id`);
const _name = _.get(item, `${this.target_field}.name`);
if (e == _id && _name) {
_resultValue = _name;
return false;
}
});
} else {
_.each(value, (e) => {
const _id = _.get(item, 'id');
const _name = _.get(item, 'name');
if (e == _id && _name) {
_resultValue = _name;
return false;
}
});
}
return _resultValue;
};
selectedWebCallback = async (messageData) => {
const { handleCreate, fieldDesc, onChange, renderType, multipleSelect } = this.props;
const apiName = _.get(fieldDesc, 'api_name');
const resultData = {
selected: messageData,
apiName,
renderType,
multipleSelect,
};
this.setState({
selected: messageData,
});
if (onChange) {
const value = _.get(messageData, '[0].value') || _.get(messageData, '[0].id');
onChange(value);
}
handleCreate(resultData);
};
navigatePage = (disable, destination, param) => {
const { selected } = this.state;
| const selected = [];
const labelExp = _.get(fieldLayout, 'render_label_expression');
if (renderType === 'select_multiple' && placeholderValue) {
_.each(placeholderValue, (val) => {
_.each(fetchList, (fetchData) => { | random_line_split |
SelectOneView.js | const { fieldLayout, fieldDesc } = props;
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
//* data_source 配置项
this.dataSource = _.get(fieldLayout, 'data_source', {});
this.target_field = '';
// * 单选多选配置查询布局
this.targetRecordType =
_.get(fieldLayout, 'target_record_type') ||
_.get(fieldLayout, 'target_layout_record_type') ||
'master';
this.dataRecordType =
_.get(fieldLayout, 'target_data_record_type') || _.get(fieldLayout, 'target_record_type');
//* 判断是否设置依赖
this.dependency = _.get(mergedObjectFieldDescribe, 'dependency');
this.textColor = '';
//* 布局配置默认options
this.hasLayoutOptions = false;
this.state = {
selected: null,
ModalSelector: false,
subordinateList: [],
};
}
componentDidMount() {
const { multipleSelect, token, renderType, placeholderValue } = this.props;
if (!_.isEmpty(this.dataSource)) {
const { object_api_name, criterias = [], target_field = '' } = this.dataSource;
this.target_field = target_field;
this.fetchData(token, object_api_name, criterias);
}
if (renderType === 'subordinate') {
// ? 筛选下属组件?
this.fetchSubordinateData(token);
}
//* 初始化给多选 已选中
if (renderType === 'select_multiple' && placeholderValue && _.isEmpty(this.dataSource)) {
const options = this.getOptions();
const selected = [];
_.each(options, (option) => {
_.each(placeholderValue, (val) => {
if (option.value === val) {
selected.push(option);
}
});
});
this.setState({
selected,
});
}
//* 单选默认选中
if (!multipleSelect) {
this.checkedDefaultValue();
}
}
componentDidUpdate(prevProps, prevStates) {
const field = _.get(this.props, 'fieldDesc.api_name');
const currentValue = _.get(this.props, `record.${field}`);
const prevValue = _.get(prevProps, `record.${field}`);
if (currentValue != prevValue) {
// * 清空value
if (_.isUndefined(currentValue)) {
this.handleSelect({ selected: [] });
return;
}
//* 后面的程序,是外部赋值给该fiield,在值改变的同时
//* 需要同步调用rc-form的onchange和获取到对应的label来显示
//! 所以如果是data_source 就无法同步进行赋值,尽量减少外部修改data_source的值
const options = this.getOptions();
if (_.isEmpty(options) || !_.isEmpty(this.dataSource)) return;
const selected = _.find(options, { value: currentValue });
//* 传递value没有匹配的options
if (_.isEmpty(selected)) return;
this.handleSelect({ selected: [selected] });
}
}
fetchSubordinateData = async (token: string) => {
const url = `${baseURL}${tutorial_query.replace(
'{id}',
global.FC_CRM_USERID,
)}/?restrict=true&token=${token}`;
const resultData = await request(url, 'GET');
const data = _.get(resultData, 'body.result', []);
const renderData = [];
_.map(data, (item) => {
renderData.push({
label: item.name,
value: item.id,
});
});
this.setState({ subordinateList: renderData });
};
isDisabled = () => {
const { disabled, record } = this.props;
//* disable优先考虑布局配置
if (disabled) {
return true;
}
//* 布局不为disable,则考虑是否设置了依赖字段
if (!_.isEmpty(this.dependency)) {
//* 有依赖项进行依赖判断
const { on } = this.dependency;
const dependencyFieldValue = _.get(record, on);
if (_.isUndefined(dependencyFieldValue) || _.isNull(dependencyFieldValue)) {
return true;
}
}
return false;
};
checkedDefaultValue = () => {
const { fieldDesc, fieldLayout } = this.props;
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
const need_default_checked = _.get(mergedObjectFieldDescribe, 'need_default_checked');
if (!_.isEmpty(need_default_checked) && _.get(need_default_checked, 'need_checked')) {
const defaultValue = _.get(need_default_checked, 'checked_value');
const options = _.get(mergedObjectFieldDescribe, 'options');
const defaultCheck = _.find(options, (e) => e.value === defaultValue);
if (!_.isEmpty(defaultCheck)) {
this.handleSelect({ selected: [defaultCheck], multipleSelect: false });
}
}
};
// 调用的回调函数
handleSelect = (selected) => {
const { handleCreate, fieldDesc, onChange, renderType, relatedParentData } = this.props;
const apiName = _.get(fieldDesc, 'api_name');
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
if (selected['selected'] && !selected['selected'][0] && apiName === 'customer') {
_.set(selected, 'selected', [relatedParentData]);
_.set(selected, 'apiName', apiName);
_.set(selected, 'renderType', renderType);
}
this.setState({
selected: selected.selected,
});
if (onChange) {
const value = _.get(selected, 'selected[0].value', _.get(selected, 'selected[0].id'));
onChange(value);
}
handleCreate(selected);
};
fetchData = async (token: string, object_api_name: string, criterias: Array<any>) => {
const { renderType, placeholderValue, fieldLayout, value, record } = this.props;
const objectApiName = object_api_name;
const fieldValueList = _.split(_.toString(value), ',');
// 修改格式不正确的查询条件
const valueList = [];
fieldValueList.forEach((val) => {
if (typeof val === 'string') {
if (val.indexOf('[') > -1) {
val = _.replace(val, '[', '');
}
if (val.indexOf(']') > -1) {
val = _.replace(val, ']', '');
}
}
valueList.push(val);
});
const dataSourceCriterias = processCriterias(_.get(this.dataSource, 'criterias', []), record);
const fieldValueListCriteria = {
field: this.target_field ? `${this.target_field}.id` : 'id',
operator: 'in',
value: value ? valueList : [],
};
// * enablec_async_criterias默认为false,当为true时dataSource初始化查询条件取配置和id(target_field)的并集
const enablecAsyncCriterias = _.get(this.dataSource, 'enablec_async_criterias', false);
const criteria = enablecAsyncCriterias
? _.concat([fieldValueListCriteria], dataSourceCriterias)
: [fieldValueListCriteria];
const payload = {
token,
objectApiName,
criteria,
joiner: 'and',
pageSize: 2500,
pageNo: 1,
};
const data = await HttpRequest.query(payload);
const fetchList = _.get(data, 'result');
const selected = [];
const labelExp = _.get(fieldLayout, 'render_label_expression');
if (renderType === 'select_multiple' && placeholderValue) {
_.each(placeholderValue, (val) => {
_.each(fetchList, (fetchData) => {
const tmpData = this.target_field ? _.get(fetchData, this.target_field) : fetchData;
if (_.get(tmpData, 'id') == val) {
selected.push({
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: val,
});
}
});
});
this.setState({ selected });
}
if (renderType === 'select_one' && placeholderValue) {
const tmpData = this.target_field
? _.get(fetchList, ['0', this.target_field])
: _.get(fetchList, '0');
this.setState({
selected: {
label: labelExp ? executeDetailExp(labelExp, tmpData) : _.get(tmpData, 'name'),
value: _.get(tmpData, 'id'),
},
});
}
};
getOptions = () => {
const { fieldDesc, fieldLayout, record, reserveOptions, renderType } = this.props;
if (renderType === 'subordinate') {
return _.get(this.state, 'subordinateList', []);
}
if (!_.isEmpty(reserveOptions)) {
return reserveOptions;
}
let options = [];
const mergedObjectFieldDescribe = Object.assign({}, fieldDesc, fieldLayout);
if (!_.isEmpty(this.dataSource)) {
return options;
}
const { dependency = {} } = mergedObjectFieldDescribe;
//* 如果布局配置了options优先布局
if (!_.isEmpty(_.get(fieldLayout, 'options', ''))) {
this.hasLayoutOptions = true;
options = _. | rops);
| identifier_name |
|
GUI test backup.py | _queue = mp.Queue()
# Creating the basic layouts.
root = FloatLayout()
scroll = ScrollView(pos_hint={"x": 0.12, "top": 0.92}, size_hint=(0.9, 1))
layout = GridLayout(cols=5, padding=0, spacing=5)
layout.bind(minimum_height=layout.setter("height"))
# Create the ActionBar with buttons.
actionbar = F.ActionBar(pos_hint={'top': 1})
av = F.ActionView()
av.add_widget(F.ActionPrevious(title='SPAI', with_previous=False, app_icon="icon.png"))
av.add_widget(F.ActionOverflow())
av.add_widget(F.ActionButton(text='Import'.format(), on_press=self._pop))
av.add_widget(F.ActionButton(text='Report'.format()))
actionbar.add_widget(av)
av.use_separator = False
# Adding the layouts to the root layout
root.add_widget(actionbar)
root.add_widget(self._sidepanel())
root.add_widget(scroll)
scroll.add_widget(layout)
return root
def _update_scroll(self, path):
"""
Function to update "showphotos" layout, when scrolling.
Args: | path: The path to the photos shown, in "showphotos" layout.
"""
global layout
#Removes the widgets in the scroll layout, if there is any.
scroll.remove_widget(layout)
#Loads the new updated layout, and updates the showphotos layout.
layout = self._showphotos(path)
scroll.add_widget(layout)
layout.do_layout()
def _sidepanel(self):
"""
Function to create the sidepanel in the root layout. It reads all the folders from "curdir", and
creates a button for each folder in "curdir". The sidepanel layout is then updated to show the buttons.
Returns:
Returs the sidepanel layout to the root layout.
"""
global curdir
global sidepanel_layout
global root
#Create the sidepanel layout.
sidepanel_layout = BoxLayout(orientation="vertical", pos_hint={"x": 0.0, "top": 0.92}, size_hint=(0.1, 0.92))
#If "curdir" contains folders, a button is created for each, and bind the button to update the
# showphotos layout.
if curdir == " ":
return sidepanel_layout
else:
root.remove_widget(sidepanel_layout)
for folders in sorted(glob(join(curdir, "thumb", "*"))):
name = basename(folders)
btn = Button(text=name, on_press=lambda n=name: self._update_scroll(n.text))
sidepanel_layout.add_widget(btn)
root.add_widget(sidepanel_layout)
sidepanel_layout.do_layout()
def pg_bar(self, max: int = 100):
global progress
progress_bar = ProgressBar(max=max)
popup = Popup(title="Filtering and sorting pictures",
content=progress_bar)
progress_bar.value = progress
popup.open()
def _validate(self, fileChooser):
"""
Function to add the path chosen by user to "curdir" and initiate functions that needs to be run.
Args:
fileChooser: Takes the path chosen by the user.
Returns:
None, but initiates several other functions.
"""
global curdir
global progress
global number_of_pictures
curdir = fileChooser.path
progress = 0
#Initiates functions.
self._queue_photos()
mp.freeze_support()
#self.pg_bar(number_of_pictures)
self._multiprocessing(self._filtering_photos, filtering_queue)
self._cnn()
self._multiprocessing(self._sorting_photos, sorting_queue)
self._sidepanel()
def _pop(self, obj):
"""
Function that creates a pop-up window, where the user choses the path of the pictures to be imported.
Args:
obj: Is needed by the FileChooser class.
Returns:
A string containing the path chosen by the user.
"""
# Creates the layouts.
fileChooser = FileChooserIconView(size_hint_y=None)
content = BoxLayout(orientation='vertical', spacing=7)
scrollView = ScrollView()
# Binds the chosen path to the "validate" function.
fileChooser.bind(on_submit=self._validate)
fileChooser.height = 500
scrollView.add_widget(fileChooser)
# Adding the layouts together.
content.add_widget(Widget(size_hint_y=None, height=5))
content.add_widget(scrollView)
content.add_widget(Widget(size_hint_y=None, height=5))
popup = Popup(title='Choose Directory',
content=content,
size_hint=(0.6, 0.6))
# Creates two buttons to sumbit or cancel.
btnlayout = BoxLayout(size_hint_y=None, height=50, spacing=5)
btn = Button(text='Ok')
btn.bind(on_release=lambda x: self._validate(fileChooser))
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
btn = Button(text='Cancel')
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
content.add_widget(btnlayout)
popup.open()
def _showphotos(self, btn):
"""
Function to load photos and show them in the layout.
Args:
btn: String, name of the folder, containing the photos to be shown.
Returns:
A GridLayout containing the pictures in the path provided.
"""
global layout
global curdir
# Create the layouts.
layout = GridLayout(cols=5, padding=0, spacing=0, size_hint=(1, None))
layout.bind(minimum_height=layout.setter("height"))
foldername = btn
# Args is combined with "curdir" to load the thumbnails, and add them to the Gridlayout.
if foldername == "":
pass
else:
for filename in sorted(glob(join(curdir, "thumb", foldername, "*"))):
try:
canvas = BoxLayout(size_hint=(1, None))
im = Image(source=filename)
canvas.add_widget(im)
layout.add_widget(canvas)
except Exception:
print("Pictures: Unable to load <%s>" % filename)
return layout
def _queue_photos(self):
"""
Function to add photos to the queue of the multiprocessing function.
Returns:
Adds a list containing strings to the queue. Strings of paths to the picture and folder, and
name of the picture.
"""
global filtering_queue
global number_of_pictures
number_of_pictures = 0
for root, dirs, files in walk(curdir):
for file in files:
if dirs == "thumb" or dirs == "filtered":
pass
else:
file_path = join(root, file)
filtering_data = [file_path, curdir]
filtering_queue.put(filtering_data)
number_of_pictures += 1
print("Queued:", file_path)
try:
mkdir(join(curdir, "thumb"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Alexander"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Bjarke"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Gabrielle"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Monica"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Wenche"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "filtered"))
except FileExistsError:
pass
@staticmethod
def _filtering_photos(queue):
"""
Handles all actions of each picture. Creating a thumbnail, and starts the filtering of each picture.
Args:
queue: Multiprocessing.queue is given, containing a list of strings, with the path to
the picture, the folder and name of the picture.
Returns:
Saves a thumbnail and the filtered picture in separate folders.
"""
while True:
# Retrieves one list from the queue and splits the list.
data = queue.get()
picture = data[0]
curdir = data[1]
picture_name = basename(picture)
# Filters the image.
Filter(picture, join(curdir, "filtered"))
def _cnn(self):
global sorting_queue
filter_path = join(curdir, "filtered")
onlyfiles = [f for f in listdir(filter_path) if isfile(join(filter_path, f))]
for i in range(0, len(onlyfiles)):
j = onlyfiles[i]
onlyfiles[i] = join(filter_path, j)
files = []
for f in onlyfiles:
files.append(f)
image_size = 256
num_channels = 3
## Let us restore the saved model
with tf.Session(graph=tf.Graph()) as sess:
# Step-1: Recreate the network graph. At this step only graph is created.
saver = tf.train.import_meta_graph('model/spai_model.meta')
# Step-2: Now let's load the weights saved using the restore method.
saver.restore(sess, tf.train.latest_checkpoint('model/'))
# tf.initialize_all_variables().run()
# Reading the image using Open | random_line_split |
|
GUI test backup.py | _queue = mp.Queue()
# Creating the basic layouts.
root = FloatLayout()
scroll = ScrollView(pos_hint={"x": 0.12, "top": 0.92}, size_hint=(0.9, 1))
layout = GridLayout(cols=5, padding=0, spacing=5)
layout.bind(minimum_height=layout.setter("height"))
# Create the ActionBar with buttons.
actionbar = F.ActionBar(pos_hint={'top': 1})
av = F.ActionView()
av.add_widget(F.ActionPrevious(title='SPAI', with_previous=False, app_icon="icon.png"))
av.add_widget(F.ActionOverflow())
av.add_widget(F.ActionButton(text='Import'.format(), on_press=self._pop))
av.add_widget(F.ActionButton(text='Report'.format()))
actionbar.add_widget(av)
av.use_separator = False
# Adding the layouts to the root layout
root.add_widget(actionbar)
root.add_widget(self._sidepanel())
root.add_widget(scroll)
scroll.add_widget(layout)
return root
def _update_scroll(self, path):
"""
Function to update "showphotos" layout, when scrolling.
Args:
path: The path to the photos shown, in "showphotos" layout.
"""
global layout
#Removes the widgets in the scroll layout, if there is any.
scroll.remove_widget(layout)
#Loads the new updated layout, and updates the showphotos layout.
layout = self._showphotos(path)
scroll.add_widget(layout)
layout.do_layout()
def _sidepanel(self):
"""
Function to create the sidepanel in the root layout. It reads all the folders from "curdir", and
creates a button for each folder in "curdir". The sidepanel layout is then updated to show the buttons.
Returns:
Returs the sidepanel layout to the root layout.
"""
global curdir
global sidepanel_layout
global root
#Create the sidepanel layout.
sidepanel_layout = BoxLayout(orientation="vertical", pos_hint={"x": 0.0, "top": 0.92}, size_hint=(0.1, 0.92))
#If "curdir" contains folders, a button is created for each, and bind the button to update the
# showphotos layout.
if curdir == " ":
return sidepanel_layout
else:
root.remove_widget(sidepanel_layout)
for folders in sorted(glob(join(curdir, "thumb", "*"))):
name = basename(folders)
btn = Button(text=name, on_press=lambda n=name: self._update_scroll(n.text))
sidepanel_layout.add_widget(btn)
root.add_widget(sidepanel_layout)
sidepanel_layout.do_layout()
def pg_bar(self, max: int = 100):
|
def _validate(self, fileChooser):
"""
Function to add the path chosen by user to "curdir" and initiate functions that needs to be run.
Args:
fileChooser: Takes the path chosen by the user.
Returns:
None, but initiates several other functions.
"""
global curdir
global progress
global number_of_pictures
curdir = fileChooser.path
progress = 0
#Initiates functions.
self._queue_photos()
mp.freeze_support()
#self.pg_bar(number_of_pictures)
self._multiprocessing(self._filtering_photos, filtering_queue)
self._cnn()
self._multiprocessing(self._sorting_photos, sorting_queue)
self._sidepanel()
def _pop(self, obj):
"""
Function that creates a pop-up window, where the user choses the path of the pictures to be imported.
Args:
obj: Is needed by the FileChooser class.
Returns:
A string containing the path chosen by the user.
"""
# Creates the layouts.
fileChooser = FileChooserIconView(size_hint_y=None)
content = BoxLayout(orientation='vertical', spacing=7)
scrollView = ScrollView()
# Binds the chosen path to the "validate" function.
fileChooser.bind(on_submit=self._validate)
fileChooser.height = 500
scrollView.add_widget(fileChooser)
# Adding the layouts together.
content.add_widget(Widget(size_hint_y=None, height=5))
content.add_widget(scrollView)
content.add_widget(Widget(size_hint_y=None, height=5))
popup = Popup(title='Choose Directory',
content=content,
size_hint=(0.6, 0.6))
# Creates two buttons to sumbit or cancel.
btnlayout = BoxLayout(size_hint_y=None, height=50, spacing=5)
btn = Button(text='Ok')
btn.bind(on_release=lambda x: self._validate(fileChooser))
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
btn = Button(text='Cancel')
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
content.add_widget(btnlayout)
popup.open()
def _showphotos(self, btn):
"""
Function to load photos and show them in the layout.
Args:
btn: String, name of the folder, containing the photos to be shown.
Returns:
A GridLayout containing the pictures in the path provided.
"""
global layout
global curdir
# Create the layouts.
layout = GridLayout(cols=5, padding=0, spacing=0, size_hint=(1, None))
layout.bind(minimum_height=layout.setter("height"))
foldername = btn
# Args is combined with "curdir" to load the thumbnails, and add them to the Gridlayout.
if foldername == "":
pass
else:
for filename in sorted(glob(join(curdir, "thumb", foldername, "*"))):
try:
canvas = BoxLayout(size_hint=(1, None))
im = Image(source=filename)
canvas.add_widget(im)
layout.add_widget(canvas)
except Exception:
print("Pictures: Unable to load <%s>" % filename)
return layout
def _queue_photos(self):
"""
Function to add photos to the queue of the multiprocessing function.
Returns:
Adds a list containing strings to the queue. Strings of paths to the picture and folder, and
name of the picture.
"""
global filtering_queue
global number_of_pictures
number_of_pictures = 0
for root, dirs, files in walk(curdir):
for file in files:
if dirs == "thumb" or dirs == "filtered":
pass
else:
file_path = join(root, file)
filtering_data = [file_path, curdir]
filtering_queue.put(filtering_data)
number_of_pictures += 1
print("Queued:", file_path)
try:
mkdir(join(curdir, "thumb"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Alexander"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Bjarke"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Gabrielle"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Monica"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Wenche"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "filtered"))
except FileExistsError:
pass
@staticmethod
def _filtering_photos(queue):
"""
Handles all actions of each picture. Creating a thumbnail, and starts the filtering of each picture.
Args:
queue: Multiprocessing.queue is given, containing a list of strings, with the path to
the picture, the folder and name of the picture.
Returns:
Saves a thumbnail and the filtered picture in separate folders.
"""
while True:
# Retrieves one list from the queue and splits the list.
data = queue.get()
picture = data[0]
curdir = data[1]
picture_name = basename(picture)
# Filters the image.
Filter(picture, join(curdir, "filtered"))
def _cnn(self):
global sorting_queue
filter_path = join(curdir, "filtered")
onlyfiles = [f for f in listdir(filter_path) if isfile(join(filter_path, f))]
for i in range(0, len(onlyfiles)):
j = onlyfiles[i]
onlyfiles[i] = join(filter_path, j)
files = []
for f in onlyfiles:
files.append(f)
image_size = 256
num_channels = 3
## Let us restore the saved model
with tf.Session(graph=tf.Graph()) as sess:
# Step-1: Recreate the network graph. At this step only graph is created.
saver = tf.train.import_meta_graph('model/spai_model.meta')
# Step-2: Now let's load the weights saved using the restore method.
saver.restore(sess, tf.train.latest_checkpoint('model/'))
# tf.initialize_all_variables().run()
# Reading the image using | global progress
progress_bar = ProgressBar(max=max)
popup = Popup(title="Filtering and sorting pictures",
content=progress_bar)
progress_bar.value = progress
popup.open() | identifier_body |
GUI test backup.py | _queue = mp.Queue()
# Creating the basic layouts.
root = FloatLayout()
scroll = ScrollView(pos_hint={"x": 0.12, "top": 0.92}, size_hint=(0.9, 1))
layout = GridLayout(cols=5, padding=0, spacing=5)
layout.bind(minimum_height=layout.setter("height"))
# Create the ActionBar with buttons.
actionbar = F.ActionBar(pos_hint={'top': 1})
av = F.ActionView()
av.add_widget(F.ActionPrevious(title='SPAI', with_previous=False, app_icon="icon.png"))
av.add_widget(F.ActionOverflow())
av.add_widget(F.ActionButton(text='Import'.format(), on_press=self._pop))
av.add_widget(F.ActionButton(text='Report'.format()))
actionbar.add_widget(av)
av.use_separator = False
# Adding the layouts to the root layout
root.add_widget(actionbar)
root.add_widget(self._sidepanel())
root.add_widget(scroll)
scroll.add_widget(layout)
return root
def _update_scroll(self, path):
"""
Function to update "showphotos" layout, when scrolling.
Args:
path: The path to the photos shown, in "showphotos" layout.
"""
global layout
#Removes the widgets in the scroll layout, if there is any.
scroll.remove_widget(layout)
#Loads the new updated layout, and updates the showphotos layout.
layout = self._showphotos(path)
scroll.add_widget(layout)
layout.do_layout()
def _sidepanel(self):
"""
Function to create the sidepanel in the root layout. It reads all the folders from "curdir", and
creates a button for each folder in "curdir". The sidepanel layout is then updated to show the buttons.
Returns:
Returs the sidepanel layout to the root layout.
"""
global curdir
global sidepanel_layout
global root
#Create the sidepanel layout.
sidepanel_layout = BoxLayout(orientation="vertical", pos_hint={"x": 0.0, "top": 0.92}, size_hint=(0.1, 0.92))
#If "curdir" contains folders, a button is created for each, and bind the button to update the
# showphotos layout.
if curdir == " ":
return sidepanel_layout
else:
root.remove_widget(sidepanel_layout)
for folders in sorted(glob(join(curdir, "thumb", "*"))):
name = basename(folders)
btn = Button(text=name, on_press=lambda n=name: self._update_scroll(n.text))
sidepanel_layout.add_widget(btn)
root.add_widget(sidepanel_layout)
sidepanel_layout.do_layout()
def pg_bar(self, max: int = 100):
global progress
progress_bar = ProgressBar(max=max)
popup = Popup(title="Filtering and sorting pictures",
content=progress_bar)
progress_bar.value = progress
popup.open()
def _validate(self, fileChooser):
"""
Function to add the path chosen by user to "curdir" and initiate functions that needs to be run.
Args:
fileChooser: Takes the path chosen by the user.
Returns:
None, but initiates several other functions.
"""
global curdir
global progress
global number_of_pictures
curdir = fileChooser.path
progress = 0
#Initiates functions.
self._queue_photos()
mp.freeze_support()
#self.pg_bar(number_of_pictures)
self._multiprocessing(self._filtering_photos, filtering_queue)
self._cnn()
self._multiprocessing(self._sorting_photos, sorting_queue)
self._sidepanel()
def _pop(self, obj):
"""
Function that creates a pop-up window, where the user choses the path of the pictures to be imported.
Args:
obj: Is needed by the FileChooser class.
Returns:
A string containing the path chosen by the user.
"""
# Creates the layouts.
fileChooser = FileChooserIconView(size_hint_y=None)
content = BoxLayout(orientation='vertical', spacing=7)
scrollView = ScrollView()
# Binds the chosen path to the "validate" function.
fileChooser.bind(on_submit=self._validate)
fileChooser.height = 500
scrollView.add_widget(fileChooser)
# Adding the layouts together.
content.add_widget(Widget(size_hint_y=None, height=5))
content.add_widget(scrollView)
content.add_widget(Widget(size_hint_y=None, height=5))
popup = Popup(title='Choose Directory',
content=content,
size_hint=(0.6, 0.6))
# Creates two buttons to sumbit or cancel.
btnlayout = BoxLayout(size_hint_y=None, height=50, spacing=5)
btn = Button(text='Ok')
btn.bind(on_release=lambda x: self._validate(fileChooser))
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
btn = Button(text='Cancel')
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
content.add_widget(btnlayout)
popup.open()
def _showphotos(self, btn):
"""
Function to load photos and show them in the layout.
Args:
btn: String, name of the folder, containing the photos to be shown.
Returns:
A GridLayout containing the pictures in the path provided.
"""
global layout
global curdir
# Create the layouts.
layout = GridLayout(cols=5, padding=0, spacing=0, size_hint=(1, None))
layout.bind(minimum_height=layout.setter("height"))
foldername = btn
# Args is combined with "curdir" to load the thumbnails, and add them to the Gridlayout.
if foldername == "":
pass
else:
for filename in sorted(glob(join(curdir, "thumb", foldername, "*"))):
try:
canvas = BoxLayout(size_hint=(1, None))
im = Image(source=filename)
canvas.add_widget(im)
layout.add_widget(canvas)
except Exception:
print("Pictures: Unable to load <%s>" % filename)
return layout
def _queue_photos(self):
"""
Function to add photos to the queue of the multiprocessing function.
Returns:
Adds a list containing strings to the queue. Strings of paths to the picture and folder, and
name of the picture.
"""
global filtering_queue
global number_of_pictures
number_of_pictures = 0
for root, dirs, files in walk(curdir):
for file in files:
if dirs == "thumb" or dirs == "filtered":
pass
else:
file_path = join(root, file)
filtering_data = [file_path, curdir]
filtering_queue.put(filtering_data)
number_of_pictures += 1
print("Queued:", file_path)
try:
mkdir(join(curdir, "thumb"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Alexander"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Bjarke"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Gabrielle"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Monica"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Wenche"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "filtered"))
except FileExistsError:
pass
@staticmethod
def _filtering_photos(queue):
"""
Handles all actions of each picture. Creating a thumbnail, and starts the filtering of each picture.
Args:
queue: Multiprocessing.queue is given, containing a list of strings, with the path to
the picture, the folder and name of the picture.
Returns:
Saves a thumbnail and the filtered picture in separate folders.
"""
while True:
# Retrieves one list from the queue and splits the list.
data = queue.get()
picture = data[0]
curdir = data[1]
picture_name = basename(picture)
# Filters the image.
Filter(picture, join(curdir, "filtered"))
def | (self):
global sorting_queue
filter_path = join(curdir, "filtered")
onlyfiles = [f for f in listdir(filter_path) if isfile(join(filter_path, f))]
for i in range(0, len(onlyfiles)):
j = onlyfiles[i]
onlyfiles[i] = join(filter_path, j)
files = []
for f in onlyfiles:
files.append(f)
image_size = 256
num_channels = 3
## Let us restore the saved model
with tf.Session(graph=tf.Graph()) as sess:
# Step-1: Recreate the network graph. At this step only graph is created.
saver = tf.train.import_meta_graph('model/spai_model.meta')
# Step-2: Now let's load the weights saved using the restore method.
saver.restore(sess, tf.train.latest_checkpoint('model/'))
# tf.initialize_all_variables().run()
# Reading the image using Open | _cnn | identifier_name |
GUI test backup.py | , 0.92))
#If "curdir" contains folders, a button is created for each, and bind the button to update the
# showphotos layout.
if curdir == " ":
return sidepanel_layout
else:
root.remove_widget(sidepanel_layout)
for folders in sorted(glob(join(curdir, "thumb", "*"))):
name = basename(folders)
btn = Button(text=name, on_press=lambda n=name: self._update_scroll(n.text))
sidepanel_layout.add_widget(btn)
root.add_widget(sidepanel_layout)
sidepanel_layout.do_layout()
def pg_bar(self, max: int = 100):
global progress
progress_bar = ProgressBar(max=max)
popup = Popup(title="Filtering and sorting pictures",
content=progress_bar)
progress_bar.value = progress
popup.open()
def _validate(self, fileChooser):
"""
Function to add the path chosen by user to "curdir" and initiate functions that needs to be run.
Args:
fileChooser: Takes the path chosen by the user.
Returns:
None, but initiates several other functions.
"""
global curdir
global progress
global number_of_pictures
curdir = fileChooser.path
progress = 0
#Initiates functions.
self._queue_photos()
mp.freeze_support()
#self.pg_bar(number_of_pictures)
self._multiprocessing(self._filtering_photos, filtering_queue)
self._cnn()
self._multiprocessing(self._sorting_photos, sorting_queue)
self._sidepanel()
def _pop(self, obj):
"""
Function that creates a pop-up window, where the user choses the path of the pictures to be imported.
Args:
obj: Is needed by the FileChooser class.
Returns:
A string containing the path chosen by the user.
"""
# Creates the layouts.
fileChooser = FileChooserIconView(size_hint_y=None)
content = BoxLayout(orientation='vertical', spacing=7)
scrollView = ScrollView()
# Binds the chosen path to the "validate" function.
fileChooser.bind(on_submit=self._validate)
fileChooser.height = 500
scrollView.add_widget(fileChooser)
# Adding the layouts together.
content.add_widget(Widget(size_hint_y=None, height=5))
content.add_widget(scrollView)
content.add_widget(Widget(size_hint_y=None, height=5))
popup = Popup(title='Choose Directory',
content=content,
size_hint=(0.6, 0.6))
# Creates two buttons to sumbit or cancel.
btnlayout = BoxLayout(size_hint_y=None, height=50, spacing=5)
btn = Button(text='Ok')
btn.bind(on_release=lambda x: self._validate(fileChooser))
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
btn = Button(text='Cancel')
btn.bind(on_release=popup.dismiss)
btnlayout.add_widget(btn)
content.add_widget(btnlayout)
popup.open()
def _showphotos(self, btn):
"""
Function to load photos and show them in the layout.
Args:
btn: String, name of the folder, containing the photos to be shown.
Returns:
A GridLayout containing the pictures in the path provided.
"""
global layout
global curdir
# Create the layouts.
layout = GridLayout(cols=5, padding=0, spacing=0, size_hint=(1, None))
layout.bind(minimum_height=layout.setter("height"))
foldername = btn
# Args is combined with "curdir" to load the thumbnails, and add them to the Gridlayout.
if foldername == "":
pass
else:
for filename in sorted(glob(join(curdir, "thumb", foldername, "*"))):
try:
canvas = BoxLayout(size_hint=(1, None))
im = Image(source=filename)
canvas.add_widget(im)
layout.add_widget(canvas)
except Exception:
print("Pictures: Unable to load <%s>" % filename)
return layout
def _queue_photos(self):
"""
Function to add photos to the queue of the multiprocessing function.
Returns:
Adds a list containing strings to the queue. Strings of paths to the picture and folder, and
name of the picture.
"""
global filtering_queue
global number_of_pictures
number_of_pictures = 0
for root, dirs, files in walk(curdir):
for file in files:
if dirs == "thumb" or dirs == "filtered":
pass
else:
file_path = join(root, file)
filtering_data = [file_path, curdir]
filtering_queue.put(filtering_data)
number_of_pictures += 1
print("Queued:", file_path)
try:
mkdir(join(curdir, "thumb"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Alexander"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Bjarke"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Gabrielle"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Monica"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "thumb", "Wenche"))
except FileExistsError:
pass
try:
mkdir(join(curdir, "filtered"))
except FileExistsError:
pass
@staticmethod
def _filtering_photos(queue):
"""
Handles all actions of each picture. Creating a thumbnail, and starts the filtering of each picture.
Args:
queue: Multiprocessing.queue is given, containing a list of strings, with the path to
the picture, the folder and name of the picture.
Returns:
Saves a thumbnail and the filtered picture in separate folders.
"""
while True:
# Retrieves one list from the queue and splits the list.
data = queue.get()
picture = data[0]
curdir = data[1]
picture_name = basename(picture)
# Filters the image.
Filter(picture, join(curdir, "filtered"))
def _cnn(self):
global sorting_queue
filter_path = join(curdir, "filtered")
onlyfiles = [f for f in listdir(filter_path) if isfile(join(filter_path, f))]
for i in range(0, len(onlyfiles)):
j = onlyfiles[i]
onlyfiles[i] = join(filter_path, j)
files = []
for f in onlyfiles:
files.append(f)
image_size = 256
num_channels = 3
## Let us restore the saved model
with tf.Session(graph=tf.Graph()) as sess:
# Step-1: Recreate the network graph. At this step only graph is created.
saver = tf.train.import_meta_graph('model/spai_model.meta')
# Step-2: Now let's load the weights saved using the restore method.
saver.restore(sess, tf.train.latest_checkpoint('model/'))
# tf.initialize_all_variables().run()
# Reading the image using OpenCV
for filename in sorted(listdir(filter_path)):
image = cv2.imread(join(filter_path, filename))
if image is not None:
images = []
# Resizing the image to our desired size and preprocessing will be done exactly as done during training
image = cv2.resize(image, (image_size, image_size), 0, 0, cv2.INTER_LINEAR)
images.append(image)
images = np.array(images, dtype=np.uint8)
images = images.astype('float32')
images = np.multiply(images, 1.0 / 255.0)
# The input to the network is of shape [None image_size image_size num_channels]. Hence we reshape.
x_batch = images.reshape(1, image_size, image_size, num_channels)
graph = tf.get_default_graph()
x = graph.get_tensor_by_name = "x:0"
y_pred = graph.get_tensor_by_name = "y_pred:0"
result = sess.run(y_pred, feed_dict={x: x_batch})
res = result[0]
combiner = [join(filter_path, filename), res]
sorting_queue.put(combiner)
def _sorting_photos(self, queue):
while True:
# Saves a thumb of the picture in a folder depending on the values from the CNN
data = queue.get()
picture = data[0]
values = data[1]
picture_name = basename(picture)
try:
size_thumb = 128, 128
thumb = pimage.open(picture)
thumb.thumbnail(size_thumb)
highest_value = max(values)
group = values.index(highest_value)
if group == 1:
print("Pictures belongs to Alexander")
thumb.save(join(curdir, "thumb", "Alexander", picture_name), "JPEG")
elif group == 0:
print("Pictures belongs to Bjarke")
thumb.save(join(curdir, "thumb", "Bjarke", picture_name), "JPEG")
elif group == 4:
| print("Pictures belongs to Gabrielle")
thumb.save(join(curdir, "thumb", "Gabrielle", picture_name), "JPEG") | conditional_block |
|
app.rs | {
name: String,
nunlinked: u64,
nunlinks: u64,
nread: u64,
reads: u64,
nwritten: u64,
writes: u64,
}
impl Snapshot {
fn compute(&self, prev: Option<&Self>, etime: f64) -> Element {
if let Some(prev) = prev {
Element {
name: self.name.clone(),
ops_r: (self.reads - prev.reads) as f64 / etime,
r_s: (self.nread - prev.nread) as f64 / etime,
ops_w: (self.writes - prev.writes) as f64 / etime,
w_s: (self.nwritten - prev.nwritten) as f64 / etime,
ops_d: (self.nunlinks - prev.nunlinks) as f64 / etime,
d_s: (self.nunlinked - prev.nunlinked) as f64 / etime,
}
} else {
Element {
name: self.name.clone(),
ops_r: self.reads as f64 / etime,
r_s: self.nread as f64 / etime,
ops_w: self.writes as f64 / etime,
w_s: self.nwritten as f64 / etime,
ops_d: self.nunlinks as f64 / etime,
d_s: self.nunlinked as f64 / etime,
}
}
}
/// Iterate through ZFS datasets, returning stats for each.
///
/// Iterates through every dataset beneath each of the given pools, or
/// through all datasets if no pool is supplied.
pub fn iter(pool: Option<&str>) -> Result<SnapshotIter, Box<dyn Error>> {
SnapshotIter::new(pool)
}
}
impl AddAssign<&Self> for Snapshot {
fn add_assign(&mut self, other: &Self) {
assert!(
other.name.starts_with(&self.name),
"Why would you want to combine two unrelated datasets?"
);
self.nunlinked += other.nunlinked;
self.nunlinks += other.nunlinks;
self.nread += other.nread;
self.reads += other.reads;
self.nwritten += other.nwritten;
self.writes += other.writes;
}
}
#[derive(Default)]
struct DataSource {
children: bool,
prev: BTreeMap<String, Snapshot>,
prev_ts: Option<TimeSpec>,
cur: BTreeMap<String, Snapshot>,
cur_ts: Option<TimeSpec>,
pools: Vec<String>,
}
impl DataSource {
fn new(children: bool, pools: Vec<String>) -> Self {
DataSource {
children,
pools,
..Default::default()
}
}
/// Iterate through all the datasets, returning current stats
fn iter(&mut self) -> impl Iterator<Item = Element> + '_ {
let etime = if let Some(prev_ts) = self.prev_ts.as_ref() {
let delta = *self.cur_ts.as_ref().unwrap() - *prev_ts;
delta.tv_sec() as f64 + delta.tv_nsec() as f64 * 1e-9
} else {
let boottime = clock_gettime(CLOCK_UPTIME).unwrap();
boottime.tv_sec() as f64 + boottime.tv_nsec() as f64 * 1e-9
};
DataSourceIter {
inner_iter: self.cur.iter(),
ds: self,
etime,
}
}
/// Iterate over all of the names of parent datasets of the argument
fn with_parents(s: &str) -> impl Iterator<Item = &str> {
s.char_indices().filter_map(move |(idx, c)| {
if c == '/' {
Some(s.split_at(idx).0)
} else if idx == s.len() - 1 {
Some(s)
} else {
None
}
})
}
fn refresh(&mut self) -> Result<(), Box<dyn Error>> {
let now = clock_gettime(ClockId::CLOCK_MONOTONIC)?;
self.prev = mem::take(&mut self.cur);
self.prev_ts = self.cur_ts.replace(now);
if self.pools.is_empty() {
for rss in Snapshot::iter(None).unwrap() {
let ss = rss?;
Self::upsert(&mut self.cur, ss, self.children);
}
} else {
for pool in self.pools.iter() {
for rss in Snapshot::iter(Some(pool)).unwrap() {
let ss = rss?;
Self::upsert(&mut self.cur, ss, self.children);
}
}
}
Ok(())
}
fn toggle_children(&mut self) -> Result<(), Box<dyn Error>> {
self.children ^= true;
// Wipe out previous statistics. The next refresh will report stats
// since boot.
self.refresh()?;
mem::take(&mut self.prev);
self.prev_ts = None;
Ok(())
}
/// Insert a snapshot into `cur`, and/or update it and its parents
fn upsert(
cur: &mut BTreeMap<String, Snapshot>,
ss: Snapshot,
children: bool,
) {
if children {
for dsname in Self::with_parents(&ss.name) {
match cur.entry(dsname.to_string()) {
btree_map::Entry::Vacant(ve) => {
if ss.name == dsname {
ve.insert(ss.clone());
} else {
let mut parent_ss = ss.clone();
parent_ss.name = dsname.to_string();
ve.insert(parent_ss);
}
}
btree_map::Entry::Occupied(mut oe) => {
*oe.get_mut() += &ss;
}
}
}
} else {
match cur.entry(ss.name.clone()) {
btree_map::Entry::Vacant(ve) => {
ve.insert(ss);
}
btree_map::Entry::Occupied(mut oe) => {
*oe.get_mut() += &ss;
}
}
};
}
}
struct DataSourceIter<'a> {
inner_iter: btree_map::Iter<'a, String, Snapshot>,
ds: &'a DataSource,
etime: f64,
}
impl<'a> Iterator for DataSourceIter<'a> {
type Item = Element;
fn next(&mut self) -> Option<Self::Item> {
self.inner_iter
.next()
.map(|(_, ss)| ss.compute(self.ds.prev.get(&ss.name), self.etime))
}
}
/// One thing to display in the table
#[derive(Clone, Debug)]
pub struct Element {
pub name: String,
/// Read IOPs
pub ops_r: f64,
/// Read B/s
pub r_s: f64,
/// Delete IOPs
pub ops_d: f64,
/// Delete B/s
pub d_s: f64,
/// Write IOPs
pub ops_w: f64,
/// Write B/s
pub w_s: f64,
}
#[derive(Default)]
pub struct App {
auto: bool,
data: DataSource,
depth: Option<NonZeroUsize>,
filter: Option<Regex>,
reverse: bool,
should_quit: bool,
/// 0-based index of the column to sort by, if any
sort_idx: Option<usize>,
}
impl App {
pub fn new(
auto: bool,
children: bool,
pools: Vec<String>,
depth: Option<NonZeroUsize>,
filter: Option<Regex>,
reverse: bool,
sort_idx: Option<usize>,
) -> Self {
let mut data = DataSource::new(children, pools);
data.refresh().unwrap();
App {
auto,
data,
depth,
filter,
reverse,
sort_idx,
..Default::default()
}
}
pub fn clear_filter(&mut self) {
self.filter = None;
}
/// Return the elements that should be displayed, in order
#[rustfmt::skip]
pub fn elements(&mut self) -> Vec<Element> {
let auto = self.auto;
let depth = self.depth;
let filter = &self.filter;
let mut v = self.data.iter()
.filter(move |elem| {
if let Some(limit) = depth {
let edepth = elem.name.split('/').count();
edepth <= limit.get()
} else {
true
}
}).filter(|elem|
filter.as_ref()
.map(|f| f.is_match(&elem.name))
.unwrap_or(true)
).filter(|elem| !auto || (elem.r_s + elem.w_s + elem.d_s > 1.0))
.collect::<Vec<_>>();
match (self.reverse, self.sort_idx) {
// TODO: when the total_cmp feature stabilities, use f64::total_cmp
// instead.
// https://github.com/rust-lang/rust/issues/72599
(false, Some(0)) => v.sort_by(|x, y| Ieee754::total_cmp(&x.ops_r, &y.ops | Snapshot | identifier_name |
|
app.rs | Map<String, Snapshot>,
prev_ts: Option<TimeSpec>,
cur: BTreeMap<String, Snapshot>,
cur_ts: Option<TimeSpec>,
pools: Vec<String>,
}
impl DataSource {
fn new(children: bool, pools: Vec<String>) -> Self {
DataSource {
children,
pools,
..Default::default()
}
}
/// Iterate through all the datasets, returning current stats
fn iter(&mut self) -> impl Iterator<Item = Element> + '_ {
let etime = if let Some(prev_ts) = self.prev_ts.as_ref() {
let delta = *self.cur_ts.as_ref().unwrap() - *prev_ts;
delta.tv_sec() as f64 + delta.tv_nsec() as f64 * 1e-9
} else {
let boottime = clock_gettime(CLOCK_UPTIME).unwrap();
boottime.tv_sec() as f64 + boottime.tv_nsec() as f64 * 1e-9
};
DataSourceIter {
inner_iter: self.cur.iter(),
ds: self,
etime,
}
}
/// Iterate over all of the names of parent datasets of the argument
fn with_parents(s: &str) -> impl Iterator<Item = &str> {
s.char_indices().filter_map(move |(idx, c)| {
if c == '/' {
Some(s.split_at(idx).0)
} else if idx == s.len() - 1 {
Some(s)
} else {
None
}
})
}
fn refresh(&mut self) -> Result<(), Box<dyn Error>> {
let now = clock_gettime(ClockId::CLOCK_MONOTONIC)?;
self.prev = mem::take(&mut self.cur);
self.prev_ts = self.cur_ts.replace(now);
if self.pools.is_empty() {
for rss in Snapshot::iter(None).unwrap() {
let ss = rss?;
Self::upsert(&mut self.cur, ss, self.children);
}
} else {
for pool in self.pools.iter() {
for rss in Snapshot::iter(Some(pool)).unwrap() {
let ss = rss?;
Self::upsert(&mut self.cur, ss, self.children);
}
}
}
Ok(())
}
fn toggle_children(&mut self) -> Result<(), Box<dyn Error>> {
self.children ^= true;
// Wipe out previous statistics. The next refresh will report stats
// since boot.
self.refresh()?;
mem::take(&mut self.prev);
self.prev_ts = None;
Ok(())
}
/// Insert a snapshot into `cur`, and/or update it and its parents
fn upsert(
cur: &mut BTreeMap<String, Snapshot>,
ss: Snapshot,
children: bool,
) {
if children {
for dsname in Self::with_parents(&ss.name) {
match cur.entry(dsname.to_string()) {
btree_map::Entry::Vacant(ve) => {
if ss.name == dsname {
ve.insert(ss.clone());
} else {
let mut parent_ss = ss.clone();
parent_ss.name = dsname.to_string();
ve.insert(parent_ss);
}
}
btree_map::Entry::Occupied(mut oe) => {
*oe.get_mut() += &ss;
}
}
}
} else {
match cur.entry(ss.name.clone()) {
btree_map::Entry::Vacant(ve) => {
ve.insert(ss);
}
btree_map::Entry::Occupied(mut oe) => {
*oe.get_mut() += &ss;
}
}
};
}
}
struct DataSourceIter<'a> {
inner_iter: btree_map::Iter<'a, String, Snapshot>,
ds: &'a DataSource,
etime: f64,
}
impl<'a> Iterator for DataSourceIter<'a> {
type Item = Element;
fn next(&mut self) -> Option<Self::Item> {
self.inner_iter
.next()
.map(|(_, ss)| ss.compute(self.ds.prev.get(&ss.name), self.etime))
}
}
/// One thing to display in the table
#[derive(Clone, Debug)]
pub struct Element {
pub name: String,
/// Read IOPs
pub ops_r: f64,
/// Read B/s
pub r_s: f64,
/// Delete IOPs
pub ops_d: f64,
/// Delete B/s
pub d_s: f64,
/// Write IOPs
pub ops_w: f64,
/// Write B/s
pub w_s: f64,
}
#[derive(Default)]
pub struct App {
auto: bool,
data: DataSource,
depth: Option<NonZeroUsize>,
filter: Option<Regex>,
reverse: bool,
should_quit: bool,
/// 0-based index of the column to sort by, if any
sort_idx: Option<usize>,
}
impl App {
pub fn new(
auto: bool,
children: bool,
pools: Vec<String>,
depth: Option<NonZeroUsize>,
filter: Option<Regex>,
reverse: bool,
sort_idx: Option<usize>,
) -> Self {
let mut data = DataSource::new(children, pools);
data.refresh().unwrap();
App {
auto,
data,
depth,
filter,
reverse,
sort_idx,
..Default::default()
}
}
pub fn clear_filter(&mut self) {
self.filter = None;
}
/// Return the elements that should be displayed, in order
#[rustfmt::skip]
pub fn elements(&mut self) -> Vec<Element> {
let auto = self.auto;
let depth = self.depth;
let filter = &self.filter;
let mut v = self.data.iter()
.filter(move |elem| {
if let Some(limit) = depth {
let edepth = elem.name.split('/').count();
edepth <= limit.get()
} else {
true
}
}).filter(|elem|
filter.as_ref()
.map(|f| f.is_match(&elem.name))
.unwrap_or(true)
).filter(|elem| !auto || (elem.r_s + elem.w_s + elem.d_s > 1.0))
.collect::<Vec<_>>();
match (self.reverse, self.sort_idx) {
// TODO: when the total_cmp feature stabilities, use f64::total_cmp
// instead.
// https://github.com/rust-lang/rust/issues/72599
(false, Some(0)) => v.sort_by(|x, y| Ieee754::total_cmp(&x.ops_r, &y.ops_r)),
(true, Some(0)) => v.sort_by(|x, y| Ieee754::total_cmp(&y.ops_r, &x.ops_r)),
(false, Some(1)) => v.sort_by(|x, y| Ieee754::total_cmp(&x.r_s, &y.r_s)),
(true, Some(1)) => v.sort_by(|x, y| Ieee754::total_cmp(&y.r_s, &x.r_s)),
(false, Some(2)) => v.sort_by(|x, y| Ieee754::total_cmp(&x.ops_w, &y.ops_w)),
(true, Some(2)) => v.sort_by(|x, y| Ieee754::total_cmp(&y.ops_w, &x.ops_w)),
(false, Some(3)) => v.sort_by(|x, y| Ieee754::total_cmp(&x.w_s, &y.w_s)),
(true, Some(3)) => v.sort_by(|x, y| Ieee754::total_cmp(&y.w_s, &x.w_s)),
(false, Some(4)) => v.sort_by(|x, y| Ieee754::total_cmp(&x.ops_d, &y.ops_d)),
(true, Some(4)) => v.sort_by(|x, y| Ieee754::total_cmp(&y.ops_d, &x.ops_d)),
(false, Some(5)) => v.sort_by(|x, y| Ieee754::total_cmp(&x.d_s, &y.d_s)),
(true, Some(5)) => v.sort_by(|x, y| Ieee754::total_cmp(&y.d_s, &x.d_s)),
(false, Some(6)) => v.sort_by(|x, y| x.name.cmp(&y.name)),
(true, Some(6)) => v.sort_by(|x, y| y.name.cmp(&x.name)),
_ => ()
}
v
}
pub fn on_a(&mut self) {
self.auto ^= true;
}
pub fn on_c(&mut self) -> Result<(), Box<dyn Error>> {
self.data.toggle_children()
}
pub fn on_d(&mut self, more_depth: bool) {
self.depth = if more_depth {
match self.depth { | None => NonZeroUsize::new(1),
Some(x) => NonZeroUsize::new(x.get() + 1),
} | random_line_split |
|
vr-party-participant.js | }
function clearMessage() {
$('#layer3').hide();
$('#layer2').show();
}
function launchViewer(urn) {
_baseDir = null;
_leftLoaded = _rightLoaded = false;
_updatingLeft = _updatingRight = false;
//_upVector = new THREE.Vector3(0, 1, 0);
_orbitInitialPosition = null;
if (urn) {
// Remove all event listeners
unwatchTilt;
unwatchProgress();
unwatchCameras();
clearMessage();
urn = urn.ensurePrefix('urn:');
Autodesk.Viewing.Document.load(
urn,
function(documentData) {
var model = getModel(documentData);
if (!model) return;
// Uninitializing the viewers helps with stability
if (_viewerLeft) {
_viewerLeft.finish();
_viewerLeft = null;
}
if (_viewerRight) {
_viewerRight.finish();
_viewerRight = null;
}
if (!_viewerLeft) {
_viewerLeft = new Autodesk.Viewing.Viewer3D($('#viewerLeft')[0]);
_viewerLeft.start();
// The settings are loaded by the 2nd viewer automatically
_viewerLeft.setQualityLevel(false, false);
_viewerLeft.setGroundShadow(true);
_viewerLeft.setGroundReflection(false);
_viewerLeft.setGhosting(false);
}
if (!_viewerRight) {
_viewerRight = new Autodesk.Viewing.Viewer3D($('#viewerRight')[0]);
_viewerRight.start();
}
watchProgress();
forceWidth(_viewerLeft);
loadModel(_viewerLeft, model);
forceWidth(_viewerRight);
loadModel(_viewerRight, model);
}
);
}
else {
showMessage('Disconnected', true);
_viewerLeft.uninitialize();
_viewerRight.uninitialize();
_viewerLeft = new Autodesk.Viewing.Viewer3D($('#viewerLeft')[0]);
_viewerRight = new Autodesk.Viewing.Viewer3D($('#viewerRight')[0]);
}
}
function forceWidth(viewer) {
viewer.container.style.width = '50%';
}
function initConnection() {
_socket.on('lmv-command', function(msg) {
if (msg.name === 'load') {
launchViewer(msg.value, msg.disconnecting);
}
else if (msg.name === 'zoom') {
_model_state.zoom_factor = parseFloat(msg.value);
}
else if (msg.name === 'explode') {
_model_state.explode_factor = parseFloat(msg.value);
}
else if (msg.name === 'isolate') {
_model_state.isolate_ids = msg.value;
}
else if (msg.name === 'hide') {
_model_state.hide_ids = msg.value;
}
else if (msg.name === 'show') {
_model_state.show_ids = msg.value;
}
else if (msg.name == 'section') {
_model_state.cut_planes = msg.value.map(function(vec) {
return new THREE.Vector4(vec.x, vec.y, vec.z, vec.w);
});
}
else if (msg.name === 'render') {
_model_state.lighting = msg.value;
}
viewersApplyState();
});
}
function viewersApplyState() {
var not_ready = false;
if (!_leftLoaded || !_rightLoaded || !_readyToApplyEvents) {
return;
}
if (_model_state.zoom_factor !== undefined) {
unwatchTilt();
var previousUpdatingLeft = _updatingLeft;
var previousUpdatingRight = _updatingRight;
var direction = new THREE.Vector3();
var target = new THREE.Vector3(); //_viewerLeft.navigation.getTarget();
direction.subVectors(_orbitInitialPosition, target);
direction.normalize();
direction.multiplyScalar(_model_state.zoom_factor);
var newPos = direction.add(target);
_viewerLeft.navigation.setPosition(newPos);
transferCameras(true);
_orbitInitialPosition = newPos;
_updatingLeft = previousUpdatingLeft;
_updatingRight = previousUpdatingRight;
_model_state.zoom_factor = undefined;
if (_lastVert && _lastHoriz) {
orbitViews(_lastVert, _lastHoriz);
}
console.log('Applied zoom');
watchTilt();
}
if (_model_state.explode_factor !== undefined) {
viewersApply('explode', _model_state.explode_factor);
_model_state.explode_factor = undefined;
console.log('Applied explode');
}
if (_model_state.isolate_ids !== undefined) {
var worked = tryToApplyIds('isolate', _model_state.isolate_ids);
if (worked) {
_model_state.isolate_ids = undefined;
console.log('Applied isolate');
}
else
console.log('Not ready to isolate');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.show_ids !== undefined) {
var worked = tryToApplyIds('show', _model_state.show_ids);
if (worked) {
_model_state.show_ids = undefined;
console.log('Applied show');
}
else
console.log('Not ready to show');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.hide_ids !== undefined) {
var worked = tryToApplyIds('hide', _model_state.hide_ids);
if (worked) {
_model_state.hide_ids = undefined;
console.log('Applied hide');
}
else
console.log('Not ready to hide');
not_ready = not_ready || !worked;
}
if (_model_state.cut_planes !== undefined) {
viewersApply('setCutPlanes', _model_state.cut_planes);
_model_state.cut_planes = undefined;
console.log('Applied section');
}
if (_model_state.lighting !== undefined) {
viewersApply('setLightPreset', _model_state.lighting);
_model_state.lighting = undefined;
console.log('Applied lighting');
}
if (not_ready) {
setTimeout(function() { viewersApplyState(); }, 1000);
}
}
function tryToApplyIds(prop, ids) {
var success = true;
if ((LMV_VIEWER_VERSION === '1.2.13' || LMV_VIEWER_VERSION === '1.2.14') &&
ids.length > 0 && typeof ids[0] === 'number') {
// getNodesByIds can throw an exception when the model isn't sufficiently loaded
// Catch it and try to apply the viewer state again in a second
try {
ids = _viewerLeft.model.getNodesByIds(ids);
}
catch (ex) {
success = false;
}
}
if (success) {
try {
viewersApply(prop, ids);
}
catch (ex) {
success = false;
}
}
return success;
}
function viewersApply(func){
//if (_viewerLeft && _viewerRight && _leftLoaded && _rightLoaded) {
var val = Array.prototype.slice.call(arguments, 1);
_viewerLeft[func].apply(_viewerLeft, val);
_viewerRight[func].apply(_viewerRight, val);
//}
}
// Progress listener to set the view once the data has started
// loading properly (we get a 5% notification early on that we
// need to ignore - it comes too soon)
function progressListener(e) {
if (e.percent >= 10) {
if (e.target.clientContainer.id === 'viewerLeft') {
_viewerLeft.getObjectTree(
function() {
_leftLoaded = true;
console.log('Left has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_leftLoaded = false;
console.log('Cannot get left instance tree');
}
);
_viewerLeft.removeEventListener('progress', progressListener);
}
else if (e.target.clientContainer.id === 'viewerRight') {
_viewerRight.getObjectTree(
function() {
_rightLoaded = true;
console.log('Right has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_rightLoaded = false;
console.log('Cannot get right instance tree');
}
);
_viewerRight.removeEventListener('progress', progressListener);
}
}
}
function finishProgress() {
if (_leftLoaded && _rightLoaded) {
if (!_orbitInitialPosition) {
_orbitInitialPosition = _viewerLeft.navigation.getPosition();
}
var vec = _viewerLeft.model.getUpVector();
_upVector = new THREE.Vector3(vec[0], vec[1], vec[2]);
//unwatchProgress();
watchCameras();
watchTilt();
_readyToApplyEvents = true;
viewersApplyState();
}
}
function watchProgress() {
_viewerLeft.addEventListener('progress', progressListener);
_viewerRight.addEventListener('progress', progressListener);
}
function unwatchProgress() {
if (_viewerLeft) {
_viewerLeft.removeEventListener('progress', progressListener);
}
if (_viewerRight) {
_viewerRight.removeEventListener('progress', progressListener);
}
}
function watchCameras() |
function unwatchCameras() {
if (_viewerLeft) {
_viewerLeft | {
_viewerLeft.addEventListener('cameraChanged', left2right);
_viewerRight.addEventListener('cameraChanged', right2left);
} | identifier_body |
vr-party-participant.js | else {
Autodesk.Viewing.Initializer(getViewingOptions(), function() {
var avp = Autodesk.Viewing.Private;
avp.GPU_OBJECT_LIMIT = 100000;
avp.onDemandLoading = false;
showMessage('Waiting...');
if (_sessionId === "demo") {
launchViewer("dXJuOmFkc2sub2JqZWN0czpvcy5vYmplY3Q6c3RlYW1idWNrL1JvYm90QXJtLmR3Zng=");
} else {
_socket.emit('join-session', { id: _sessionId });
initConnection();
}
});
}
}
function showMessage(text, removeBlink) {
$('#layer2').hide();
var messages = $('#messageLeft,#messageRight');
if (removeBlink) {
messages.removeClass('blink');
}
messages.html(text);
$('#layer3').show();
}
function clearMessage() {
$('#layer3').hide();
$('#layer2').show();
}
function launchViewer(urn) {
_baseDir = null;
_leftLoaded = _rightLoaded = false;
_updatingLeft = _updatingRight = false;
//_upVector = new THREE.Vector3(0, 1, 0);
_orbitInitialPosition = null;
if (urn) {
// Remove all event listeners
unwatchTilt;
unwatchProgress();
unwatchCameras();
clearMessage();
urn = urn.ensurePrefix('urn:');
Autodesk.Viewing.Document.load(
urn,
function(documentData) {
var model = getModel(documentData);
if (!model) return;
// Uninitializing the viewers helps with stability
if (_viewerLeft) {
_viewerLeft.finish();
_viewerLeft = null;
}
if (_viewerRight) {
_viewerRight.finish();
_viewerRight = null;
}
if (!_viewerLeft) {
_viewerLeft = new Autodesk.Viewing.Viewer3D($('#viewerLeft')[0]);
_viewerLeft.start();
// The settings are loaded by the 2nd viewer automatically
_viewerLeft.setQualityLevel(false, false);
_viewerLeft.setGroundShadow(true);
_viewerLeft.setGroundReflection(false);
_viewerLeft.setGhosting(false);
}
if (!_viewerRight) {
_viewerRight = new Autodesk.Viewing.Viewer3D($('#viewerRight')[0]);
_viewerRight.start();
}
watchProgress();
forceWidth(_viewerLeft);
loadModel(_viewerLeft, model);
forceWidth(_viewerRight);
loadModel(_viewerRight, model);
}
);
}
else {
showMessage('Disconnected', true);
_viewerLeft.uninitialize();
_viewerRight.uninitialize();
_viewerLeft = new Autodesk.Viewing.Viewer3D($('#viewerLeft')[0]);
_viewerRight = new Autodesk.Viewing.Viewer3D($('#viewerRight')[0]);
}
}
function forceWidth(viewer) {
viewer.container.style.width = '50%';
}
function initConnection() {
_socket.on('lmv-command', function(msg) {
if (msg.name === 'load') {
launchViewer(msg.value, msg.disconnecting);
}
else if (msg.name === 'zoom') {
_model_state.zoom_factor = parseFloat(msg.value);
}
else if (msg.name === 'explode') {
_model_state.explode_factor = parseFloat(msg.value);
}
else if (msg.name === 'isolate') {
_model_state.isolate_ids = msg.value;
}
else if (msg.name === 'hide') {
_model_state.hide_ids = msg.value;
}
else if (msg.name === 'show') {
_model_state.show_ids = msg.value;
}
else if (msg.name == 'section') {
_model_state.cut_planes = msg.value.map(function(vec) {
return new THREE.Vector4(vec.x, vec.y, vec.z, vec.w);
});
}
else if (msg.name === 'render') {
_model_state.lighting = msg.value;
}
viewersApplyState();
});
}
function viewersApplyState() {
var not_ready = false;
if (!_leftLoaded || !_rightLoaded || !_readyToApplyEvents) {
return;
}
if (_model_state.zoom_factor !== undefined) {
unwatchTilt();
var previousUpdatingLeft = _updatingLeft;
var previousUpdatingRight = _updatingRight;
var direction = new THREE.Vector3();
var target = new THREE.Vector3(); //_viewerLeft.navigation.getTarget();
direction.subVectors(_orbitInitialPosition, target);
direction.normalize();
direction.multiplyScalar(_model_state.zoom_factor);
var newPos = direction.add(target);
_viewerLeft.navigation.setPosition(newPos);
transferCameras(true);
_orbitInitialPosition = newPos;
_updatingLeft = previousUpdatingLeft;
_updatingRight = previousUpdatingRight;
_model_state.zoom_factor = undefined;
if (_lastVert && _lastHoriz) {
orbitViews(_lastVert, _lastHoriz);
}
console.log('Applied zoom');
watchTilt();
}
if (_model_state.explode_factor !== undefined) {
viewersApply('explode', _model_state.explode_factor);
_model_state.explode_factor = undefined;
console.log('Applied explode');
}
if (_model_state.isolate_ids !== undefined) {
var worked = tryToApplyIds('isolate', _model_state.isolate_ids);
if (worked) {
_model_state.isolate_ids = undefined;
console.log('Applied isolate');
}
else
console.log('Not ready to isolate');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.show_ids !== undefined) {
var worked = tryToApplyIds('show', _model_state.show_ids);
if (worked) {
_model_state.show_ids = undefined;
console.log('Applied show');
}
else
console.log('Not ready to show');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.hide_ids !== undefined) {
var worked = tryToApplyIds('hide', _model_state.hide_ids);
if (worked) {
_model_state.hide_ids = undefined;
console.log('Applied hide');
}
else
console.log('Not ready to hide');
not_ready = not_ready || !worked;
}
if (_model_state.cut_planes !== undefined) {
viewersApply('setCutPlanes', _model_state.cut_planes);
_model_state.cut_planes = undefined;
console.log('Applied section');
}
if (_model_state.lighting !== undefined) {
viewersApply('setLightPreset', _model_state.lighting);
_model_state.lighting = undefined;
console.log('Applied lighting');
}
if (not_ready) {
setTimeout(function() { viewersApplyState(); }, 1000);
}
}
function tryToApplyIds(prop, ids) {
var success = true;
if ((LMV_VIEWER_VERSION === '1.2.13' || LMV_VIEWER_VERSION === '1.2.14') &&
ids.length > 0 && typeof ids[0] === 'number') {
// getNodesByIds can throw an exception when the model isn't sufficiently loaded
// Catch it and try to apply the viewer state again in a second
try {
ids = _viewerLeft.model.getNodesByIds(ids);
}
catch (ex) {
success = false;
}
}
if (success) {
try {
viewersApply(prop, ids);
}
catch (ex) {
success = false;
}
}
return success;
}
function viewersApply(func){
//if (_viewerLeft && _viewerRight && _leftLoaded && _rightLoaded) {
var val = Array.prototype.slice.call(arguments, 1);
_viewerLeft[func].apply(_viewerLeft, val);
_viewerRight[func].apply(_viewerRight, val);
//}
}
// Progress listener to set the view once the data has started
// loading properly (we get a 5% notification early on that we
// need to ignore - it comes too soon)
function progressListener(e) {
if (e.percent >= 10) {
if (e.target.clientContainer.id === 'viewerLeft') {
_viewerLeft.getObjectTree(
function() {
_leftLoaded = true;
console.log('Left has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_leftLoaded = false;
console.log('Cannot get left instance tree');
}
);
_viewerLeft.removeEventListener('progress', progressListener);
}
else if (e.target.clientContainer.id === 'viewerRight') {
_viewerRight.getObjectTree(
function() {
_rightLoaded = true;
console.log('Right has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_rightLoaded = false;
console.log('Cannot get right instance tree');
}
);
_viewerRight.removeEventListener('progress', progressListener);
}
}
}
function finishProgress() {
if (_leftLoaded | initConnection();
});
}
);
} | random_line_split |
|
vr-party-participant.js | ('Disconnected', true);
_viewerLeft.uninitialize();
_viewerRight.uninitialize();
_viewerLeft = new Autodesk.Viewing.Viewer3D($('#viewerLeft')[0]);
_viewerRight = new Autodesk.Viewing.Viewer3D($('#viewerRight')[0]);
}
}
function forceWidth(viewer) {
viewer.container.style.width = '50%';
}
function initConnection() {
_socket.on('lmv-command', function(msg) {
if (msg.name === 'load') {
launchViewer(msg.value, msg.disconnecting);
}
else if (msg.name === 'zoom') {
_model_state.zoom_factor = parseFloat(msg.value);
}
else if (msg.name === 'explode') {
_model_state.explode_factor = parseFloat(msg.value);
}
else if (msg.name === 'isolate') {
_model_state.isolate_ids = msg.value;
}
else if (msg.name === 'hide') {
_model_state.hide_ids = msg.value;
}
else if (msg.name === 'show') {
_model_state.show_ids = msg.value;
}
else if (msg.name == 'section') {
_model_state.cut_planes = msg.value.map(function(vec) {
return new THREE.Vector4(vec.x, vec.y, vec.z, vec.w);
});
}
else if (msg.name === 'render') {
_model_state.lighting = msg.value;
}
viewersApplyState();
});
}
function viewersApplyState() {
var not_ready = false;
if (!_leftLoaded || !_rightLoaded || !_readyToApplyEvents) {
return;
}
if (_model_state.zoom_factor !== undefined) {
unwatchTilt();
var previousUpdatingLeft = _updatingLeft;
var previousUpdatingRight = _updatingRight;
var direction = new THREE.Vector3();
var target = new THREE.Vector3(); //_viewerLeft.navigation.getTarget();
direction.subVectors(_orbitInitialPosition, target);
direction.normalize();
direction.multiplyScalar(_model_state.zoom_factor);
var newPos = direction.add(target);
_viewerLeft.navigation.setPosition(newPos);
transferCameras(true);
_orbitInitialPosition = newPos;
_updatingLeft = previousUpdatingLeft;
_updatingRight = previousUpdatingRight;
_model_state.zoom_factor = undefined;
if (_lastVert && _lastHoriz) {
orbitViews(_lastVert, _lastHoriz);
}
console.log('Applied zoom');
watchTilt();
}
if (_model_state.explode_factor !== undefined) {
viewersApply('explode', _model_state.explode_factor);
_model_state.explode_factor = undefined;
console.log('Applied explode');
}
if (_model_state.isolate_ids !== undefined) {
var worked = tryToApplyIds('isolate', _model_state.isolate_ids);
if (worked) {
_model_state.isolate_ids = undefined;
console.log('Applied isolate');
}
else
console.log('Not ready to isolate');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.show_ids !== undefined) {
var worked = tryToApplyIds('show', _model_state.show_ids);
if (worked) {
_model_state.show_ids = undefined;
console.log('Applied show');
}
else
console.log('Not ready to show');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.hide_ids !== undefined) {
var worked = tryToApplyIds('hide', _model_state.hide_ids);
if (worked) {
_model_state.hide_ids = undefined;
console.log('Applied hide');
}
else
console.log('Not ready to hide');
not_ready = not_ready || !worked;
}
if (_model_state.cut_planes !== undefined) {
viewersApply('setCutPlanes', _model_state.cut_planes);
_model_state.cut_planes = undefined;
console.log('Applied section');
}
if (_model_state.lighting !== undefined) {
viewersApply('setLightPreset', _model_state.lighting);
_model_state.lighting = undefined;
console.log('Applied lighting');
}
if (not_ready) {
setTimeout(function() { viewersApplyState(); }, 1000);
}
}
function tryToApplyIds(prop, ids) {
var success = true;
if ((LMV_VIEWER_VERSION === '1.2.13' || LMV_VIEWER_VERSION === '1.2.14') &&
ids.length > 0 && typeof ids[0] === 'number') {
// getNodesByIds can throw an exception when the model isn't sufficiently loaded
// Catch it and try to apply the viewer state again in a second
try {
ids = _viewerLeft.model.getNodesByIds(ids);
}
catch (ex) {
success = false;
}
}
if (success) {
try {
viewersApply(prop, ids);
}
catch (ex) {
success = false;
}
}
return success;
}
function viewersApply(func){
//if (_viewerLeft && _viewerRight && _leftLoaded && _rightLoaded) {
var val = Array.prototype.slice.call(arguments, 1);
_viewerLeft[func].apply(_viewerLeft, val);
_viewerRight[func].apply(_viewerRight, val);
//}
}
// Progress listener to set the view once the data has started
// loading properly (we get a 5% notification early on that we
// need to ignore - it comes too soon)
function progressListener(e) {
if (e.percent >= 10) {
if (e.target.clientContainer.id === 'viewerLeft') {
_viewerLeft.getObjectTree(
function() {
_leftLoaded = true;
console.log('Left has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_leftLoaded = false;
console.log('Cannot get left instance tree');
}
);
_viewerLeft.removeEventListener('progress', progressListener);
}
else if (e.target.clientContainer.id === 'viewerRight') {
_viewerRight.getObjectTree(
function() {
_rightLoaded = true;
console.log('Right has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_rightLoaded = false;
console.log('Cannot get right instance tree');
}
);
_viewerRight.removeEventListener('progress', progressListener);
}
}
}
function finishProgress() {
if (_leftLoaded && _rightLoaded) {
if (!_orbitInitialPosition) {
_orbitInitialPosition = _viewerLeft.navigation.getPosition();
}
var vec = _viewerLeft.model.getUpVector();
_upVector = new THREE.Vector3(vec[0], vec[1], vec[2]);
//unwatchProgress();
watchCameras();
watchTilt();
_readyToApplyEvents = true;
viewersApplyState();
}
}
function watchProgress() {
_viewerLeft.addEventListener('progress', progressListener);
_viewerRight.addEventListener('progress', progressListener);
}
function unwatchProgress() {
if (_viewerLeft) {
_viewerLeft.removeEventListener('progress', progressListener);
}
if (_viewerRight) {
_viewerRight.removeEventListener('progress', progressListener);
}
}
function watchCameras() {
_viewerLeft.addEventListener('cameraChanged', left2right);
_viewerRight.addEventListener('cameraChanged', right2left);
}
function unwatchCameras() {
if (_viewerLeft) {
_viewerLeft.removeEventListener('cameraChanged', left2right);
}
if (_viewerRight) {
_viewerRight.removeEventListener('cameraChanged', right2left);
}
}
function watchTilt() {
if (window.DeviceOrientationEvent) {
window.addEventListener('deviceorientation', orb);
}
}
function unwatchTilt() {
if (window.DeviceOrientationEvent) {
window.removeEventListener('deviceorientation', orb);
}
}
// Event handlers for the cameraChanged events
function left2right() {
if (_viewerLeft && _viewerRight && !_updatingRight) {
_updatingLeft = true;
transferCameras(true);
setTimeout(function() { _updatingLeft = false; }, 500);
}
}
function right2left() {
if (_viewerLeft && _viewerRight && !_updatingLeft) {
_updatingRight = true;
transferCameras(false);
setTimeout(function() { _updatingRight = false; }, 500);
}
}
function transferCameras(leftToRight) {
// The direction argument dictates the source and target
var source = leftToRight ? _viewerLeft : _viewerRight;
var target = leftToRight ? _viewerRight : _viewerLeft;
var pos = source.navigation.getPosition();
var trg = source.navigation.getTarget();
// Set the up vector manually for both cameras
source.navigation.setWorldUpVector(_upVector);
target.navigation.setWorldUpVector(_upVector);
// Get the new position for the target camera
var up = source.navigation.getCameraUpVector();
// Get the position of the target camera
var newPos = offsetCameraPos(source, pos, trg, leftToRight);
// Zoom to the new camera position in the target
zoom(target, newPos, trg, up);
}
// And for the deviceorientation event
function | orb | identifier_name |
|
vr-party-participant.js | }
function clearMessage() {
$('#layer3').hide();
$('#layer2').show();
}
function launchViewer(urn) {
_baseDir = null;
_leftLoaded = _rightLoaded = false;
_updatingLeft = _updatingRight = false;
//_upVector = new THREE.Vector3(0, 1, 0);
_orbitInitialPosition = null;
if (urn) {
// Remove all event listeners
unwatchTilt;
unwatchProgress();
unwatchCameras();
clearMessage();
urn = urn.ensurePrefix('urn:');
Autodesk.Viewing.Document.load(
urn,
function(documentData) {
var model = getModel(documentData);
if (!model) return;
// Uninitializing the viewers helps with stability
if (_viewerLeft) {
_viewerLeft.finish();
_viewerLeft = null;
}
if (_viewerRight) {
_viewerRight.finish();
_viewerRight = null;
}
if (!_viewerLeft) {
_viewerLeft = new Autodesk.Viewing.Viewer3D($('#viewerLeft')[0]);
_viewerLeft.start();
// The settings are loaded by the 2nd viewer automatically
_viewerLeft.setQualityLevel(false, false);
_viewerLeft.setGroundShadow(true);
_viewerLeft.setGroundReflection(false);
_viewerLeft.setGhosting(false);
}
if (!_viewerRight) {
_viewerRight = new Autodesk.Viewing.Viewer3D($('#viewerRight')[0]);
_viewerRight.start();
}
watchProgress();
forceWidth(_viewerLeft);
loadModel(_viewerLeft, model);
forceWidth(_viewerRight);
loadModel(_viewerRight, model);
}
);
}
else {
showMessage('Disconnected', true);
_viewerLeft.uninitialize();
_viewerRight.uninitialize();
_viewerLeft = new Autodesk.Viewing.Viewer3D($('#viewerLeft')[0]);
_viewerRight = new Autodesk.Viewing.Viewer3D($('#viewerRight')[0]);
}
}
function forceWidth(viewer) {
viewer.container.style.width = '50%';
}
function initConnection() {
_socket.on('lmv-command', function(msg) {
if (msg.name === 'load') {
launchViewer(msg.value, msg.disconnecting);
}
else if (msg.name === 'zoom') {
_model_state.zoom_factor = parseFloat(msg.value);
}
else if (msg.name === 'explode') {
_model_state.explode_factor = parseFloat(msg.value);
}
else if (msg.name === 'isolate') {
_model_state.isolate_ids = msg.value;
}
else if (msg.name === 'hide') {
_model_state.hide_ids = msg.value;
}
else if (msg.name === 'show') {
_model_state.show_ids = msg.value;
}
else if (msg.name == 'section') {
_model_state.cut_planes = msg.value.map(function(vec) {
return new THREE.Vector4(vec.x, vec.y, vec.z, vec.w);
});
}
else if (msg.name === 'render') {
_model_state.lighting = msg.value;
}
viewersApplyState();
});
}
function viewersApplyState() {
var not_ready = false;
if (!_leftLoaded || !_rightLoaded || !_readyToApplyEvents) {
return;
}
if (_model_state.zoom_factor !== undefined) {
unwatchTilt();
var previousUpdatingLeft = _updatingLeft;
var previousUpdatingRight = _updatingRight;
var direction = new THREE.Vector3();
var target = new THREE.Vector3(); //_viewerLeft.navigation.getTarget();
direction.subVectors(_orbitInitialPosition, target);
direction.normalize();
direction.multiplyScalar(_model_state.zoom_factor);
var newPos = direction.add(target);
_viewerLeft.navigation.setPosition(newPos);
transferCameras(true);
_orbitInitialPosition = newPos;
_updatingLeft = previousUpdatingLeft;
_updatingRight = previousUpdatingRight;
_model_state.zoom_factor = undefined;
if (_lastVert && _lastHoriz) |
console.log('Applied zoom');
watchTilt();
}
if (_model_state.explode_factor !== undefined) {
viewersApply('explode', _model_state.explode_factor);
_model_state.explode_factor = undefined;
console.log('Applied explode');
}
if (_model_state.isolate_ids !== undefined) {
var worked = tryToApplyIds('isolate', _model_state.isolate_ids);
if (worked) {
_model_state.isolate_ids = undefined;
console.log('Applied isolate');
}
else
console.log('Not ready to isolate');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.show_ids !== undefined) {
var worked = tryToApplyIds('show', _model_state.show_ids);
if (worked) {
_model_state.show_ids = undefined;
console.log('Applied show');
}
else
console.log('Not ready to show');
not_ready = not_ready || !worked;
}
if (!not_ready && _model_state.hide_ids !== undefined) {
var worked = tryToApplyIds('hide', _model_state.hide_ids);
if (worked) {
_model_state.hide_ids = undefined;
console.log('Applied hide');
}
else
console.log('Not ready to hide');
not_ready = not_ready || !worked;
}
if (_model_state.cut_planes !== undefined) {
viewersApply('setCutPlanes', _model_state.cut_planes);
_model_state.cut_planes = undefined;
console.log('Applied section');
}
if (_model_state.lighting !== undefined) {
viewersApply('setLightPreset', _model_state.lighting);
_model_state.lighting = undefined;
console.log('Applied lighting');
}
if (not_ready) {
setTimeout(function() { viewersApplyState(); }, 1000);
}
}
function tryToApplyIds(prop, ids) {
var success = true;
if ((LMV_VIEWER_VERSION === '1.2.13' || LMV_VIEWER_VERSION === '1.2.14') &&
ids.length > 0 && typeof ids[0] === 'number') {
// getNodesByIds can throw an exception when the model isn't sufficiently loaded
// Catch it and try to apply the viewer state again in a second
try {
ids = _viewerLeft.model.getNodesByIds(ids);
}
catch (ex) {
success = false;
}
}
if (success) {
try {
viewersApply(prop, ids);
}
catch (ex) {
success = false;
}
}
return success;
}
function viewersApply(func){
//if (_viewerLeft && _viewerRight && _leftLoaded && _rightLoaded) {
var val = Array.prototype.slice.call(arguments, 1);
_viewerLeft[func].apply(_viewerLeft, val);
_viewerRight[func].apply(_viewerRight, val);
//}
}
// Progress listener to set the view once the data has started
// loading properly (we get a 5% notification early on that we
// need to ignore - it comes too soon)
function progressListener(e) {
if (e.percent >= 10) {
if (e.target.clientContainer.id === 'viewerLeft') {
_viewerLeft.getObjectTree(
function() {
_leftLoaded = true;
console.log('Left has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_leftLoaded = false;
console.log('Cannot get left instance tree');
}
);
_viewerLeft.removeEventListener('progress', progressListener);
}
else if (e.target.clientContainer.id === 'viewerRight') {
_viewerRight.getObjectTree(
function() {
_rightLoaded = true;
console.log('Right has an instance tree');
setTimeout(finishProgress, 100);
},
function() {
_rightLoaded = false;
console.log('Cannot get right instance tree');
}
);
_viewerRight.removeEventListener('progress', progressListener);
}
}
}
function finishProgress() {
if (_leftLoaded && _rightLoaded) {
if (!_orbitInitialPosition) {
_orbitInitialPosition = _viewerLeft.navigation.getPosition();
}
var vec = _viewerLeft.model.getUpVector();
_upVector = new THREE.Vector3(vec[0], vec[1], vec[2]);
//unwatchProgress();
watchCameras();
watchTilt();
_readyToApplyEvents = true;
viewersApplyState();
}
}
function watchProgress() {
_viewerLeft.addEventListener('progress', progressListener);
_viewerRight.addEventListener('progress', progressListener);
}
function unwatchProgress() {
if (_viewerLeft) {
_viewerLeft.removeEventListener('progress', progressListener);
}
if (_viewerRight) {
_viewerRight.removeEventListener('progress', progressListener);
}
}
function watchCameras() {
_viewerLeft.addEventListener('cameraChanged', left2right);
_viewerRight.addEventListener('cameraChanged', right2left);
}
function unwatchCameras() {
if (_viewerLeft) {
_viewerLeft | {
orbitViews(_lastVert, _lastHoriz);
} | conditional_block |
main.py | = df.drop("birth_year", axis=1) #Drop birth_year for clustering; consider it for interpretation
df = df[df["first_policy"]<50000] #Drop one case where first_policy year <50000
#df = df[~(df["premium_motor"]==0)&~(df["premium_household"]==0)&~(df["premium_health"]==0)&~(df["premium_life"]==0)&~(df["premium_work_comp"]==0)]
df = df[df[["premium_motor","premium_household","premium_health","premium_life","premium_work_comp"]].sum(axis=1)!=0]
#####################################################################################
################# Outlier #################
df.reset_index(inplace=True,drop=True)
df_num = pd.DataFrame(df[['first_policy', 'salary_year','mon_value','claims_rate','premium_motor','premium_household','premium_health','premium_life','premium_work_comp']])
# Define individual thresholds for features
thresholds = {'salary_year': 200000,'mon_value': -200,'claims_rate': 3,'premium_motor': 600,'premium_household': 1600,'premium_health': 400,'premium_life': 300,'premium_work_comp': 300}
outliers = []
for col, th in thresholds.items():
direct = "pos"
if col == "mon_value":
direct = "neg"
outliers.append(get_outliers_i(df_num, col, th, direct))
df_outlier = df.iloc[list(set([o for l in outliers for o in l]))]
df = df[~df.index.isin(df_outlier.index.values)]
#####################################################################################
################# filling NAN #################
# Filling nan values in premium columns
#Assumption: nan values in premium mean no contract
df[["premium_motor","premium_household","premium_health","premium_life","premium_work_comp"]] = df[["premium_motor","premium_household","premium_health","premium_life","premium_work_comp"]].fillna(0)
# Drop customers with nan values in "salary_year","educ" or "has_children" because these are only a few customers and we do not have any reasonable correlation to fill it
df_dropped = df[df[["salary_year","educ","has_children"]].isna().any(axis=1)]
df = df.dropna(subset=["salary_year","educ","has_children"])
#######################################################################
######### Feature-engineering and -selection #########
df.reset_index(drop=True, inplace=True)
# Calculate total amount paid for premiums per year per customer
df["premium_total"] = [sum(p for p in premiums if p > 0) for i, premiums in df[['premium_motor','premium_household','premium_health', 'premium_life','premium_work_comp']].iterrows()]
# True if customer cancelled contract this year (has a negative value in the premium-related columns) | temp = [sum(1 for p in premiums if p < 0) for i, premiums in df[['premium_motor','premium_household','premium_health', 'premium_life','premium_work_comp']].iterrows()]
df["cancelled_contracts"] = [1 if i != 0 else 0 for i in temp]
# True if customers has premium for every part
temp = [sum(1 for p in premiums if p > 0) for i, premiums in df[['premium_motor','premium_household','premium_health', 'premium_life','premium_work_comp']].iterrows()]
df["has_all"] = [1 if i == 5 else 0 for i in temp]
#Calculate if customers are profitable
df["is_profit"] = [1 if mon_value > 0 else 0 for mon_value in df.mon_value.values]
# Split the features in customer- and product-related.
customer_related_num = ['salary_year', 'mon_value', 'claims_rate', 'premium_total'] # dont use first_policy because the clusters are clearer without
customer_related_cat = ['location','has_children', 'educ', 'cancelled_contracts', 'has_all', "is_profit"]
customer_related = customer_related_num + customer_related_cat
product_related = ['premium_motor','premium_household', 'premium_health', 'premium_life','premium_work_comp']
################# Choose algorithm #################
######### Product-related #########
### K-Means ###
# Normalization for product-related variables
scaler = StandardScaler()
prod_norm = scaler.fit_transform(df[product_related])
df_prod_norm = pd.DataFrame(prod_norm, columns = product_related)
### Find number of clusters
# Elbow graph
create_elbowgraph(10, df_prod_norm)
#Silhouette
kmeans = KMeans(n_clusters=2, random_state=1).fit(df_prod_norm)
df["p_cluster"] = kmeans.labels_
create_silgraph(df_prod_norm, df["p_cluster"])
silhouette_avg = silhouette_score(df_prod_norm, kmeans.labels_)
print("For n_clusters =", str(2), "the average silhouette_score is :", silhouette_avg)
# Inverse Normalization for Interpretation
pcluster_centroids_num = pd.DataFrame(scaler.inverse_transform(X=kmeans.cluster_centers_), columns = df_prod_norm.columns)
######### Customer-related ##########
############ SOM and Hierarchical Clustering #####################
scaler = StandardScaler()
cust_norm = scaler.fit_transform(df[customer_related_num])
df_cust_norm = pd.DataFrame(cust_norm, columns = customer_related_num)
X = df_cust_norm.values
sm = SOMFactory().build(data = X,
mapsize=(8,8),
normalization = 'var',
initialization="pca",
component_names=customer_related_num,
lattice="hexa",
training ="batch" )
sm.train(n_job=5,
verbose='info',
train_rough_len=40,
train_finetune_len=100)
final_clusters = pd.DataFrame(sm._data, columns = customer_related_num)
my_labels = pd.DataFrame(sm._bmu[0])
final_clusters = pd.concat([final_clusters,my_labels], axis = 1)
cluster_cols = customer_related_num + ["Labels"]
final_clusters.columns = cluster_cols
view2D = View2DPacked(20,20,"", text_size=9)
view2D.show(sm, col_sz=4, what = 'codebook',)#which_dim="all", denormalize=True)
plt.show()
view2D = View2D(20,20,"", text_size=9)
view2D.show(sm, col_sz=2, what = 'codebook',)#which_dim="all", denormalize=True)
plt.show()
vhts = BmuHitsView(12,12,"Hits Map",text_size=7)
vhts.show(sm, anotate=True, onlyzeros=False, labelsize=10, cmap="autumn", logaritmic=False)
## Hierarchical Clustering ##
som_cluster = final_clusters.groupby("Labels").mean()
dend = shc.dendrogram(shc.linkage(som_cluster, method='ward'))
plt.title("Dendogram with SOM nodes", size=12)
som_cluster["h_cluster"] = AgglomerativeClustering(n_clusters=3).fit_predict(som_cluster)
# Calculate centroids of clusters and inverse scaling for interpretation
h_cluster = som_cluster.groupby("h_cluster").mean()
h_cluster = pd.DataFrame(scaler.inverse_transform(X=h_cluster), columns = customer_related_num)
# Assign customer to cluster generated by hierarchical clustering
final_clusters["h_cluster"] = [som_cluster.loc[label,"h_cluster"] for label in final_clusters["Labels"].values]
# Silhoutte graph
create_silgraph(df_cust_norm, final_clusters["h_cluster"])
plt.title("Silhouette graph customer clusters", size=12)
silhouette_avg = silhouette_score(df_cust_norm, final_clusters["h_cluster"])
print("the average silhouette_score is :", silhouette_avg)
df["c_cluster"] = final_clusters["h_cluster"]
#################################################################
################## Decision Tree classifier #####################
# Find most important features
X = df[customer_related_num]
y = df["c_cluster"]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1)
clf = DecisionTreeClassifier(max_depth=4)
# Fit model
clf = clf.fit(X_train,y_train)
#Predict the cluster for test data
y_pred = clf.predict(X_test)
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
dot_data = StringIO()
export_graphviz(clf, out_file=dot_data,
filled=True,
special_characters=True,feature_names = X.columns.values,class_names=['0','1', '3', '4'])
graph = pydotplus.graph_from_dot_data(dot_data.getvalue())
graph.write_png('decision_tree_cluster.png')
# Predict clusters of outliers and dropped customers
# c_cluster
df_add = pd.concat([df_outlier,df_dropped], axis=0)
num_cols = ['salary_year', 'mon_value', 'claims_rate']
df_topredc = pd.DataFrame(df_add[num_cols])
trained_models = {}
pred_cclusters = []
df_topredc.reset_index(drop=True, inplace=True)
df_add.reset_index(drop=True, inplace=True)
for i in df_topredc.index.values:
isna = df_topredc.iloc[i,:].isna()
cols = [num_cols[j] for j in range(0,len(num_cols)) if isna[j] == False]
if ', '.join(cols) in trained_models.keys():
y_pred = trained_models[', '.join(cols)].predict([df_topredc.loc[i,cols]])
pred_cclusters.append(y_pred[0])
continue
else:
X = df[cols | random_line_split |
|
main.py | = df.drop("birth_year", axis=1) #Drop birth_year for clustering; consider it for interpretation
df = df[df["first_policy"]<50000] #Drop one case where first_policy year <50000
#df = df[~(df["premium_motor"]==0)&~(df["premium_household"]==0)&~(df["premium_health"]==0)&~(df["premium_life"]==0)&~(df["premium_work_comp"]==0)]
df = df[df[["premium_motor","premium_household","premium_health","premium_life","premium_work_comp"]].sum(axis=1)!=0]
#####################################################################################
################# Outlier #################
df.reset_index(inplace=True,drop=True)
df_num = pd.DataFrame(df[['first_policy', 'salary_year','mon_value','claims_rate','premium_motor','premium_household','premium_health','premium_life','premium_work_comp']])
# Define individual thresholds for features
thresholds = {'salary_year': 200000,'mon_value': -200,'claims_rate': 3,'premium_motor': 600,'premium_household': 1600,'premium_health': 400,'premium_life': 300,'premium_work_comp': 300}
outliers = []
for col, th in thresholds.items():
direct = "pos"
if col == "mon_value":
direct = "neg"
outliers.append(get_outliers_i(df_num, col, th, direct))
df_outlier = df.iloc[list(set([o for l in outliers for o in l]))]
df = df[~df.index.isin(df_outlier.index.values)]
#####################################################################################
################# filling NAN #################
# Filling nan values in premium columns
#Assumption: nan values in premium mean no contract
df[["premium_motor","premium_household","premium_health","premium_life","premium_work_comp"]] = df[["premium_motor","premium_household","premium_health","premium_life","premium_work_comp"]].fillna(0)
# Drop customers with nan values in "salary_year","educ" or "has_children" because these are only a few customers and we do not have any reasonable correlation to fill it
df_dropped = df[df[["salary_year","educ","has_children"]].isna().any(axis=1)]
df = df.dropna(subset=["salary_year","educ","has_children"])
#######################################################################
######### Feature-engineering and -selection #########
df.reset_index(drop=True, inplace=True)
# Calculate total amount paid for premiums per year per customer
df["premium_total"] = [sum(p for p in premiums if p > 0) for i, premiums in df[['premium_motor','premium_household','premium_health', 'premium_life','premium_work_comp']].iterrows()]
# True if customer cancelled contract this year (has a negative value in the premium-related columns)
temp = [sum(1 for p in premiums if p < 0) for i, premiums in df[['premium_motor','premium_household','premium_health', 'premium_life','premium_work_comp']].iterrows()]
df["cancelled_contracts"] = [1 if i != 0 else 0 for i in temp]
# True if customers has premium for every part
temp = [sum(1 for p in premiums if p > 0) for i, premiums in df[['premium_motor','premium_household','premium_health', 'premium_life','premium_work_comp']].iterrows()]
df["has_all"] = [1 if i == 5 else 0 for i in temp]
#Calculate if customers are profitable
df["is_profit"] = [1 if mon_value > 0 else 0 for mon_value in df.mon_value.values]
# Split the features in customer- and product-related.
customer_related_num = ['salary_year', 'mon_value', 'claims_rate', 'premium_total'] # dont use first_policy because the clusters are clearer without
customer_related_cat = ['location','has_children', 'educ', 'cancelled_contracts', 'has_all', "is_profit"]
customer_related = customer_related_num + customer_related_cat
product_related = ['premium_motor','premium_household', 'premium_health', 'premium_life','premium_work_comp']
################# Choose algorithm #################
######### Product-related #########
### K-Means ###
# Normalization for product-related variables
scaler = StandardScaler()
prod_norm = scaler.fit_transform(df[product_related])
df_prod_norm = pd.DataFrame(prod_norm, columns = product_related)
### Find number of clusters
# Elbow graph
create_elbowgraph(10, df_prod_norm)
#Silhouette
kmeans = KMeans(n_clusters=2, random_state=1).fit(df_prod_norm)
df["p_cluster"] = kmeans.labels_
create_silgraph(df_prod_norm, df["p_cluster"])
silhouette_avg = silhouette_score(df_prod_norm, kmeans.labels_)
print("For n_clusters =", str(2), "the average silhouette_score is :", silhouette_avg)
# Inverse Normalization for Interpretation
pcluster_centroids_num = pd.DataFrame(scaler.inverse_transform(X=kmeans.cluster_centers_), columns = df_prod_norm.columns)
######### Customer-related ##########
############ SOM and Hierarchical Clustering #####################
scaler = StandardScaler()
cust_norm = scaler.fit_transform(df[customer_related_num])
df_cust_norm = pd.DataFrame(cust_norm, columns = customer_related_num)
X = df_cust_norm.values
sm = SOMFactory().build(data = X,
mapsize=(8,8),
normalization = 'var',
initialization="pca",
component_names=customer_related_num,
lattice="hexa",
training ="batch" )
sm.train(n_job=5,
verbose='info',
train_rough_len=40,
train_finetune_len=100)
final_clusters = pd.DataFrame(sm._data, columns = customer_related_num)
my_labels = pd.DataFrame(sm._bmu[0])
final_clusters = pd.concat([final_clusters,my_labels], axis = 1)
cluster_cols = customer_related_num + ["Labels"]
final_clusters.columns = cluster_cols
view2D = View2DPacked(20,20,"", text_size=9)
view2D.show(sm, col_sz=4, what = 'codebook',)#which_dim="all", denormalize=True)
plt.show()
view2D = View2D(20,20,"", text_size=9)
view2D.show(sm, col_sz=2, what = 'codebook',)#which_dim="all", denormalize=True)
plt.show()
vhts = BmuHitsView(12,12,"Hits Map",text_size=7)
vhts.show(sm, anotate=True, onlyzeros=False, labelsize=10, cmap="autumn", logaritmic=False)
## Hierarchical Clustering ##
som_cluster = final_clusters.groupby("Labels").mean()
dend = shc.dendrogram(shc.linkage(som_cluster, method='ward'))
plt.title("Dendogram with SOM nodes", size=12)
som_cluster["h_cluster"] = AgglomerativeClustering(n_clusters=3).fit_predict(som_cluster)
# Calculate centroids of clusters and inverse scaling for interpretation
h_cluster = som_cluster.groupby("h_cluster").mean()
h_cluster = pd.DataFrame(scaler.inverse_transform(X=h_cluster), columns = customer_related_num)
# Assign customer to cluster generated by hierarchical clustering
final_clusters["h_cluster"] = [som_cluster.loc[label,"h_cluster"] for label in final_clusters["Labels"].values]
# Silhoutte graph
create_silgraph(df_cust_norm, final_clusters["h_cluster"])
plt.title("Silhouette graph customer clusters", size=12)
silhouette_avg = silhouette_score(df_cust_norm, final_clusters["h_cluster"])
print("the average silhouette_score is :", silhouette_avg)
df["c_cluster"] = final_clusters["h_cluster"]
#################################################################
################## Decision Tree classifier #####################
# Find most important features
X = df[customer_related_num]
y = df["c_cluster"]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1)
clf = DecisionTreeClassifier(max_depth=4)
# Fit model
clf = clf.fit(X_train,y_train)
#Predict the cluster for test data
y_pred = clf.predict(X_test)
print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
dot_data = StringIO()
export_graphviz(clf, out_file=dot_data,
filled=True,
special_characters=True,feature_names = X.columns.values,class_names=['0','1', '3', '4'])
graph = pydotplus.graph_from_dot_data(dot_data.getvalue())
graph.write_png('decision_tree_cluster.png')
# Predict clusters of outliers and dropped customers
# c_cluster
df_add = pd.concat([df_outlier,df_dropped], axis=0)
num_cols = ['salary_year', 'mon_value', 'claims_rate']
df_topredc = pd.DataFrame(df_add[num_cols])
trained_models = {}
pred_cclusters = []
df_topredc.reset_index(drop=True, inplace=True)
df_add.reset_index(drop=True, inplace=True)
for i in df_topredc.index.values:
isna = df_topredc.iloc[i,:].isna()
cols = [num_cols[j] for j in range(0,len(num_cols)) if isna[j] == False]
if ', '.join(cols) in trained_models.keys():
|
else:
X = df[cols | y_pred = trained_models[', '.join(cols)].predict([df_topredc.loc[i,cols]])
pred_cclusters.append(y_pred[0])
continue | conditional_block |
moc_guestagent_exec.pb.go | 3" json:"Execs,omitempty"`
Result *wrappers.BoolValue `protobuf:"bytes,2,opt,name=Result,proto3" json:"Result,omitempty"`
Error string `protobuf:"bytes,3,opt,name=Error,proto3" json:"Error,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ExecResponse) Reset() { *m = ExecResponse{} }
func (m *ExecResponse) String() string { return proto.CompactTextString(m) }
func (*ExecResponse) ProtoMessage() {}
func (*ExecResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_66e2f35f165c7840, []int{1}
}
func (m *ExecResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ExecResponse.Unmarshal(m, b)
}
func (m *ExecResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ExecResponse.Marshal(b, m, deterministic)
}
func (m *ExecResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ExecResponse.Merge(m, src)
}
func (m *ExecResponse) | () int {
return xxx_messageInfo_ExecResponse.Size(m)
}
func (m *ExecResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ExecResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ExecResponse proto.InternalMessageInfo
func (m *ExecResponse) GetExecs() []*Exec {
if m != nil {
return m.Execs
}
return nil
}
func (m *ExecResponse) GetResult() *wrappers.BoolValue {
if m != nil {
return m.Result
}
return nil
}
func (m *ExecResponse) GetError() string {
if m != nil {
return m.Error
}
return ""
}
type Exec struct {
Command string `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
Output string `protobuf:"bytes,2,opt,name=output,proto3" json:"output,omitempty"`
Status *common.Status `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Exec) Reset() { *m = Exec{} }
func (m *Exec) String() string { return proto.CompactTextString(m) }
func (*Exec) ProtoMessage() {}
func (*Exec) Descriptor() ([]byte, []int) {
return fileDescriptor_66e2f35f165c7840, []int{2}
}
func (m *Exec) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Exec.Unmarshal(m, b)
}
func (m *Exec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Exec.Marshal(b, m, deterministic)
}
func (m *Exec) XXX_Merge(src proto.Message) {
xxx_messageInfo_Exec.Merge(m, src)
}
func (m *Exec) XXX_Size() int {
return xxx_messageInfo_Exec.Size(m)
}
func (m *Exec) XXX_DiscardUnknown() {
xxx_messageInfo_Exec.DiscardUnknown(m)
}
var xxx_messageInfo_Exec proto.InternalMessageInfo
func (m *Exec) GetCommand() string {
if m != nil {
return m.Command
}
return ""
}
func (m *Exec) GetOutput() string {
if m != nil {
return m.Output
}
return ""
}
func (m *Exec) GetStatus() *common.Status {
if m != nil {
return m.Status
}
return nil
}
func init() {
proto.RegisterType((*ExecRequest)(nil), "moc.guestagent.admin.ExecRequest")
proto.RegisterType((*ExecResponse)(nil), "moc.guestagent.admin.ExecResponse")
proto.RegisterType((*Exec)(nil), "moc.guestagent.admin.Exec")
}
func init() { proto.RegisterFile("exec/moc_guestagent_exec.proto", fileDescriptor_66e2f35f165c7840) }
var fileDescriptor_66e2f35f165c7840 = []byte{
// 351 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x52, 0xbf, 0x4e, 0xe3, 0x30,
0x1c, 0xbe, 0x5c, 0xaf, 0x39, 0xd5, 0xb9, 0xeb, 0x60, 0x55, 0x77, 0x51, 0x86, 0xaa, 0x84, 0xa5,
0x0b, 0x36, 0x0a, 0xbc, 0x00, 0x95, 0x3a, 0x30, 0x21, 0x02, 0x62, 0x40, 0x82, 0x2a, 0x75, 0xdd,
0x10, 0x91, 0xe4, 0x67, 0xfc, 0x07, 0xca, 0x2b, 0xf0, 0xd4, 0xc8, 0x76, 0x4a, 0x85, 0x84, 0x3a,
0x30, 0x45, 0x9f, 0xbf, 0x7f, 0xf1, 0x97, 0xa0, 0x31, 0xdf, 0x70, 0x46, 0x1b, 0x60, 0x8b, 0xd2,
0x70, 0xa5, 0x8b, 0x92, 0xb7, 0x7a, 0x61, 0xcf, 0x88, 0x90, 0xa0, 0x01, 0x8f, 0x1a, 0x60, 0x64,
0x47, 0x91, 0x62, 0xd5, 0x54, 0x6d, 0x32, 0x2e, 0x01, 0xca, 0x9a, 0x53, 0xa7, 0x59, 0x9a, 0x35,
0x7d, 0x91, 0x85, 0x10, 0x5c, 0x2a, 0xef, 0x4a, 0xfe, 0xdb, 0x40, 0x06, 0x4d, 0x03, 0x6d, 0xf7,
0xf0, 0x44, 0x6a, 0x50, 0x34, 0xdf, 0x70, 0x96, 0xf3, 0x27, 0x1b, 0x89, 0x8f, 0x51, 0xdf, 0x42,
0x15, 0x07, 0x93, 0xde, 0x34, 0xca, 0x12, 0xf2, 0x55, 0x1b, 0x71, 0x0e, 0x2f, 0xc4, 0xa7, 0xe8,
0xef, 0x85, 0xe0, 0xb2, 0xd0, 0x15, 0xb4, 0xd7, 0xaf, 0x82, 0xc7, 0x3f, 0x27, 0xc1, 0x74, 0x98,
0x0d, 0x9d, 0xf3, 0x83, 0xc9, 0x3f, 0x8b, 0xd2, 0xb7, 0x00, 0xfd, 0xf1, 0xbd, 0x4a, 0x40, 0xab,
0xf | XXX_Size | identifier_name |
moc_guestagent_exec.pb.go | 3" json:"Execs,omitempty"`
Result *wrappers.BoolValue `protobuf:"bytes,2,opt,name=Result,proto3" json:"Result,omitempty"`
Error string `protobuf:"bytes,3,opt,name=Error,proto3" json:"Error,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ExecResponse) Reset() { *m = ExecResponse{} }
func (m *ExecResponse) String() string { return proto.CompactTextString(m) }
func (*ExecResponse) ProtoMessage() {}
func (*ExecResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_66e2f35f165c7840, []int{1}
}
func (m *ExecResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ExecResponse.Unmarshal(m, b)
}
func (m *ExecResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ExecResponse.Marshal(b, m, deterministic)
}
func (m *ExecResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ExecResponse.Merge(m, src)
}
func (m *ExecResponse) XXX_Size() int {
return xxx_messageInfo_ExecResponse.Size(m)
}
func (m *ExecResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ExecResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ExecResponse proto.InternalMessageInfo
func (m *ExecResponse) GetExecs() []*Exec {
if m != nil {
return m.Execs
}
return nil
}
func (m *ExecResponse) GetResult() *wrappers.BoolValue {
if m != nil |
return nil
}
func (m *ExecResponse) GetError() string {
if m != nil {
return m.Error
}
return ""
}
type Exec struct {
Command string `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
Output string `protobuf:"bytes,2,opt,name=output,proto3" json:"output,omitempty"`
Status *common.Status `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Exec) Reset() { *m = Exec{} }
func (m *Exec) String() string { return proto.CompactTextString(m) }
func (*Exec) ProtoMessage() {}
func (*Exec) Descriptor() ([]byte, []int) {
return fileDescriptor_66e2f35f165c7840, []int{2}
}
func (m *Exec) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Exec.Unmarshal(m, b)
}
func (m *Exec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Exec.Marshal(b, m, deterministic)
}
func (m *Exec) XXX_Merge(src proto.Message) {
xxx_messageInfo_Exec.Merge(m, src)
}
func (m *Exec) XXX_Size() int {
return xxx_messageInfo_Exec.Size(m)
}
func (m *Exec) XXX_DiscardUnknown() {
xxx_messageInfo_Exec.DiscardUnknown(m)
}
var xxx_messageInfo_Exec proto.InternalMessageInfo
func (m *Exec) GetCommand() string {
if m != nil {
return m.Command
}
return ""
}
func (m *Exec) GetOutput() string {
if m != nil {
return m.Output
}
return ""
}
func (m *Exec) GetStatus() *common.Status {
if m != nil {
return m.Status
}
return nil
}
func init() {
proto.RegisterType((*ExecRequest)(nil), "moc.guestagent.admin.ExecRequest")
proto.RegisterType((*ExecResponse)(nil), "moc.guestagent.admin.ExecResponse")
proto.RegisterType((*Exec)(nil), "moc.guestagent.admin.Exec")
}
func init() { proto.RegisterFile("exec/moc_guestagent_exec.proto", fileDescriptor_66e2f35f165c7840) }
var fileDescriptor_66e2f35f165c7840 = []byte{
// 351 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x52, 0xbf, 0x4e, 0xe3, 0x30,
0x1c, 0xbe, 0x5c, 0xaf, 0x39, 0xd5, 0xb9, 0xeb, 0x60, 0x55, 0x77, 0x51, 0x86, 0xaa, 0x84, 0xa5,
0x0b, 0x36, 0x0a, 0xbc, 0x00, 0x95, 0x3a, 0x30, 0x21, 0x02, 0x62, 0x40, 0x82, 0x2a, 0x75, 0xdd,
0x10, 0x91, 0xe4, 0x67, 0xfc, 0x07, 0xca, 0x2b, 0xf0, 0xd4, 0xc8, 0x76, 0x4a, 0x85, 0x84, 0x3a,
0x30, 0x45, 0x9f, 0xbf, 0x7f, 0xf1, 0x97, 0xa0, 0x31, 0xdf, 0x70, 0x46, 0x1b, 0x60, 0x8b, 0xd2,
0x70, 0xa5, 0x8b, 0x92, 0xb7, 0x7a, 0x61, 0xcf, 0x88, 0x90, 0xa0, 0x01, 0x8f, 0x1a, 0x60, 0x64,
0x47, 0x91, 0x62, 0xd5, 0x54, 0x6d, 0x32, 0x2e, 0x01, 0xca, 0x9a, 0x53, 0xa7, 0x59, 0x9a, 0x35,
0x7d, 0x91, 0x85, 0x10, 0x5c, 0x2a, 0xef, 0x4a, 0xfe, 0xdb, 0x40, 0x06, 0x4d, 0x03, 0x6d, 0xf7,
0xf0, 0x44, 0x6a, 0x50, 0x34, 0xdf, 0x70, 0x96, 0xf3, 0x27, 0x1b, 0x89, 0x8f, 0x51, 0xdf, 0x42,
0x15, 0x07, 0x93, 0xde, 0x34, 0xca, 0x12, 0xf2, 0x55, 0x1b, 0x71, 0x0e, 0x2f, 0xc4, 0xa7, 0xe8,
0xef, 0x85, 0xe0, 0xb2, 0xd0, 0x15, 0xb4, 0xd7, 0xaf, 0x82, 0xc7, 0x3f, 0x27, 0xc1, 0x74, 0x98,
0x0d, 0x9d, 0xf3, 0x83, 0xc9, 0x3f, 0x8b, 0xd2, 0xb7, 0x00, 0xfd, 0xf1, 0xbd, 0x4a, 0x40, 0xab,
0 | {
return m.Result
} | conditional_block |
moc_guestagent_exec.pb.go | x95, 0x3a, 0x30, 0x21, 0x02, 0x62, 0x40, 0x82, 0x2a, 0x75, 0xdd,
0x10, 0x91, 0xe4, 0x67, 0xfc, 0x07, 0xca, 0x2b, 0xf0, 0xd4, 0xc8, 0x76, 0x4a, 0x85, 0x84, 0x3a,
0x30, 0x45, 0x9f, 0xbf, 0x7f, 0xf1, 0x97, 0xa0, 0x31, 0xdf, 0x70, 0x46, 0x1b, 0x60, 0x8b, 0xd2,
0x70, 0xa5, 0x8b, 0x92, 0xb7, 0x7a, 0x61, 0xcf, 0x88, 0x90, 0xa0, 0x01, 0x8f, 0x1a, 0x60, 0x64,
0x47, 0x91, 0x62, 0xd5, 0x54, 0x6d, 0x32, 0x2e, 0x01, 0xca, 0x9a, 0x53, 0xa7, 0x59, 0x9a, 0x35,
0x7d, 0x91, 0x85, 0x10, 0x5c, 0x2a, 0xef, 0x4a, 0xfe, 0xdb, 0x40, 0x06, 0x4d, 0x03, 0x6d, 0xf7,
0xf0, 0x44, 0x6a, 0x50, 0x34, 0xdf, 0x70, 0x96, 0xf3, 0x27, 0x1b, 0x89, 0x8f, 0x51, 0xdf, 0x42,
0x15, 0x07, 0x93, 0xde, 0x34, 0xca, 0x12, 0xf2, 0x55, 0x1b, 0x71, 0x0e, 0x2f, 0xc4, 0xa7, 0xe8,
0xef, 0x85, 0xe0, 0xb2, 0xd0, 0x15, 0xb4, 0xd7, 0xaf, 0x82, 0xc7, 0x3f, 0x27, 0xc1, 0x74, 0x98,
0x0d, 0x9d, 0xf3, 0x83, 0xc9, 0x3f, 0x8b, 0xd2, 0xb7, 0x00, 0xfd, 0xf1, 0xbd, 0x4a, 0x40, 0xab,
0xf8, 0x37, 0x8a, 0x33, 0x14, 0xe6, 0x5c, 0x99, 0x5a, 0xbb, 0x46, 0x6b, 0xf1, 0x1b, 0x90, 0xed,
0x06, 0x64, 0x06, 0x50, 0xdf, 0x14, 0xb5, 0xe1, 0x79, 0xa7, 0xc4, 0x23, 0xd4, 0x9f, 0x4b, 0x09,
0x32, 0xee, 0x4d, 0x82, 0xe9, 0x20, 0xf7, 0x20, 0xbd, 0x43, 0xbf, 0x6c, 0x24, 0x8e, 0xd1, 0x6f,
0xbb, 0x4d, 0xd1, 0xae, 0xe2, 0xc0, 0xf1, 0x5b, 0x88, 0xff, 0xa1, 0x10, 0x8c, 0x16, 0xc6, 0x77,
0x0d, 0xf2, 0x0e, 0xe1, 0x43, 0x14, 0x2a, 0x5d, 0x68, 0xa3, 0x5c, 0x60, 0x94, 0x45, 0xee, 0xb5,
0xaf, 0xdc, 0x51, 0xde, 0x51, 0xd9, 0x3d, 0x1a, 0xd8, 0xf8, 0x33, 0x7b, 0x0f, 0x7c, 0x89, 0xc2,
0xf3, 0xf6, 0x19, 0x1e, 0x39, 0x3e, 0xd8, 0x73, 0x45, 0xff, 0x35, 0x92, 0x74, 0x9f, 0xc4, 0x0f,
0x97, 0xfe, 0x98, 0xd1, 0xdb, 0xa3, 0xb2, 0xd2, 0x0f, 0x66, 0x49, 0x18, 0x34, 0xb4, 0xa9, 0x98,
0x04, 0x05, 0x6b, 0x6d, 0xff, 0x21, 0x2a, 0x05, 0xa3, 0x3b, 0x3f, 0x75, 0xfe, 0x65, 0xe8, 0x16,
0x3a, 0x79, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x28, 0xee, 0xb4, 0xa6, 0x6b, 0x02, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// ExecAgentClient is the client API for ExecAgent service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type ExecAgentClient interface {
Invoke(ctx context.Context, in *ExecRequest, opts ...grpc.CallOption) (*ExecResponse, error)
}
type execAgentClient struct {
cc *grpc.ClientConn
}
func NewExecAgentClient(cc *grpc.ClientConn) ExecAgentClient {
return &execAgentClient{cc}
}
func (c *execAgentClient) Invoke(ctx context.Context, in *ExecRequest, opts ...grpc.CallOption) (*ExecResponse, error) {
out := new(ExecResponse)
err := c.cc.Invoke(ctx, "/moc.guestagent.admin.ExecAgent/Invoke", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ExecAgentServer is the server API for ExecAgent service.
type ExecAgentServer interface {
Invoke(context.Context, *ExecRequest) (*ExecResponse, error)
}
// UnimplementedExecAgentServer can be embedded to have forward compatible implementations.
type UnimplementedExecAgentServer struct { | random_line_split |
||
moc_guestagent_exec.pb.go |
func (m *ExecRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ExecRequest.Merge(m, src)
}
func (m *ExecRequest) XXX_Size() int {
return xxx_messageInfo_ExecRequest.Size(m)
}
func (m *ExecRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ExecRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ExecRequest proto.InternalMessageInfo
func (m *ExecRequest) GetExecs() []*Exec {
if m != nil {
return m.Execs
}
return nil
}
func (m *ExecRequest) GetOperationType() common.Operation {
if m != nil {
return m.OperationType
}
return common.Operation_GET
}
type ExecResponse struct {
Execs []*Exec `protobuf:"bytes,1,rep,name=Execs,proto3" json:"Execs,omitempty"`
Result *wrappers.BoolValue `protobuf:"bytes,2,opt,name=Result,proto3" json:"Result,omitempty"`
Error string `protobuf:"bytes,3,opt,name=Error,proto3" json:"Error,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ExecResponse) Reset() { *m = ExecResponse{} }
func (m *ExecResponse) String() string { return proto.CompactTextString(m) }
func (*ExecResponse) ProtoMessage() {}
func (*ExecResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_66e2f35f165c7840, []int{1}
}
func (m *ExecResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ExecResponse.Unmarshal(m, b)
}
func (m *ExecResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ExecResponse.Marshal(b, m, deterministic)
}
func (m *ExecResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ExecResponse.Merge(m, src)
}
func (m *ExecResponse) XXX_Size() int {
return xxx_messageInfo_ExecResponse.Size(m)
}
func (m *ExecResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ExecResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ExecResponse proto.InternalMessageInfo
func (m *ExecResponse) GetExecs() []*Exec {
if m != nil {
return m.Execs
}
return nil
}
func (m *ExecResponse) GetResult() *wrappers.BoolValue {
if m != nil {
return m.Result
}
return nil
}
func (m *ExecResponse) GetError() string {
if m != nil {
return m.Error
}
return ""
}
type Exec struct {
Command string `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
Output string `protobuf:"bytes,2,opt,name=output,proto3" json:"output,omitempty"`
Status *common.Status `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Exec) Reset() { *m = Exec{} }
func (m *Exec) String() string { return proto.CompactTextString(m) }
func (*Exec) ProtoMessage() {}
func (*Exec) Descriptor() ([]byte, []int) {
return fileDescriptor_66e2f35f165c7840, []int{2}
}
func (m *Exec) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Exec.Unmarshal(m, b)
}
func (m *Exec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Exec.Marshal(b, m, deterministic)
}
func (m *Exec) XXX_Merge(src proto.Message) {
xxx_messageInfo_Exec.Merge(m, src)
}
func (m *Exec) XXX_Size() int {
return xxx_messageInfo_Exec.Size(m)
}
func (m *Exec) XXX_DiscardUnknown() {
xxx_messageInfo_Exec.DiscardUnknown(m)
}
var xxx_messageInfo_Exec proto.InternalMessageInfo
func (m *Exec) GetCommand() string {
if m != nil {
return m.Command
}
return ""
}
func (m *Exec) GetOutput() string {
if m != nil {
return m.Output
}
return ""
}
func (m *Exec) GetStatus() *common.Status {
if m != nil {
return m.Status
}
return nil
}
func init() {
proto.RegisterType((*ExecRequest)(nil), "moc.guestagent.admin.ExecRequest")
proto.RegisterType((*ExecResponse)(nil), "moc.guestagent.admin.ExecResponse")
proto.RegisterType((*Exec)(nil), "moc.guestagent.admin.Exec")
}
func init() { proto.RegisterFile("exec/moc_guestagent_exec.proto", fileDescriptor_66e2f35f165c7840) }
var fileDescriptor_66e2f35f165c7840 = []byte{
// 351 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x52, 0xbf, 0x4e, 0xe3, 0x30,
0x1c, 0xbe, 0x5c, 0xaf, 0x39, 0xd5, 0xb9, 0xeb, 0x60, 0x55, 0x77, 0x51, 0x86, 0xaa, 0x84, 0xa5,
0x0b, 0x36, 0x0a, 0xbc, 0x00, 0x95, 0x3a, 0x30, 0x21, 0x02, 0x62, 0x40, 0x82, 0x2a, 0x75, 0xdd,
0x10, 0x91, 0xe4, 0x67, 0xfc, 0x07, 0xca, 0x2b, 0xf0, 0xd4, 0xc8, 0x76, 0x4a, 0x85, 0x84, 0x3a,
0x30, 0x45, 0x9f, 0xbf, 0x7f, 0xf1, 0x97, 0xa0, 0x31, 0xdf, 0x70, 0x46, 0x1b, 0x60, 0x8b, 0xd2,
0x70, 0xa5, 0x8b, 0x92, 0xb7, 0x7a, 0x61, 0xcf, 0x88, 0x90, 0xa0, 0x01, 0x8f, 0x1a, 0x60, 0x64,
0x47, 0x91, 0x62, 0xd5, 0x54, 0x6d, 0x32, 0x2e, 0x01, 0xca, 0x9a, 0x53, 0xa7, 0x59, 0x9a, 0x35,
0x7d, 0x91, 0x85, 0x10, 0x5c, 0x2a, 0xef, 0x4a, 0xfe, 0xdb, 0x40, 0x06, 0x4d, 0x03, 0x6d, 0xf7,
0xf0, 0x44, 0x6a, 0x50, 0x34, 0xdf, 0x70, 0x96, 0xf3, 0x27, 0x1b, 0x89, 0x8f, 0x51, 0xdf, 0x42,
0x15, 0x07, 0x93, 0xde, 0x34, 0xca, 0x12, 0xf2, 0x55, 0x1b, 0x71, 0x0e, | {
return xxx_messageInfo_ExecRequest.Marshal(b, m, deterministic)
} | identifier_body |
|
generate_synthetic_immunoblot_dataset.py | 'norm_EC-RP']],
measured_variables={'norm_IC-RP': 'semi-quantitative',
'norm_EC-RP': 'semi-quantitative'})
dataset.measurement_error_df = fluorescence_data[['nrm_var_IC-RP', 'nrm_var_EC-RP']].\
rename(columns={'nrm_var_IC-RP': 'norm_IC-RP__error',
'nrm_var_EC-RP': 'norm_EC-RP__error'}) # DataSet expects error columns to have "__error" suffix
# ------- Starting Parameters -------
param_names = [p.name for p in model.parameters_rules()][:-6]
true_params = np.load('true_params.npy')[:len(param_names)]
parameters = pd.DataFrame([[10**p for p in true_params]], columns=param_names)
# ------- Simulations -------
sim = Simulator(model=model, param_values=parameters, solver='cupsoda')
sim_results = sim.run(np.linspace(0, fluorescence_data.time.max(), 100))
results = sim_results.opt2q_dataframe.reset_index().rename(columns={'index': 'time'})
cm = plt.get_cmap('tab10')
if __name__ == '__main__':
plt.plot(results['time'], results['cPARP_obs'], label=f'cPARP_obs', alpha=0.8, color=cm.colors[0])
plt.plot(results['time'], results['tBID_obs'], label=f'tBID_obs', alpha=0.8, color=cm.colors[1])
plt.legend()
plt.title('simulations based on "true parameters"')
plt.xlabel('time [seconds]')
plt.ylabel('copies per cell')
plt.show()
# ------- Fluorescence -------
# The "true parameters" are based on best fit to these data.
measurement_model = Pipeline(
steps=[('interpolate', Interpolate('time', ['cPARP_obs', 'tBID_obs'], dataset.data['time'])),
('normalize', ScaleToMinMax(feature_range=(0, 1), columns=['cPARP_obs', 'tBID_obs']))
])
measurement_results = measurement_model.transform(results[['tBID_obs', 'cPARP_obs', 'time']])
if __name__ == '__main__':
plt.plot(measurement_results['time'], measurement_results['cPARP_obs'], label=f'simulated PARP cleavage')
plt.plot(fluorescence_data['time'], fluorescence_data['norm_EC-RP'], '--', label=f'norm_EC-RP data', color=cm.colors[0])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_EC-RP']-np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
fluorescence_data['norm_EC-RP']+np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
color=cm.colors[0], alpha=0.2)
plt.title('"True Parameters" Compared w/ cPARP Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
plt.plot(measurement_results['time'], measurement_results['tBID_obs'],
label=f'simulated Bid truncation', color=cm.colors[1])
plt.plot(fluorescence_data['time'], fluorescence_data['norm_IC-RP'], '--',
label=f'norm_IC-RP data', color=cm.colors[1])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_IC-RP']-np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
fluorescence_data['norm_IC-RP']+np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
color=cm.colors[1], alpha=0.2)
plt.title('"True Parameters" compared w/ tBID Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
# ------- Immunoblot -------
def immunoblot_number_of_categories(variances, expected_misclassification_rate=0.05, data_range=1):
# Effective Number of Bits in Fluorescence Data
# ref -- https://en.wikipedia.org/wiki/Effective_number_of_bits
# Fluorescence data was normalized to 0-1 :. data_range=1.
|
IC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_IC-RP__error'])
EC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_EC-RP__error'])
# ------- Immunoblot Data Set -------
ordinal_dataset_size = 14 # 28, 16, 14, 7 divide evenly into the total 112 rows.
len_fl_data = len(fluorescence_data)
# immunoblot_data_0 is necessary to setup the classifier
immunoblot_data_0 = fluorescence_data[['time']].iloc[1::int(len_fl_data / ordinal_dataset_size)]
immunoblot_data_0['tBID_blot'] = np.tile(range(IC_RP__n_cats), int(np.ceil(ordinal_dataset_size/IC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_data_0['cPARP_blot'] = np.tile(range(EC_RP__n_cats), int(np.ceil(ordinal_dataset_size/EC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_dataset = DataSet(immunoblot_data_0, measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
# set up classifier
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], immunoblot_data_0['time'])\
.transform(x_scaled)
lc = LogisticClassifier(immunoblot_dataset,
column_groups={'tBID_blot': ['tBID_obs'], 'cPARP_blot': ['cPARP_obs']},
do_fit_transform=True,
classifier_type='ordinal_eoc')
lc.set_up(x_int)
# ------- Define Classifier Parameters-------
a = 50
lc.set_params(** {'coefficients__cPARP_blot__coef_': np.array([a]),
'coefficients__cPARP_blot__theta_': np.array([0.03, 0.20, 0.97])*a,
'coefficients__tBID_blot__coef_': np.array([a]),
'coefficients__tBID_blot__theta_': np.array([0.03, 0.4, 0.82, 0.97])*a})
# plot classifier
lc.do_fit_transform = False
plot_domain = pd.DataFrame({'tBID_obs': np.linspace(0, 1, 100), 'cPARP_obs': np.linspace(0, 1, 100)})
lc_results = lc.transform(plot_domain)
cPARP_results = lc_results.filter(regex='cPARP_blot')
tBID_results = lc_results.filter(regex='tBID_blot')
# ------- Synthetic Immunoblot Data -------
n = 180
time_span = list(range(fluorescence_data['time'].max()))[::n] # ::30 = one measurement per 30s; 6x fluorescence data
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], time_span)\
.transform(x_scaled)
lc_results = lc.transform(x_int)
tBID_blot_cols = lc_results.filter(regex='tBID_blot__').columns
cPARP_blot_cols = lc_results.filter(regex='cPARP_blot__').columns
lc_results['tBID_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in tBID_blot_cols],
p=[x[c] for c in tBID_blot_cols]), axis=1)
lc_results['cPARP_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in cPARP_blot_cols],
p=[x[c] for c in cPARP_blot_cols]), axis=1)
immunoblot_data = lc_results[['time', 'tBID_blot', 'cPARP_blot']]
synthetic_immunoblot_data = DataSet(immunoblot_data,
measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
if __name__ == '__main__' and save_dataset:
import pickle
| data_rms = np.sqrt(variances).mean()
z_stat = norm.ppf(1 - expected_misclassification_rate)
peak_noise = z_stat*data_rms
signal_to_noise_ratio = 20*np.log10(peak_noise/data_range)
effective_number_of_bits = -(signal_to_noise_ratio+1.76)/6.02
return int(np.floor(0.70*(2**effective_number_of_bits))) # No. of categories: 70% of fluorescence data bit capacity | identifier_body |
generate_synthetic_immunoblot_dataset.py | '__main__':
plt.plot(measurement_results['time'], measurement_results['cPARP_obs'], label=f'simulated PARP cleavage')
plt.plot(fluorescence_data['time'], fluorescence_data['norm_EC-RP'], '--', label=f'norm_EC-RP data', color=cm.colors[0])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_EC-RP']-np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
fluorescence_data['norm_EC-RP']+np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
color=cm.colors[0], alpha=0.2)
plt.title('"True Parameters" Compared w/ cPARP Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
plt.plot(measurement_results['time'], measurement_results['tBID_obs'],
label=f'simulated Bid truncation', color=cm.colors[1])
plt.plot(fluorescence_data['time'], fluorescence_data['norm_IC-RP'], '--',
label=f'norm_IC-RP data', color=cm.colors[1])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_IC-RP']-np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
fluorescence_data['norm_IC-RP']+np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
color=cm.colors[1], alpha=0.2)
plt.title('"True Parameters" compared w/ tBID Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
# ------- Immunoblot -------
def immunoblot_number_of_categories(variances, expected_misclassification_rate=0.05, data_range=1):
# Effective Number of Bits in Fluorescence Data
# ref -- https://en.wikipedia.org/wiki/Effective_number_of_bits
# Fluorescence data was normalized to 0-1 :. data_range=1.
data_rms = np.sqrt(variances).mean()
z_stat = norm.ppf(1 - expected_misclassification_rate)
peak_noise = z_stat*data_rms
signal_to_noise_ratio = 20*np.log10(peak_noise/data_range)
effective_number_of_bits = -(signal_to_noise_ratio+1.76)/6.02
return int(np.floor(0.70*(2**effective_number_of_bits))) # No. of categories: 70% of fluorescence data bit capacity
IC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_IC-RP__error'])
EC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_EC-RP__error'])
# ------- Immunoblot Data Set -------
ordinal_dataset_size = 14 # 28, 16, 14, 7 divide evenly into the total 112 rows.
len_fl_data = len(fluorescence_data)
# immunoblot_data_0 is necessary to setup the classifier
immunoblot_data_0 = fluorescence_data[['time']].iloc[1::int(len_fl_data / ordinal_dataset_size)]
immunoblot_data_0['tBID_blot'] = np.tile(range(IC_RP__n_cats), int(np.ceil(ordinal_dataset_size/IC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_data_0['cPARP_blot'] = np.tile(range(EC_RP__n_cats), int(np.ceil(ordinal_dataset_size/EC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_dataset = DataSet(immunoblot_data_0, measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
# set up classifier
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], immunoblot_data_0['time'])\
.transform(x_scaled)
lc = LogisticClassifier(immunoblot_dataset,
column_groups={'tBID_blot': ['tBID_obs'], 'cPARP_blot': ['cPARP_obs']},
do_fit_transform=True,
classifier_type='ordinal_eoc')
lc.set_up(x_int)
# ------- Define Classifier Parameters-------
a = 50
lc.set_params(** {'coefficients__cPARP_blot__coef_': np.array([a]),
'coefficients__cPARP_blot__theta_': np.array([0.03, 0.20, 0.97])*a,
'coefficients__tBID_blot__coef_': np.array([a]),
'coefficients__tBID_blot__theta_': np.array([0.03, 0.4, 0.82, 0.97])*a})
# plot classifier
lc.do_fit_transform = False
plot_domain = pd.DataFrame({'tBID_obs': np.linspace(0, 1, 100), 'cPARP_obs': np.linspace(0, 1, 100)})
lc_results = lc.transform(plot_domain)
cPARP_results = lc_results.filter(regex='cPARP_blot')
tBID_results = lc_results.filter(regex='tBID_blot')
# ------- Synthetic Immunoblot Data -------
n = 180
time_span = list(range(fluorescence_data['time'].max()))[::n] # ::30 = one measurement per 30s; 6x fluorescence data
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], time_span)\
.transform(x_scaled)
lc_results = lc.transform(x_int)
tBID_blot_cols = lc_results.filter(regex='tBID_blot__').columns
cPARP_blot_cols = lc_results.filter(regex='cPARP_blot__').columns
lc_results['tBID_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in tBID_blot_cols],
p=[x[c] for c in tBID_blot_cols]), axis=1)
lc_results['cPARP_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in cPARP_blot_cols],
p=[x[c] for c in cPARP_blot_cols]), axis=1)
immunoblot_data = lc_results[['time', 'tBID_blot', 'cPARP_blot']]
synthetic_immunoblot_data = DataSet(immunoblot_data,
measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
if __name__ == '__main__' and save_dataset:
import pickle
import datetime as dt
now = dt.datetime.now()
with open(f'synthetic_WB_dataset_{n}s_{now.year}_{now.month}_{now.day}.pkl', 'wb') as output:
pickle.dump(synthetic_immunoblot_data, output, pickle.HIGHEST_PROTOCOL)
with open(f'synthetic_WB_dataset_{n}s_{now.year}_{now.month}_{now.day}.pkl', 'rb') as data_input:
loaded_dataset = pickle.load(data_input)
if __name__ == '__main__':
| fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 6), sharey='all', gridspec_kw={'width_ratios': [2, 1]})
ax1.scatter(x=synthetic_immunoblot_data.data['time'],
y=synthetic_immunoblot_data.data['cPARP_blot'].values / (EC_RP__n_cats-1),
s=10, color=cm.colors[0], label=f'cPARP blot data', alpha=0.5)
ax1.plot(x_scaled['time'], x_scaled['cPARP_obs'], color=cm.colors[0], label=f'simulated cPARP')
ax1.legend()
for col in sorted(list(cPARP_results.columns)):
ax2.plot(cPARP_results[col].values, np.linspace(0, 1, 100), label=col)
ax1.set_title('Classification of Simulated cPARP')
ax1.set_xlabel('time [seconds]')
ax1.set_ylabel('fluorescence [AU]')
ax2.set_xlabel('category probability')
ax2.legend()
plt.show()
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 6), sharey='all', gridspec_kw={'width_ratios': [2, 1]})
ax1.plot(x_scaled['time'], x_scaled['tBID_obs'], color=cm.colors[1], label=f'simulated tBID')
ax1.scatter(x=synthetic_immunoblot_data.data['time'], | conditional_block |
|
generate_synthetic_immunoblot_dataset.py | 'norm_EC-RP']],
measured_variables={'norm_IC-RP': 'semi-quantitative',
'norm_EC-RP': 'semi-quantitative'})
dataset.measurement_error_df = fluorescence_data[['nrm_var_IC-RP', 'nrm_var_EC-RP']].\
rename(columns={'nrm_var_IC-RP': 'norm_IC-RP__error',
'nrm_var_EC-RP': 'norm_EC-RP__error'}) # DataSet expects error columns to have "__error" suffix
# ------- Starting Parameters -------
param_names = [p.name for p in model.parameters_rules()][:-6]
true_params = np.load('true_params.npy')[:len(param_names)]
parameters = pd.DataFrame([[10**p for p in true_params]], columns=param_names)
# ------- Simulations -------
sim = Simulator(model=model, param_values=parameters, solver='cupsoda')
sim_results = sim.run(np.linspace(0, fluorescence_data.time.max(), 100))
results = sim_results.opt2q_dataframe.reset_index().rename(columns={'index': 'time'})
cm = plt.get_cmap('tab10')
if __name__ == '__main__':
plt.plot(results['time'], results['cPARP_obs'], label=f'cPARP_obs', alpha=0.8, color=cm.colors[0])
plt.plot(results['time'], results['tBID_obs'], label=f'tBID_obs', alpha=0.8, color=cm.colors[1])
plt.legend()
plt.title('simulations based on "true parameters"')
plt.xlabel('time [seconds]')
plt.ylabel('copies per cell')
plt.show()
# ------- Fluorescence -------
# The "true parameters" are based on best fit to these data.
measurement_model = Pipeline(
steps=[('interpolate', Interpolate('time', ['cPARP_obs', 'tBID_obs'], dataset.data['time'])),
('normalize', ScaleToMinMax(feature_range=(0, 1), columns=['cPARP_obs', 'tBID_obs']))
])
measurement_results = measurement_model.transform(results[['tBID_obs', 'cPARP_obs', 'time']])
if __name__ == '__main__':
plt.plot(measurement_results['time'], measurement_results['cPARP_obs'], label=f'simulated PARP cleavage')
plt.plot(fluorescence_data['time'], fluorescence_data['norm_EC-RP'], '--', label=f'norm_EC-RP data', color=cm.colors[0])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_EC-RP']-np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
fluorescence_data['norm_EC-RP']+np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
color=cm.colors[0], alpha=0.2)
plt.title('"True Parameters" Compared w/ cPARP Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
plt.plot(measurement_results['time'], measurement_results['tBID_obs'],
label=f'simulated Bid truncation', color=cm.colors[1])
plt.plot(fluorescence_data['time'], fluorescence_data['norm_IC-RP'], '--',
label=f'norm_IC-RP data', color=cm.colors[1])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_IC-RP']-np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
fluorescence_data['norm_IC-RP']+np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
color=cm.colors[1], alpha=0.2)
plt.title('"True Parameters" compared w/ tBID Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
# ------- Immunoblot -------
def immunoblot_number_of_categories(variances, expected_misclassification_rate=0.05, data_range=1):
# Effective Number of Bits in Fluorescence Data
# ref -- https://en.wikipedia.org/wiki/Effective_number_of_bits
# Fluorescence data was normalized to 0-1 :. data_range=1.
data_rms = np.sqrt(variances).mean()
z_stat = norm.ppf(1 - expected_misclassification_rate)
peak_noise = z_stat*data_rms
signal_to_noise_ratio = 20*np.log10(peak_noise/data_range)
effective_number_of_bits = -(signal_to_noise_ratio+1.76)/6.02
return int(np.floor(0.70*(2**effective_number_of_bits))) # No. of categories: 70% of fluorescence data bit capacity
IC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_IC-RP__error'])
EC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_EC-RP__error'])
# ------- Immunoblot Data Set -------
ordinal_dataset_size = 14 # 28, 16, 14, 7 divide evenly into the total 112 rows.
len_fl_data = len(fluorescence_data)
# immunoblot_data_0 is necessary to setup the classifier
immunoblot_data_0 = fluorescence_data[['time']].iloc[1::int(len_fl_data / ordinal_dataset_size)]
immunoblot_data_0['tBID_blot'] = np.tile(range(IC_RP__n_cats), int(np.ceil(ordinal_dataset_size/IC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_data_0['cPARP_blot'] = np.tile(range(EC_RP__n_cats), int(np.ceil(ordinal_dataset_size/EC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_dataset = DataSet(immunoblot_data_0, measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
# set up classifier
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], immunoblot_data_0['time'])\
.transform(x_scaled)
lc = LogisticClassifier(immunoblot_dataset,
column_groups={'tBID_blot': ['tBID_obs'], 'cPARP_blot': ['cPARP_obs']},
do_fit_transform=True,
classifier_type='ordinal_eoc')
lc.set_up(x_int)
# ------- Define Classifier Parameters-------
a = 50
lc.set_params(** {'coefficients__cPARP_blot__coef_': np.array([a]),
'coefficients__cPARP_blot__theta_': np.array([0.03, 0.20, 0.97])*a,
'coefficients__tBID_blot__coef_': np.array([a]),
'coefficients__tBID_blot__theta_': np.array([0.03, 0.4, 0.82, 0.97])*a})
| lc_results = lc.transform(plot_domain)
cPARP_results = lc_results.filter(regex='cPARP_blot')
tBID_results = lc_results.filter(regex='tBID_blot')
# ------- Synthetic Immunoblot Data -------
n = 180
time_span = list(range(fluorescence_data['time'].max()))[::n] # ::30 = one measurement per 30s; 6x fluorescence data
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], time_span)\
.transform(x_scaled)
lc_results = lc.transform(x_int)
tBID_blot_cols = lc_results.filter(regex='tBID_blot__').columns
cPARP_blot_cols = lc_results.filter(regex='cPARP_blot__').columns
lc_results['tBID_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in tBID_blot_cols],
p=[x[c] for c in tBID_blot_cols]), axis=1)
lc_results['cPARP_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in cPARP_blot_cols],
p=[x[c] for c in cPARP_blot_cols]), axis=1)
immunoblot_data = lc_results[['time', 'tBID_blot', 'cPARP_blot']]
synthetic_immunoblot_data = DataSet(immunoblot_data,
measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
if __name__ == '__main__' and save_dataset:
import pickle
|
# plot classifier
lc.do_fit_transform = False
plot_domain = pd.DataFrame({'tBID_obs': np.linspace(0, 1, 100), 'cPARP_obs': np.linspace(0, 1, 100)}) | random_line_split |
generate_synthetic_immunoblot_dataset.py | 'norm_EC-RP']],
measured_variables={'norm_IC-RP': 'semi-quantitative',
'norm_EC-RP': 'semi-quantitative'})
dataset.measurement_error_df = fluorescence_data[['nrm_var_IC-RP', 'nrm_var_EC-RP']].\
rename(columns={'nrm_var_IC-RP': 'norm_IC-RP__error',
'nrm_var_EC-RP': 'norm_EC-RP__error'}) # DataSet expects error columns to have "__error" suffix
# ------- Starting Parameters -------
param_names = [p.name for p in model.parameters_rules()][:-6]
true_params = np.load('true_params.npy')[:len(param_names)]
parameters = pd.DataFrame([[10**p for p in true_params]], columns=param_names)
# ------- Simulations -------
sim = Simulator(model=model, param_values=parameters, solver='cupsoda')
sim_results = sim.run(np.linspace(0, fluorescence_data.time.max(), 100))
results = sim_results.opt2q_dataframe.reset_index().rename(columns={'index': 'time'})
cm = plt.get_cmap('tab10')
if __name__ == '__main__':
plt.plot(results['time'], results['cPARP_obs'], label=f'cPARP_obs', alpha=0.8, color=cm.colors[0])
plt.plot(results['time'], results['tBID_obs'], label=f'tBID_obs', alpha=0.8, color=cm.colors[1])
plt.legend()
plt.title('simulations based on "true parameters"')
plt.xlabel('time [seconds]')
plt.ylabel('copies per cell')
plt.show()
# ------- Fluorescence -------
# The "true parameters" are based on best fit to these data.
measurement_model = Pipeline(
steps=[('interpolate', Interpolate('time', ['cPARP_obs', 'tBID_obs'], dataset.data['time'])),
('normalize', ScaleToMinMax(feature_range=(0, 1), columns=['cPARP_obs', 'tBID_obs']))
])
measurement_results = measurement_model.transform(results[['tBID_obs', 'cPARP_obs', 'time']])
if __name__ == '__main__':
plt.plot(measurement_results['time'], measurement_results['cPARP_obs'], label=f'simulated PARP cleavage')
plt.plot(fluorescence_data['time'], fluorescence_data['norm_EC-RP'], '--', label=f'norm_EC-RP data', color=cm.colors[0])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_EC-RP']-np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
fluorescence_data['norm_EC-RP']+np.sqrt(dataset.measurement_error_df['norm_EC-RP__error']),
color=cm.colors[0], alpha=0.2)
plt.title('"True Parameters" Compared w/ cPARP Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
plt.plot(measurement_results['time'], measurement_results['tBID_obs'],
label=f'simulated Bid truncation', color=cm.colors[1])
plt.plot(fluorescence_data['time'], fluorescence_data['norm_IC-RP'], '--',
label=f'norm_IC-RP data', color=cm.colors[1])
plt.fill_between(fluorescence_data['time'],
fluorescence_data['norm_IC-RP']-np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
fluorescence_data['norm_IC-RP']+np.sqrt(dataset.measurement_error_df['norm_IC-RP__error']),
color=cm.colors[1], alpha=0.2)
plt.title('"True Parameters" compared w/ tBID Fluorescence Data')
plt.xlabel('time [seconds]')
plt.ylabel('fluorescence [AU]')
plt.legend()
plt.show()
# ------- Immunoblot -------
def | (variances, expected_misclassification_rate=0.05, data_range=1):
# Effective Number of Bits in Fluorescence Data
# ref -- https://en.wikipedia.org/wiki/Effective_number_of_bits
# Fluorescence data was normalized to 0-1 :. data_range=1.
data_rms = np.sqrt(variances).mean()
z_stat = norm.ppf(1 - expected_misclassification_rate)
peak_noise = z_stat*data_rms
signal_to_noise_ratio = 20*np.log10(peak_noise/data_range)
effective_number_of_bits = -(signal_to_noise_ratio+1.76)/6.02
return int(np.floor(0.70*(2**effective_number_of_bits))) # No. of categories: 70% of fluorescence data bit capacity
IC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_IC-RP__error'])
EC_RP__n_cats = immunoblot_number_of_categories(dataset.measurement_error_df['norm_EC-RP__error'])
# ------- Immunoblot Data Set -------
ordinal_dataset_size = 14 # 28, 16, 14, 7 divide evenly into the total 112 rows.
len_fl_data = len(fluorescence_data)
# immunoblot_data_0 is necessary to setup the classifier
immunoblot_data_0 = fluorescence_data[['time']].iloc[1::int(len_fl_data / ordinal_dataset_size)]
immunoblot_data_0['tBID_blot'] = np.tile(range(IC_RP__n_cats), int(np.ceil(ordinal_dataset_size/IC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_data_0['cPARP_blot'] = np.tile(range(EC_RP__n_cats), int(np.ceil(ordinal_dataset_size/EC_RP__n_cats)))[:ordinal_dataset_size]
immunoblot_dataset = DataSet(immunoblot_data_0, measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
# set up classifier
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], immunoblot_data_0['time'])\
.transform(x_scaled)
lc = LogisticClassifier(immunoblot_dataset,
column_groups={'tBID_blot': ['tBID_obs'], 'cPARP_blot': ['cPARP_obs']},
do_fit_transform=True,
classifier_type='ordinal_eoc')
lc.set_up(x_int)
# ------- Define Classifier Parameters-------
a = 50
lc.set_params(** {'coefficients__cPARP_blot__coef_': np.array([a]),
'coefficients__cPARP_blot__theta_': np.array([0.03, 0.20, 0.97])*a,
'coefficients__tBID_blot__coef_': np.array([a]),
'coefficients__tBID_blot__theta_': np.array([0.03, 0.4, 0.82, 0.97])*a})
# plot classifier
lc.do_fit_transform = False
plot_domain = pd.DataFrame({'tBID_obs': np.linspace(0, 1, 100), 'cPARP_obs': np.linspace(0, 1, 100)})
lc_results = lc.transform(plot_domain)
cPARP_results = lc_results.filter(regex='cPARP_blot')
tBID_results = lc_results.filter(regex='tBID_blot')
# ------- Synthetic Immunoblot Data -------
n = 180
time_span = list(range(fluorescence_data['time'].max()))[::n] # ::30 = one measurement per 30s; 6x fluorescence data
x_scaled = ScaleToMinMax(columns=['tBID_obs', 'cPARP_obs'])\
.transform(results[['time', 'tBID_obs', 'cPARP_obs']])
x_int = Interpolate('time', ['tBID_obs', 'cPARP_obs'], time_span)\
.transform(x_scaled)
lc_results = lc.transform(x_int)
tBID_blot_cols = lc_results.filter(regex='tBID_blot__').columns
cPARP_blot_cols = lc_results.filter(regex='cPARP_blot__').columns
lc_results['tBID_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in tBID_blot_cols],
p=[x[c] for c in tBID_blot_cols]), axis=1)
lc_results['cPARP_blot'] = lc_results.apply(lambda x: np.random.choice(
[int(c.split('__')[1]) for c in cPARP_blot_cols],
p=[x[c] for c in cPARP_blot_cols]), axis=1)
immunoblot_data = lc_results[['time', 'tBID_blot', 'cPARP_blot']]
synthetic_immunoblot_data = DataSet(immunoblot_data,
measured_variables={'tBID_blot': 'ordinal', 'cPARP_blot': 'ordinal'})
if __name__ == '__main__' and save_dataset:
import pickle | immunoblot_number_of_categories | identifier_name |
media.py | 64
import typing
import logging
import pathlib
import zipfile
import functools
import mimetypes
import urllib.parse
import urllib.request
from clldutils.misc import lazyproperty, log_or_raise
import pycldf
from pycldf import orm
from csvw.datatypes import anyURI
__all__ = ['Mimetype', 'MediaTable', 'File']
class File:
"""
A `File` represents a row in a MediaTable, providing functionality to access the contents.
:ivar id: The ID of the item.
:ivar url: The URL (as `str`) to download the content associated with the item.
`File` supports media files within ZIP archives as specified in CLDF 1.2. I.e.
- :meth:`read` will extract the specified file from a downloaded ZIP archive and
- :meth:`save` will write a (deflated) ZIP archive containing the specified file as single \
member.
"""
def __init__(self, media: 'MediaTable', row: dict):
self.row = row
self.id = row[media.id_col.name]
self._mimetype = row[media.mimetype_col.name]
self.url = None
self.scheme = None
self.url_reader = media.url_reader
self.path_in_zip = row.get(media.path_in_zip_col.name) if media.path_in_zip_col else None
if media.url_col:
# 1. Look for a downloadUrl property:
self.url = row[media.url_col.name]
else:
# 2. Expand valueUrl property:
if media.id_col and media.id_col.valueUrl:
self.url = media.id_col.valueUrl.expand(**row)
if self.url:
self.url = anyURI.to_string(self.url)
self.parsed_url = urllib.parse.urlparse(self.url)
self.scheme = self.parsed_url.scheme
@classmethod
def from_dataset(
cls, ds: pycldf.Dataset, row_or_object: typing.Union[dict, orm.Media]) -> 'File':
"""
Factory method to instantiate a `File` bypassing the `Media` wrapper.
"""
return cls(
MediaTable(ds),
row_or_object.data if isinstance(row_or_object, orm.Media) else row_or_object)
def __getitem__(self, item):
"""
Access to the underlying row `dict`.
"""
return self.row[item]
@lazyproperty
def mimetype(self) -> 'Mimetype':
"""
The `Mimetype` object associated with the item.
While the mediaType column is required by the CLDF spec, this might be disabled.
If so, we use "out-of-band" methods to figure out a mimetype for the file.
"""
if self._mimetype:
# We take the mimetype reported in the dataset as authoritative.
return Mimetype(self._mimetype)
# If no mimetype is specified explicitly, we fall back to mimetype detection mechanisms:
if self.scheme in ['file', 'http', 'https']:
mt, _ = mimetypes.guess_type(self.parsed_url.path)
if mt:
return Mimetype(mt)
if self.scheme == 'data':
mt, _, data = self.parsed_url.path.partition(',')
if mt.endswith(';base64'):
mt = mt.replace(';base64', '').strip()
if mt:
return Mimetype(mt)
# There's an explicit default mimetype for data URLs!
return Mimetype('text/plain;charset=US-ASCII')
if self.scheme in ['http', 'https']:
res = urllib.request.urlopen(urllib.request.Request(self.url, method="HEAD"))
mt = res.headers.get('Content-Type')
if mt:
return Mimetype(mt)
return Mimetype('application/octet-stream')
def local_path(self, d: pathlib.Path) -> pathlib.Path:
"""
:return: The expected path of the file in the directory `d`.
"""
return d.joinpath('{}{}'.format(
self.id, '.zip' if self.path_in_zip else (self.mimetype.extension or '')))
def read(self, d=None) -> typing.Union[None, str, bytes]:
"""
:param d: A local directory where the file has been saved before. If `None`, the content \
will read from the file's URL.
"""
if self.path_in_zip:
zipcontent = None
if d:
zipcontent = self.local_path(d).read_bytes()
if self.url:
zipcontent = self.url_reader[self.scheme](
self.parsed_url, Mimetype('application/zip'))
if zipcontent:
zf = zipfile.ZipFile(io.BytesIO(zipcontent))
return self.mimetype.read(zf.read(self.path_in_zip))
return # pragma: no cover
if d:
return self.mimetype.read(self.local_path(d).read_bytes())
if self.url:
try:
return self.url_reader[self.scheme or 'file'](self.parsed_url, self.mimetype)
except KeyError:
raise ValueError('Unsupported URL scheme: {}'.format(self.scheme))
def save(self, d: pathlib.Path) -> pathlib.Path:
"""
Saves the content of `File` in directory `d`.
:return: Path of the local file where the content has been saved.
.. note::
We use the identifier of the media item (i.e. the content of the ID column of the
associated row) as stem of the file to be written.
"""
p = self.local_path(d)
if not p.exists():
if self.path_in_zip:
with zipfile.ZipFile(p, 'w', compression=zipfile.ZIP_DEFLATED) as zf:
zf.writestr(self.path_in_zip, self.mimetype.write(self.read()))
else:
self.mimetype.write(self.read(), p)
return p
class MediaTable(pycldf.ComponentWithValidation):
"""
Container class for a `Dataset`'s media items.
"""
def __init__(self, ds: pycldf.Dataset):
super().__init__(ds)
self.url_col = ds.get(('MediaTable', 'http://cldf.clld.org/v1.0/terms.rdf#downloadUrl'))
self.path_in_zip_col = ds.get(
(self.component, 'http://cldf.clld.org/v1.0/terms.rdf#pathInZip'))
if self.table and not self.url_col:
for col in self.table.tableSchema.columns:
if col.propertyUrl and col.propertyUrl == 'http://www.w3.org/ns/dcat#downloadUrl':
self.url_col = col
break
self.id_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#id']
self.mimetype_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#mediaType']
@lazyproperty
def url_reader(self):
return {
'http': read_http_url,
'https': read_http_url,
'data': read_data_url,
# file: URLs are interpreted relative to the location of the metadata file:
'file': functools.partial(read_file_url, self.ds.directory),
}
def __iter__(self) -> typing.Generator[File, None, None]:
for row in self.table:
yield File(self, row)
def validate(self, success: bool = True, log: logging.Logger = None) -> bool:
for file in self:
if not file.url:
success = False
log_or_raise('File without URL: {}'.format(file.id), log=log)
elif file.scheme == 'file':
try:
file.read()
except FileNotFoundError:
success = False
log_or_raise('Non-existing local file referenced: {}'.format(file.id), log=log)
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
elif file.scheme == 'data':
try:
file.read()
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
return success
Media = MediaTable
class Mimetype:
"""
A media type specification.
:ivar type: The (main) type as `str`.
:ivar subtype: The subtype as `str`.
:ivar encoding: The encoding specified with a "charset" parameter.
"""
def __init__(self, s):
|
def __eq__(self, other):
return self.string == other if isinstance(other, str) else \
(self.type, self.subtype) == (other.type, other.subtype)
@property
def is_text(self) -> bool:
return self.type == 'text'
@property
def extension(self) -> typing.Union[None, str]:
return mimetypes.guess_extension('{}/{}'.format(self.type, self.subtype))
def read(self, data: bytes) -> typing.Union[str, bytes]:
if self.is_text and not isinstance(data, str):
return data.decode(self.encoding)
return data
| self.string = s
mtype, _, param = self.string.partition(';')
param = param.strip()
self.type, _, self.subtype = mtype.partition('/')
if param.startswith('charset='):
self.encoding = param.replace('charset=', '').strip()
else:
self.encoding = 'utf8' | identifier_body |
media.py | base64
import typing
import logging
import pathlib
import zipfile
import functools
import mimetypes
import urllib.parse
import urllib.request
from clldutils.misc import lazyproperty, log_or_raise
import pycldf
from pycldf import orm
from csvw.datatypes import anyURI
__all__ = ['Mimetype', 'MediaTable', 'File']
class File:
"""
A `File` represents a row in a MediaTable, providing functionality to access the contents.
:ivar id: The ID of the item.
:ivar url: The URL (as `str`) to download the content associated with the item.
`File` supports media files within ZIP archives as specified in CLDF 1.2. I.e.
- :meth:`read` will extract the specified file from a downloaded ZIP archive and
- :meth:`save` will write a (deflated) ZIP archive containing the specified file as single \
member.
"""
def __init__(self, media: 'MediaTable', row: dict):
self.row = row
self.id = row[media.id_col.name]
self._mimetype = row[media.mimetype_col.name]
self.url = None
self.scheme = None
self.url_reader = media.url_reader
self.path_in_zip = row.get(media.path_in_zip_col.name) if media.path_in_zip_col else None
if media.url_col:
# 1. Look for a downloadUrl property:
self.url = row[media.url_col.name]
else:
# 2. Expand valueUrl property:
if media.id_col and media.id_col.valueUrl:
self.url = media.id_col.valueUrl.expand(**row)
if self.url:
self.url = anyURI.to_string(self.url)
self.parsed_url = urllib.parse.urlparse(self.url)
self.scheme = self.parsed_url.scheme
@classmethod
def from_dataset(
cls, ds: pycldf.Dataset, row_or_object: typing.Union[dict, orm.Media]) -> 'File':
"""
Factory method to instantiate a `File` bypassing the `Media` wrapper.
"""
return cls(
MediaTable(ds),
row_or_object.data if isinstance(row_or_object, orm.Media) else row_or_object)
def __getitem__(self, item):
"""
Access to the underlying row `dict`.
"""
return self.row[item]
@lazyproperty
def mimetype(self) -> 'Mimetype':
"""
The `Mimetype` object associated with the item.
While the mediaType column is required by the CLDF spec, this might be disabled.
If so, we use "out-of-band" methods to figure out a mimetype for the file.
"""
if self._mimetype:
# We take the mimetype reported in the dataset as authoritative.
return Mimetype(self._mimetype)
# If no mimetype is specified explicitly, we fall back to mimetype detection mechanisms:
if self.scheme in ['file', 'http', 'https']:
mt, _ = mimetypes.guess_type(self.parsed_url.path)
if mt:
return Mimetype(mt)
if self.scheme == 'data':
mt, _, data = self.parsed_url.path.partition(',')
if mt.endswith(';base64'):
mt = mt.replace(';base64', '').strip()
if mt:
return Mimetype(mt)
# There's an explicit default mimetype for data URLs!
return Mimetype('text/plain;charset=US-ASCII')
if self.scheme in ['http', 'https']:
res = urllib.request.urlopen(urllib.request.Request(self.url, method="HEAD"))
mt = res.headers.get('Content-Type')
if mt:
return Mimetype(mt)
return Mimetype('application/octet-stream')
def local_path(self, d: pathlib.Path) -> pathlib.Path:
"""
:return: The expected path of the file in the directory `d`.
"""
return d.joinpath('{}{}'.format(
self.id, '.zip' if self.path_in_zip else (self.mimetype.extension or '')))
def read(self, d=None) -> typing.Union[None, str, bytes]:
"""
:param d: A local directory where the file has been saved before. If `None`, the content \
will read from the file's URL.
"""
if self.path_in_zip:
zipcontent = None | zipcontent = self.local_path(d).read_bytes()
if self.url:
zipcontent = self.url_reader[self.scheme](
self.parsed_url, Mimetype('application/zip'))
if zipcontent:
zf = zipfile.ZipFile(io.BytesIO(zipcontent))
return self.mimetype.read(zf.read(self.path_in_zip))
return # pragma: no cover
if d:
return self.mimetype.read(self.local_path(d).read_bytes())
if self.url:
try:
return self.url_reader[self.scheme or 'file'](self.parsed_url, self.mimetype)
except KeyError:
raise ValueError('Unsupported URL scheme: {}'.format(self.scheme))
def save(self, d: pathlib.Path) -> pathlib.Path:
"""
Saves the content of `File` in directory `d`.
:return: Path of the local file where the content has been saved.
.. note::
We use the identifier of the media item (i.e. the content of the ID column of the
associated row) as stem of the file to be written.
"""
p = self.local_path(d)
if not p.exists():
if self.path_in_zip:
with zipfile.ZipFile(p, 'w', compression=zipfile.ZIP_DEFLATED) as zf:
zf.writestr(self.path_in_zip, self.mimetype.write(self.read()))
else:
self.mimetype.write(self.read(), p)
return p
class MediaTable(pycldf.ComponentWithValidation):
"""
Container class for a `Dataset`'s media items.
"""
def __init__(self, ds: pycldf.Dataset):
super().__init__(ds)
self.url_col = ds.get(('MediaTable', 'http://cldf.clld.org/v1.0/terms.rdf#downloadUrl'))
self.path_in_zip_col = ds.get(
(self.component, 'http://cldf.clld.org/v1.0/terms.rdf#pathInZip'))
if self.table and not self.url_col:
for col in self.table.tableSchema.columns:
if col.propertyUrl and col.propertyUrl == 'http://www.w3.org/ns/dcat#downloadUrl':
self.url_col = col
break
self.id_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#id']
self.mimetype_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#mediaType']
@lazyproperty
def url_reader(self):
return {
'http': read_http_url,
'https': read_http_url,
'data': read_data_url,
# file: URLs are interpreted relative to the location of the metadata file:
'file': functools.partial(read_file_url, self.ds.directory),
}
def __iter__(self) -> typing.Generator[File, None, None]:
for row in self.table:
yield File(self, row)
def validate(self, success: bool = True, log: logging.Logger = None) -> bool:
for file in self:
if not file.url:
success = False
log_or_raise('File without URL: {}'.format(file.id), log=log)
elif file.scheme == 'file':
try:
file.read()
except FileNotFoundError:
success = False
log_or_raise('Non-existing local file referenced: {}'.format(file.id), log=log)
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
elif file.scheme == 'data':
try:
file.read()
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
return success
Media = MediaTable
class Mimetype:
"""
A media type specification.
:ivar type: The (main) type as `str`.
:ivar subtype: The subtype as `str`.
:ivar encoding: The encoding specified with a "charset" parameter.
"""
def __init__(self, s):
self.string = s
mtype, _, param = self.string.partition(';')
param = param.strip()
self.type, _, self.subtype = mtype.partition('/')
if param.startswith('charset='):
self.encoding = param.replace('charset=', '').strip()
else:
self.encoding = 'utf8'
def __eq__(self, other):
return self.string == other if isinstance(other, str) else \
(self.type, self.subtype) == (other.type, other.subtype)
@property
def is_text(self) -> bool:
return self.type == 'text'
@property
def extension(self) -> typing.Union[None, str]:
return mimetypes.guess_extension('{}/{}'.format(self.type, self.subtype))
def read(self, data: bytes) -> typing.Union[str, bytes]:
if self.is_text and not isinstance(data, str):
return data.decode(self.encoding)
return data
def | if d: | random_line_split |
media.py | 64
import typing
import logging
import pathlib
import zipfile
import functools
import mimetypes
import urllib.parse
import urllib.request
from clldutils.misc import lazyproperty, log_or_raise
import pycldf
from pycldf import orm
from csvw.datatypes import anyURI
__all__ = ['Mimetype', 'MediaTable', 'File']
class File:
"""
A `File` represents a row in a MediaTable, providing functionality to access the contents.
:ivar id: The ID of the item.
:ivar url: The URL (as `str`) to download the content associated with the item.
`File` supports media files within ZIP archives as specified in CLDF 1.2. I.e.
- :meth:`read` will extract the specified file from a downloaded ZIP archive and
- :meth:`save` will write a (deflated) ZIP archive containing the specified file as single \
member.
"""
def __init__(self, media: 'MediaTable', row: dict):
self.row = row
self.id = row[media.id_col.name]
self._mimetype = row[media.mimetype_col.name]
self.url = None
self.scheme = None
self.url_reader = media.url_reader
self.path_in_zip = row.get(media.path_in_zip_col.name) if media.path_in_zip_col else None
if media.url_col:
# 1. Look for a downloadUrl property:
self.url = row[media.url_col.name]
else:
# 2. Expand valueUrl property:
if media.id_col and media.id_col.valueUrl:
self.url = media.id_col.valueUrl.expand(**row)
if self.url:
self.url = anyURI.to_string(self.url)
self.parsed_url = urllib.parse.urlparse(self.url)
self.scheme = self.parsed_url.scheme
@classmethod
def from_dataset(
cls, ds: pycldf.Dataset, row_or_object: typing.Union[dict, orm.Media]) -> 'File':
"""
Factory method to instantiate a `File` bypassing the `Media` wrapper.
"""
return cls(
MediaTable(ds),
row_or_object.data if isinstance(row_or_object, orm.Media) else row_or_object)
def __getitem__(self, item):
"""
Access to the underlying row `dict`.
"""
return self.row[item]
@lazyproperty
def mimetype(self) -> 'Mimetype':
"""
The `Mimetype` object associated with the item.
While the mediaType column is required by the CLDF spec, this might be disabled.
If so, we use "out-of-band" methods to figure out a mimetype for the file.
"""
if self._mimetype:
# We take the mimetype reported in the dataset as authoritative.
return Mimetype(self._mimetype)
# If no mimetype is specified explicitly, we fall back to mimetype detection mechanisms:
if self.scheme in ['file', 'http', 'https']:
mt, _ = mimetypes.guess_type(self.parsed_url.path)
if mt:
return Mimetype(mt)
if self.scheme == 'data':
mt, _, data = self.parsed_url.path.partition(',')
if mt.endswith(';base64'):
mt = mt.replace(';base64', '').strip()
if mt:
return Mimetype(mt)
# There's an explicit default mimetype for data URLs!
return Mimetype('text/plain;charset=US-ASCII')
if self.scheme in ['http', 'https']:
res = urllib.request.urlopen(urllib.request.Request(self.url, method="HEAD"))
mt = res.headers.get('Content-Type')
if mt:
return Mimetype(mt)
return Mimetype('application/octet-stream')
def local_path(self, d: pathlib.Path) -> pathlib.Path:
"""
:return: The expected path of the file in the directory `d`.
"""
return d.joinpath('{}{}'.format(
self.id, '.zip' if self.path_in_zip else (self.mimetype.extension or '')))
def | (self, d=None) -> typing.Union[None, str, bytes]:
"""
:param d: A local directory where the file has been saved before. If `None`, the content \
will read from the file's URL.
"""
if self.path_in_zip:
zipcontent = None
if d:
zipcontent = self.local_path(d).read_bytes()
if self.url:
zipcontent = self.url_reader[self.scheme](
self.parsed_url, Mimetype('application/zip'))
if zipcontent:
zf = zipfile.ZipFile(io.BytesIO(zipcontent))
return self.mimetype.read(zf.read(self.path_in_zip))
return # pragma: no cover
if d:
return self.mimetype.read(self.local_path(d).read_bytes())
if self.url:
try:
return self.url_reader[self.scheme or 'file'](self.parsed_url, self.mimetype)
except KeyError:
raise ValueError('Unsupported URL scheme: {}'.format(self.scheme))
def save(self, d: pathlib.Path) -> pathlib.Path:
"""
Saves the content of `File` in directory `d`.
:return: Path of the local file where the content has been saved.
.. note::
We use the identifier of the media item (i.e. the content of the ID column of the
associated row) as stem of the file to be written.
"""
p = self.local_path(d)
if not p.exists():
if self.path_in_zip:
with zipfile.ZipFile(p, 'w', compression=zipfile.ZIP_DEFLATED) as zf:
zf.writestr(self.path_in_zip, self.mimetype.write(self.read()))
else:
self.mimetype.write(self.read(), p)
return p
class MediaTable(pycldf.ComponentWithValidation):
"""
Container class for a `Dataset`'s media items.
"""
def __init__(self, ds: pycldf.Dataset):
super().__init__(ds)
self.url_col = ds.get(('MediaTable', 'http://cldf.clld.org/v1.0/terms.rdf#downloadUrl'))
self.path_in_zip_col = ds.get(
(self.component, 'http://cldf.clld.org/v1.0/terms.rdf#pathInZip'))
if self.table and not self.url_col:
for col in self.table.tableSchema.columns:
if col.propertyUrl and col.propertyUrl == 'http://www.w3.org/ns/dcat#downloadUrl':
self.url_col = col
break
self.id_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#id']
self.mimetype_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#mediaType']
@lazyproperty
def url_reader(self):
return {
'http': read_http_url,
'https': read_http_url,
'data': read_data_url,
# file: URLs are interpreted relative to the location of the metadata file:
'file': functools.partial(read_file_url, self.ds.directory),
}
def __iter__(self) -> typing.Generator[File, None, None]:
for row in self.table:
yield File(self, row)
def validate(self, success: bool = True, log: logging.Logger = None) -> bool:
for file in self:
if not file.url:
success = False
log_or_raise('File without URL: {}'.format(file.id), log=log)
elif file.scheme == 'file':
try:
file.read()
except FileNotFoundError:
success = False
log_or_raise('Non-existing local file referenced: {}'.format(file.id), log=log)
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
elif file.scheme == 'data':
try:
file.read()
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
return success
Media = MediaTable
class Mimetype:
"""
A media type specification.
:ivar type: The (main) type as `str`.
:ivar subtype: The subtype as `str`.
:ivar encoding: The encoding specified with a "charset" parameter.
"""
def __init__(self, s):
self.string = s
mtype, _, param = self.string.partition(';')
param = param.strip()
self.type, _, self.subtype = mtype.partition('/')
if param.startswith('charset='):
self.encoding = param.replace('charset=', '').strip()
else:
self.encoding = 'utf8'
def __eq__(self, other):
return self.string == other if isinstance(other, str) else \
(self.type, self.subtype) == (other.type, other.subtype)
@property
def is_text(self) -> bool:
return self.type == 'text'
@property
def extension(self) -> typing.Union[None, str]:
return mimetypes.guess_extension('{}/{}'.format(self.type, self.subtype))
def read(self, data: bytes) -> typing.Union[str, bytes]:
if self.is_text and not isinstance(data, str):
return data.decode(self.encoding)
return data
| read | identifier_name |
media.py | 64
import typing
import logging
import pathlib
import zipfile
import functools
import mimetypes
import urllib.parse
import urllib.request
from clldutils.misc import lazyproperty, log_or_raise
import pycldf
from pycldf import orm
from csvw.datatypes import anyURI
__all__ = ['Mimetype', 'MediaTable', 'File']
class File:
"""
A `File` represents a row in a MediaTable, providing functionality to access the contents.
:ivar id: The ID of the item.
:ivar url: The URL (as `str`) to download the content associated with the item.
`File` supports media files within ZIP archives as specified in CLDF 1.2. I.e.
- :meth:`read` will extract the specified file from a downloaded ZIP archive and
- :meth:`save` will write a (deflated) ZIP archive containing the specified file as single \
member.
"""
def __init__(self, media: 'MediaTable', row: dict):
self.row = row
self.id = row[media.id_col.name]
self._mimetype = row[media.mimetype_col.name]
self.url = None
self.scheme = None
self.url_reader = media.url_reader
self.path_in_zip = row.get(media.path_in_zip_col.name) if media.path_in_zip_col else None
if media.url_col:
# 1. Look for a downloadUrl property:
self.url = row[media.url_col.name]
else:
# 2. Expand valueUrl property:
|
if self.url:
self.url = anyURI.to_string(self.url)
self.parsed_url = urllib.parse.urlparse(self.url)
self.scheme = self.parsed_url.scheme
@classmethod
def from_dataset(
cls, ds: pycldf.Dataset, row_or_object: typing.Union[dict, orm.Media]) -> 'File':
"""
Factory method to instantiate a `File` bypassing the `Media` wrapper.
"""
return cls(
MediaTable(ds),
row_or_object.data if isinstance(row_or_object, orm.Media) else row_or_object)
def __getitem__(self, item):
"""
Access to the underlying row `dict`.
"""
return self.row[item]
@lazyproperty
def mimetype(self) -> 'Mimetype':
"""
The `Mimetype` object associated with the item.
While the mediaType column is required by the CLDF spec, this might be disabled.
If so, we use "out-of-band" methods to figure out a mimetype for the file.
"""
if self._mimetype:
# We take the mimetype reported in the dataset as authoritative.
return Mimetype(self._mimetype)
# If no mimetype is specified explicitly, we fall back to mimetype detection mechanisms:
if self.scheme in ['file', 'http', 'https']:
mt, _ = mimetypes.guess_type(self.parsed_url.path)
if mt:
return Mimetype(mt)
if self.scheme == 'data':
mt, _, data = self.parsed_url.path.partition(',')
if mt.endswith(';base64'):
mt = mt.replace(';base64', '').strip()
if mt:
return Mimetype(mt)
# There's an explicit default mimetype for data URLs!
return Mimetype('text/plain;charset=US-ASCII')
if self.scheme in ['http', 'https']:
res = urllib.request.urlopen(urllib.request.Request(self.url, method="HEAD"))
mt = res.headers.get('Content-Type')
if mt:
return Mimetype(mt)
return Mimetype('application/octet-stream')
def local_path(self, d: pathlib.Path) -> pathlib.Path:
"""
:return: The expected path of the file in the directory `d`.
"""
return d.joinpath('{}{}'.format(
self.id, '.zip' if self.path_in_zip else (self.mimetype.extension or '')))
def read(self, d=None) -> typing.Union[None, str, bytes]:
"""
:param d: A local directory where the file has been saved before. If `None`, the content \
will read from the file's URL.
"""
if self.path_in_zip:
zipcontent = None
if d:
zipcontent = self.local_path(d).read_bytes()
if self.url:
zipcontent = self.url_reader[self.scheme](
self.parsed_url, Mimetype('application/zip'))
if zipcontent:
zf = zipfile.ZipFile(io.BytesIO(zipcontent))
return self.mimetype.read(zf.read(self.path_in_zip))
return # pragma: no cover
if d:
return self.mimetype.read(self.local_path(d).read_bytes())
if self.url:
try:
return self.url_reader[self.scheme or 'file'](self.parsed_url, self.mimetype)
except KeyError:
raise ValueError('Unsupported URL scheme: {}'.format(self.scheme))
def save(self, d: pathlib.Path) -> pathlib.Path:
"""
Saves the content of `File` in directory `d`.
:return: Path of the local file where the content has been saved.
.. note::
We use the identifier of the media item (i.e. the content of the ID column of the
associated row) as stem of the file to be written.
"""
p = self.local_path(d)
if not p.exists():
if self.path_in_zip:
with zipfile.ZipFile(p, 'w', compression=zipfile.ZIP_DEFLATED) as zf:
zf.writestr(self.path_in_zip, self.mimetype.write(self.read()))
else:
self.mimetype.write(self.read(), p)
return p
class MediaTable(pycldf.ComponentWithValidation):
"""
Container class for a `Dataset`'s media items.
"""
def __init__(self, ds: pycldf.Dataset):
super().__init__(ds)
self.url_col = ds.get(('MediaTable', 'http://cldf.clld.org/v1.0/terms.rdf#downloadUrl'))
self.path_in_zip_col = ds.get(
(self.component, 'http://cldf.clld.org/v1.0/terms.rdf#pathInZip'))
if self.table and not self.url_col:
for col in self.table.tableSchema.columns:
if col.propertyUrl and col.propertyUrl == 'http://www.w3.org/ns/dcat#downloadUrl':
self.url_col = col
break
self.id_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#id']
self.mimetype_col = ds[self.component, 'http://cldf.clld.org/v1.0/terms.rdf#mediaType']
@lazyproperty
def url_reader(self):
return {
'http': read_http_url,
'https': read_http_url,
'data': read_data_url,
# file: URLs are interpreted relative to the location of the metadata file:
'file': functools.partial(read_file_url, self.ds.directory),
}
def __iter__(self) -> typing.Generator[File, None, None]:
for row in self.table:
yield File(self, row)
def validate(self, success: bool = True, log: logging.Logger = None) -> bool:
for file in self:
if not file.url:
success = False
log_or_raise('File without URL: {}'.format(file.id), log=log)
elif file.scheme == 'file':
try:
file.read()
except FileNotFoundError:
success = False
log_or_raise('Non-existing local file referenced: {}'.format(file.id), log=log)
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
elif file.scheme == 'data':
try:
file.read()
except Exception as e: # pragma: no cover
success = False
log_or_raise('Error reading {}: {}'.format(file.id, e), log=log)
return success
Media = MediaTable
class Mimetype:
"""
A media type specification.
:ivar type: The (main) type as `str`.
:ivar subtype: The subtype as `str`.
:ivar encoding: The encoding specified with a "charset" parameter.
"""
def __init__(self, s):
self.string = s
mtype, _, param = self.string.partition(';')
param = param.strip()
self.type, _, self.subtype = mtype.partition('/')
if param.startswith('charset='):
self.encoding = param.replace('charset=', '').strip()
else:
self.encoding = 'utf8'
def __eq__(self, other):
return self.string == other if isinstance(other, str) else \
(self.type, self.subtype) == (other.type, other.subtype)
@property
def is_text(self) -> bool:
return self.type == 'text'
@property
def extension(self) -> typing.Union[None, str]:
return mimetypes.guess_extension('{}/{}'.format(self.type, self.subtype))
def read(self, data: bytes) -> typing.Union[str, bytes]:
if self.is_text and not isinstance(data, str):
return data.decode(self.encoding)
return data
| if media.id_col and media.id_col.valueUrl:
self.url = media.id_col.valueUrl.expand(**row) | conditional_block |
ui.js | AVANT DE RECEVOIR-UN-DOIGT()
//
if (doigt.estLeve) {
if (dessus) {
//on a eu un mouseDOWN
if (this.actionOnClick) {
//console.log("mUp YES YES YES ", this)
this.actionOnClick( doigt )
ok = true
}
else {
console.log("mUp (without an action?)")
}
}
else console.log("on MOUSE UP à côté?", doigt, this )
dessus = false
}
else {
ok = true
//
}
//-------------ajustement visuel
if (dessus) {
this.scale.x = this.scale.y = 0.98
this.alpha = 1.0
}
else {
this.scale.x = this.scale.y = 1.0
this.alpha = baseAlpha
}
return ok
}
sp.actionOnClick = actionOnClick
/*
if (!bigw) bigw = w
if (!bigh) bigh = h
sp.interactive = true;
sp.hitArea = new PIXI.Rectangle(-bigw/2, -bigh/2, bigw, bigh);
*/
//sp.inside = false
sp.codeName = "btn_"+icoName
//sp.paused = function(){ return false } //default
/*
sp.pushedOrNot = function( pushed ) {
if (pushed) {
this.inside = true
this.scale.x = this.scale.y = 0.98
this.alpha = 1.0
return
}
this.inside = false
this.scale.x = this.scale.y = 1.0
this.alpha = baseAlpha
}
function myDown( ) {
//konsole.log("mDown!!")
//konsole.log(eventData)
if (!sp.paused()) {
sp.inside = true
sp.scale.x = sp.scale.y = 0.98
sp.alpha = 1.0
}
//eventData.stopPropagation();
}
function myLeave( ) {
sp.inside = false
sp.scale.x = sp.scale.y = 1.0
sp.alpha = baseAlpha
}
function myUp( ) {
if (!sp.paused()) {
if (actionOnClick) {
if (this.inside) actionOnClick( )
}
else {
console.log("mUp (without an action?)")
}
}
myLeave( )
//eventData.stopPropagation();
}
*/
//sp.mousedown = myDown
//sp.mouseup = myUp
//sp.mouseout = myLeave
//sp.touchstart = myDown
//sp.touchend = myUp
//sp.touchendoutside = myLeave
//sp.on('mousedown', sp.myDown);
//sp.on('touchstart', sp.myDown);
//sp.on('mouseup', sp.myUp);
//sp.on('touchend', sp.myUp);
return sp
}
//étaient dans ANIM
function distanceEntre( p1, p2 ) {
return {
x: (p2.x - p1.x),
y: (p2.y - p1.y)
}
}
ui.distanceEntre = distanceEntre
function hypothenuseCarree( pt ){
return (pt.x*pt.x)+(pt.y*pt.y)
}
function calcVitesse( dlst, derniers ) {
if (derniers==null || derniers==undefined) derniers = 10
//
var tot,i,j,d,sum,ponderation, a
ponderation = 0.0
sum = {x:0, y:0}
tot = dlst.length
a = Math.max( 0, tot-derniers )
for(i=a, j=1; i<tot; i++, j++) {
d = dlst[i]
sum.x = sum.x + (d.x * j)
sum.y = sum.y + (d.y * j)
ponderation += j
//onsole.log( " j:", (i+1), "d:",d, "sum:",sum, "ponderation:",ponderation)
}
sum.x = sum.x / ponderation
sum.y = sum.y / ponderation
//onsole.log( " sum:", sum )
return sum
}
ui.installSwipeInteractivity = function( layer ) {
layer.currentPosition = {x:0, y:0}
layer.deltas = []
layer.cumulerUnDelta = function( newPosition ) {
var ceDelta
this.lastPosition = this.currentPosition
this.currentPosition = newPosition
ceDelta = distanceEntre(this.lastPosition, newPosition)
this.deltas.push( ceDelta )
}
layer.doigtQuiPousse = function( doigt ){
//onsole.log("doigtQuiPousse........", this, doigt )
if (doigt.target == null) {
this.startPosition = pt(doigt.dernPosition)
this.currentPosition = this.startPosition
this.deltas = []
//beginSwipe
this.initialPosition = pt( this.position )
//onsole.log("beginSwipe?", doigt, this )
}
else if (doigt.estLeve) {
//fin
this.cumulerUnDelta( pt(doigt.dernPosition) )
//
this.vitesseLachee = calcVitesse( this.deltas )
this.deltas = []
//
var d, delta
delta = distanceEntre( this.startPosition, this.currentPosition )
//d = hypothenuseCarree( delta )
//onsole.log("...endSwipe! delta =", delta, " distance:",d, " vitesseLachee:",this.vitesseLachee)
//if (d < 16) {
// //onsole.log("whatIsReallyInteractive.actionClique() ",ev )
// if (this.actionClique) this.actionClique( doigt )
//}
if (this.endSwipe) {
this.endSwipe( delta, this.vitesseLachee )
}
}
else {
//durant
this.cumulerUnDelta( pt(doigt.dernPosition) )
//
var delta = distanceEntre( this.startPosition, this.currentPosition )
var pos = this.initialPosition
this.position.x = pos.x + delta.x
}
}
}
var path = svp_paths( "app/media/" )
//var doigtCouleurs = [ '#333', '#bbb', '#f00', '#aa0', '#0f0', '#44f', '#b33', '#4f4', '#f72', '#ff7', '#a4a', '#449' ]
//debog doigts
function prepareDoigts( layer, tot ) {
var i,pts,pt,circ,c,h
pts = []
for(i=0;i<tot;i++) {
//h = doigtCouleurs[i]
//var c = MISC.hex2rgb(h)
//circ = ANIM.getPetitCercle( layer, 0, 0, 70, c.r, c.g, c.b, 64 )
circ = PIXI.Sprite.fromImage( path +"doigt.png" )
layer.addChild( circ )
circ.anchor.x = circ.anchor.y = 0.5;
circ.scale.x = circ.scale.y = 1.5;
circ.alpha = 0.35
circ.visible = false
pts.push( circ )
}
return pts
}
var tempPoint
tempPoint = {x:0, y:0}
function showDoigts( luimeme, someFingers ) {
| var i,tot,d,circ,p, zonesDoigts
zonesDoigts = luimeme.zonesDoigts
tot = zonesDoigts.length
for(i=0; i<tot; i++) {
circ = zonesDoigts[i]
circ.visible = false
//c.position.x = -100
//c.position.y = -100
}
var layer = luimeme.layer
tot = someFingers.length
for(i=0; i<tot; i++) {
d = someFingers[i]
if (!d.estLeve) {
circ = zonesDoigts[ d.numero ]
xy = d.dernPosition
layer.worldTransform.applyInverse( xy, tempPoint );
circ.visible = true
circ.position.x = tempPoint.x
circ.position.y = tempPoint.y | identifier_body |
|
ui.js | ==undefined) derniers = 10
//
var tot,i,j,d,sum,ponderation, a
ponderation = 0.0
sum = {x:0, y:0}
tot = dlst.length
a = Math.max( 0, tot-derniers )
for(i=a, j=1; i<tot; i++, j++) {
d = dlst[i]
sum.x = sum.x + (d.x * j)
sum.y = sum.y + (d.y * j)
ponderation += j
//onsole.log( " j:", (i+1), "d:",d, "sum:",sum, "ponderation:",ponderation)
}
sum.x = sum.x / ponderation
sum.y = sum.y / ponderation
//onsole.log( " sum:", sum )
return sum
}
ui.installSwipeInteractivity = function( layer ) {
layer.currentPosition = {x:0, y:0}
layer.deltas = []
layer.cumulerUnDelta = function( newPosition ) {
var ceDelta
this.lastPosition = this.currentPosition
this.currentPosition = newPosition
ceDelta = distanceEntre(this.lastPosition, newPosition)
this.deltas.push( ceDelta )
}
layer.doigtQuiPousse = function( doigt ){
//onsole.log("doigtQuiPousse........", this, doigt )
if (doigt.target == null) {
this.startPosition = pt(doigt.dernPosition)
this.currentPosition = this.startPosition
this.deltas = []
//beginSwipe
this.initialPosition = pt( this.position )
//onsole.log("beginSwipe?", doigt, this )
}
else if (doigt.estLeve) {
//fin
this.cumulerUnDelta( pt(doigt.dernPosition) )
//
this.vitesseLachee = calcVitesse( this.deltas )
this.deltas = []
//
var d, delta
delta = distanceEntre( this.startPosition, this.currentPosition )
//d = hypothenuseCarree( delta )
//onsole.log("...endSwipe! delta =", delta, " distance:",d, " vitesseLachee:",this.vitesseLachee)
//if (d < 16) {
// //onsole.log("whatIsReallyInteractive.actionClique() ",ev )
// if (this.actionClique) this.actionClique( doigt )
//}
if (this.endSwipe) {
this.endSwipe( delta, this.vitesseLachee )
}
}
else {
//durant
this.cumulerUnDelta( pt(doigt.dernPosition) )
//
var delta = distanceEntre( this.startPosition, this.currentPosition )
var pos = this.initialPosition
this.position.x = pos.x + delta.x
}
}
}
var path = svp_paths( "app/media/" )
//var doigtCouleurs = [ '#333', '#bbb', '#f00', '#aa0', '#0f0', '#44f', '#b33', '#4f4', '#f72', '#ff7', '#a4a', '#449' ]
//debog doigts
function prepareDoigts( layer, tot ) {
var i,pts,pt,circ,c,h
pts = []
for(i=0;i<tot;i++) {
//h = doigtCouleurs[i]
//var c = MISC.hex2rgb(h)
//circ = ANIM.getPetitCercle( layer, 0, 0, 70, c.r, c.g, c.b, 64 )
circ = PIXI.Sprite.fromImage( path +"doigt.png" )
layer.addChild( circ )
circ.anchor.x = circ.anchor.y = 0.5;
circ.scale.x = circ.scale.y = 1.5;
circ.alpha = 0.35
circ.visible = false
pts.push( circ )
}
return pts
}
var tempPoint
tempPoint = {x:0, y:0}
function showDoigts( luimeme, someFingers ) {
var i,tot,d,circ,p, zonesDoigts
zonesDoigts = luimeme.zonesDoigts
tot = zonesDoigts.length
for(i=0; i<tot; i++) {
circ = zonesDoigts[i]
circ.visible = false
//c.position.x = -100
//c.position.y = -100
}
var layer = luimeme.layer
tot = someFingers.length
for(i=0; i<tot; i++) {
d = someFingers[i]
if (!d.estLeve) {
circ = zonesDoigts[ d.numero ]
xy = d.dernPosition
layer.worldTransform.applyInverse( xy, tempPoint );
circ.visible = true
circ.position.x = tempPoint.x
circ.position.y = tempPoint.y
}
}
}
function fouilleUnPeu( ceci, tempPoint ) {
//console.log("..fouilleUnPeu() ceci=", ceci)
var children,i,tot,objet, enfant
children = ceci.children
tot = children.length - 1
for(i=tot; i>=0 ; i--) {
objet = children[i]
if (objet.contientDesTouchables) {
enfant = fouilleUnPeu( objet, tempPoint )
if (enfant != null) return enfant
}
if (objet.visible && objet.recoitUnDoigt && (!objet.horsService)) {
if (!objet.hitArea) objet.updateHitArea()
//
if (objet.hitArea.contains( tempPoint.x, tempPoint.y )) {
//onsole.log(".....fouilleUnPeu() objet.recoitUnDoigt !! ", objet.hitArea, tempPoint, objet )
return objet
}
}
}
if (ceci.recoitUnDoigt && ceci.infini) {
console.log("ceci.recoitUnDoigt && ceci.infini : ",ceci)
return ceci
}
//
return null
}
ui.installerGestionDesDoigts = function( ceLayer, avecTracesDeDoigts ) {
if (avecTracesDeDoigts) ceLayer.zonesDoigts = prepareDoigts( ceLayer.layer, 10 )
ceLayer.doigtsDessus = function ( doigtsQuiTouchent ) {
if (avecTracesDeDoigts) showDoigts( this, doigtsQuiTouchent )
//--1--
// si on touche un bouton, on ignore les autres doigts?
// OU si un doigt, les boutons sont consultés
// si deux+ alors on pan/swipe etc
var btn,tot,i,layer,children,objet,doigt, dessus
layer = this.layer
var qty = doigtsQuiTouchent.length
if (qty==0) return //
if (qty==1) {
dessus = true
doigt = doigtsQuiTouchent[0]
//layer.worldTransform.applyInverse( doigt.dernPosition, tempPoint );
tempPoint = {x:doigt.dernPosition.x, y:doigt.dernPosition.y}
if (doigt.target == null) {
//cherche un truc qui ferait l'affaire
//NOTE: LE PREMIER LAYER EST réputé "FOUILLABLE"
btn = fouilleUnPeu( layer, tempPoint )
//
//NOT USEFUL ANYMORE:::: peut-être que oui
if (btn==null) {
//onsole.log("On a trouvé aucun btn, on prends")
if (layer.recoitUnDoigt) btn = layer
if (this.recoitUnDoigt) btn = this
//if (btn!=null) console.log("On a trouvé aucun btn, ...alors on prends ceLayer ou THIS")
}
//onsole.log("on a trouvé cet objet : ", btn)
}
else {
//I | CI ON S'EN FOUT SI INSIDE OU NON?
btn = doigt.target
//
if (btn && btn.hitArea) dessus = btn.hitArea.contains( tempPoint.x, tempPoint.y )
//onsole.log("on connait l'objet, YÉ: ", btn.hitArea, dessus, tempPoint )
}
//secu | conditional_block |
|
ui.js | var w,h,ln, coin, scale
scale = this.scales[ icoScale ]
coin = this.coins[ icoScale ]
ln = this.lineSize[ icoScale ]
w = this.sizes[ icoScale ]
h = w - Math.floor(w/10)
// pale 59b6b9
// dark 2f6771
sp = new PIXI.Graphics()
base.addChild( sp )
sp.lineStyle( 0 ) //ln, this.lineColor, 1 )
//sp.lineWidth = ln
sp.beginFill( this.fillColor, 1 )
sp.drawRoundedRect(-w/2, -h/2, w, h, coin)
sp.endFill()
ico = PIXI.Sprite.fromImage( path+'ico_'+icoName+'.png' )
ico.anchor.x = ico.anchor.y = 0.5
ico.scale.x = ico.scale.y = scale
base.btn = sp
sp = base //TRICK ********************************** TRICK
sp.addChild( ico )
sp.ico = ico
//sp.buttonMode = true //???
//sp.interactive = true;
////////sensibleArea( -w/2, -h/2, w, h )
sp.updateHitArea = function() {
this.hitArea = this.getBounds()
//onsole.log("* * * ", this.hitArea, this.parent.position)
}
sp.sensibleArea = function( sx,sy, sw,sh ) {
this.hitArea = new PIXI.Rectangle( sx, sy, sw, sh );
}
sp.recoitUnDoigt = function( doigt, dessus ){
var ok = false
//PAS CONVAINCU QUE UTILE:::: if (sp.paused()) { doigt.estLeve=true; this.inside=false }
//LE "SYSTEME" DOIT METTRE SES BOUTONS HORS-SERVICE ET LE CHECK EST FAIT AVANT DE RECEVOIR-UN-DOIGT()
//
if (doigt.estLeve) {
if (dessus) {
//on a eu un mouseDOWN
if (this.actionOnClick) {
//console.log("mUp YES YES YES ", this)
this.actionOnClick( doigt )
ok = true
}
else {
console.log("mUp (without an action?)")
}
}
else console.log("on MOUSE UP à côté?", doigt, this )
dessus = false
}
else {
ok = true
//
}
//-------------ajustement visuel
if (dessus) {
this.scale.x = this.scale.y = 0.98
this.alpha = 1.0
}
else {
this.scale.x = this.scale.y = 1.0
this.alpha = baseAlpha
}
return ok
}
sp.actionOnClick = actionOnClick
/*
if (!bigw) bigw = w
if (!bigh) bigh = h
sp.interactive = true;
sp.hitArea = new PIXI.Rectangle(-bigw/2, -bigh/2, bigw, bigh);
*/
//sp.inside = false
sp.codeName = "btn_"+icoName
//sp.paused = function(){ return false } //default
/*
sp.pushedOrNot = function( pushed ) {
if (pushed) {
this.inside = true
this.scale.x = this.scale.y = 0.98
this.alpha = 1.0
return
}
this.inside = false
this.scale.x = this.scale.y = 1.0
this.alpha = baseAlpha
}
function myDown( ) {
//konsole.log("mDown!!")
//konsole.log(eventData)
if (!sp.paused()) {
sp.inside = true
sp.scale.x = sp.scale.y = 0.98
sp.alpha = 1.0
}
//eventData.stopPropagation();
}
function myLeave( ) {
sp.inside = false
sp.scale.x = sp.scale.y = 1.0
sp.alpha = baseAlpha
}
function myUp( ) {
if (!sp.paused()) {
if (actionOnClick) {
if (this.inside) actionOnClick( )
}
else {
console.log("mUp (without an action?)")
}
}
myLeave( )
//eventData.stopPropagation();
}
*/
//sp.mousedown = myDown
//sp.mouseup = myUp
//sp.mouseout = myLeave
//sp.touchstart = myDown
//sp.touchend = myUp
//sp.touchendoutside = myLeave
//sp.on('mousedown', sp.myDown);
//sp.on('touchstart', sp.myDown);
//sp.on('mouseup', sp.myUp);
//sp.on('touchend', sp.myUp);
return sp
}
//étaient dans ANIM
function distanceEntre( p1, p2 ) {
return {
x: (p2.x - p1.x),
y: (p2.y - p1.y)
}
}
ui.distanceEntre = distanceEntre
function hypothenuseCarree( pt ){
return (pt.x*pt.x)+(pt.y*pt.y)
}
function calc | st, derniers ) {
if (derniers==null || derniers==undefined) derniers = 10
//
var tot,i,j,d,sum,ponderation, a
ponderation = 0.0
sum = {x:0, y:0}
tot = dlst.length
a = Math.max( 0, tot-derniers )
for(i=a, j=1; i<tot; i++, j++) {
d = dlst[i]
sum.x = sum.x + (d.x * j)
sum.y = sum.y + (d.y * j)
ponderation += j
//onsole.log( " j:", (i+1), "d:",d, "sum:",sum, "ponderation:",ponderation)
}
sum.x = sum.x / ponderation
sum.y = sum.y / ponderation
//onsole.log( " sum:", sum )
return sum
}
ui.installSwipeInteractivity = function( layer ) {
layer.currentPosition = {x:0, y:0}
layer.deltas = []
layer.cumulerUnDelta = function( newPosition ) {
var ceDelta
this.lastPosition = this.currentPosition
this.currentPosition = newPosition
ceDelta = distanceEntre(this.lastPosition, newPosition)
this.deltas.push( ceDelta )
}
layer.doigtQuiPousse = function( doigt ){
//onsole.log("doigtQuiPousse........", this, doigt )
if (doigt.target == null) {
this.startPosition = pt(doigt.dernPosition)
this.currentPosition = this.startPosition
this.deltas = []
//beginSwipe
this.initialPosition = pt( this.position )
//onsole.log("beginSwipe?", doigt, this )
}
else if (doigt.estLeve) {
//fin
this.cumulerUnDelta( pt(doigt.dernPosition) )
//
this.vitesseLachee = calcVitesse( this.deltas )
this.deltas = []
//
var d, delta
delta = distanceEntre( this.startPosition, this.currentPosition )
//d = hypothenuseCarree( delta )
//onsole.log("...endSwipe! delta =", delta, " distance:",d, " vitesseLachee:",this.vitesseLachee)
//if (d < 16) {
// //onsole.log("whatIsReallyInteractive.actionClique() ",ev )
// if (this.actionClique) this.actionClique( doigt )
//}
if (this.endSwipe) {
this.endSwipe( delta, this.vitesseLachee )
}
}
else {
//durant
this.cumulerUnDelta( pt(doigt.dernPosition) )
//
var delta = distanceEntre( this.startPosition, this.currentPosition )
var pos = this.initialPosition
this.position.x = pos.x + delta.x
}
}
}
var path = svp_paths( "app/media/" )
//var doigtCouleurs = [ '#333', '#bbb', '#f00', '#aa0', '#0f0', '#44f', '#b33', '#4f4', '#f72', '#ff7', '#a4a | Vitesse( dl | identifier_name |
ui.js | textColorLT : 0x95e3ff,
path : svp_paths( "app/media/ui/" )
//app_paths.app + '/media/ui/'
}
ui.addBtn = function( layer, icoName, icoScale, actionOnClick, bigw, bigh ) {
var sp,ico
var baseAlpha = 0.8
var base = new PIXI.Container()
layer.addChild( base )
//sp.anchor.x = sp.anchor.y = 0.5
base.scale.x = base.scale.y = 1 //0.3333;
base.alpha = baseAlpha
//sp.position.x = -stgWd2 + 60
//sp.position.y = -stgHd2 + 80
var path = this.path
var w,h,ln, coin, scale
scale = this.scales[ icoScale ]
coin = this.coins[ icoScale ]
ln = this.lineSize[ icoScale ]
w = this.sizes[ icoScale ]
h = w - Math.floor(w/10)
// pale 59b6b9
// dark 2f6771
sp = new PIXI.Graphics()
base.addChild( sp )
sp.lineStyle( 0 ) //ln, this.lineColor, 1 )
//sp.lineWidth = ln
sp.beginFill( this.fillColor, 1 )
sp.drawRoundedRect(-w/2, -h/2, w, h, coin)
sp.endFill()
ico = PIXI.Sprite.fromImage( path+'ico_'+icoName+'.png' )
ico.anchor.x = ico.anchor.y = 0.5
ico.scale.x = ico.scale.y = scale
base.btn = sp
sp = base //TRICK ********************************** TRICK
sp.addChild( ico )
sp.ico = ico
//sp.buttonMode = true //???
//sp.interactive = true;
////////sensibleArea( -w/2, -h/2, w, h )
sp.updateHitArea = function() {
this.hitArea = this.getBounds()
//onsole.log("* * * ", this.hitArea, this.parent.position)
}
sp.sensibleArea = function( sx,sy, sw,sh ) {
this.hitArea = new PIXI.Rectangle( sx, sy, sw, sh );
}
sp.recoitUnDoigt = function( doigt, dessus ){
var ok = false
//PAS CONVAINCU QUE UTILE:::: if (sp.paused()) { doigt.estLeve=true; this.inside=false }
//LE "SYSTEME" DOIT METTRE SES BOUTONS HORS-SERVICE ET LE CHECK EST FAIT AVANT DE RECEVOIR-UN-DOIGT()
//
if (doigt.estLeve) {
if (dessus) {
//on a eu un mouseDOWN
if (this.actionOnClick) {
//console.log("mUp YES YES YES ", this)
this.actionOnClick( doigt )
ok = true
}
else {
console.log("mUp (without an action?)")
}
}
else console.log("on MOUSE UP à côté?", doigt, this )
dessus = false
}
else {
ok = true
//
}
//-------------ajustement visuel
if (dessus) {
this.scale.x = this.scale.y = 0.98
this.alpha = 1.0
}
else {
this.scale.x = this.scale.y = 1.0
this.alpha = baseAlpha
}
return ok
}
sp.actionOnClick = actionOnClick
/*
if (!bigw) bigw = w
if (!bigh) bigh = h
sp.interactive = true;
sp.hitArea = new PIXI.Rectangle(-bigw/2, -bigh/2, bigw, bigh);
*/
//sp.inside = false
sp.codeName = "btn_"+icoName
//sp.paused = function(){ return false } //default
/*
sp.pushedOrNot = function( pushed ) {
if (pushed) {
this.inside = true
this.scale.x = this.scale.y = 0.98
this.alpha = 1.0
return
}
this.inside = false
this.scale.x = this.scale.y = 1.0
this.alpha = baseAlpha
}
function myDown( ) {
//konsole.log("mDown!!")
//konsole.log(eventData)
if (!sp.paused()) {
sp.inside = true
sp.scale.x = sp.scale.y = 0.98
sp.alpha = 1.0
}
//eventData.stopPropagation();
}
function myLeave( ) {
sp.inside = false
sp.scale.x = sp.scale.y = 1.0
sp.alpha = baseAlpha
}
function myUp( ) {
if (!sp.paused()) {
if (actionOnClick) {
if (this.inside) actionOnClick( )
}
else {
console.log("mUp (without an action?)")
}
}
myLeave( )
//eventData.stopPropagation();
}
*/
//sp.mousedown = myDown
//sp.mouseup = myUp
//sp.mouseout = myLeave
//sp.touchstart = myDown
//sp.touchend = myUp
//sp.touchendoutside = myLeave
//sp.on('mousedown', sp.myDown);
//sp.on('touchstart', sp.myDown);
//sp.on('mouseup', sp.myUp);
//sp.on('touchend', sp.myUp);
return sp
}
//étaient dans ANIM
function distanceEntre( p1, p2 ) {
return {
x: (p2.x - p1.x),
y: (p2.y - p1.y)
}
}
ui.distanceEntre = distanceEntre
function hypothenuseCarree( pt ){
return (pt.x*pt.x)+(pt.y*pt.y)
}
function calcVitesse( dlst, derniers ) {
if (derniers==null || derniers==undefined) derniers = 10
//
var tot,i,j,d,sum,ponderation, a
ponderation = 0.0
sum = {x:0, y:0}
tot = dlst.length
a = Math.max( 0, tot-derniers )
for(i=a, j=1; i<tot; i++, j++) {
d = dlst[i]
sum.x = sum.x + (d.x * j)
sum.y = sum.y + (d.y * j)
ponderation += j
//onsole.log( " j:", (i+1), "d:",d, "sum:",sum, "ponderation:",ponderation)
}
sum.x = sum.x / ponderation
sum.y = sum.y / ponderation
//onsole.log( " sum:", sum )
return sum
}
ui.installSwipeInteractivity = function( layer ) {
layer.currentPosition = {x:0, y:0}
layer.deltas = []
layer.cumulerUnDelta = function( newPosition ) {
var ceDelta
this.lastPosition = this.currentPosition
this.currentPosition = newPosition
ceDelta = distanceEntre(this.lastPosition, newPosition)
this.deltas.push( ceDelta )
}
layer.doigtQuiPousse = function( doigt ){
//onsole.log("doigtQuiPousse........", this, doigt )
if (doigt.target == null) {
this.startPosition = pt(doigt.dernPosition)
this.currentPosition = this.startPosition
this.deltas = []
//beginSwipe
this.initialPosition = pt( this.position )
//onsole.log("beginSwipe?", doigt, this )
}
else if (doigt.estLeve) {
//fin
this.cumulerUnDelta( pt(doigt.dernPosition) )
//
this.vitesseLachee = calcVitesse( this.deltas )
this.deltas = []
| fillColor : 0x95e3ff, //0x77d4f4, //0xb2f1ff, //0x95e4f6, ////0x89d3e4, //0x59b6b9,
textColor : 0x6fcbf0, //0x8dd0d5, //0xb7f2ff, //0xdbfbff, ///0xcef6ff, //b2f8f3, | random_line_split |
|
manterGrupoTaxaControle.js | }else{
consultar = true;
}
if (consultar){
//_this.$el.find('#listaGrupoTaxa').removeClass("hidden");
loadCCR.start();
// var codigo=0;
// var nome="teste";
// lista taxas
_this.getCollection().buscar(codigo,nome)
.done(function sucesso(data) {
_this.$el.find('#divListaGrupoTaxaShow').removeClass("hidden");
Retorno.trataRetorno(data, 'grupoTaxa', null, false);
//Inclui o template inicial no el deste controle
_this.$el.find('#divListaGrupoTaxa').html(_this.manterListaTaxaIOFTemplate({grupoTaxa: data.listaRetorno}));
// configura datatable
_this.$el.find('#gridListaGrupoTaxa').dataTable({
'aoColumns' : [ null, null, null, null, { "bSortable": false } ],
'aaSorting': [],
//'oLanguage' : {'sEmptyTable' : 'Nenhum registro encontrado.', 'sLengthMenu': '_MENU_ registros por página'} //Sobrescreve o sEmptyTable do jquery.dataTable.js do fec-web.
'oLanguage' : {'sEmptyTable' : 'Nenhum registro encontrado.', 'sLengthMenu': '_MENU_ registros por página', 'sInfoFiltered' : '(Filtrado _MAX_ do total de entradas)'},
});
//Carrega as mascaras usadas.
// desabilita os botoes
_this.$el.find('a.disabled').on('click', function(evt) {
evt.preventDefault();
return false;
});
listenToDatepickerChange(_this.$el, _this.changed);
loadCCR.stop();
})
.error(function erro(jqXHR) {
Retorno.trataRetorno({codigo: -1, tipo: "ERRO_EXCECAO", mensagem: "Ocorreu um erro ao listar !"}, 'manterGrupo', jqXHR);
loadCCR.stop();
});
return _this;
}
},
getCollection: function () {
if (_this.collection == null || this.collection == undefined)
_this.collection = new GrupoTaxaModel();
return _this.collection;
},
findGrupoTaxas : function(request, response) {
console.log("Manter Grupo Taxa - findGrupoTaxas");
$.when( _this.getCollection().getAutoCompleteNomeGrupoTaxa(request.term) )
.then(function ( data ) { response( _.map(data.listaRetorno, function ( d ) { return { value: d.nomeModeloDocumento, label: d.nomeGrupoTaxa }; }) ); });
},
sair: function () {
loadCCR.start();
console.log("saindo do CCR...");
window.location = 'index.html';
},
novoGrupoTaxa: function () {
_this.id=null;
//Seta as propriedades padrões dos campos titulo e data de inicio da vigência
_this.$el.find('#divTituloForm').html(' Incluir Grupo Taxa');
_this.$el.find('#tipoOper').val("incluir");
_this.$el.find('#divInputNovoCodigo').val("");
_this.$el.find('#divInputNovoNome').val("");
_this.$el.find('#divInputNovoCodigo').prop('disabled', false);
_this.validator.withForm('divFormulario').cleanErrors();
_this.$el.find('#divFormulario').modal('show');
},
limparForm: function() {
_this.$el.find('#codigo').val("");
_this.$el.find('#nomeGrupoTaxa').val("");
_this.$el.find('#divListaGrupoTaxaShow').addClass("hidden");
/*_this.$el.find('#inicioVigenciaFiltro').prop('disabled', false).attr('validators', 'required,data,dataMenor');
_this.validator.withForm('formCadastroIOF').cleanErrors();
_this.$el.find('#formCadastroIOF')[0].reset(); */
},
limparFormToda: function() {
/*_this.$el.find('#inicioVigenciaFiltro').prop('disabled', false).attr('validators', 'required,data,dataMenor');
_this.validator.withForm('formCadastroIOF').cleanErrors();
_this.$el.find('#formCadastroIOF')[0].reset();
_this.validator.withForm('formFiltroTaxaIOF').cleanErrors();
_this.$el.find('#formFiltroTaxaIOF')[0].reset(); */
},
voltar: function () {
_this.validator.withForm('formCadastroGrupoTaxa').cleanErrors();
_this.$el.find('#divFormulario').modal('hide');
},
salvar: function () {
var codigo = _this.$el.find('#divInputNovoCodigo').val();
if(parseInt(codigo) < 0){
_this.$el.find('#divInputNovoCodigo').val("");
_this.validator.withForm('formCadastroGrupoTaxa').validate();
}else{
if (_this.validator.withForm('formCadastroGrupoTaxa').validate()){
_this.$el.find('#divFormulario').modal('hide');
msgCCR.confirma(EMensagemCCR.manterGrupo.MA0046, function() {
loadCCR.start();
var codigo = _this.$el.find('#divInputNovoCodigo').val().replace(/[_.\-\/]/g, '');
var nome = _this.$el.find('#divInputNovoNome').val();
var tipoOper = _this.$el.find('#tipoOper').val();
var grupoTaxa={};
grupoTaxa.codigo=codigo;
grupoTaxa.nome=nome;
grupoTaxa.tipoOper = tipoOper;
_this.codRet=codigo;
_this.nomeRet=nome;
grupoTaxa.id=_this.id;
_this.getCollection().salvar(grupoTaxa)
.done(function sucesso(data){
loadCCR.stop();
if (data.mensagemRetorno == "MA0093") {
Retorno.trataRetorno({codigo: -1, tipo: "ERRO_NEGOCIAL", mensagem: "", idMsg: 'MA0093'}, 'manterGrupo');
//msgCCR.alerta("Convenente não existe no SICLI.!", function () {});
return;
}else{
Retorno.trataRetorno(data, 'manterGrupo');
_this.consultarGrupoTaxa("true");
}
})
.error(function erro(jqXHR){
Retorno.trataRetorno({codigo: -1, tipo: "ERRO_EXCECAO", mensagem: "Ocorreu um erro ao salvar a Grupo Taxa!"}, 'manterGrupo', jqXHR);
loadCCR.stop();
});
});
}
}
},
alterar: function (evt) {
var id = _this.$el.find(evt.currentTarget).data('id');
var nome = _this.$el.find(evt.currentTarget).data('nome');
var codigo = _this.$el.find(evt.currentTarget).data('codigo');
_this.codRet=codigo;
_this.id=id;
_this.$el.find('#divInputNovoCodigo').val(codigo);
_this.$el.find('#divInputNovoCodigo').prop('disabled', true);
_this.$el.find('#divInputNovoNome').val(nome);
_this.$el.find('#tipoOper').val("alterar");
_this.$el.find('#divTituloForm').html(' Alterar Grupo Taxa');
_this.$el.find('#divFormulario').modal('show');
},
remover: function (evt) {
var id = _this.$el.find(evt.currentTarget).data('codigo');
_this.codRet = _this.$el.find('#codigo').val();
_this.nomeRet = _this.$el.find('#nomeGrupoTaxa').val();
msgCCR.confirma(EMensagemCCR.manterGrupo.MA0046, function() {
loadCCR.start();
_this.getCollection().excluir(id)
.done(function sucesso(data){
loadCCR.stop();
Retorno.trataRetorno(data, 'manterGrupo');
_this.consultarGrupoTaxa("true");
})
.error(function erro (jqXHR) {
Retorno.trataRetorno({codigo: 1, tipo: "ERRO_EXCECAO", mensagem: "Ocorreu um erro ao excluir!"}, 'manterGrupo', jqXHR);
loadCCR.stop();
});
});
}, |
atualizar: function(){
_/*this.consultarTaxaIOF();
$('ul.nav a#manterTaxaIOF.links-menu').trigger('click');*/
} | random_line_split |
|
manterGrupoTaxaControle.js | id : null,
codRet : null,
nomeRet : null,
/**
* Função que faz bind das ações e interações da pagina com as funções
* javascript
*
*/
events : {
'click a#btnConsultarGrupoTaxa' : 'consultarGrupoTaxa',
'click a#btnNovoGrupoTaxa' : 'novoGrupoTaxa',
'click a#btnLimparForm' : 'limparForm',
'focus #inputGrupoTaxa' : 'getAutocomplete',
'keydown #inputGrupoTaxa' : 'invokefetch',
'click a#btnSair' : 'sair',
'click a#btnRemoverGrupoTaxa' : 'remover',
'click a#btnAlterarGrupoTaxa' : 'alterar' ,
'click a#btnSalvar' : 'salvar',
'click a#btnVoltar' : 'voltar'
},
/**
* Função padrão de incialização do template html
*
*/
initialize : function() {
console.log("Manter IOF controle - initialize");
// pra nao ter problema de pegar outro 'this'
_this = this;
_this.validator.withErrorRender(new BootstrapErrorRender());
},
render : function() {
console.log("Manter - render");
//Inclui o template inicial no el deste controle
_this.$el.html(_this.manterGrupoTemplate({TaxaIOF: {}}));
//Alinha os botões do popup à direita, com uma margen de 30px
//_this.$el.find('#divFormulario #divAcoesmanterGrupo').css('text-align', 'right');
_this.$el.find('#btnVoltar').css('margin-right', '30px');
//Carrega as mascaras usadas.
loadMaskEl(_this.$el);
listenToDatepickerChange(_this.$el, _this.changed);
_this.$('#nomeGrupoTaxa').autocomplete({ source: $.proxy( _this.findGrupoTaxas, _this), minLength: 3 });
//Sobreescreve submit da form
return _this;
},
consultarGrupoTaxa: function (retorno) {
console.log("Manter - consultarGrupoTaxa");
_this.validator.withForm('formFiltroTaxaIOF').cleanErrors();
codigo = _this.$el.find('#codigo').val();
nome = _this.$el.find('#nomeGrupoTaxa').val();
consultar = false;
if(retorno=="true"){
codigo= _this.codRet;
nome = _this.nomeRet;
consultar = true;
}else if(codigo == "" && nome == ""){
_this.validator.withForm('formFiltroTaxaIOF').validate();
}else{
consultar = true;
}
if (consultar){
//_this.$el | //'oLanguage' : {'sEmptyTable' : 'Nenhum registro encontrado.', 'sLengthMenu': '_MENU_ registros por página'} //Sobrescreve o sEmptyTable do jquery.dataTable.js do fec-web.
'oLanguage' : {'sEmptyTable' : 'Nenhum registro encontrado.', 'sLengthMenu': '_MENU_ registros por página', 'sInfoFiltered' : '(Filtrado _MAX_ do total de entradas)'},
});
//Carrega as mascaras usadas.
// desabilita os botoes
_this.$el.find('a.disabled').on('click', function(evt) {
evt.preventDefault();
return false;
});
listenToDatepickerChange(_this.$el, _this.changed);
loadCCR.stop();
})
.error(function erro(jqXHR) {
Retorno.trataRetorno({codigo: -1, tipo: "ERRO_EXCECAO", mensagem: "Ocorreu um erro ao listar !"}, 'manterGrupo', jqXHR);
loadCCR.stop();
});
return _this;
}
},
getCollection: function () {
if (_this.collection == null || this.collection == undefined)
_this.collection = new GrupoTaxaModel();
return _this.collection;
},
findGrupoTaxas : function(request, response) {
console.log("Manter Grupo Taxa - findGrupoTaxas");
$.when( _this.getCollection().getAutoCompleteNomeGrupoTaxa(request.term) )
.then(function ( data ) { response( _.map(data.listaRetorno, function ( d ) { return { value: d.nomeModeloDocumento, label: d.nomeGrupoTaxa }; }) ); });
},
sair: function () {
loadCCR.start();
console.log("saindo do CCR...");
window.location = 'index.html';
},
novoGrupoTaxa: function () {
_this.id=null;
//Seta as propriedades padrões dos campos titulo e data de inicio da vigência
_this.$el.find('#divTituloForm').html(' Incluir Grupo Taxa');
_this.$el.find('#tipoOper').val("incluir");
_this.$el.find('#divInputNovoCodigo').val("");
_this.$el.find('#divInputNovoNome').val("");
_this.$el.find('#divInputNovoCodigo').prop('disabled', false);
_this.validator.withForm('divFormulario').cleanErrors();
_this.$el.find('#divFormulario').modal('show');
},
limparForm: function() {
_this.$el.find('#codigo').val("");
_this.$el.find('#nomeGrupoTaxa').val("");
_this.$el.find('#divListaGrupoTaxaShow').addClass("hidden");
/*_this.$el.find('#inicioVigenciaFiltro').prop('disabled', false).attr('validators', 'required,data,dataMenor');
_this.validator.withForm('formCadastroIOF').cleanErrors();
_this.$el.find('#formCadastroIOF')[0].reset(); */
},
limparFormToda: function() {
/*_this.$el.find('#inicioVigenciaFiltro').prop('disabled', false).attr('validators', 'required,data,dataMenor');
_this.validator.withForm('formCadastroIOF').cleanErrors();
_this.$el.find('#formCadastroIOF')[0].reset();
_this.validator.withForm('formFiltroTaxaIOF').cleanErrors();
_this.$el.find('#formFiltroTaxaIOF')[0].reset(); */
},
voltar: function () {
_this.validator.withForm('formCadastroGrupoTaxa').cleanErrors();
_this.$el.find('#divFormulario').modal('hide');
},
salvar: function () {
var codigo = _this.$el.find('#divInputNovoCodigo').val();
if(parseInt(codigo) < 0){
_this.$el.find('#divInputNovoCodigo').val("");
_this.validator.withForm('formCadastroGrupoTaxa').validate();
}else{
if (_this.validator.withForm('formCadastroGrupoTaxa').validate()){
_this.$el.find('#divFormulario').modal('hide');
msgCCR.confirma(EMensagemCCR.manterGrupo.MA0046, function() {
loadCCR.start();
var codigo = _this.$el.find('#divInputNovoCodigo').val().replace(/[_.\-\/]/g, '');
var nome = _this.$el.find('#divInputNovoNome').val();
var tipoOper = _this.$el.find('#tipoOper').val();
var grupoTaxa={};
grupoTaxa.codigo=codigo;
grupoTaxa.nome=nome;
grupoTaxa.tipoOper = tipoOper;
| .find('#listaGrupoTaxa').removeClass("hidden");
loadCCR.start();
// var codigo=0;
// var nome="teste";
// lista taxas
_this.getCollection().buscar(codigo,nome)
.done(function sucesso(data) {
_this.$el.find('#divListaGrupoTaxaShow').removeClass("hidden");
Retorno.trataRetorno(data, 'grupoTaxa', null, false);
//Inclui o template inicial no el deste controle
_this.$el.find('#divListaGrupoTaxa').html(_this.manterListaTaxaIOFTemplate({grupoTaxa: data.listaRetorno}));
// configura datatable
_this.$el.find('#gridListaGrupoTaxa').dataTable({
'aoColumns' : [ null, null, null, null, { "bSortable": false } ],
'aaSorting': [], | conditional_block |
bip32.rs | _be_bytes(n as u64 + (1 << 31), 4, |raw| hmac.input(raw));
}
}
hmac.raw_result(result.as_mut_slice());
let mut sk = try!(SecretKey::from_slice(result.slice_to(32)).map_err(EcdsaError));
try!(sk.add_assign(&self.secret_key).map_err(EcdsaError));
Ok(ExtendedPrivKey {
network: self.network,
depth: self.depth + 1,
parent_fingerprint: self.fingerprint(),
child_number: i,
secret_key: sk,
chain_code: ChainCode::from_slice(result.slice_from(32))
})
}
/// Returns the HASH160 of the chaincode
pub fn identifier(&self) -> [u8, ..20] {
let mut sha2_res = [0, ..32];
let mut ripemd_res = [0, ..20];
// Compute extended public key
let pk = ExtendedPubKey::from_private(self);
// Do SHA256 of just the ECDSA pubkey
let mut sha2 = Sha256::new();
sha2.input(pk.public_key.as_slice());
sha2.result(sha2_res.as_mut_slice());
// do RIPEMD160
let mut ripemd = Ripemd160::new();
ripemd.input(sha2_res.as_slice());
ripemd.result(ripemd_res.as_mut_slice());
// Return
ripemd_res
}
/// Returns the first four bytes of the identifier
pub fn fingerprint(&self) -> Fingerprint {
Fingerprint::from_slice(self.identifier().slice_to(4))
}
}
impl ExtendedPubKey {
/// Derives a public key from a private key
pub fn from_private(sk: &ExtendedPrivKey) -> ExtendedPubKey {
secp256k1::init();
ExtendedPubKey {
network: sk.network,
depth: sk.depth,
parent_fingerprint: sk.parent_fingerprint,
child_number: sk.child_number,
public_key: PublicKey::from_secret_key(&sk.secret_key, true),
chain_code: sk.chain_code
}
}
/// Public->Public child key derivation
pub fn ckd_pub(&self, i: ChildNumber) -> Result<ExtendedPubKey, Error> {
match i {
Hardened(n) => {
if n >= (1 << 31) {
Err(InvalidChildNumber(i))
} else {
Err(CannotDeriveFromHardenedKey)
}
}
Normal(n) => {
let mut hmac = Hmac::new(Sha512::new(), self.chain_code.as_slice());
hmac.input(self.public_key.as_slice());
u64_to_be_bytes(n as u64, 4, |raw| hmac.input(raw));
let mut result = [0, ..64];
hmac.raw_result(result.as_mut_slice());
let sk = try!(SecretKey::from_slice(result.slice_to(32)).map_err(EcdsaError));
let mut pk = self.public_key.clone();
try!(pk.add_exp_assign(&sk).map_err(EcdsaError));
Ok(ExtendedPubKey {
network: self.network,
depth: self.depth + 1,
parent_fingerprint: self.fingerprint(),
child_number: i,
public_key: pk,
chain_code: ChainCode::from_slice(result.slice_from(32))
})
}
}
}
/// Returns the HASH160 of the chaincode
pub fn identifier(&self) -> [u8, ..20] {
let mut sha2_res = [0, ..32];
let mut ripemd_res = [0, ..20];
// Do SHA256 of just the ECDSA pubkey
let mut sha2 = Sha256::new();
sha2.input(self.public_key.as_slice());
sha2.result(sha2_res.as_mut_slice());
// do RIPEMD160
let mut ripemd = Ripemd160::new();
ripemd.input(sha2_res.as_slice());
ripemd.result(ripemd_res.as_mut_slice());
// Return
ripemd_res
}
/// Returns the first four bytes of the identifier
pub fn fingerprint(&self) -> Fingerprint {
Fingerprint::from_slice(self.identifier().slice_to(4))
}
}
impl ToBase58 for ExtendedPrivKey {
fn base58_layout(&self) -> Vec<u8> {
let mut ret = Vec::with_capacity(78);
ret.push_all(match self.network {
Bitcoin => [0x04, 0x88, 0xAD, 0xE4],
BitcoinTestnet => [0x04, 0x35, 0x83, 0x94]
});
ret.push(self.depth as u8);
ret.push_all(self.parent_fingerprint.as_slice());
match self.child_number {
Hardened(n) => {
u64_to_be_bytes(n as u64 + (1 << 31), 4, |raw| ret.push_all(raw));
}
Normal(n) => {
u64_to_be_bytes(n as u64, 4, |raw| ret.push_all(raw));
}
}
ret.push_all(self.chain_code.as_slice());
ret.push(0);
ret.push_all(self.secret_key.as_slice());
ret
}
}
impl FromBase58 for ExtendedPrivKey {
fn from_base58_layout(data: Vec<u8>) -> Result<ExtendedPrivKey, Base58Error> {
if data.len() != 78 {
return Err(InvalidLength(data.len()));
}
let cn_int = u64_from_be_bytes(data.as_slice(), 9, 4) as u32;
let child_number = if cn_int < (1 << 31) { Normal(cn_int) }
else { Hardened(cn_int - (1 << 31)) };
Ok(ExtendedPrivKey {
network: match data.slice_to(4) {
[0x04, 0x88, 0xAD, 0xE4] => Bitcoin,
[0x04, 0x35, 0x83, 0x94] => BitcoinTestnet,
_ => { return Err(InvalidVersion(data.slice_to(4).to_vec())); }
},
depth: data[4] as uint,
parent_fingerprint: Fingerprint::from_slice(data.slice(5, 9)),
child_number: child_number,
chain_code: ChainCode::from_slice(data.slice(13, 45)),
secret_key: try!(SecretKey::from_slice(
data.slice(46, 78)).map_err(|e|
OtherBase58Error(e.to_string())))
})
}
}
impl ToBase58 for ExtendedPubKey {
fn base58_layout(&self) -> Vec<u8> {
assert!(self.public_key.is_compressed());
let mut ret = Vec::with_capacity(78);
ret.push_all(match self.network {
Bitcoin => [0x04, 0x88, 0xB2, 0x1E],
BitcoinTestnet => [0x04, 0x35, 0x87, 0xCF]
});
ret.push(self.depth as u8);
ret.push_all(self.parent_fingerprint.as_slice());
match self.child_number {
Hardened(n) => {
u64_to_be_bytes(n as u64 + (1 << 31), 4, |raw| ret.push_all(raw));
}
Normal(n) => {
u64_to_be_bytes(n as u64, 4, |raw| ret.push_all(raw));
}
}
ret.push_all(self.chain_code.as_slice());
ret.push_all(self.public_key.as_slice());
ret
}
}
impl FromBase58 for ExtendedPubKey {
fn from_base58_layout(data: Vec<u8>) -> Result<ExtendedPubKey, Base58Error> | data.slice(45, 78)).map_err(|e|
OtherBase5 | {
if data.len() != 78 {
return Err(InvalidLength(data.len()));
}
let cn_int = u64_from_be_bytes(data.as_slice(), 9, 4) as u32;
let child_number = if cn_int < (1 << 31) { Normal(cn_int) }
else { Hardened(cn_int - (1 << 31)) };
Ok(ExtendedPubKey {
network: match data.slice_to(4) {
[0x04, 0x88, 0xB2, 0x1E] => Bitcoin,
[0x04, 0x35, 0x87, 0xCF] => BitcoinTestnet,
_ => { return Err(InvalidVersion(data.slice_to(4).to_vec())); }
},
depth: data[4] as uint,
parent_fingerprint: Fingerprint::from_slice(data.slice(5, 9)),
child_number: child_number,
chain_code: ChainCode::from_slice(data.slice(13, 45)),
public_key: try!(PublicKey::from_slice( | identifier_body |
bip32.rs | (master: &ExtendedPrivKey, path: &[ChildNumber])
-> Result<ExtendedPrivKey, Error> {
let mut sk = *master;
for &num in path.iter() {
sk = try!(sk.ckd_priv(num));
}
Ok(sk)
}
/// Private->Private child key derivation
pub fn ckd_priv(&self, i: ChildNumber) -> Result<ExtendedPrivKey, Error> {
let mut result = [0, ..64];
let mut hmac = Hmac::new(Sha512::new(), self.chain_code.as_slice());
match i {
Normal(n) => {
if n >= (1 << 31) { return Err(InvalidChildNumber(i)) }
// Non-hardened key: compute public data and use that
secp256k1::init();
// Note the unwrap: this is fine, we checked the SK when we created it
hmac.input(PublicKey::from_secret_key(&self.secret_key, true).as_slice());
u64_to_be_bytes(n as u64, 4, |raw| hmac.input(raw));
}
Hardened(n) => {
if n >= (1 << 31) { return Err(InvalidChildNumber(i)) }
// Hardened key: use only secret data to prevent public derivation
hmac.input([0]);
hmac.input(self.secret_key.as_slice());
u64_to_be_bytes(n as u64 + (1 << 31), 4, |raw| hmac.input(raw));
}
}
hmac.raw_result(result.as_mut_slice());
let mut sk = try!(SecretKey::from_slice(result.slice_to(32)).map_err(EcdsaError));
try!(sk.add_assign(&self.secret_key).map_err(EcdsaError));
Ok(ExtendedPrivKey {
network: self.network,
depth: self.depth + 1,
parent_fingerprint: self.fingerprint(),
child_number: i,
secret_key: sk,
chain_code: ChainCode::from_slice(result.slice_from(32))
})
}
/// Returns the HASH160 of the chaincode
pub fn identifier(&self) -> [u8, ..20] {
let mut sha2_res = [0, ..32];
let mut ripemd_res = [0, ..20];
// Compute extended public key
let pk = ExtendedPubKey::from_private(self);
// Do SHA256 of just the ECDSA pubkey
let mut sha2 = Sha256::new();
sha2.input(pk.public_key.as_slice());
sha2.result(sha2_res.as_mut_slice());
// do RIPEMD160
let mut ripemd = Ripemd160::new();
ripemd.input(sha2_res.as_slice());
ripemd.result(ripemd_res.as_mut_slice());
// Return
ripemd_res
}
/// Returns the first four bytes of the identifier
pub fn fingerprint(&self) -> Fingerprint {
Fingerprint::from_slice(self.identifier().slice_to(4))
}
}
impl ExtendedPubKey {
/// Derives a public key from a private key
pub fn from_private(sk: &ExtendedPrivKey) -> ExtendedPubKey {
secp256k1::init();
ExtendedPubKey {
network: sk.network,
depth: sk.depth,
parent_fingerprint: sk.parent_fingerprint,
child_number: sk.child_number,
public_key: PublicKey::from_secret_key(&sk.secret_key, true),
chain_code: sk.chain_code
}
}
/// Public->Public child key derivation
pub fn ckd_pub(&self, i: ChildNumber) -> Result<ExtendedPubKey, Error> {
match i {
Hardened(n) => {
if n >= (1 << 31) {
Err(InvalidChildNumber(i))
} else {
Err(CannotDeriveFromHardenedKey)
}
}
Normal(n) => {
let mut hmac = Hmac::new(Sha512::new(), self.chain_code.as_slice());
hmac.input(self.public_key.as_slice());
u64_to_be_bytes(n as u64, 4, |raw| hmac.input(raw));
let mut result = [0, ..64];
hmac.raw_result(result.as_mut_slice());
let sk = try!(SecretKey::from_slice(result.slice_to(32)).map_err(EcdsaError));
let mut pk = self.public_key.clone();
try!(pk.add_exp_assign(&sk).map_err(EcdsaError));
Ok(ExtendedPubKey {
network: self.network,
depth: self.depth + 1,
parent_fingerprint: self.fingerprint(),
child_number: i,
public_key: pk,
chain_code: ChainCode::from_slice(result.slice_from(32))
})
}
}
}
/// Returns the HASH160 of the chaincode
pub fn identifier(&self) -> [u8, ..20] {
let mut sha2_res = [0, ..32];
let mut ripemd_res = [0, ..20];
// Do SHA256 of just the ECDSA pubkey
let mut sha2 = Sha256::new();
sha2.input(self.public_key.as_slice());
sha2.result(sha2_res.as_mut_slice());
// do RIPEMD160
let mut ripemd = Ripemd160::new();
ripemd.input(sha2_res.as_slice());
ripemd.result(ripemd_res.as_mut_slice());
// Return
ripemd_res
}
/// Returns the first four bytes of the identifier
pub fn fingerprint(&self) -> Fingerprint {
Fingerprint::from_slice(self.identifier().slice_to(4))
}
}
impl ToBase58 for ExtendedPrivKey {
fn base58_layout(&self) -> Vec<u8> {
let mut ret = Vec::with_capacity(78);
ret.push_all(match self.network {
Bitcoin => [0x04, 0x88, 0xAD, 0xE4],
BitcoinTestnet => [0x04, 0x35, 0x83, 0x94]
});
ret.push(self.depth as u8);
ret.push_all(self.parent_fingerprint.as_slice());
match self.child_number {
Hardened(n) => {
u64_to_be_bytes(n as u64 + (1 << 31), 4, |raw| ret.push_all(raw));
}
Normal(n) => {
u64_to_be_bytes(n as u64, 4, |raw| ret.push_all(raw));
}
}
ret.push_all(self.chain_code.as_slice());
ret.push(0);
ret.push_all(self.secret_key.as_slice());
ret
}
}
impl FromBase58 for ExtendedPrivKey {
fn from_base58_layout(data: Vec<u8>) -> Result<ExtendedPrivKey, Base58Error> {
if data.len() != 78 {
return Err(InvalidLength(data.len()));
}
let cn_int = u64_from_be_bytes(data.as_slice(), 9, 4) as u32;
let child_number = if cn_int < (1 << 31) { Normal(cn_int) }
else { Hardened(cn_int - (1 << 31)) };
Ok(ExtendedPrivKey {
network: match data.slice_to(4) {
[0x04, 0x88, 0xAD, 0xE4] => Bitcoin,
[0x04, 0x35, 0x83, 0x94] => BitcoinTestnet,
_ => { return Err(InvalidVersion(data.slice_to(4).to_vec())); }
},
depth: data[4] as uint,
parent_fingerprint: Fingerprint::from_slice(data.slice(5, 9)),
child_number: child_number,
chain_code: ChainCode::from_slice(data.slice(13, 45)),
secret_key: try!(SecretKey::from_slice(
data.slice(46, 78)).map_err(|e|
OtherBase58Error(e.to_string())))
})
}
}
impl ToBase58 for ExtendedPubKey {
fn base58_layout(&self) -> Vec<u8> {
assert!(self.public_key.is_compressed());
let mut ret = Vec::with_capacity(78);
ret.push_all(match self.network {
Bitcoin => [0x04, 0x88, 0xB2, 0x1E],
BitcoinTestnet => [0x04, 0x35, 0x87, 0xCF]
});
ret.push(self.depth as u8);
ret.push_all(self.parent_fingerprint.as_slice());
match self.child_number {
Hardened(n) => {
u64_to_be_bytes(n as u64 + (1 << 31), 4, |raw| ret.push_all(raw));
}
Normal(n) => {
u64_to_be_bytes(n as u64, 4, |raw| ret.push_all(raw));
}
}
ret.push_all(self.chain_code.as_slice());
ret.push | from_path | identifier_name |
|
bip32.rs | impl Default for Fingerprint {
fn default() -> Fingerprint { Fingerprint([0, 0, 0, 0]) }
}
/// Extended private key
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Show)]
pub struct ExtendedPrivKey {
/// The network this key is to be used on
pub network: Network,
/// How many derivations this key is from the master (which is 0)
pub depth: uint,
/// Fingerprint of the parent key (0 for master)
pub parent_fingerprint: Fingerprint,
/// Child number of the key used to derive from parent (0 for master)
pub child_number: ChildNumber,
/// Secret key
pub secret_key: SecretKey,
/// Chain code
pub chain_code: ChainCode
}
/// Extended public key
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Show)]
pub struct ExtendedPubKey {
/// The network this key is to be used on
pub network: Network,
/// How many derivations this key is from the master (which is 0)
pub depth: uint,
/// Fingerprint of the parent key
pub parent_fingerprint: Fingerprint,
/// Child number of the key used to derive from parent (0 for master)
pub child_number: ChildNumber,
/// Public key
pub public_key: PublicKey,
/// Chain code
pub chain_code: ChainCode
}
/// A child number for a derived key
#[deriving(Clone, PartialEq, Eq, Show)]
pub enum ChildNumber {
/// Hardened key index, within [0, 2^31 - 1]
Hardened(u32),
/// Non-hardened key, within [0, 2^31 - 1]
Normal(u32),
}
impl<S: Encoder<E>, E> Encodable<S, E> for ChildNumber {
fn encode(&self, s: &mut S) -> Result<(), E> {
match *self {
Hardened(n) => (n + (1 << 31)).encode(s),
Normal(n) => n.encode(s)
}
}
}
impl<D: Decoder<E>, E> Decodable<D, E> for ChildNumber {
fn decode(d: &mut D) -> Result<ChildNumber, E> {
let n: u32 = try!(Decodable::decode(d));
if n < (1 << 31) {
Ok(Normal(n))
} else {
Ok(Hardened(n - (1 << 31)))
}
}
}
/// A BIP32 error
#[deriving(Clone, PartialEq, Eq, Show)]
pub enum Error {
/// A pk->pk derivation was attempted on a hardened key
CannotDeriveFromHardenedKey,
/// A secp256k1 error occured
EcdsaError(secp256k1::Error),
/// A child number was provided that was out of range
InvalidChildNumber(ChildNumber),
/// Error creating a master seed --- for application use
RngError(String)
}
impl ExtendedPrivKey {
/// Construct a new master key from a seed value
pub fn new_master(network: Network, seed: &[u8]) -> Result<ExtendedPrivKey, Error> {
let mut result = [0, ..64];
let mut hmac = Hmac::new(Sha512::new(), b"Bitcoin seed".as_slice());
hmac.input(seed);
hmac.raw_result(result.as_mut_slice());
Ok(ExtendedPrivKey {
network: network,
depth: 0,
parent_fingerprint: Default::default(),
child_number: Normal(0),
secret_key: try!(SecretKey::from_slice(result.slice_to(32)).map_err(EcdsaError)),
chain_code: ChainCode::from_slice(result.slice_from(32))
})
}
/// Creates a privkey from a path
pub fn from_path(master: &ExtendedPrivKey, path: &[ChildNumber])
-> Result<ExtendedPrivKey, Error> {
let mut sk = *master;
for &num in path.iter() {
sk = try!(sk.ckd_priv(num));
}
Ok(sk)
}
/// Private->Private child key derivation
pub fn ckd_priv(&self, i: ChildNumber) -> Result<ExtendedPrivKey, Error> {
let mut result = [0, ..64];
let mut hmac = Hmac::new(Sha512::new(), self.chain_code.as_slice());
match i {
Normal(n) => {
if n >= (1 << 31) { return Err(InvalidChildNumber(i)) }
// Non-hardened key: compute public data and use that
secp256k1::init();
// Note the unwrap: this is fine, we checked the SK when we created it
hmac.input(PublicKey::from_secret_key(&self.secret_key, true).as_slice());
u64_to_be_bytes(n as u64, 4, |raw| hmac.input(raw));
}
Hardened(n) => {
if n >= (1 << 31) { return Err(InvalidChildNumber(i)) }
// Hardened key: use only secret data to prevent public derivation
hmac.input([0]);
hmac.input(self.secret_key.as_slice());
u64_to_be_bytes(n as u64 + (1 << 31), 4, |raw| hmac.input(raw));
}
}
hmac.raw_result(result.as_mut_slice());
let mut sk = try!(SecretKey::from_slice(result.slice_to(32)).map_err(EcdsaError));
try!(sk.add_assign(&self.secret_key).map_err(EcdsaError));
Ok(ExtendedPrivKey {
network: self.network,
depth: self.depth + 1,
parent_fingerprint: self.fingerprint(),
child_number: i,
secret_key: sk,
chain_code: ChainCode::from_slice(result.slice_from(32))
})
}
/// Returns the HASH160 of the chaincode
pub fn identifier(&self) -> [u8, ..20] {
let mut sha2_res = [0, ..32];
let mut ripemd_res = [0, ..20];
// Compute extended public key
let pk = ExtendedPubKey::from_private(self);
// Do SHA256 of just the ECDSA pubkey
let mut sha2 = Sha256::new();
sha2.input(pk.public_key.as_slice());
sha2.result(sha2_res.as_mut_slice());
// do RIPEMD160
let mut ripemd = Ripemd160::new();
ripemd.input(sha2_res.as_slice());
ripemd.result(ripemd_res.as_mut_slice());
// Return
ripemd_res
}
/// Returns the first four bytes of the identifier
pub fn fingerprint(&self) -> Fingerprint {
Fingerprint::from_slice(self.identifier().slice_to(4))
}
}
impl ExtendedPubKey {
/// Derives a public key from a private key
pub fn from_private(sk: &ExtendedPrivKey) -> ExtendedPubKey {
secp256k1::init();
ExtendedPubKey {
network: sk.network,
depth: sk.depth,
parent_fingerprint: sk.parent_fingerprint,
child_number: sk.child_number,
public_key: PublicKey::from_secret_key(&sk.secret_key, true),
chain_code: sk.chain_code
}
}
/// Public->Public child key derivation
pub fn ckd_pub(&self, i: ChildNumber) -> Result<ExtendedPubKey, Error> {
match i {
Hardened(n) => {
if n >= (1 << 31) {
Err(InvalidChildNumber(i))
} else {
Err(CannotDeriveFromHardenedKey)
}
}
Normal(n) => {
let mut hmac = Hmac::new(Sha512::new(), self.chain_code.as_slice());
hmac.input(self.public_key.as_slice());
u64_to_be_bytes(n as u64, 4, |raw| hmac.input(raw));
let mut result = [0, ..64];
hmac.raw_result(result.as_mut_slice());
let sk = try!(SecretKey::from_slice(result.slice_to(32)).map_err(EcdsaError));
let mut pk = self.public_key.clone();
try!(pk.add_exp_assign(&sk).map_err(EcdsaError));
Ok(ExtendedPubKey {
network: self.network,
depth: self.depth + 1,
parent_fingerprint: self.fingerprint(),
child_number: i,
public_key: pk,
chain_code: ChainCode::from_slice(result.slice_from(32))
})
}
}
}
/// Returns the HASH160 of the chaincode
pub fn identifier(&self) -> [u8, ..20] {
let mut sha2_res = [0, ..32];
let mut ripemd_res = [0, ..20];
// Do SHA256 of just the ECDSA pubkey
let | impl_array_newtype!(Fingerprint, u8, 4)
impl_array_newtype_show!(Fingerprint)
impl_array_newtype_encodable!(Fingerprint, u8, 4)
| random_line_split |
|
f2s.rs | 0 >> 32) + bits1;
let shifted_sum = sum >> (shift - 32);
debug_assert!(shifted_sum <= u32::max_value() as u64);
shifted_sum as u32
}
#[cfg_attr(feature = "no-panic", inline)]
fn mul_pow5_inv_div_pow2(m: u32, q: u32, j: i32) -> u32 {
debug_assert!(q < FLOAT_POW5_INV_SPLIT.len() as u32);
unsafe { mul_shift(m, *FLOAT_POW5_INV_SPLIT.get_unchecked(q as usize), j) }
}
#[cfg_attr(feature = "no-panic", inline)]
fn mul_pow5_div_pow2(m: u32, i: u32, j: i32) -> u32 {
debug_assert!(i < FLOAT_POW5_SPLIT.len() as u32);
unsafe { mul_shift(m, *FLOAT_POW5_SPLIT.get_unchecked(i as usize), j) }
}
// A floating decimal representing m * 10^e.
pub struct FloatingDecimal32 {
pub mantissa: u32,
// Decimal exponent's range is -45 to 38
// inclusive, and can fit in i16 if needed.
pub exponent: i32,
}
#[cfg_attr(feature = "no-panic", inline)]
pub fn f2d(ieee_mantissa: u32, ieee_exponent: u32) -> FloatingDecimal32 {
let (e2, m2) = if ieee_exponent == 0 {
(
// We subtract 2 so that the bounds computation has 2 additional bits.
1 - FLOAT_BIAS - FLOAT_MANTISSA_BITS as i32 - 2,
ieee_mantissa,
)
} else {
(
ieee_exponent as i32 - FLOAT_BIAS - FLOAT_MANTISSA_BITS as i32 - 2,
(1u32 << FLOAT_MANTISSA_BITS) | ieee_mantissa,
)
};
let even = (m2 & 1) == 0;
let accept_bounds = even;
// Step 2: Determine the interval of valid decimal representations.
let mv = 4 * m2;
let mp = 4 * m2 + 2;
// Implicit bool -> int conversion. True is 1, false is 0.
let mm_shift = (ieee_mantissa != 0 || ieee_exponent <= 1) as u32;
let mm = 4 * m2 - 1 - mm_shift;
// Step 3: Convert to a decimal power base using 64-bit arithmetic.
let mut vr: u32;
let mut vp: u32;
let mut vm: u32;
let e10: i32;
let mut vm_is_trailing_zeros = false;
let mut vr_is_trailing_zeros = false;
let mut last_removed_digit = 0u8;
if e2 >= 0 {
let q = log10_pow2(e2);
e10 = q as i32;
let k = FLOAT_POW5_INV_BITCOUNT + pow5bits(q as i32) - 1;
let i = -e2 + q as i32 + k;
vr = mul_pow5_inv_div_pow2(mv, q, i);
vp = mul_pow5_inv_div_pow2(mp, q, i);
vm = mul_pow5_inv_div_pow2(mm, q, i);
if q != 0 && (vp - 1) / 10 <= vm / 10 {
// We need to know one removed digit even if we are not going to loop below. We could use
// q = X - 1 above, except that would require 33 bits for the result, and we've found that
// 32-bit arithmetic is faster even on 64-bit machines.
let l = FLOAT_POW5_INV_BITCOUNT + pow5bits(q as i32 - 1) - 1;
last_removed_digit =
(mul_pow5_inv_div_pow2(mv, q - 1, -e2 + q as i32 - 1 + l) % 10) as u8;
}
if q <= 9 {
// The largest power of 5 that fits in 24 bits is 5^10, but q <= 9 seems to be safe as well.
// Only one of mp, mv, and mm can be a multiple of 5, if any.
if mv % 5 == 0 {
vr_is_trailing_zeros = multiple_of_power_of_5(mv, q);
} else if accept_bounds {
vm_is_trailing_zeros = multiple_of_power_of_5(mm, q);
} else {
vp -= multiple_of_power_of_5(mp, q) as u32;
}
}
} else {
let q = log10_pow5(-e2);
e10 = q as i32 + e2;
let i = -e2 - q as i32;
let k = pow5bits(i) - FLOAT_POW5_BITCOUNT;
let mut j = q as i32 - k;
vr = mul_pow5_div_pow2(mv, i as u32, j);
vp = mul_pow5_div_pow2(mp, i as u32, j);
vm = mul_pow5_div_pow2(mm, i as u32, j);
if q != 0 && (vp - 1) / 10 <= vm / 10 {
j = q as i32 - 1 - (pow5bits(i + 1) - FLOAT_POW5_BITCOUNT);
last_removed_digit = (mul_pow5_div_pow2(mv, (i + 1) as u32, j) % 10) as u8;
}
if q <= 1 {
// {vr,vp,vm} is trailing zeros if {mv,mp,mm} has at least q trailing 0 bits.
// mv = 4 * m2, so it always has at least two trailing 0 bits.
vr_is_trailing_zeros = true;
if accept_bounds {
// mm = mv - 1 - mm_shift, so it has 1 trailing 0 bit iff mm_shift == 1.
vm_is_trailing_zeros = mm_shift == 1;
} else {
// mp = mv + 2, so it always has at least one trailing 0 bit.
vp -= 1;
}
} else if q < 31 {
// TODO(ulfjack): Use a tighter bound here.
vr_is_trailing_zeros = multiple_of_power_of_2(mv, q - 1);
}
}
// Step 4: Find the shortest decimal representation in the interval of valid representations.
let mut removed = 0i32;
let output = if vm_is_trailing_zeros || vr_is_trailing_zeros {
// General case, which happens rarely (~4.0%).
while vp / 10 > vm / 10 {
vm_is_trailing_zeros &= vm - (vm / 10) * 10 == 0;
vr_is_trailing_zeros &= last_removed_digit == 0;
last_removed_digit = (vr % 10) as u8;
vr /= 10;
vp /= 10;
vm /= 10;
removed += 1;
}
if vm_is_trailing_zeros {
while vm % 10 == 0 {
vr_is_trailing_zeros &= last_removed_digit == 0;
last_removed_digit = (vr % 10) as u8;
vr /= 10;
vp /= 10;
vm /= 10;
removed += 1;
}
}
if vr_is_trailing_zeros && last_removed_digit == 5 && vr % 2 == 0 {
// Round even if the exact number is .....50..0.
last_removed_digit = 4;
}
// We need to take vr + 1 if vr is outside bounds or we need to round up.
vr + ((vr == vm && (!accept_bounds || !vm_is_trailing_zeros)) || last_removed_digit >= 5)
as u32
} else | {
// Specialized for the common case (~96.0%). Percentages below are relative to this.
// Loop iterations below (approximately):
// 0: 13.6%, 1: 70.7%, 2: 14.1%, 3: 1.39%, 4: 0.14%, 5+: 0.01%
while vp / 10 > vm / 10 {
last_removed_digit = (vr % 10) as u8;
vr /= 10;
vp /= 10;
vm /= 10;
removed += 1;
}
// We need to take vr + 1 if vr is outside bounds or we need to round up.
vr + (vr == vm || last_removed_digit >= 5) as u32
} | conditional_block |
|
f2s.rs | 958651173080,
340282366920938464,
544451787073501542,
435561429658801234,
348449143727040987,
557518629963265579,
446014903970612463,
356811923176489971,
570899077082383953,
456719261665907162,
365375409332725730,
1 << 63,
];
static FLOAT_POW5_SPLIT: [u64; 47] = [
1152921504606846976,
1441151880758558720,
1801439850948198400,
2251799813685248000,
1407374883553280000,
1759218604441600000,
2199023255552000000,
1374389534720000000,
1717986918400000000,
2147483648000000000,
1342177280000000000,
1677721600000000000,
2097152000000000000,
1310720000000000000,
1638400000000000000,
2048000000000000000,
1280000000000000000,
1600000000000000000,
2000000000000000000,
1250000000000000000,
1562500000000000000,
1953125000000000000,
1220703125000000000,
1525878906250000000,
1907348632812500000,
1192092895507812500,
1490116119384765625,
1862645149230957031,
1164153218269348144,
1455191522836685180,
1818989403545856475,
2273736754432320594,
1421085471520200371,
1776356839400250464,
2220446049250313080,
1387778780781445675,
1734723475976807094,
2168404344971008868,
1355252715606880542,
1694065894508600678,
2117582368135750847,
1323488980084844279,
1654361225106055349,
2067951531382569187,
1292469707114105741,
1615587133892632177,
2019483917365790221,
];
#[cfg_attr(feature = "no-panic", inline)]
fn pow5_factor(mut value: u32) -> u32 {
let mut count = 0u32;
loop {
debug_assert!(value != 0);
let q = value / 5;
let r = value % 5;
if r != 0 {
break;
}
value = q;
count += 1;
}
count
}
// Returns true if value is divisible by 5^p.
#[cfg_attr(feature = "no-panic", inline)]
fn multiple_of_power_of_5(value: u32, p: u32) -> bool {
pow5_factor(value) >= p
}
// Returns true if value is divisible by 2^p.
#[cfg_attr(feature = "no-panic", inline)]
fn multiple_of_power_of_2(value: u32, p: u32) -> bool {
// return __builtin_ctz(value) >= p;
(value & ((1u32 << p) - 1)) == 0
}
// It seems to be slightly faster to avoid uint128_t here, although the
// generated code for uint128_t looks slightly nicer.
#[cfg_attr(feature = "no-panic", inline)]
fn mul_shift(m: u32, factor: u64, shift: i32) -> u32 {
debug_assert!(shift > 32);
// The casts here help MSVC to avoid calls to the __allmul library
// function.
let factor_lo = factor as u32;
let factor_hi = (factor >> 32) as u32;
let bits0 = m as u64 * factor_lo as u64;
let bits1 = m as u64 * factor_hi as u64;
let sum = (bits0 >> 32) + bits1;
let shifted_sum = sum >> (shift - 32);
debug_assert!(shifted_sum <= u32::max_value() as u64);
shifted_sum as u32
}
#[cfg_attr(feature = "no-panic", inline)]
fn | (m: u32, q: u32, j: i32) -> u32 {
debug_assert!(q < FLOAT_POW5_INV_SPLIT.len() as u32);
unsafe { mul_shift(m, *FLOAT_POW5_INV_SPLIT.get_unchecked(q as usize), j) }
}
#[cfg_attr(feature = "no-panic", inline)]
fn mul_pow5_div_pow2(m: u32, i: u32, j: i32) -> u32 {
debug_assert!(i < FLOAT_POW5_SPLIT.len() as u32);
unsafe { mul_shift(m, *FLOAT_POW5_SPLIT.get_unchecked(i as usize), j) }
}
// A floating decimal representing m * 10^e.
pub struct FloatingDecimal32 {
pub mantissa: u32,
// Decimal exponent's range is -45 to 38
// inclusive, and can fit in i16 if needed.
pub exponent: i32,
}
#[cfg_attr(feature = "no-panic", inline)]
pub fn f2d(ieee_mantissa: u32, ieee_exponent: u32) -> FloatingDecimal32 {
let (e2, m2) = if ieee_exponent == 0 {
(
// We | mul_pow5_inv_div_pow2 | identifier_name |
f2s.rs | 958651173080,
340282366920938464,
544451787073501542,
435561429658801234,
348449143727040987,
557518629963265579,
446014903970612463,
356811923176489971,
570899077082383953,
456719261665907162,
365375409332725730,
1 << 63,
];
static FLOAT_POW5_SPLIT: [u64; 47] = [
1152921504606846976,
1441151880758558720,
1801439850948198400,
2251799813685248000,
1407374883553280000,
1759218604441600000,
2199023255552000000,
1374389534720000000,
1717986918400000000,
2147483648000000000,
1342177280000000000,
1677721600000000000,
2097152000000000000,
1310720000000000000,
1638400000000000000,
2048000000000000000,
1280000000000000000,
1600000000000000000,
2000000000000000000,
1250000000000000000,
1562500000000000000,
1953125000000000000,
1220703125000000000,
1525878906250000000,
1907348632812500000,
1192092895507812500,
1490116119384765625,
1862645149230957031,
1164153218269348144,
1455191522836685180,
1818989403545856475,
2273736754432320594,
1421085471520200371,
1776356839400250464,
2220446049250313080,
1387778780781445675,
1734723475976807094,
2168404344971008868,
1355252715606880542,
1694065894508600678,
2117582368135750847,
1323488980084844279,
1654361225106055349,
2067951531382569187,
1292469707114105741,
1615587133892632177,
2019483917365790221,
];
#[cfg_attr(feature = "no-panic", inline)]
fn pow5_factor(mut value: u32) -> u32 {
let mut count = 0u32;
loop {
debug_assert!(value != 0);
let q = value / 5;
let r = value % 5;
if r != 0 {
break;
}
value = q;
count += 1; |
// Returns true if value is divisible by 5^p.
#[cfg_attr(feature = "no-panic", inline)]
fn multiple_of_power_of_5(value: u32, p: u32) -> bool {
pow5_factor(value) >= p
}
// Returns true if value is divisible by 2^p.
#[cfg_attr(feature = "no-panic", inline)]
fn multiple_of_power_of_2(value: u32, p: u32) -> bool {
// return __builtin_ctz(value) >= p;
(value & ((1u32 << p) - 1)) == 0
}
// It seems to be slightly faster to avoid uint128_t here, although the
// generated code for uint128_t looks slightly nicer.
#[cfg_attr(feature = "no-panic", inline)]
fn mul_shift(m: u32, factor: u64, shift: i32) -> u32 {
debug_assert!(shift > 32);
// The casts here help MSVC to avoid calls to the __allmul library
// function.
let factor_lo = factor as u32;
let factor_hi = (factor >> 32) as u32;
let bits0 = m as u64 * factor_lo as u64;
let bits1 = m as u64 * factor_hi as u64;
let sum = (bits0 >> 32) + bits1;
let shifted_sum = sum >> (shift - 32);
debug_assert!(shifted_sum <= u32::max_value() as u64);
shifted_sum as u32
}
#[cfg_attr(feature = "no-panic", inline)]
fn mul_pow5_inv_div_pow2(m: u32, q: u32, j: i32) -> u32 {
debug_assert!(q < FLOAT_POW5_INV_SPLIT.len() as u32);
unsafe { mul_shift(m, *FLOAT_POW5_INV_SPLIT.get_unchecked(q as usize), j) }
}
#[cfg_attr(feature = "no-panic", inline)]
fn mul_pow5_div_pow2(m: u32, i: u32, j: i32) -> u32 {
debug_assert!(i < FLOAT_POW5_SPLIT.len() as u32);
unsafe { mul_shift(m, *FLOAT_POW5_SPLIT.get_unchecked(i as usize), j) }
}
// A floating decimal representing m * 10^e.
pub struct FloatingDecimal32 {
pub mantissa: u32,
// Decimal exponent's range is -45 to 38
// inclusive, and can fit in i16 if needed.
pub exponent: i32,
}
#[cfg_attr(feature = "no-panic", inline)]
pub fn f2d(ieee_mantissa: u32, ieee_exponent: u32) -> FloatingDecimal32 {
let (e2, m2) = if ieee_exponent == 0 {
(
// We | }
count
} | random_line_split |
discovery.pb.go | ,proto3,enum=grafeas.v1.NoteKind" json:"analysis_kind,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovery) Reset() { *m = Discovery{} }
func (m *Discovery) String() string { return proto.CompactTextString(m) }
func (*Discovery) ProtoMessage() {}
func (*Discovery) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{0}
}
func (m *Discovery) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovery.Unmarshal(m, b)
}
func (m *Discovery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovery.Marshal(b, m, deterministic)
}
func (m *Discovery) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovery.Merge(m, src)
}
func (m *Discovery) XXX_Size() int {
return xxx_messageInfo_Discovery.Size(m)
}
func (m *Discovery) XXX_DiscardUnknown() {
xxx_messageInfo_Discovery.DiscardUnknown(m)
}
var xxx_messageInfo_Discovery proto.InternalMessageInfo
func (m *Discovery) GetAnalysisKind() common_go_proto.NoteKind {
if m != nil {
return m.AnalysisKind
}
return common_go_proto.NoteKind_NOTE_KIND_UNSPECIFIED
}
// Details of a discovery occurrence.
type Details struct {
// Required. Analysis status for the discovered resource.
Discovered *Discovered `protobuf:"bytes,1,opt,name=discovered,proto3" json:"discovered,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Details) Reset() { *m = Details{} }
func (m *Details) String() string { return proto.CompactTextString(m) }
func (*Details) ProtoMessage() {}
func (*Details) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{1}
}
func (m *Details) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Details.Unmarshal(m, b)
}
func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Details.Marshal(b, m, deterministic)
}
func (m *Details) XXX_Merge(src proto.Message) {
xxx_messageInfo_Details.Merge(m, src)
}
func (m *Details) XXX_Size() int {
return xxx_messageInfo_Details.Size(m)
}
func (m *Details) XXX_DiscardUnknown() {
xxx_messageInfo_Details.DiscardUnknown(m)
}
var xxx_messageInfo_Details proto.InternalMessageInfo
func (m *Details) GetDiscovered() *Discovered {
if m != nil {
return m.Discovered
}
return nil
}
// Provides information about the analysis status of a discovered resource.
type Discovered struct {
// Whether the resource is continuously analyzed.
ContinuousAnalysis Discovered_ContinuousAnalysis `protobuf:"varint,1,opt,name=continuous_analysis,json=continuousAnalysis,proto3,enum=grafeas.v1.discovery.Discovered_ContinuousAnalysis" json:"continuous_analysis,omitempty"`
// The last time continuous analysis was done for this resource.
// Deprecated, do not use.
LastAnalysisTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=last_analysis_time,json=lastAnalysisTime,proto3" json:"last_analysis_time,omitempty"`
// The status of discovery for the resource.
AnalysisStatus Discovered_AnalysisStatus `protobuf:"varint,3,opt,name=analysis_status,json=analysisStatus,proto3,enum=grafeas.v1.discovery.Discovered_AnalysisStatus" json:"analysis_status,omitempty"`
// When an error is encountered this will contain a LocalizedMessage under
// details to show to the user. The LocalizedMessage is output only and
// populated by the API.
AnalysisStatusError *status.Status `protobuf:"bytes,4,opt,name=analysis_status_error,json=analysisStatusError,proto3" json:"analysis_status_error,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovered) Reset() { *m = Discovered{} }
func (m *Discovered) String() string { return proto.CompactTextString(m) }
func (*Discovered) ProtoMessage() {}
func (*Discovered) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{2}
}
func (m *Discovered) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovered.Unmarshal(m, b)
}
func (m *Discovered) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovered.Marshal(b, m, deterministic)
}
func (m *Discovered) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovered.Merge(m, src)
}
func (m *Discovered) XXX_Size() int {
return xxx_messageInfo_Discovered.Size(m)
}
func (m *Discovered) XXX_DiscardUnknown() {
xxx_messageInfo_Discovered.DiscardUnknown(m)
}
var xxx_messageInfo_Discovered proto.InternalMessageInfo
func (m *Discovered) GetContinuousAnalysis() Discovered_ContinuousAnalysis {
if m != nil {
return m.ContinuousAnalysis
}
return Discovered_CONTINUOUS_ANALYSIS_UNSPECIFIED
}
func (m *Discovered) GetLastAnalysisTime() *timestamp.Timestamp {
if m != nil {
return m.LastAnalysisTime
}
return nil
}
func (m *Discovered) GetAnalysisStatus() Discovered_AnalysisStatus {
if m != nil {
return m.AnalysisStatus
}
return Discovered_ANALYSIS_STATUS_UNSPECIFIED
}
func (m *Discovered) GetAnalysisStatusError() *status.Status {
if m != nil {
return m.AnalysisStatusError
} | proto.RegisterEnum("grafeas.v1.discovery.Discovered_ContinuousAnalysis", Discovered_ContinuousAnalysis_name, Discovered_ContinuousAnalysis_value)
proto.RegisterEnum("grafeas.v1.discovery.Discovered_AnalysisStatus", Discovered_AnalysisStatus_name, Discovered_AnalysisStatus_value)
proto.RegisterType((*Discovery)(nil), "grafeas.v1.discovery.Discovery")
proto.RegisterType((*Details)(nil), "grafeas.v1.discovery.Details")
proto.RegisterType((*Discovered)(nil), "grafeas.v1.discovery.Discovered")
}
func init() { proto.RegisterFile("proto/v1/discovery.proto", fileDescriptor_046437f686d0269e) }
var fileDescriptor_046437f686d0269e = []byte{
// 504 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xdf, 0x4e, 0xdb, 0x30,
0x18, 0xc5, 0xd7, 0xd2, 0xc1, 0xf6, 0xc1, 0x4a, 0xe4, 0x16, 0x11, 0x75, 0x17, 0x45, 0xdd, 0xcd,
0xae, 0x1c, 0x01, 0xd2, 0xa4, 0xdd, 0x2d, 0x4b, 0x52, 0xb0, 0x40, 0x6e, 0x15, 0x27, 0xd3, 0xd8,
0x4d, 0x94, 0xa6, 0xa1, 0x8b, 0xd6, 0xc6, 0x55, 0xec, 0x54, 0xe2, 0x31, 0xf6, 0x0a, 0x7b, 0x99,
0xbd, 0xd6, 0x94, 0x7f, 0x1e, 0xa5, 0x48, 0x5c, 0x45, 0xdf, 0xf1, 0x77, 0x7e, 0x3e, 0x39, 0x51,
0x40, 0x5f, 0x67, 0x5c, 0x72, 0x63, 0x73, 0x | return nil
}
func init() { | random_line_split |
discovery.pb.go | 3,enum=grafeas.v1.NoteKind" json:"analysis_kind,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovery) Reset() { *m = Discovery{} }
func (m *Discovery) String() string { return proto.CompactTextString(m) }
func (*Discovery) ProtoMessage() {}
func (*Discovery) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{0}
}
func (m *Discovery) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovery.Unmarshal(m, b)
}
func (m *Discovery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovery.Marshal(b, m, deterministic)
}
func (m *Discovery) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovery.Merge(m, src)
}
func (m *Discovery) XXX_Size() int {
return xxx_messageInfo_Discovery.Size(m)
}
func (m *Discovery) XXX_DiscardUnknown() {
xxx_messageInfo_Discovery.DiscardUnknown(m)
}
var xxx_messageInfo_Discovery proto.InternalMessageInfo
func (m *Discovery) GetAnalysisKind() common_go_proto.NoteKind {
if m != nil {
return m.AnalysisKind
}
return common_go_proto.NoteKind_NOTE_KIND_UNSPECIFIED
}
// Details of a discovery occurrence.
type Details struct {
// Required. Analysis status for the discovered resource.
Discovered *Discovered `protobuf:"bytes,1,opt,name=discovered,proto3" json:"discovered,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Details) Reset() { *m = Details{} }
func (m *Details) String() string { return proto.CompactTextString(m) }
func (*Details) ProtoMessage() {}
func (*Details) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{1}
}
func (m *Details) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Details.Unmarshal(m, b)
}
func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Details.Marshal(b, m, deterministic)
}
func (m *Details) XXX_Merge(src proto.Message) {
xxx_messageInfo_Details.Merge(m, src)
}
func (m *Details) XXX_Size() int {
return xxx_messageInfo_Details.Size(m)
}
func (m *Details) XXX_DiscardUnknown() {
xxx_messageInfo_Details.DiscardUnknown(m)
}
var xxx_messageInfo_Details proto.InternalMessageInfo
func (m *Details) GetDiscovered() *Discovered {
if m != nil {
return m.Discovered
}
return nil
}
// Provides information about the analysis status of a discovered resource.
type Discovered struct {
// Whether the resource is continuously analyzed.
ContinuousAnalysis Discovered_ContinuousAnalysis `protobuf:"varint,1,opt,name=continuous_analysis,json=continuousAnalysis,proto3,enum=grafeas.v1.discovery.Discovered_ContinuousAnalysis" json:"continuous_analysis,omitempty"`
// The last time continuous analysis was done for this resource.
// Deprecated, do not use.
LastAnalysisTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=last_analysis_time,json=lastAnalysisTime,proto3" json:"last_analysis_time,omitempty"`
// The status of discovery for the resource.
AnalysisStatus Discovered_AnalysisStatus `protobuf:"varint,3,opt,name=analysis_status,json=analysisStatus,proto3,enum=grafeas.v1.discovery.Discovered_AnalysisStatus" json:"analysis_status,omitempty"`
// When an error is encountered this will contain a LocalizedMessage under
// details to show to the user. The LocalizedMessage is output only and
// populated by the API.
AnalysisStatusError *status.Status `protobuf:"bytes,4,opt,name=analysis_status_error,json=analysisStatusError,proto3" json:"analysis_status_error,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovered) Reset() { *m = Discovered{} }
func (m *Discovered) String() string { return proto.CompactTextString(m) }
func (*Discovered) ProtoMessage() {}
func (*Discovered) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{2}
}
func (m *Discovered) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovered.Unmarshal(m, b)
}
func (m *Discovered) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovered.Marshal(b, m, deterministic)
}
func (m *Discovered) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovered.Merge(m, src)
}
func (m *Discovered) XXX_Size() int {
return xxx_messageInfo_Discovered.Size(m)
}
func (m *Discovered) XXX_DiscardUnknown() {
xxx_messageInfo_Discovered.DiscardUnknown(m)
}
var xxx_messageInfo_Discovered proto.InternalMessageInfo
func (m *Discovered) GetContinuousAnalysis() Discovered_ContinuousAnalysis {
if m != nil {
return m.ContinuousAnalysis
}
return Discovered_CONTINUOUS_ANALYSIS_UNSPECIFIED
}
func (m *Discovered) GetLastAnalysisTime() *timestamp.Timestamp {
if m != nil {
return m.LastAnalysisTime
}
return nil
}
func (m *Discovered) GetAnalysisStatus() Discovered_AnalysisStatus {
if m != nil {
return m.AnalysisStatus
}
return Discovered_ANALYSIS_STATUS_UNSPECIFIED
}
func (m *Discovered) GetAnalysisStatusError() *status.Status {
if m != nil |
return nil
}
func init() {
proto.RegisterEnum("grafeas.v1.discovery.Discovered_ContinuousAnalysis", Discovered_ContinuousAnalysis_name, Discovered_ContinuousAnalysis_value)
proto.RegisterEnum("grafeas.v1.discovery.Discovered_AnalysisStatus", Discovered_AnalysisStatus_name, Discovered_AnalysisStatus_value)
proto.RegisterType((*Discovery)(nil), "grafeas.v1.discovery.Discovery")
proto.RegisterType((*Details)(nil), "grafeas.v1.discovery.Details")
proto.RegisterType((*Discovered)(nil), "grafeas.v1.discovery.Discovered")
}
func init() { proto.RegisterFile("proto/v1/discovery.proto", fileDescriptor_046437f686d0269e) }
var fileDescriptor_046437f686d0269e = []byte{
// 504 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xdf, 0x4e, 0xdb, 0x30,
0x18, 0xc5, 0xd7, 0xd2, 0xc1, 0xf6, 0xc1, 0x4a, 0xe4, 0x16, 0x11, 0x75, 0x17, 0x45, 0xdd, 0xcd,
0xae, 0x1c, 0x01, 0xd2, 0xa4, 0xdd, 0x2d, 0x4b, 0x52, 0xb0, 0x40, 0x6e, 0x15, 0x27, 0xd3, 0xd8,
0x4d, 0x94, 0xa6, 0xa1, 0x8b, 0xd6, 0xc6, 0x55, 0xec, 0x54, 0xe2, 0x31, 0xf6, 0x0a, 0x7b, 0x99,
0xbd, 0xd6, 0x94, 0x7f, 0x1e, 0xa5, 0x48, 0x5c, 0x45, 0xdf, 0xf1, 0x77, 0x7e, 0x3e, 0x39, 0x51,
0x40, 0x5f, 0x67, 0x5c, 0x72, 0x63, 0x73, 0 | {
return m.AnalysisStatusError
} | conditional_block |
discovery.pb.go | ,proto3,enum=grafeas.v1.NoteKind" json:"analysis_kind,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovery) Reset() { *m = Discovery{} }
func (m *Discovery) String() string { return proto.CompactTextString(m) }
func (*Discovery) ProtoMessage() {}
func (*Discovery) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{0}
}
func (m *Discovery) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovery.Unmarshal(m, b)
}
func (m *Discovery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovery.Marshal(b, m, deterministic)
}
func (m *Discovery) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovery.Merge(m, src)
}
func (m *Discovery) XXX_Size() int {
return xxx_messageInfo_Discovery.Size(m)
}
func (m *Discovery) XXX_DiscardUnknown() {
xxx_messageInfo_Discovery.DiscardUnknown(m)
}
var xxx_messageInfo_Discovery proto.InternalMessageInfo
func (m *Discovery) GetAnalysisKind() common_go_proto.NoteKind {
if m != nil {
return m.AnalysisKind
}
return common_go_proto.NoteKind_NOTE_KIND_UNSPECIFIED
}
// Details of a discovery occurrence.
type Details struct {
// Required. Analysis status for the discovered resource.
Discovered *Discovered `protobuf:"bytes,1,opt,name=discovered,proto3" json:"discovered,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Details) Reset() { *m = Details{} }
func (m *Details) String() string { return proto.CompactTextString(m) }
func (*Details) ProtoMessage() {}
func (*Details) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{1}
}
func (m *Details) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Details.Unmarshal(m, b)
}
func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Details.Marshal(b, m, deterministic)
}
func (m *Details) XXX_Merge(src proto.Message) {
xxx_messageInfo_Details.Merge(m, src)
}
func (m *Details) XXX_Size() int {
return xxx_messageInfo_Details.Size(m)
}
func (m *Details) XXX_DiscardUnknown() {
xxx_messageInfo_Details.DiscardUnknown(m)
}
var xxx_messageInfo_Details proto.InternalMessageInfo
func (m *Details) | () *Discovered {
if m != nil {
return m.Discovered
}
return nil
}
// Provides information about the analysis status of a discovered resource.
type Discovered struct {
// Whether the resource is continuously analyzed.
ContinuousAnalysis Discovered_ContinuousAnalysis `protobuf:"varint,1,opt,name=continuous_analysis,json=continuousAnalysis,proto3,enum=grafeas.v1.discovery.Discovered_ContinuousAnalysis" json:"continuous_analysis,omitempty"`
// The last time continuous analysis was done for this resource.
// Deprecated, do not use.
LastAnalysisTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=last_analysis_time,json=lastAnalysisTime,proto3" json:"last_analysis_time,omitempty"`
// The status of discovery for the resource.
AnalysisStatus Discovered_AnalysisStatus `protobuf:"varint,3,opt,name=analysis_status,json=analysisStatus,proto3,enum=grafeas.v1.discovery.Discovered_AnalysisStatus" json:"analysis_status,omitempty"`
// When an error is encountered this will contain a LocalizedMessage under
// details to show to the user. The LocalizedMessage is output only and
// populated by the API.
AnalysisStatusError *status.Status `protobuf:"bytes,4,opt,name=analysis_status_error,json=analysisStatusError,proto3" json:"analysis_status_error,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovered) Reset() { *m = Discovered{} }
func (m *Discovered) String() string { return proto.CompactTextString(m) }
func (*Discovered) ProtoMessage() {}
func (*Discovered) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{2}
}
func (m *Discovered) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovered.Unmarshal(m, b)
}
func (m *Discovered) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovered.Marshal(b, m, deterministic)
}
func (m *Discovered) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovered.Merge(m, src)
}
func (m *Discovered) XXX_Size() int {
return xxx_messageInfo_Discovered.Size(m)
}
func (m *Discovered) XXX_DiscardUnknown() {
xxx_messageInfo_Discovered.DiscardUnknown(m)
}
var xxx_messageInfo_Discovered proto.InternalMessageInfo
func (m *Discovered) GetContinuousAnalysis() Discovered_ContinuousAnalysis {
if m != nil {
return m.ContinuousAnalysis
}
return Discovered_CONTINUOUS_ANALYSIS_UNSPECIFIED
}
func (m *Discovered) GetLastAnalysisTime() *timestamp.Timestamp {
if m != nil {
return m.LastAnalysisTime
}
return nil
}
func (m *Discovered) GetAnalysisStatus() Discovered_AnalysisStatus {
if m != nil {
return m.AnalysisStatus
}
return Discovered_ANALYSIS_STATUS_UNSPECIFIED
}
func (m *Discovered) GetAnalysisStatusError() *status.Status {
if m != nil {
return m.AnalysisStatusError
}
return nil
}
func init() {
proto.RegisterEnum("grafeas.v1.discovery.Discovered_ContinuousAnalysis", Discovered_ContinuousAnalysis_name, Discovered_ContinuousAnalysis_value)
proto.RegisterEnum("grafeas.v1.discovery.Discovered_AnalysisStatus", Discovered_AnalysisStatus_name, Discovered_AnalysisStatus_value)
proto.RegisterType((*Discovery)(nil), "grafeas.v1.discovery.Discovery")
proto.RegisterType((*Details)(nil), "grafeas.v1.discovery.Details")
proto.RegisterType((*Discovered)(nil), "grafeas.v1.discovery.Discovered")
}
func init() { proto.RegisterFile("proto/v1/discovery.proto", fileDescriptor_046437f686d0269e) }
var fileDescriptor_046437f686d0269e = []byte{
// 504 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xdf, 0x4e, 0xdb, 0x30,
0x18, 0xc5, 0xd7, 0xd2, 0xc1, 0xf6, 0xc1, 0x4a, 0xe4, 0x16, 0x11, 0x75, 0x17, 0x45, 0xdd, 0xcd,
0xae, 0x1c, 0x01, 0xd2, 0xa4, 0xdd, 0x2d, 0x4b, 0x52, 0xb0, 0x40, 0x6e, 0x15, 0x27, 0xd3, 0xd8,
0x4d, 0x94, 0xa6, 0xa1, 0x8b, 0xd6, 0xc6, 0x55, 0xec, 0x54, 0xe2, 0x31, 0xf6, 0x0a, 0x7b, 0x99,
0xbd, 0xd6, 0x94, 0x7f, 0x1e, 0xa5, 0x48, 0x5c, 0x45, 0xdf, 0xf1, 0x77, 0x7e, 0x3e, 0x39, 0x51,
0x40, 0x5f, 0x67, 0x5c, 0x72, 0x63, 0x73, 0 | GetDiscovered | identifier_name |
discovery.pb.go |
func (Discovered_AnalysisStatus) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{2, 1}
}
// DO NOT USE: UNDER HEAVY DEVELOPMENT.
// TODO(aysylu): finalize this.
//
// A note that indicates a type of analysis a provider would perform. This note
// exists in a provider's project. A `Discovery` occurrence is created in a
// consumer's project at the start of analysis.
type Discovery struct {
// Required. Immutable. The kind of analysis that is handled by this
// discovery.
AnalysisKind common_go_proto.NoteKind `protobuf:"varint,1,opt,name=analysis_kind,json=analysisKind,proto3,enum=grafeas.v1.NoteKind" json:"analysis_kind,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovery) Reset() { *m = Discovery{} }
func (m *Discovery) String() string { return proto.CompactTextString(m) }
func (*Discovery) ProtoMessage() {}
func (*Discovery) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{0}
}
func (m *Discovery) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovery.Unmarshal(m, b)
}
func (m *Discovery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovery.Marshal(b, m, deterministic)
}
func (m *Discovery) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovery.Merge(m, src)
}
func (m *Discovery) XXX_Size() int {
return xxx_messageInfo_Discovery.Size(m)
}
func (m *Discovery) XXX_DiscardUnknown() {
xxx_messageInfo_Discovery.DiscardUnknown(m)
}
var xxx_messageInfo_Discovery proto.InternalMessageInfo
func (m *Discovery) GetAnalysisKind() common_go_proto.NoteKind {
if m != nil {
return m.AnalysisKind
}
return common_go_proto.NoteKind_NOTE_KIND_UNSPECIFIED
}
// Details of a discovery occurrence.
type Details struct {
// Required. Analysis status for the discovered resource.
Discovered *Discovered `protobuf:"bytes,1,opt,name=discovered,proto3" json:"discovered,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Details) Reset() { *m = Details{} }
func (m *Details) String() string { return proto.CompactTextString(m) }
func (*Details) ProtoMessage() {}
func (*Details) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{1}
}
func (m *Details) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Details.Unmarshal(m, b)
}
func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Details.Marshal(b, m, deterministic)
}
func (m *Details) XXX_Merge(src proto.Message) {
xxx_messageInfo_Details.Merge(m, src)
}
func (m *Details) XXX_Size() int {
return xxx_messageInfo_Details.Size(m)
}
func (m *Details) XXX_DiscardUnknown() {
xxx_messageInfo_Details.DiscardUnknown(m)
}
var xxx_messageInfo_Details proto.InternalMessageInfo
func (m *Details) GetDiscovered() *Discovered {
if m != nil {
return m.Discovered
}
return nil
}
// Provides information about the analysis status of a discovered resource.
type Discovered struct {
// Whether the resource is continuously analyzed.
ContinuousAnalysis Discovered_ContinuousAnalysis `protobuf:"varint,1,opt,name=continuous_analysis,json=continuousAnalysis,proto3,enum=grafeas.v1.discovery.Discovered_ContinuousAnalysis" json:"continuous_analysis,omitempty"`
// The last time continuous analysis was done for this resource.
// Deprecated, do not use.
LastAnalysisTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=last_analysis_time,json=lastAnalysisTime,proto3" json:"last_analysis_time,omitempty"`
// The status of discovery for the resource.
AnalysisStatus Discovered_AnalysisStatus `protobuf:"varint,3,opt,name=analysis_status,json=analysisStatus,proto3,enum=grafeas.v1.discovery.Discovered_AnalysisStatus" json:"analysis_status,omitempty"`
// When an error is encountered this will contain a LocalizedMessage under
// details to show to the user. The LocalizedMessage is output only and
// populated by the API.
AnalysisStatusError *status.Status `protobuf:"bytes,4,opt,name=analysis_status_error,json=analysisStatusError,proto3" json:"analysis_status_error,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Discovered) Reset() { *m = Discovered{} }
func (m *Discovered) String() string { return proto.CompactTextString(m) }
func (*Discovered) ProtoMessage() {}
func (*Discovered) Descriptor() ([]byte, []int) {
return fileDescriptor_046437f686d0269e, []int{2}
}
func (m *Discovered) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Discovered.Unmarshal(m, b)
}
func (m *Discovered) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Discovered.Marshal(b, m, deterministic)
}
func (m *Discovered) XXX_Merge(src proto.Message) {
xxx_messageInfo_Discovered.Merge(m, src)
}
func (m *Discovered) XXX_Size() int {
return xxx_messageInfo_Discovered.Size(m)
}
func (m *Discovered) XXX_DiscardUnknown() {
xxx_messageInfo_Discovered.DiscardUnknown(m)
}
var xxx_messageInfo_Discovered proto.InternalMessageInfo
func (m *Discovered) GetContinuousAnalysis() Discovered_ContinuousAnalysis {
if m != nil {
return m.ContinuousAnalysis
}
return Discovered_CONTINUOUS_ANALYSIS_UNSPECIFIED
}
func (m *Discovered) GetLastAnalysisTime() *timestamp.Timestamp {
if m != nil {
return m.LastAnalysisTime
}
return nil
}
func (m *Discovered) GetAnalysisStatus() Discovered_AnalysisStatus {
if m != nil {
return m.AnalysisStatus
}
return Discovered_ANALYSIS_STATUS_UNSPECIFIED
}
func (m *Discovered) GetAnalysisStatusError() *status.Status {
if m != nil {
return m.AnalysisStatusError
}
return nil
}
func init() {
proto.RegisterEnum("grafeas.v1.discovery.Discovered_ContinuousAnalysis", Discovered_ContinuousAnalysis_name, Discovered_ContinuousAnalysis_value)
proto.RegisterEnum("grafeas.v1.discovery.Discovered_AnalysisStatus", Discovered_AnalysisStatus_name, Discovered_AnalysisStatus_value)
proto.RegisterType((*Discovery)(nil), "grafeas.v1.discovery.Discovery")
proto.RegisterType((*Details)(nil), "grafeas.v1.discovery.Details")
proto.RegisterType((*Discovered)(nil), "grafeas.v1.discovery.Discovered")
}
func init() { proto.RegisterFile("proto/v1/discovery.proto", fileDescriptor_046437f686d0269e) }
var fileDescriptor_046437f686d0269e = []byte{
// 504 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xdf, 0x4e, 0xdb, 0x30,
0x18, 0xc5, 0xd7, 0xd2, 0xc1, 0xf6, 0xc1, 0x4a, 0xe4, 0x16, 0x11, 0x75, 0x17, 0x45, 0xdd, 0xcd,
0xae, 0x1c, 0x01, 0xd2, 0xa4, 0xdd, 0x2d, 0x4b, 0x52, 0xb0, 0x40, 0x6e, 0x15, 0x27, 0xd3, 0xd8,
0x4d, 0x94, 0xa6, 0xa1, 0x8b, 0xd6, 0xc6 | {
return proto.EnumName(Discovered_AnalysisStatus_name, int32(x))
} | identifier_body |
|
word2vec.py | 1
from mixins import BoardRecorderMixin
class DistributedRepresentations:
|
def inspect(self):
rels = np.dot(self.normalized_vecs, self.normalized_vecs.T)
printoptions = np.get_printoptions()
np.set_printoptions(linewidth=200, precision=6)
for word, vec in zip(self.words, rels):
print(f'{word + ":":8s} {vec}')
np.set_printoptions(**printoptions)
def cos_similarity(self, x, y, eps=1e-8):
return np.dot(x, y) / (np.linalg.norm(x) + eps) \
/ (np.linalg.norm(y) + eps)
def words_similarity(self, word1, word2, eps=1e-8):
x, y = [self.vecs[i]
for i in [self.words.index(word) for word in [word1, word2]]]
return self.cos_similarity(x, y, eps=eps)
def most_similar(self, word, top=5):
try:
word_id = self.words.index(word)
except ValueError:
print(f"'{word}' is not found.")
return
print(f'\n[query]: {word}')
word_vec = self.vecs[word_id]
similarity = [[w, self.cos_similarity(word_vec, self.vecs[i])]
for i, w in enumerate(self.words) if i != word_id]
similarity.sort(key=lambda sim: sim[1], reverse=True)
for s in similarity[:top]:
print(f' {s[0]}: {s[1]}')
def analogy(self, a, b, c, top=5, answer=None):
try:
a_vec, b_vec, c_vec = \
self.vecs[[self.words.index(word) for word in (a, b, c)]]
except ValueError as err:
print(err)
return
print(f'{a}:{b} = {c}:?')
query_vec = b_vec - a_vec + c_vec
if answer is not None:
try:
answer_id = self.words.index(answer)
print(
f' ==> {answer}: '
f'{self.cos_similarity(self.vecs[answer_id], query_vec)}'
)
except ValueError as err:
print(err)
similarity = [[w, self.cos_similarity(query_vec, self.vecs[i])]
for i, w in enumerate(self.words)]
similarity.sort(key=lambda sim: sim[1], reverse=True)
count = 0
for s in similarity:
if s[0] not in (a, b, c):
print(f' {s[0]}: {s[1]}')
count += 1
if top <= count:
print()
break
class Word2Vec(BoardRecorderMixin):
"""Base class for wor2vec.
Attributes:
words (list): List of words
vocab_size (int): Size of `words`
corpus (list): Corpus
word_vectors (:obj:`WordVectors`): Results of model. You can reforence
after call `train` method.
"""
model_file_name = 'model.chpt'
@classmethod
def create_from_text(cls, text):
"""Create Word2Vec instance form text.
Args:
text (str): text to analyze
Returns:
Word2Vec: Instance created
"""
duplicate_words = text.lower().replace('.', ' .').split(' ')
words = list(set(duplicate_words))
corpus = [words.index(word) for word in duplicate_words]
return cls(words, corpus)
def __init__(self, words, corpus):
"""Create Word2Vec instance form corpus.
Args:
words (list): List of words
corpus (list): Corpus
"""
self.words = words
self.vocab_size = len(words)
self.corpus = corpus
counter = Counter(corpus)
self.counts = np.array([counter[i] for i in range(self.vocab_size)])
def get_contexts(self):
return np.r_[
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size)
]),
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size+1, self.window_size*2+1)
]),
].T
def get_targets(self):
return np.array(self.corpus[self.window_size:-self.window_size])
@property
def data_size(self):
if not hasattr(self, '_data_size'):
self._data_size = len(self.corpus) - self.window_size * 2
return self._data_size
def build_model_params(self, window_size, hidden_size):
if hasattr(self, '_data_size'):
del self._data_size
self.window_size = window_size
self.hidden_size = hidden_size
self.learning_rate = tfv1.placeholder(tf.float32, name='learning_rate')
self.W_in = tf.Variable(
tf.random.uniform([self.vocab_size, self.hidden_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_in',
)
self.W_out = tf.Variable(
tf.random.uniform([self.hidden_size, self.vocab_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_out',
)
def build_graph(self, window_size=1, hidden_size=5,
ns_count=0, ns_exponent=0.75):
"""Build Word2Vec graph.
Args:
window_size (int): Window size
hidden_size (int): Dimension of a vector encoding the words
ns_count (int): Number of samples using negative sampling.
If you specify 0, this object does not use negative sampling
and use softmax function. Default to 0.
ns_exponent (float): Value of exponent to determine probability of
acquiring each vocabulary using Nagative sampling. Default to
0.75.
"""
raise NotImplementedError
def get_incomes(self):
raise NotImplementedError
def get_labels(self):
raise NotImplementedError
def fetch_batch(self, incomes, labels, epoch_i, batch_i, batch_size):
raise NotImplementedError
def train(self, log_dir=None, max_epoch=10000, learning_rate=0.001,
batch_size=None, interval_sec=300, restore_step=None,
run_metadata=False):
"""Train model.
Args:
log_dir (str): Log directory where log and model is saved.
max_epoch (int): Size of epoch
learning_rate (float): Learning rate
batch_size (int): Batch size when using mini-batch descent method.
If specifying a size larger then learning data or `None`,
using batch descent.
interfal_sec (float): Specify logging time interval in seconds.
Default by 300.
restore_step (int): When you specify this argument, this mixin
resotres model for specified step.
run_metadata (bool): If true, run metadata and write logs.
"""
if log_dir is None:
log_dir = os.path.join(os.path.dirname(__file__),
'tf_logs',
datetime.utcnow().strftime('%Y%m%d%H%M%S'))
if batch_size is None:
n_batches = 1
else:
n_batches = int(np.ceil(self.data_size / batch_size))
if run_metadata:
options = tfv1.RunOptions(trace_level=tfv1.RunOptions.FULL_TRACE)
metadata = tfv1.RunMetadata()
else:
options = None
metadata = None
with self.open_writer(log_dir) as writer:
with self.open_session(interval_sec=interval_sec,
per_step=n_batches,
restore_step=restore_step) as sess:
incomes = self.get_incomes()
labels = self.get_labels()
self.word_reps = DistributedRepresentations(
self.words,
sess.run(self.W_in))
step = restore_step or 0
if restore_step is None:
writer.add_summary(
self.los_summary.eval(
feed_dict={self.incomes: incomes[:batch_size],
self.labels: labels[:batch_size]},
),
step,
)
for epoch_i in range(step // self.data_size, max_epoch):
for batch_i in range(n_batches):
c, l, b = self.fetch_batch(incomes, labels,
epoch_i, batch_i,
batch_size)
fd = {
self.incomes: c,
self.labels: l,
self.learning_rate: learning_rate,
}
sess.run(self.training_op,
feed_dict=fd,
options=options,
| """Distributed Represendations of the words.
Args:
words (list): List of words
vectors (numpy.array): Vectors encoded words
Attributes:
vecs (numpy.array): Vectors encoded words
words (list): List of words
"""
def __init__(self, words, vectors):
self.words = words
self.vecs = vectors
@property
def normalized_vecs(self):
return self.vecs / \
np.linalg.norm(self.vecs, axis=1).reshape(
[self.vecs.shape[0], 1],
) | identifier_body |
word2vec.py | 1
from mixins import BoardRecorderMixin
class DistributedRepresentations:
"""Distributed Represendations of the words.
Args:
words (list): List of words
vectors (numpy.array): Vectors encoded words
Attributes:
vecs (numpy.array): Vectors encoded words
words (list): List of words
"""
def __init__(self, words, vectors):
self.words = words
self.vecs = vectors
@property
def normalized_vecs(self):
return self.vecs / \
np.linalg.norm(self.vecs, axis=1).reshape(
[self.vecs.shape[0], 1],
)
def inspect(self):
rels = np.dot(self.normalized_vecs, self.normalized_vecs.T)
printoptions = np.get_printoptions()
np.set_printoptions(linewidth=200, precision=6)
for word, vec in zip(self.words, rels):
print(f'{word + ":":8s} {vec}')
np.set_printoptions(**printoptions)
def cos_similarity(self, x, y, eps=1e-8):
return np.dot(x, y) / (np.linalg.norm(x) + eps) \
/ (np.linalg.norm(y) + eps)
def words_similarity(self, word1, word2, eps=1e-8):
x, y = [self.vecs[i]
for i in [self.words.index(word) for word in [word1, word2]]]
return self.cos_similarity(x, y, eps=eps)
def most_similar(self, word, top=5):
try:
word_id = self.words.index(word)
except ValueError:
print(f"'{word}' is not found.")
return
print(f'\n[query]: {word}')
word_vec = self.vecs[word_id]
similarity = [[w, self.cos_similarity(word_vec, self.vecs[i])]
for i, w in enumerate(self.words) if i != word_id]
similarity.sort(key=lambda sim: sim[1], reverse=True)
for s in similarity[:top]:
print(f' {s[0]}: {s[1]}')
def analogy(self, a, b, c, top=5, answer=None):
try:
a_vec, b_vec, c_vec = \
self.vecs[[self.words.index(word) for word in (a, b, c)]]
except ValueError as err:
print(err)
return
print(f'{a}:{b} = {c}:?')
query_vec = b_vec - a_vec + c_vec
if answer is not None:
try:
answer_id = self.words.index(answer)
print(
f' ==> {answer}: '
f'{self.cos_similarity(self.vecs[answer_id], query_vec)}'
)
except ValueError as err:
print(err)
similarity = [[w, self.cos_similarity(query_vec, self.vecs[i])]
for i, w in enumerate(self.words)]
similarity.sort(key=lambda sim: sim[1], reverse=True)
count = 0
for s in similarity:
if s[0] not in (a, b, c):
print(f' {s[0]}: {s[1]}')
count += 1
if top <= count:
print()
break
class Word2Vec(BoardRecorderMixin):
"""Base class for wor2vec.
Attributes:
words (list): List of words
vocab_size (int): Size of `words`
corpus (list): Corpus
word_vectors (:obj:`WordVectors`): Results of model. You can reforence
after call `train` method.
"""
model_file_name = 'model.chpt'
@classmethod
def create_from_text(cls, text):
"""Create Word2Vec instance form text.
Args:
text (str): text to analyze
Returns:
Word2Vec: Instance created
"""
duplicate_words = text.lower().replace('.', ' .').split(' ')
words = list(set(duplicate_words))
corpus = [words.index(word) for word in duplicate_words]
return cls(words, corpus)
def __init__(self, words, corpus):
"""Create Word2Vec instance form corpus.
Args:
words (list): List of words
corpus (list): Corpus
"""
self.words = words
self.vocab_size = len(words)
self.corpus = corpus
counter = Counter(corpus)
self.counts = np.array([counter[i] for i in range(self.vocab_size)])
def get_contexts(self):
return np.r_[
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size)
]),
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size+1, self.window_size*2+1)
]),
].T
def get_targets(self):
return np.array(self.corpus[self.window_size:-self.window_size])
@property
def data_size(self):
if not hasattr(self, '_data_size'):
self._data_size = len(self.corpus) - self.window_size * 2
return self._data_size
def build_model_params(self, window_size, hidden_size):
if hasattr(self, '_data_size'):
del self._data_size
self.window_size = window_size
self.hidden_size = hidden_size
self.learning_rate = tfv1.placeholder(tf.float32, name='learning_rate')
self.W_in = tf.Variable(
tf.random.uniform([self.vocab_size, self.hidden_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_in',
)
self.W_out = tf.Variable(
tf.random.uniform([self.hidden_size, self.vocab_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_out',
)
def build_graph(self, window_size=1, hidden_size=5,
ns_count=0, ns_exponent=0.75):
"""Build Word2Vec graph.
Args:
window_size (int): Window size
hidden_size (int): Dimension of a vector encoding the words
ns_count (int): Number of samples using negative sampling.
If you specify 0, this object does not use negative sampling
and use softmax function. Default to 0.
ns_exponent (float): Value of exponent to determine probability of
acquiring each vocabulary using Nagative sampling. Default to
0.75.
"""
raise NotImplementedError
def | (self):
raise NotImplementedError
def get_labels(self):
raise NotImplementedError
def fetch_batch(self, incomes, labels, epoch_i, batch_i, batch_size):
raise NotImplementedError
def train(self, log_dir=None, max_epoch=10000, learning_rate=0.001,
batch_size=None, interval_sec=300, restore_step=None,
run_metadata=False):
"""Train model.
Args:
log_dir (str): Log directory where log and model is saved.
max_epoch (int): Size of epoch
learning_rate (float): Learning rate
batch_size (int): Batch size when using mini-batch descent method.
If specifying a size larger then learning data or `None`,
using batch descent.
interfal_sec (float): Specify logging time interval in seconds.
Default by 300.
restore_step (int): When you specify this argument, this mixin
resotres model for specified step.
run_metadata (bool): If true, run metadata and write logs.
"""
if log_dir is None:
log_dir = os.path.join(os.path.dirname(__file__),
'tf_logs',
datetime.utcnow().strftime('%Y%m%d%H%M%S'))
if batch_size is None:
n_batches = 1
else:
n_batches = int(np.ceil(self.data_size / batch_size))
if run_metadata:
options = tfv1.RunOptions(trace_level=tfv1.RunOptions.FULL_TRACE)
metadata = tfv1.RunMetadata()
else:
options = None
metadata = None
with self.open_writer(log_dir) as writer:
with self.open_session(interval_sec=interval_sec,
per_step=n_batches,
restore_step=restore_step) as sess:
incomes = self.get_incomes()
labels = self.get_labels()
self.word_reps = DistributedRepresentations(
self.words,
sess.run(self.W_in))
step = restore_step or 0
if restore_step is None:
writer.add_summary(
self.los_summary.eval(
feed_dict={self.incomes: incomes[:batch_size],
self.labels: labels[:batch_size]},
),
step,
)
for epoch_i in range(step // self.data_size, max_epoch):
for batch_i in range(n_batches):
c, l, b = self.fetch_batch(incomes, labels,
epoch_i, batch_i,
batch_size)
fd = {
self.incomes: c,
self.labels: l,
self.learning_rate: learning_rate,
}
sess.run(self.training_op,
feed_dict=fd,
options=options,
| get_incomes | identifier_name |
word2vec.py | the words.
Args:
words (list): List of words
vectors (numpy.array): Vectors encoded words
Attributes:
vecs (numpy.array): Vectors encoded words
words (list): List of words
"""
def __init__(self, words, vectors):
self.words = words
self.vecs = vectors
@property
def normalized_vecs(self):
return self.vecs / \
np.linalg.norm(self.vecs, axis=1).reshape(
[self.vecs.shape[0], 1],
)
def inspect(self):
rels = np.dot(self.normalized_vecs, self.normalized_vecs.T)
printoptions = np.get_printoptions()
np.set_printoptions(linewidth=200, precision=6)
for word, vec in zip(self.words, rels):
print(f'{word + ":":8s} {vec}')
np.set_printoptions(**printoptions)
def cos_similarity(self, x, y, eps=1e-8):
return np.dot(x, y) / (np.linalg.norm(x) + eps) \
/ (np.linalg.norm(y) + eps)
def words_similarity(self, word1, word2, eps=1e-8):
x, y = [self.vecs[i]
for i in [self.words.index(word) for word in [word1, word2]]]
return self.cos_similarity(x, y, eps=eps)
def most_similar(self, word, top=5):
try:
word_id = self.words.index(word)
except ValueError:
print(f"'{word}' is not found.")
return
print(f'\n[query]: {word}')
word_vec = self.vecs[word_id]
similarity = [[w, self.cos_similarity(word_vec, self.vecs[i])]
for i, w in enumerate(self.words) if i != word_id]
similarity.sort(key=lambda sim: sim[1], reverse=True)
for s in similarity[:top]:
print(f' {s[0]}: {s[1]}')
def analogy(self, a, b, c, top=5, answer=None):
try:
a_vec, b_vec, c_vec = \
self.vecs[[self.words.index(word) for word in (a, b, c)]]
except ValueError as err:
print(err)
return
print(f'{a}:{b} = {c}:?')
query_vec = b_vec - a_vec + c_vec
if answer is not None:
try:
answer_id = self.words.index(answer)
print(
f' ==> {answer}: '
f'{self.cos_similarity(self.vecs[answer_id], query_vec)}'
)
except ValueError as err:
print(err)
similarity = [[w, self.cos_similarity(query_vec, self.vecs[i])]
for i, w in enumerate(self.words)]
similarity.sort(key=lambda sim: sim[1], reverse=True)
count = 0
for s in similarity:
if s[0] not in (a, b, c):
print(f' {s[0]}: {s[1]}')
count += 1
if top <= count:
print()
break
class Word2Vec(BoardRecorderMixin):
"""Base class for wor2vec.
Attributes:
words (list): List of words
vocab_size (int): Size of `words`
corpus (list): Corpus
word_vectors (:obj:`WordVectors`): Results of model. You can reforence
after call `train` method.
"""
model_file_name = 'model.chpt'
@classmethod
def create_from_text(cls, text):
"""Create Word2Vec instance form text.
Args:
text (str): text to analyze
Returns:
Word2Vec: Instance created
"""
duplicate_words = text.lower().replace('.', ' .').split(' ')
words = list(set(duplicate_words))
corpus = [words.index(word) for word in duplicate_words]
return cls(words, corpus)
def __init__(self, words, corpus):
"""Create Word2Vec instance form corpus.
Args:
words (list): List of words
corpus (list): Corpus
"""
self.words = words
self.vocab_size = len(words)
self.corpus = corpus
counter = Counter(corpus)
self.counts = np.array([counter[i] for i in range(self.vocab_size)])
def get_contexts(self):
return np.r_[
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size)
]),
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size+1, self.window_size*2+1)
]),
].T
def get_targets(self):
return np.array(self.corpus[self.window_size:-self.window_size])
@property
def data_size(self):
if not hasattr(self, '_data_size'):
self._data_size = len(self.corpus) - self.window_size * 2
return self._data_size
def build_model_params(self, window_size, hidden_size):
if hasattr(self, '_data_size'):
del self._data_size
self.window_size = window_size
self.hidden_size = hidden_size
self.learning_rate = tfv1.placeholder(tf.float32, name='learning_rate')
self.W_in = tf.Variable(
tf.random.uniform([self.vocab_size, self.hidden_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_in',
)
self.W_out = tf.Variable(
tf.random.uniform([self.hidden_size, self.vocab_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_out',
)
def build_graph(self, window_size=1, hidden_size=5,
ns_count=0, ns_exponent=0.75):
"""Build Word2Vec graph.
Args:
window_size (int): Window size
hidden_size (int): Dimension of a vector encoding the words
ns_count (int): Number of samples using negative sampling.
If you specify 0, this object does not use negative sampling
and use softmax function. Default to 0.
ns_exponent (float): Value of exponent to determine probability of
acquiring each vocabulary using Nagative sampling. Default to
0.75.
"""
raise NotImplementedError
def get_incomes(self):
raise NotImplementedError
def get_labels(self):
raise NotImplementedError
def fetch_batch(self, incomes, labels, epoch_i, batch_i, batch_size):
raise NotImplementedError
def train(self, log_dir=None, max_epoch=10000, learning_rate=0.001,
batch_size=None, interval_sec=300, restore_step=None,
run_metadata=False):
"""Train model.
Args:
log_dir (str): Log directory where log and model is saved.
max_epoch (int): Size of epoch
learning_rate (float): Learning rate
batch_size (int): Batch size when using mini-batch descent method.
If specifying a size larger then learning data or `None`,
using batch descent.
interfal_sec (float): Specify logging time interval in seconds.
Default by 300.
restore_step (int): When you specify this argument, this mixin
resotres model for specified step.
run_metadata (bool): If true, run metadata and write logs.
"""
if log_dir is None:
log_dir = os.path.join(os.path.dirname(__file__),
'tf_logs',
datetime.utcnow().strftime('%Y%m%d%H%M%S'))
if batch_size is None:
n_batches = 1
else:
n_batches = int(np.ceil(self.data_size / batch_size))
if run_metadata:
options = tfv1.RunOptions(trace_level=tfv1.RunOptions.FULL_TRACE)
metadata = tfv1.RunMetadata()
else:
options = None
metadata = None
with self.open_writer(log_dir) as writer:
with self.open_session(interval_sec=interval_sec,
per_step=n_batches,
restore_step=restore_step) as sess:
incomes = self.get_incomes()
labels = self.get_labels()
self.word_reps = DistributedRepresentations(
self.words,
sess.run(self.W_in))
step = restore_step or 0
if restore_step is None:
writer.add_summary(
self.los_summary.eval(
feed_dict={self.incomes: incomes[:batch_size],
self.labels: labels[:batch_size]},
),
step,
)
for epoch_i in range(step // self.data_size, max_epoch):
for batch_i in range(n_batches):
c, l, b = self.fetch_batch(incomes, labels,
epoch_i, batch_i,
batch_size)
fd = {
self.incomes: c,
self.labels: l,
self.learning_rate: learning_rate,
}
sess.run(self.training_op,
feed_dict=fd,
options=options,
run_metadata=metadata)
if run_metadata:
| writer.add_run_metadata(metadata, f'step: {step}') | conditional_block |
|
word2vec.py | 1
from mixins import BoardRecorderMixin
class DistributedRepresentations:
"""Distributed Represendations of the words.
Args:
words (list): List of words
vectors (numpy.array): Vectors encoded words
Attributes:
vecs (numpy.array): Vectors encoded words
words (list): List of words
"""
def __init__(self, words, vectors):
self.words = words
self.vecs = vectors
@property
def normalized_vecs(self):
return self.vecs / \
np.linalg.norm(self.vecs, axis=1).reshape(
[self.vecs.shape[0], 1],
)
def inspect(self):
rels = np.dot(self.normalized_vecs, self.normalized_vecs.T)
printoptions = np.get_printoptions()
np.set_printoptions(linewidth=200, precision=6)
for word, vec in zip(self.words, rels):
print(f'{word + ":":8s} {vec}')
np.set_printoptions(**printoptions)
def cos_similarity(self, x, y, eps=1e-8):
return np.dot(x, y) / (np.linalg.norm(x) + eps) \
/ (np.linalg.norm(y) + eps)
def words_similarity(self, word1, word2, eps=1e-8):
x, y = [self.vecs[i]
for i in [self.words.index(word) for word in [word1, word2]]]
return self.cos_similarity(x, y, eps=eps)
def most_similar(self, word, top=5):
try:
word_id = self.words.index(word)
except ValueError:
print(f"'{word}' is not found.")
return
print(f'\n[query]: {word}')
word_vec = self.vecs[word_id]
similarity = [[w, self.cos_similarity(word_vec, self.vecs[i])]
for i, w in enumerate(self.words) if i != word_id]
similarity.sort(key=lambda sim: sim[1], reverse=True)
for s in similarity[:top]:
print(f' {s[0]}: {s[1]}')
def analogy(self, a, b, c, top=5, answer=None):
try:
a_vec, b_vec, c_vec = \
self.vecs[[self.words.index(word) for word in (a, b, c)]]
except ValueError as err:
print(err)
return
print(f'{a}:{b} = {c}:?')
query_vec = b_vec - a_vec + c_vec
if answer is not None:
try:
answer_id = self.words.index(answer)
print(
f' ==> {answer}: '
f'{self.cos_similarity(self.vecs[answer_id], query_vec)}'
)
except ValueError as err:
print(err)
similarity = [[w, self.cos_similarity(query_vec, self.vecs[i])]
for i, w in enumerate(self.words)]
similarity.sort(key=lambda sim: sim[1], reverse=True)
count = 0
for s in similarity:
if s[0] not in (a, b, c):
print(f' {s[0]}: {s[1]}')
count += 1
if top <= count:
print()
break
class Word2Vec(BoardRecorderMixin):
"""Base class for wor2vec.
Attributes:
words (list): List of words
vocab_size (int): Size of `words`
corpus (list): Corpus
word_vectors (:obj:`WordVectors`): Results of model. You can reforence
after call `train` method.
"""
model_file_name = 'model.chpt'
@classmethod
def create_from_text(cls, text):
"""Create Word2Vec instance form text.
Args:
text (str): text to analyze
Returns:
Word2Vec: Instance created
"""
duplicate_words = text.lower().replace('.', ' .').split(' ')
words = list(set(duplicate_words))
corpus = [words.index(word) for word in duplicate_words]
return cls(words, corpus)
def __init__(self, words, corpus):
"""Create Word2Vec instance form corpus.
Args:
words (list): List of words
corpus (list): Corpus
"""
self.words = words
self.vocab_size = len(words)
self.corpus = corpus
counter = Counter(corpus)
self.counts = np.array([counter[i] for i in range(self.vocab_size)])
def get_contexts(self):
return np.r_[
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size)
]),
np.array([
self.corpus[i:(i-self.window_size*2 or len(self.corpus))]
for i in range(self.window_size+1, self.window_size*2+1)
]),
].T
def get_targets(self):
return np.array(self.corpus[self.window_size:-self.window_size])
@property
def data_size(self):
if not hasattr(self, '_data_size'):
self._data_size = len(self.corpus) - self.window_size * 2
return self._data_size
def build_model_params(self, window_size, hidden_size):
if hasattr(self, '_data_size'):
del self._data_size
self.window_size = window_size
self.hidden_size = hidden_size
self.learning_rate = tfv1.placeholder(tf.float32, name='learning_rate')
self.W_in = tf.Variable(
tf.random.uniform([self.vocab_size, self.hidden_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_in',
)
self.W_out = tf.Variable(
tf.random.uniform([self.hidden_size, self.vocab_size],
-1.0, 1.0, dtype=tf.float32),
dtype=tf.float32,
name='W_out',
)
def build_graph(self, window_size=1, hidden_size=5,
ns_count=0, ns_exponent=0.75):
"""Build Word2Vec graph.
Args:
window_size (int): Window size
hidden_size (int): Dimension of a vector encoding the words
ns_count (int): Number of samples using negative sampling.
If you specify 0, this object does not use negative sampling
and use softmax function. Default to 0.
ns_exponent (float): Value of exponent to determine probability of
acquiring each vocabulary using Nagative sampling. Default to
0.75.
"""
raise NotImplementedError
def get_incomes(self):
raise NotImplementedError
def get_labels(self):
raise NotImplementedError
def fetch_batch(self, incomes, labels, epoch_i, batch_i, batch_size):
raise NotImplementedError
def train(self, log_dir=None, max_epoch=10000, learning_rate=0.001,
batch_size=None, interval_sec=300, restore_step=None,
run_metadata=False):
"""Train model.
Args:
log_dir (str): Log directory where log and model is saved.
max_epoch (int): Size of epoch
learning_rate (float): Learning rate
batch_size (int): Batch size when using mini-batch descent method.
If specifying a size larger then learning data or `None`,
using batch descent.
interfal_sec (float): Specify logging time interval in seconds.
Default by 300.
restore_step (int): When you specify this argument, this mixin
resotres model for specified step.
run_metadata (bool): If true, run metadata and write logs.
"""
if log_dir is None:
log_dir = os.path.join(os.path.dirname(__file__),
'tf_logs',
datetime.utcnow().strftime('%Y%m%d%H%M%S'))
if batch_size is None:
n_batches = 1
else:
n_batches = int(np.ceil(self.data_size / batch_size))
if run_metadata:
options = tfv1.RunOptions(trace_level=tfv1.RunOptions.FULL_TRACE)
metadata = tfv1.RunMetadata()
else:
options = None
metadata = None
with self.open_writer(log_dir) as writer:
with self.open_session(interval_sec=interval_sec,
per_step=n_batches,
restore_step=restore_step) as sess:
incomes = self.get_incomes()
labels = self.get_labels()
self.word_reps = DistributedRepresentations(
self.words,
sess.run(self.W_in))
step = restore_step or 0
if restore_step is None:
writer.add_summary(
self.los_summary.eval(
feed_dict={self.incomes: incomes[:batch_size],
self.labels: labels[:batch_size]},
),
step,
)
for epoch_i in range(step // self.data_size, max_epoch):
for batch_i in range(n_batches):
c, l, b = self.fetch_batch(incomes, labels,
epoch_i, batch_i,
batch_size)
fd = {
self.incomes: c,
self.labels: l,
self.learning_rate: learning_rate, | feed_dict=fd,
options=options,
run | }
sess.run(self.training_op, | random_line_split |
elasticsearch_adapter.js | Successfully removed indices: "${indicesToRemove}"`);
} catch (err) {
Services.logger.warning(`Error when trying to remove indices: ${err}`);
}
}
}
/**
*
* @param {FilterBuilder} builder
* @return {Object} The result of <code>builder.build()</code> but with a few translations for ES
*/
getQueryObject (builder) {
const translationMappings = {
is: 'term',
not: 'not',
exists: 'exists',
range: 'range',
in_array: 'terms',
like: 'regexp'
};
function Translate (node) {
node.children.forEach(child => {
if (child instanceof BuilderNode) {
Translate(child);
} else {
let replaced = Object.keys(child)[0];
if (translationMappings[replaced]) {
// 'not' contains a filter name
if (replaced === 'not') {
let secondReplaced = Object.keys(child[replaced])[0];
if (translationMappings[secondReplaced] !== secondReplaced) {
child[replaced][translationMappings[secondReplaced]] = cloneObject(child[replaced][secondReplaced]);
delete child[replaced][secondReplaced];
}
} else if (replaced === 'like') {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
let fieldObj = {};
Object.keys(child[translationMappings[replaced]]).forEach(field => {
fieldObj[field] = `.*${escapeRegExp(child[translationMappings[replaced]][field])}.*`;
});
child[translationMappings[replaced]] = fieldObj;
delete child[replaced];
} else if (translationMappings[replaced] !== replaced) {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
delete child[replaced];
}
}
}
});
}
Translate(builder.root);
return builder.build();
}
async getObjects (items) {
if (!Array.isArray(items) || items.length === 0) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'ElasticSearchDB.getObjects: "ids" should be a non-empty array');
}
const docs = items.map(object => {
let index;
switch (object.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.type}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.application_id}-${object.type}`;
}
}
return {
_id: object.id,
_index: index
};
}, this);
const results = await this.connection.mget({
body: {
docs
}
});
let errors = [];
let objects = [];
let versions = new Map();
results.docs.forEach(result => {
if (result.found) {
objects.push(result._source);
versions.set(result._id, result._version);
} else {
errors.push(new NexxusError(NexxusError.errors.ObjectNotFound, [result._id]));
}
});
return {errors, results: objects, versions};
}
async searchObjects (options) {
let index;
const reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (options.modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.modelName}`;
break;
}
default: {
if (Array.isArray(options.modelName)) {
index = options.modelName.map(model => {
return `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${model}`;
}).join(',');
} else {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${options.modelName}`;
}
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query = this.getQueryObject(options.filters);
} else {
reqBody.query = {match_all: {}};
}
if (options.fields) {
if (!(options.scanFunction instanceof Function)) {
throw new NexxusError(NexxusError.errors.ServerFailure, ['searchObjects was provided with fields but no scanFunction']);
}
let hitsCollected = 0;
let response = await this.connection.search({
index,
body: reqBody,
scroll: '10s',
fields: options.fields,
size: 1024
});
do {
let objects = [];
hitsCollected += response.hits.hits.length;
response.hits.hits.forEach(hit => {
let obj = {};
for (const f in hit.fields) {
obj[f] = hit.fields[f][0];
}
objects.push(obj);
});
if (response.hits.hits.length) {
await options.scanFunction(objects);
}
response = await this.connection.scroll({
scrollId: response._scroll_id,
scroll: '10s'
});
} while (response.hits.total !== hitsCollected);
return null;
}
if (options.sort) {
reqBody.sort = [];
Object.keys(options.sort).forEach(field => {
let sortObjectField = {};
if (!options.sort[field].type) {
sortObjectField[field] = { order: options.sort[field].order, unmapped_type: 'long' };
} else if (options.sort[field].type === 'geo') {
sortObjectField._geo_distance = {};
sortObjectField._geo_distance[field] = { lat: options.sort[field].poi.lat || 0.0, lon: options.sort[field].poi.long || 0.0 };
sortObjectField._geo_distance.order = options.sort[field].order;
}
reqBody.sort.push(sortObjectField);
});
}
const results = await this.connection.search({
index,
body: reqBody,
from: options.offset,
size: options.limit
});
return {results: results.hits.hits.map(object => object._source)};
}
async countObjects (modelName, options) {
let index;
let reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${modelName}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${modelName}`;
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query.filtered.filter = this.getQueryObject(options.filters);
}
if (options.aggregation) {
reqBody.aggs = { aggregation: options.aggregation };
const result = await this.connection.search({
index,
body: reqBody,
search_type: 'count',
queryCache: true
});
let countResult = { count: result.hits.total };
countResult.aggregation = result.aggregations.aggregation.value;
return Object.assign({ count: result.hits.total }, { aggregation: result.aggregations.aggregation.value });
}
const result = await this.connection.count({
index,
body: reqBody
});
return { count: result.count };
}
async createObjects (objects) {
if (!Array.isArray(objects) || objects.length === 0) {
throw new NexxusError('InvalidFieldValue', ['ElasticSearchDB.createObjects: "objects" should be a non-empty array']);
}
let shouldRefresh = false;
let bulk = [];
let errors = [];
await objects.reduce(async (promise, obj) => {
await promise;
let index;
switch (obj.type) {
case 'admin':
case 'application': {
index = `${constants.CHANNEL_KEY_PREFIX}-${obj.type}`;
shouldRefresh = true;
if (obj.schema) {
await Object.keys(obj.schema).reduce(async (p, modelName) => {
await p;
return this.connection.indices.create({
index: `${constants.CHANNEL_KEY_PREFIX}-${obj.id}-${modelName}`
});
}, Promise.resolve());
}
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${obj.applicationId}-${obj.type}`;
}
}
bulk.push({ index: { _id: obj.id, _index: index, _type: '_doc' } });
bulk.push(obj);
return Promise.resolve();
}, Promise.resolve());
if (bulk.length !== objects.length * 2) {
Services.logger.warning(`ElasticSearchDB.createObjects: some objects were missing their "type" and "id" (${(objects.length - bulk.length / 2)} failed)`);
}
if (!bulk.length) {
return null;
}
const res = await this.connection.bulk({
body: bulk,
refresh: shouldRefresh
});
if (res.errors) | {
res.items.forEach(error => {
errors.push(new NexxusError('ServerFailure', `Error creating ${error.index._type}: ${error.index.error}`));
});
} | conditional_block |
|
elasticsearch_adapter.js | ;
this[tryConnectionMethod]();
}
[tryConnectionMethod] () {
let error = false;
async.doWhilst(callback => {
this.connection.ping({}, (err, res) => {
if (!err) {
Services.logger.info('Connected to ElasticSearch MainDatabase');
this.connected = true;
return setImmediate(callback);
}
if (err.message === 'No Living connections') {
Services.logger.error(`Failed connecting to Elasticsearch "${this.config.host || this.config.hosts.join(', ')}": ${err.message}. Retrying...`);
setTimeout(callback, 2000);
} else if (err.message.startsWith('Request Timeout')) {
Services.logger.error(`Failed connecting to Elasticsearch "${this.config.host || this.config.hosts.join(', ')}": ${err.message}. Retrying...`);
setTimeout(callback, 2000);
} else {
error = err;
Services.logger.emergency(`Connection to ElasticSearch failed: ${err.message}`);
setImmediate(callback);
}
return null;
});
}, () => this.connected === false && error === false, () => {
if (error) {
this.emit('error', error);
} else {
if (this.reconnecting === true) {
this.emit('reconnected');
} else {
this.emit('ready');
}
this.reconnecting = false;
}
});
}
async [processSchemaModificationMethod] (applicationId, modifications) |
if (modifications.deleted.schema) {
const removedModels = Object.keys(modifications.deleted.schema);
const indicesToRemove = removedModels.map(modelName => `${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}`);
try {
await this.connection.indices.delete({
index: indicesToRemove
});
Services.logger.debug(`Successfully removed indices: "${indicesToRemove}"`);
} catch (err) {
Services.logger.warning(`Error when trying to remove indices: ${err}`);
}
}
}
/**
*
* @param {FilterBuilder} builder
* @return {Object} The result of <code>builder.build()</code> but with a few translations for ES
*/
getQueryObject (builder) {
const translationMappings = {
is: 'term',
not: 'not',
exists: 'exists',
range: 'range',
in_array: 'terms',
like: 'regexp'
};
function Translate (node) {
node.children.forEach(child => {
if (child instanceof BuilderNode) {
Translate(child);
} else {
let replaced = Object.keys(child)[0];
if (translationMappings[replaced]) {
// 'not' contains a filter name
if (replaced === 'not') {
let secondReplaced = Object.keys(child[replaced])[0];
if (translationMappings[secondReplaced] !== secondReplaced) {
child[replaced][translationMappings[secondReplaced]] = cloneObject(child[replaced][secondReplaced]);
delete child[replaced][secondReplaced];
}
} else if (replaced === 'like') {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
let fieldObj = {};
Object.keys(child[translationMappings[replaced]]).forEach(field => {
fieldObj[field] = `.*${escapeRegExp(child[translationMappings[replaced]][field])}.*`;
});
child[translationMappings[replaced]] = fieldObj;
delete child[replaced];
} else if (translationMappings[replaced] !== replaced) {
child[translationMappings[replaced]] = cloneObject(child[replaced]);
delete child[replaced];
}
}
}
});
}
Translate(builder.root);
return builder.build();
}
async getObjects (items) {
if (!Array.isArray(items) || items.length === 0) {
throw new NexxusError(NexxusError.errors.InvalidFieldValue, 'ElasticSearchDB.getObjects: "ids" should be a non-empty array');
}
const docs = items.map(object => {
let index;
switch (object.type) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.type}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${object.application_id}-${object.type}`;
}
}
return {
_id: object.id,
_index: index
};
}, this);
const results = await this.connection.mget({
body: {
docs
}
});
let errors = [];
let objects = [];
let versions = new Map();
results.docs.forEach(result => {
if (result.found) {
objects.push(result._source);
versions.set(result._id, result._version);
} else {
errors.push(new NexxusError(NexxusError.errors.ObjectNotFound, [result._id]));
}
});
return {errors, results: objects, versions};
}
async searchObjects (options) {
let index;
const reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (options.modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.modelName}`;
break;
}
default: {
if (Array.isArray(options.modelName)) {
index = options.modelName.map(model => {
return `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${model}`;
}).join(',');
} else {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${options.modelName}`;
}
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query = this.getQueryObject(options.filters);
} else {
reqBody.query = {match_all: {}};
}
if (options.fields) {
if (!(options.scanFunction instanceof Function)) {
throw new NexxusError(NexxusError.errors.ServerFailure, ['searchObjects was provided with fields but no scanFunction']);
}
let hitsCollected = 0;
let response = await this.connection.search({
index,
body: reqBody,
scroll: '10s',
fields: options.fields,
size: 1024
});
do {
let objects = [];
hitsCollected += response.hits.hits.length;
response.hits.hits.forEach(hit => {
let obj = {};
for (const f in hit.fields) {
obj[f] = hit.fields[f][0];
}
objects.push(obj);
});
if (response.hits.hits.length) {
await options.scanFunction(objects);
}
response = await this.connection.scroll({
scrollId: response._scroll_id,
scroll: '10s'
});
} while (response.hits.total !== hitsCollected);
return null;
}
if (options.sort) {
reqBody.sort = [];
Object.keys(options.sort).forEach(field => {
let sortObjectField = {};
if (!options.sort[field].type) {
sortObjectField[field] = { order: options.sort[field].order, unmapped_type: 'long' };
} else if (options.sort[field].type === 'geo') {
sortObjectField._geo_distance = {};
sortObjectField._geo_distance[field] = { lat: options.sort[field].poi.lat || 0.0, lon: options.sort[field].poi.long || 0.0 };
sortObjectField._geo_distance.order = options.sort[field].order;
}
reqBody.sort.push(sortObjectField);
});
}
const results = await this.connection.search({
index,
body: reqBody,
from: options.offset,
size: options.limit
});
return {results: results.hits.hits.map(object => object._source)};
}
async countObjects (modelName, options) {
let index;
let reqBody = {
query: {
filtered: {
filter: {}
}
}
};
switch (modelName) {
case 'application':
case 'admin': {
index = `${constants.CHANNEL_KEY_PREFIX}-${modelName}`;
break;
}
default: {
index = `${constants.CHANNEL_KEY_PREFIX}-${options.applicationId}-${modelName}`;
}
}
if (options.filters && !options.filters.isEmpty()) {
reqBody.query.filtered.filter = this.getQueryObject(options.filters);
}
if (options.aggregation) {
| {
if (modifications.added.schema) {
const addedModels = Object.keys(modifications.added.schema);
await addedModels.reduce(async (promise, modelName) => {
await promise;
try {
await this.connection.indices.create({
index: `${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}`
});
Services.logger.debug(`Successfully created index: "${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}"`);
} catch (err) {
Services.logger.warning(`Index already exists: "${constants.CHANNEL_KEY_PREFIX}-${applicationId}-${modelName}"`);
}
return Promise.resolve();
}, Promise.resolve());
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.