file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
decoder.py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import torch
import torch.nn as nn
import torch.nn.functional as F
# In[2]:
class Decoder(nn.Module):
def __init__(self, output_dim, emb_dim, enc_hid_dim, dec_hid_dim, dropout_rate, attention):
|
self.output_dim = output_dim
self.emb_dim = emb_dim
self.enc_hid_dim = enc_hid_dim
self.dec_hid_dim = dec_hid_dim
self.dropout_rate = dropout_rate
self.attention = attention
self.embedding = nn.Embedding(output_dim, emb_dim)
self.gru = nn.GRU(enc_hid_dim + emb_dim, dec_hid_dim, batch_first=True)
self.fc = nn.Linear(enc_hid_dim + dec_hid_dim + emb_dim, output_dim)
self.dropout = nn.Dropout(dropout_rate)
def forward(self, input, hidden, encoder_outputs):
# input = [batch_size]
# hidden = [batch_size, dec_hid_dim]
# encoder_outputs = [batch_size, seq_len, enc_hid_dim * 2]
input = input.unsqueeze(1)
# input = [batch_size, 1]
embedded = self.dropout(self.embedding(input))
# embedded = [batch_size, 1, emb_dim]
a = self.attention(hidden, encoder_outputs)
# a = [batch_size, seq_len]
a = a.unsqueeze(1)
# a = [batch_size, 1, seq_len]
context = torch.bmm(a, encoder_outputs)
# context = [batch_size, 1, enc_hid_dim * 2]
gru_input = torch.cat((embedded, context), dim=2)
# gru_input = [batch_size, 1, (enc hid dim * 2) + emb dim]
output, hidden = self.gru(gru_input, hidden.unsqueeze(0))
# output = [batch_size, seq_len, dec hid dim * n directions]
# hidden = [n layers * n directions, batch size, dec hid dim]
#seq_len, n layers and n directions will always be 1 in this decoder, therefore:
#output = [batch_size, 1, dec_hid_dim]
#hidden = [1, batch_size, dec_hid_dim]
#this also means that output == hidden
#assert (output == hidden).all()
embedded = embedded.squeeze(1) #[batch_size, emb_dim]
output = output.squeeze(1) #[batch_size, dec_hid_dim * n directions]??????????
context = context.squeeze(1) #[batch_size, enc_hid_dim * 2]
output = self.fc(torch.cat((output, context, embedded), dim=1))
# output = [batch_size, output_dim]
return output, hidden.squeeze(0)
|
super().__init__()
|
stubs.rs
|
// Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use super::*;
pub struct
|
;
impl Rand for TestRand {
fn get_chain_randomness(
&self,
_: DomainSeparationTag,
_: ChainEpoch,
_: &[u8],
) -> Result<[u8; 32], Box<dyn StdError>> {
Ok(*b"i_am_random_____i_am_random_____")
}
fn get_beacon_randomness(
&self,
_: DomainSeparationTag,
_: ChainEpoch,
_: &[u8],
) -> Result<[u8; 32], Box<dyn StdError>> {
Ok(*b"i_am_random_____i_am_random_____")
}
}
pub struct TestSyscalls;
impl Syscalls for TestSyscalls {
fn verify_signature(
&self,
_: &Signature,
_: &Address,
_: &[u8],
) -> Result<(), Box<dyn StdError>> {
Ok(())
}
fn verify_seal(&self, _: &SealVerifyInfo) -> Result<(), Box<dyn StdError>> {
Ok(())
}
fn verify_post(&self, _: &WindowPoStVerifyInfo) -> Result<(), Box<dyn StdError>> {
Ok(())
}
// TODO check if this should be defaulted as well
fn verify_consensus_fault(
&self,
_: &[u8],
_: &[u8],
_: &[u8],
) -> Result<Option<ConsensusFault>, Box<dyn StdError>> {
Ok(None)
}
}
|
TestRand
|
eaglesong.py
|
def PrintState( state ):
s = ""
for i in range(0, 16):
s += "0x%08x" % state[i]
s += " "
print(s)
def EaglesongPermutation( state ):
N = 43
#PrintState(state)
for i in range(0, N):
state = EaglesongRound(state, i)
return state
def EaglesongRound( state, index ):
# constants
bitmatrix = [[1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1],
[0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1],
[0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1],
[0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1],
[1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0],
[1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1],
[0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1],
[0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1],
[0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1],
[0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1],
[1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0],
[1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1]]
coefficients = [[0, 2, 4], [0, 13, 22], [0, 4, 19], [0, 3, 14], [0, 27, 31], [0, 3, 8], [0, 17, 26], [0, 3, 12], [0, 18, 22], [0, 12, 18], [0, 4, 7], [0, 4, 31], [0, 12, 27], [0, 7, 17], [0, 7, 8], [0, 1, 13]]
injection_constants = [ 0x6e9e40ae , 0x71927c02 , 0x9a13d3b1 , 0xdaec32ad , 0x3d8951cf , 0xe1c9fe9a , 0xb806b54c , 0xacbbf417 ,
0xd3622b3b , 0xa082762a , 0x9edcf1c0 , 0xa9bada77 , 0x7f91e46c , 0xcb0f6e4f , 0x265d9241 , 0xb7bdeab0 ,
0x6260c9e6 , 0xff50dd2a , 0x9036aa71 , 0xce161879 , 0xd1307cdf , 0x89e456df , 0xf83133e2 , 0x65f55c3d ,
0x94871b01 , 0xb5d204cd , 0x583a3264 , 0x5e165957 , 0x4cbda964 , 0x675fca47 , 0xf4a3033e , 0x2a417322 ,
0x3b61432f , 0x7f5532f2 , 0xb609973b , 0x1a795239 , 0x31b477c9 , 0xd2949d28 , 0x78969712 , 0x0eb87b6e ,
0x7e11d22d , 0xccee88bd , 0xeed07eb8 , 0xe5563a81 , 0xe7cb6bcf , 0x25de953e , 0x4d05653a , 0x0b831557 ,
0x94b9cd77 , 0x13f01579 , 0x794b4a4a , 0x67e7c7dc , 0xc456d8d4 , 0x59689c9b , 0x668456d7 , 0x22d2a2e1 ,
0x38b3a828 , 0x0315ac3c , 0x438d681e , 0xab7109c5 , 0x97ee19a8 , 0xde062b2e , 0x2c76c47b , 0x0084456f ,
0x908f0fd3 , 0xa646551f , 0x3e826725 , 0xd521788e , 0x9f01c2b0 , 0x93180cdc , 0x92ea1df8 , 0x431a9aae ,
0x7c2ea356 , 0xda33ad03 , 0x46926893 , 0x66bde7d7 , 0xb501cc75 , 0x1f6e8a41 , 0x685250f4 , 0x3bb1f318 ,
0xaf238c04 , 0x974ed2ec , 0x5b159e49 , 0xd526f8bf , 0x12085626 , 0x3e2432a9 , 0x6bd20c48 , 0x1f1d59da ,
0x18ab1068 , 0x80f83cf8 , 0x2c8c11c0 , 0x7d548035 , 0x0ff675c3 , 0xfed160bf , 0x74bbbb24 , 0xd98e006b ,
0xdeaa47eb , 0x05f2179e , 0x437b0b71 , 0xa7c95f8f , 0x00a99d3b , 0x3fc3c444 , 0x72686f8e , 0x00fd01a9 ,
0xdedc0787 , 0xc6af7626 , 0x7012fe76 , 0xf2a5f7ce , 0x9a7b2eda , 0x5e57fcf2 , 0x4da0d4ad , 0x5c63b155 ,
0x34117375 , 0xd4134c11 , 0x2ea77435 , 0x5278b6de , 0xab522c4c , 0xbc8fc702 , 0xc94a09e4 , 0xebb93a9e ,
0x91ecb65e , 0x4c52ecc6 , 0x8703bb52 , 0xcb2d60aa , 0x30a0538a , 0x1514f10b , 0x157f6329 , 0x3429dc3d ,
0x5db73eb2 , 0xa7a1a969 , 0x7286bd24 , 0x0df6881e , 0x3785ba5f , 0xcd04623a , 0x02758170 , 0xd827f556 ,
0x99d95191 , 0x84457eb1 , 0x58a7fb22 , 0xd2967c5f , 0x4f0c33f6 , 0x4a02099a , 0xe0904821 , 0x94124036 ,
0x496a031b , 0x780b69c4 , 0xcf1a4927 , 0x87a119b8 , 0xcdfaf4f8 , 0x4cf9cd0f , 0x27c96a84 , 0x6d11117e ,
0x7f8cf847 , 0x74ceede5 , 0xc88905e6 , 0x60215841 , 0x7172875a , 0x736e993a , 0x010aa53c , 0x43d53c2b ,
0xf0d91a93 , 0x0d983b56 , 0xf816663c , 0xe5d13363 , 0x0a61737c , 0x09d51150 , 0x83a5ac2f , 0x3e884905 ,
0x7b01aeb5 , 0x600a6ea7 , 0xb7678f7b , 0x72b38977 , 0x068018f2 , 0xce6ae45b , 0x29188aa8 , 0xe5a0b1e9 ,
0xc04c2b86 , 0x8bd14d75 , 0x648781f3 , 0xdbae1e0a , 0xddcdd8ae , 0xab4d81a3 , 0x446baaba , 0x1cc0c19d ,
0x17be4f90 , 0x82c0e65d , 0x676f9c95 , 0x5c708db2 , 0x6fd4c867 , 0xa5106ef0 , 0x19dde49d , 0x78182f95 ,
0xd089cd81 , 0xa32e98fe , 0xbe306c82 , 0x6cd83d8c , 0x037f1bde , 0x0b15722d , 0xeddc1e22 , 0x93c76559 ,
0x8a2f571b , 0x92cc81b4 , 0x021b7477 , 0x67523904 , 0xc95dbccc , 0xac17ee9d , 0x944e46bc , 0x0781867e ,
0xc854dd9d , 0x26e2c30c , 0x858c0416 , 0x6d397708 , 0xebe29c58 , 0xc80ced86 , 0xd496b4ab , 0xbe45e6f5 ,
0x10d24706 , 0xacf8187a , 0x96f523cb , 0x2227e143 , 0x78c36564 , 0x4643adc2 , 0x4729d97a , 0xcff93e0d ,
0x25484bbd , 0x91c6798e , 0x95f773f4 , 0x44204675 , 0x2eda57ba , 0x06d313ef , 0xeeaa4466 , 0x2dfa7530 ,
0xa8af0c9b , 0x39f1535e , 0x0cc2b7bd , 0x38a76c0e , 0x4f41071d , 0xcdaf2475 , 0x49a6eff8 , 0x01621748 ,
0x36ebacab , 0xbd6d9a29 , 0x44d1cd65 , 0x40815dfd , 0x55fa5a1a , 0x87cce9e9 , 0xae559b45 , 0xd76b4c26 ,
0x637d60ad , 0xde29f5f9 , 0x97491cbb , 0xfb350040 , 0xffe7f997 , 0x201c9dcd , 0xe61320e9 , 0xa90987a3 ,
0xe24afa83 , 0x61c1e6fc , 0xcc87ff62 , 0xf1c9d8fa , 0x4fd04546 , 0x90ecc76e , 0x46e456b9 , 0x305dceb8 ,
0xf627e68c , 0x2d286815 , 0xc705bbfd , 0x101b6df3 , 0x892dae62 , 0xd5b7fb44 , 0xea1d5c94 , 0x5332e3cb ,
0xf856f88a , 0xb341b0e9 , 0x28408d9d , 0x5421bc17 , 0xeb9af9bc , 0x602371c5 , 0x67985a91 , 0xd774907f ,
0x7c4d697d , 0x9370b0b8 , 0x6ff5cebb , 0x7d465744 , 0x674ceac0 , 0xea9102fc , 0x0de94784 , 0xc793de69 ,
0xfe599bb1 , 0xc6ad952f , 0x6d6ca9c3 , 0x928c3f91 , 0xf9022f05 , 0x24a164dc , 0xe5e98cd3 , 0x7649efdb ,
0x6df3bcdb , 0x5d1e9ff1 , 0x17f5d010 , 0xe2686ea1 , 0x6eac77fe , 0x7bb5c585 , 0x88d90cbb , 0x18689163 ,
0x67c9efa5 , 0xc0b76d9b , 0x960efbab , 0xbd872807 , 0x70f4c474 , 0x56c29d20 , 0xd1541d15 , 0x88137033 ,
0xe3f02b3e , 0xb6d9b28d , 0x53a077ba , 0xeedcd29e , 0xa50a6c1d , 0x12c2801e , 0x52ba335b , 0x35984614 ,
0xe2599aa8 , 0xaf94ed1d , 0xd90d4767 , 0x202c7d07 , 0x77bec4f4 , 0xfa71bc80 , 0xfc5c8b76 , 0x8d0fbbfc ,
0xda366dc6 , 0x8b32a0c7 , 0x1b36f7fc , 0x6642dcbc , 0x6fe7e724 , 0x8b5fa782 , 0xc4227404 , 0x3a7d1da7 ,
0x517ed658 , 0x8a18df6d , 0x3e5c9b23 , 0x1fbd51ef , 0x1470601d , 0x3400389c , 0x676b065d , 0x8864ad80 ,
0xea6f1a9c , 0x2db484e1 , 0x608785f0 , 0x8dd384af , 0x69d26699 , 0x409c4e16 , 0x77f9986a , 0x7f491266 ,
0x883ea6cf , 0xeaa06072 , 0xfa2e5db5 , 0x352594b4 , 0x9156bb89 , 0xa2fbbbfb , 0xac3989c7 , 0x6e2422b1 ,
0x581f3560 , 0x1009a9b5 , 0x7e5ad9cd , 0xa9fc0a6e , 0x43e5998e , 0x7f8778f9 , 0xf038f8e1 , 0x5415c2e8 ,
0x6499b731 , 0xb82389ae , 0x05d4d819 , 0x0f06440e , 0xf1735aa0 , 0x986430ee , 0x47ec952c , 0xbf149cc5 ,
0xb3cb2cb6 , 0x3f41e8c2 , 0x271ac51b , 0x48ac5ded , 0xf76a0469 , 0x717bba4d , 0x4f5c90d6 , 0x3b74f756 ,
0x1824110a , 0xa4fd43e3 , 0x1eb0507c , 0xa9375c08 , 0x157c59a7 , 0x0cad8f51 , 0xd66031a0 , 0xabb5343f ,
0xe533fa43 , 0x1996e2bb , 0xd7953a71 , 0xd2529b94 , 0x58f0fa07 , 0x4c9b1877 , 0x057e990d , 0x8bfe19c4 ,
0xa8e2c0c9 , 0x99fcaada , 0x69d2aaca , 0xdc1c4642 , 0xf4d22307 , 0x7fe27e8c , 0x1366aa07 , 0x1594e637 ,
0xce1066bf , 0xdb922552 , 0x9930b52a , 0xaeaa9a3e , 0x31ff7eb4 , 0x5e1f945a , 0x150ac49c , 0x0ccdac2d ,
0xd8a8a217 , 0xb82ea6e5 , 0xd6a74659 , 0x67b7e3e6 , 0x836eef4a , 0xb6f90074 , 0x7fa3ea4b , 0xcb038123 ,
0xbf069f55 , 0x1fa83fc4 , 0xd6ebdb23 , 0x16f0a137 , 0x19a7110d , 0x5ff3b55f , 0xfb633868 , 0xb466f845 ,
0xbce0c198 , 0x88404296 , 0xddbdd88b , 0x7fc52546 , 0x63a553f8 , 0xa728405a , 0x378a2bce , 0x6862e570 ,
0xefb77e7d , 0xc611625e , 0x32515c15 , 0x6984b765 , 0xe8405976 , 0x9ba386fd , 0xd4eed4d9 , 0xf8fe0309 ,
0x0ce54601 , 0xbaf879c2 , 0xd8524057 , 0x1d8c1d7a , 0x72c0a3a9 , 0x5a1ffbde , 0x82f33a45 , 0x5143f446 ,
0x29c7e182 , 0xe536c32f , 0x5a6f245b , 0x44272adb , 0xcb701d9c , 0xf76137ec , 0x0841f145 , 0xe7042ecc ,
0xf1277dd7 , 0x745cf92c , 0xa8fe65fe , 0xd3e2d7cf , 0x54c513ef , 0x6079bc2d , 0xb66336b0 , 0x101e383b ,
0xbcd75753 , 0x25be238a , 0x56a6f0be , 0xeeffcc17 , 0x5ea31f3d , 0x0ae772f5 , 0xf76de3de , 0x1bbecdad ,
0xc9107d43 , 0xf7e38dce , 0x618358cd , 0x5c833f04 , 0xf6975906 , 0xde4177e5 , 0x67d314dc , 0xb4760f3e ,
0x56ce5888 , 0x0e8345a8 , 0xbff6b1bf , 0x78dfb112 , 0xf1709c1e , 0x7bb8ed8b , 0x902402b9 , 0xdaa64ae0 ,
0x46b71d89 , 0x7eee035f , 0xbe376509 , 0x99648f3a , 0x0863ea1f , 0x49ad8887 , 0x79bdecc5 , 0x3c10b568 ,
0x5f2e4bae , 0x04ef20ab , 0x72f8ce7b , 0x521e1ebe , 0x14525535 , 0x2e8af95b , 0x9094ccfd , 0xbcf36713 ,
0xc73953ef , 0xd4b91474 , 0x6554ec2d , 0xe3885c96 , 0x03dc73b7 , 0x931688a9 , 0xcbbef182 , 0x2b77cfc9 ,
0x632a32bd , 0xd2115dcc , 0x1ae5533d , 0x32684e13 , 0x4cc5a004 , 0x13321bde , 0x62cbd38d , 0x78383a3b ,
0xd00686f1 , 0x9f601ee7 , 0x7eaf23de , 0x3110c492 , 0x9c351209 , 0x7eb89d52 , 0x6d566eac , 0xc2efd226 ,
0x32e9fac5 , 0x52227274 , 0x09f84725 , 0xb8d0b605 , 0x72291f02 , 0x71b5c34b , 0x3dbfcbb8 , 0x04a02263 ,
0x55ba597f , 0xd4e4037d , 0xc813e1be , 0xffddeefa , 0xc3c058f3 , 0x87010f2e , 0x1dfcf55f , 0xc694eeeb ,
0xa9c01a74 , 0x98c2fc6b , 0xe57e1428 , 0xdd265a71 , 0x836b956d , 0x7e46ab1a , 0x5835d541 , 0x50b32505 ,
0xe640913c , 0xbb486079 , 0xfe496263 , 0x113c5b69 , 0x93cd6620 , 0x5efe823b , 0x2d657b40 , 0xb46dfc6c ,
0x57710c69 , 0xfe9fadeb , 0xb5f8728a , 0xe3224170 , 0xca28b751 , 0xfdabae56 , 0x5ab12c3c , 0xa697c457 ,
0xd28fa2b7 , 0x056579f2 , 0x9fd9d810 , 0xe3557478 , 0xd88d89ab , 0xa72a9422 , 0x6d47abd0 , 0x405bcbd9 ,
0x6f83ebaf , 0x13caec76 , 0xfceb9ee2 , 0x2e922df7 , 0xce9856df , 0xc05e9322 , 0x2772c854 , 0xb67f2a32 ,
0x6d1af28d , 0x3a78cf77 , 0xdff411e4 , 0x61c74ca9 , 0xed8b842e , 0x72880845 , 0x6e857085 , 0xc6404932 ,
0xee37f6bc , 0x27116f48 , 0x5e9ec45a , 0x8ea2a51f , 0xa5573db7 , 0xa746d036 , 0x486b4768 , 0x5b438f3b ,
0x18c54a5c , 0x64fcf08e , 0xe993cdc1 , 0x35c1ead3 , 0x9de07de7 , 0x321b841c , 0x87423c5e , 0x071aa0f6 ,
0x962eb75b , 0xbb06bdd2 , 0xdcdb5363 , 0x389752f2 , 0x83d9cc88 , 0xd014adc6 , 0xc71121bb , 0x2372f938 ,
0xcaff2650 , 0x62be8951 , 0x56dccaff , 0xac4084c0 , 0x09712e95 , 0x1d3c288f , 0x1b085744 , 0xe1d3cfef ,
|
0x5c9a812e , 0x6611fd59 , 0x85e46044 , 0x1981d885 , 0x5a4c903f , 0x43f30d4b , 0x7d1d601b , 0xdd3c3391 ,
0x030ec65e , 0xc12878cd , 0x72e795fe , 0xd0c76abd , 0x1ec085db , 0x7cbb61fa , 0x93e8dd1e , 0x8582eb06 ,
0x73563144 , 0x049d4e7e , 0x5fd5aefe , 0x7b842a00 , 0x75ced665 , 0xbb32d458 , 0x4e83bba7 , 0x8f15151f ,
0x7795a125 , 0xf0842455 , 0x499af99d , 0x565cc7fa , 0xa3b1278d , 0x3f27ce74 , 0x96ca058e , 0x8a497443 ,
0xa6fb8cae , 0xc115aa21 , 0x17504923 , 0xe4932402 , 0xaea886c2 , 0x8eb79af5 , 0xebd5ea6b , 0xc7980d3b ,
0x71369315 , 0x796e6a66 , 0x3a7ec708 , 0xb05175c8 , 0xe02b74e7 , 0xeb377ad3 , 0x6c8c1f54 , 0xb980c374 ,
0x59aee281 , 0x449cb799 , 0xe01f5605 , 0xed0e085e , 0xc9a1a3b4 , 0xaac481b1 , 0xc935c39c , 0xb7d8ce7f ]
# bit matrix
new = [0 for i in range(0,16)]
for j in range(0, 16):
for k in range(0, 16):
new[j] = new[j] ^ (state[k] * bitmatrix[k][j])
new[j] = new[j] & 0xffffffff # truncate to 32 bits, if necessary
state = new
# circulant multiplication
for i in range(0, 16):
acc = 0
for j in range(0, 3):
acc = acc ^ (state[i] << coefficients[i][j]) ^ (state[i] >> (32-coefficients[i][j]))
state[i] = acc & 0xffffffff # truncate to 32 bits, if necessary
# constants injection
for i in range(0, 16):
state[i] = state[i] ^ injection_constants[index*16 + i]
# add / rotate / add
for i in range(0, 8):
state[2*i] = (state[2*i] + state[2*i+1]) & 0xffffffff # truncate to 32 bits, if necessary
state[2*i] = (state[2*i] >> 24) ^ ((state[2*i] << 8) & 0xffffffff) # shift bytes
state[2*i+1] = (state[2*i+1] >> 8) ^ ((state[2*i+1] << 24) & 0xffffffff) # shift bytes
state[2*i+1] = (state[2*i] + state[2*i+1]) & 0xffffffff # truncate to 32 bits, if necessary
return state
def EaglesongSponge( input_bytes, num_output_bytes, delimiter ):
# parameters
capacity = 256 # must be multiple of 32
rate = 256 # must be multiple of 32
state = [0 for i in range(0, 16)]
# absorbing
for i in range(0, ((len(input_bytes)+1)*8+rate-1) // rate):
for j in range(0, rate//32):
integer = 0
for k in range(0, 4):
if i*rate//8 + j*4 + k < len(input_bytes):
integer = (integer << 8) ^ input_bytes[i*rate//8 + j*4 + k]
elif i*rate//8 + j*4 + k == len(input_bytes):
integer = (integer << 8) ^ delimiter
state[j] = state[j] ^ integer
state = EaglesongPermutation(state)
# squeezing
output_bytes = [0] * num_output_bytes
for i in range(0, num_output_bytes//(rate//8)):
for j in range(0, rate//32):
for k in range(0, 4):
output_bytes[i*rate//8 + j*4 + k] = (state[j] >> (8*k)) & 0xff
state = EaglesongPermutation(state)
return output_bytes
def EaglesongHash( input_bytes ):
# just run the sponge (with delimiter 0x06 -- hashing mode) and truncate to 32 bytes == 256 bits
return EaglesongSponge(bytearray(input_bytes), 32, 0x06)
| |
elasticsearch.go
|
package main
import (
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
)
func elasticsearchHandler(w http.ResponseWriter, r *http.Request)
|
func testElasticsearchConnection(tls bool) error {
var credentials struct {
URI string `json:"uri"`
}
err := getVCAPServiceCredentials("elasticsearch", &credentials)
if err != nil {
return err
}
if !tls {
credentials.URI, err = forcePlainHTTP(credentials.URI)
if err != nil {
return err
}
}
client := &esClient{
client: &http.Client{},
baseURL: credentials.URI,
}
// Insert document
err = client.InsertDocument("test_index", "test_type", "42", map[string]string{"title": "Test document"})
if err != nil {
return err
}
// Read document
doc, err := client.GetDocument("test_index", "test_type", "42")
if err != nil {
return err
}
if doc.Source["title"] != "Test document" {
return fmt.Errorf("Unexpected data back from ES: %#v", doc)
}
// Delete document
err = client.DeleteDocument("test_index", "test_type", "42")
if err != nil {
return err
}
return nil
}
func forcePlainHTTP(uri string) (string, error) {
u, err := url.Parse(uri)
if err != nil {
return "", err
}
u.Scheme = "http"
return u.String(), nil
}
type esDocument struct {
Index string `json:"_index"`
Type string `json:"_type"`
ID string `json:"_id"`
Source map[string]interface{} `json:"_source"`
}
type esClient struct {
client *http.Client
baseURL string
}
func (e *esClient) DocumentUrl(index, kind, id string) string {
esURL := &url.URL{}
esURL, _ = esURL.Parse(e.baseURL)
path := fmt.Sprintf("/%s/%s/%s", index, kind, id)
esURL.Path = path
return esURL.String()
}
func (e *esClient) GetDocument(index, kind, id string) (*esDocument, error) {
resp, err := e.doRequest("GET", e.DocumentUrl(index, kind, id), nil, 200)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var doc esDocument
err = json.NewDecoder(resp.Body).Decode(&doc)
return &doc, err
}
func (e *esClient) InsertDocument(index, kind, id string, data interface{}) error {
var body bytes.Buffer
err := json.NewEncoder(&body).Encode(data)
if err != nil {
return err
}
_, err = e.doRequest("PUT", e.DocumentUrl(index, kind, id), &body, 201)
return err
}
func (e *esClient) DeleteDocument(index, kind, id string) error {
_, err := e.doRequest("DELETE", e.DocumentUrl(index, kind, id), nil, 200)
return err
}
func (e *esClient) doRequest(method, url string, body io.Reader, expectedStatus int) (*http.Response, error) {
req, err := http.NewRequest(method, url, body)
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", "application/json")
resp, err := e.client.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode != expectedStatus {
respBody, _ := ioutil.ReadAll(resp.Body)
resp.Body.Close()
return nil, fmt.Errorf("Expected %d, got %d response\n%s\n", expectedStatus, resp.StatusCode, string(respBody))
}
return resp, nil
}
|
{
tls := r.FormValue("tls") != "false"
err := testElasticsearchConnection(tls)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
writeJson(w, map[string]interface{}{
"success": true,
})
}
|
device_plugin_service.rs
|
use super::constants::{
DEVICE_PLUGIN_PATH, HEALTHY, K8S_DEVICE_PLUGIN_VERSION, KUBELET_SOCKET,
LIST_AND_WATCH_SLEEP_SECS, UNHEALTHY,
};
use super::v1beta1;
use super::v1beta1::{
device_plugin_server::{DevicePlugin, DevicePluginServer},
registration_client, AllocateRequest, AllocateResponse, DevicePluginOptions, Empty,
ListAndWatchResponse, PreStartContainerRequest, PreStartContainerResponse,
};
use akri_shared::{
akri::{
configuration::{Configuration, ProtocolHandler},
instance::Instance,
retry::{random_delay, MAX_INSTANCE_UPDATE_TRIES},
AKRI_PREFIX, AKRI_SLOT_ANNOTATION_NAME,
},
k8s,
k8s::KubeInterface,
};
use futures::stream::TryStreamExt;
use log::{error, info, trace};
use std::{
collections::HashMap,
convert::TryFrom,
env,
path::Path,
sync::Arc,
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
};
use tokio::{
net::UnixListener,
net::UnixStream,
sync::{broadcast, mpsc, Mutex},
task,
time::{delay_for, timeout},
};
use tonic::{
transport::{Endpoint, Server, Uri},
Code, Request, Response, Status,
};
use tower::service_fn;
/// Message sent in channel to `list_and_watch`.
/// Dictates what action `list_and_watch` should take upon being awoken.
#[derive(PartialEq, Clone, Debug)]
pub enum ListAndWatchMessageKind {
/// Prematurely continue looping
Continue,
/// Stop looping
End,
}
/// Describes the discoverability of an instance for this node
#[derive(PartialEq, Debug, Clone)]
pub enum ConnectivityStatus {
/// Was discovered
Online,
/// Could not be discovered. Instant contains time at which it was no longer discovered.
Offline(Instant),
}
/// Contains an Instance's state
#[derive(Clone, Debug)]
pub struct InstanceInfo {
/// Sender to tell `list_and_watch` to either prematurely continue looping or end
pub list_and_watch_message_sender: broadcast::Sender<ListAndWatchMessageKind>,
/// Instance's `ConnectivityStatus`
pub connectivity_status: ConnectivityStatus,
}
pub type InstanceMap = Arc<Mutex<HashMap<String, InstanceInfo>>>;
/// Kubernetes Device-Plugin for an Instance.
///
/// `DevicePluginService` implements Kubernetes Device-Plugin v1beta1 API specification
/// defined in a public proto file (imported here at agent/proto/pluginapi.proto).
/// The code generated from pluginapi.proto can be found in `agent/src/util/v1beta1.rs`.
/// Each `DevicePluginService` has an associated Instance and Configuration.
/// Serves a unix domain socket, sending and receiving messages to/from kubelet.
/// Kubelet is its client, calling each of its methods.
#[derive(Clone)]
pub struct DevicePluginService {
/// Instance CRD name
instance_name: String,
/// Socket endpoint
endpoint: String,
/// Instance's Configuration
config: Configuration,
/// Name of Instance's Configuration CRD
config_name: String,
/// UID of Instance's Configuration CRD
config_uid: String,
/// Namespace of Instance's Configuration CRD
config_namespace: String,
/// Instance is [not]shared
shared: bool,
/// Hostname of node this Device Plugin is running on
node_name: String,
/// Information that must be communicated with broker. Stored in Instance CRD as metadata.
instance_properties: HashMap<String, String>,
/// Map of all Instances that have the same Configuration CRD as this one
instance_map: InstanceMap,
/// Receiver for list_and_watch continue or end messages
/// Note: since the tonic grpc generated list_and_watch definition takes in &self,
/// using broadcast sender instead of mpsc receiver
/// Can clone broadcast sender and subscribe receiver to use in spawned thread in list_and_watch
list_and_watch_message_sender: broadcast::Sender<ListAndWatchMessageKind>,
/// Upon send, terminates function that acts as the shutdown signal for this service
server_ender_sender: mpsc::Sender<()>,
}
#[tonic::async_trait]
impl DevicePlugin for DevicePluginService {
/// Returns options to be communicated with kubelet Device Manager
async fn get_device_plugin_options(
&self,
_request: Request<Empty>,
) -> Result<Response<DevicePluginOptions>, Status>
|
type ListAndWatchStream = mpsc::Receiver<Result<ListAndWatchResponse, Status>>;
/// Called by Kubelet right after the DevicePluginService registers with Kubelet.
/// Returns a stream of List of "virtual" Devices over a channel.
/// Since Kubernetes designed Device-Plugin so that multiple consumers can use a Device,
/// "virtual" Devices are reservation slots for using the Device or Instance in akri terms.
/// The number of "virtual" Devices (length of `ListAndWatchResponse`) is determined by Instance.capacity.
/// Whenever Instance state changes or an Instance disapears, `list_and_watch` returns the new list.
/// Runs until receives message to end due to Instance disappearing or Configuration being deleted.
async fn list_and_watch(
&self,
_request: Request<Empty>,
) -> Result<Response<Self::ListAndWatchStream>, Status> {
info!(
"list_and_watch - kubelet called list_and_watch for instance {}",
self.instance_name
);
let dps = Arc::new(self.clone());
let mut list_and_watch_message_receiver = self.list_and_watch_message_sender.subscribe();
// Create a channel that list_and_watch can periodically send updates to kubelet on
let (mut kubelet_update_sender, kubelet_update_receiver) = mpsc::channel(4);
// Spawn thread so can send kubelet the receiving end of the channel to listen on
tokio::spawn(async move {
let mut keep_looping = true;
#[cfg(not(test))]
let kube_interface = Arc::new(k8s::create_kube_interface());
// Try to create an Instance CRD for this plugin and add it to the global InstanceMap else shutdown
#[cfg(not(test))]
{
if let Err(e) = try_create_instance(dps.clone(), kube_interface.clone()).await {
error!(
"list_and_watch - ending service because could not create instance {} with error {}",
dps.instance_name,
e
);
dps.server_ender_sender.clone().send(()).await.unwrap();
keep_looping = false;
}
}
while keep_looping {
trace!(
"list_and_watch - loop iteration for Instance {}",
dps.instance_name
);
let virtual_devices: Vec<v1beta1::Device>;
#[cfg(test)]
{
virtual_devices =
build_unhealthy_virtual_devices(dps.config.capacity, &dps.instance_name);
}
#[cfg(not(test))]
{
virtual_devices =
build_list_and_watch_response(dps.clone(), kube_interface.clone())
.await
.unwrap();
}
let resp = v1beta1::ListAndWatchResponse {
devices: virtual_devices,
};
// Send virtual devices list back to kubelet
if let Err(e) = kubelet_update_sender.send(Ok(resp)).await {
trace!(
"list_and_watch - for Instance {} kubelet no longer receiving with error {}",
dps.instance_name,
e
);
// This means kubelet is down/has been restarted. Remove instance from instance map so
// do_periodic_discovery will create a new device plugin service for this instance.
dps.instance_map.lock().await.remove(&dps.instance_name);
dps.server_ender_sender.clone().send(()).await.unwrap();
keep_looping = false;
}
// Sleep for LIST_AND_WATCH_SLEEP_SECS unless receive message to shutdown the server
// or continue (and send another list of devices)
match timeout(
Duration::from_secs(LIST_AND_WATCH_SLEEP_SECS),
list_and_watch_message_receiver.recv(),
)
.await
{
Ok(message) => {
// If receive message to end list_and_watch, send list of unhealthy devices
// and shutdown the server by sending message on server_ender_sender channel
if message == Ok(ListAndWatchMessageKind::End) {
trace!(
"list_and_watch - for Instance {} received message to end",
dps.instance_name
);
let devices = build_unhealthy_virtual_devices(
dps.config.capacity,
&dps.instance_name,
);
kubelet_update_sender.send(Ok(v1beta1::ListAndWatchResponse { devices }))
.await
.unwrap();
dps.server_ender_sender.clone().send(()).await.unwrap();
keep_looping = false;
}
}
Err(_) => trace!(
"list_and_watch - for Instance {} did not receive a message for {} seconds ... continuing", dps.instance_name, LIST_AND_WATCH_SLEEP_SECS
),
}
}
trace!("list_and_watch - for Instance {} ending", dps.instance_name);
});
Ok(Response::new(kubelet_update_receiver))
}
/// Kubelet calls allocate during pod creation.
/// This means kubelet is trying to reserve a usage slot (virtual Device) of the Instance for this node.
/// Returns error if cannot reserve that slot.
async fn allocate(
&self,
requests: Request<AllocateRequest>,
) -> Result<Response<AllocateResponse>, Status> {
info!(
"allocate - kubelet called allocate for Instance {}",
self.instance_name
);
let kube_interface = Arc::new(k8s::create_kube_interface());
match self.internal_allocate(requests, kube_interface).await {
Ok(resp) => Ok(resp),
Err(e) => Err(e),
}
}
/// Should never be called, as indicated by DevicePluginService during registration.
async fn pre_start_container(
&self,
_request: Request<PreStartContainerRequest>,
) -> Result<Response<PreStartContainerResponse>, Status> {
error!(
"pre_start_container - kubelet called pre_start_container for Instance {}",
self.instance_name
);
Ok(Response::new(v1beta1::PreStartContainerResponse {}))
}
}
impl DevicePluginService {
/// Called when kubelet is trying to reserve for this node a usage slot (or virtual device) of the Instance.
/// Tries to update Instance CRD to reserve the requested slot. If cannot reserve that slot, forces `list_and_watch` to continue
/// (sending kubelet the latest list of slots) and returns error, so kubelet will not schedule the pod to this node.
async fn internal_allocate(
&self,
requests: Request<AllocateRequest>,
kube_interface: Arc<impl KubeInterface>,
) -> Result<Response<AllocateResponse>, Status> {
let mut container_responses: Vec<v1beta1::ContainerAllocateResponse> = Vec::new();
for request in requests.into_inner().container_requests {
trace!(
"internal_allocate - for Instance {} handling request {:?}",
&self.instance_name,
request,
);
let mut akri_annotations = std::collections::HashMap::new();
for device_usage_id in request.devices_i_ds {
trace!(
"internal_allocate - for Instance {} processing request for device usage slot id {}",
&self.instance_name,
device_usage_id
);
akri_annotations.insert(
AKRI_SLOT_ANNOTATION_NAME.to_string(),
device_usage_id.clone(),
);
if let Err(e) = try_update_instance_device_usage(
&device_usage_id,
&self.node_name,
&self.instance_name,
&self.config_namespace,
kube_interface.clone(),
)
.await
{
trace!("internal_allocate - could not assign {} slot to {} node ... forcing list_and_watch to continue", device_usage_id, &self.node_name);
self.list_and_watch_message_sender
.send(ListAndWatchMessageKind::Continue)
.unwrap();
return Err(e);
}
trace!(
"internal_allocate - finished processing device_usage_id {}",
device_usage_id
);
}
// Successfully reserved device_usage_slot[s] for this node.
// Add response to list of responses
let response = build_container_allocate_response(
akri_annotations,
&self.instance_properties,
&self.config.protocol,
);
container_responses.push(response);
}
trace!(
"internal_allocate - for Instance {} returning responses",
&self.instance_name
);
Ok(Response::new(v1beta1::AllocateResponse {
container_responses,
}))
}
}
/// This returns the value that should be inserted at `device_usage_id` slot for an instance else an error.
/// # More details
/// Cases based on the usage slot (`device_usage_id`) value
/// 1. device_usage[id] == "" ... this means that the device is available for use
/// * <ACTION> return this node name
/// 2. device_usage[id] == self.nodeName ... this means THIS node previously used id, but the DevicePluginManager knows that this is no longer true
/// * <ACTION> return ""
/// 3. device_usage[id] == <some other node> ... this means that we believe this device is in use by another node and should be marked unhealthy
/// * <ACTION> return error
/// 4. No corresponding id found ... this is an unknown error condition (BAD)
/// * <ACTION> return error
fn get_slot_value(
device_usage_id: &str,
node_name: &str,
instance: &Instance,
) -> Result<String, Status> {
if let Some(allocated_node) = instance.device_usage.get(device_usage_id) {
if allocated_node == "" {
Ok(node_name.to_string())
} else if allocated_node == node_name {
Ok("".to_string())
} else {
trace!("internal_allocate - request for device slot {} previously claimed by a diff node {} than this one {} ... indicates the device on THIS node must be marked unhealthy, invoking ListAndWatch ... returning failure, next scheduling should succeed!", device_usage_id, allocated_node, node_name);
Err(Status::new(
Code::Unknown,
"Requested device already in use",
))
}
} else {
// No corresponding id found
trace!(
"internal_allocate - could not find {} id in device_usage",
device_usage_id
);
Err(Status::new(
Code::Unknown,
"Could not find device usage slot",
))
}
}
/// This tries up to `MAX_INSTANCE_UPDATE_TRIES` to update the requested slot of the Instance with the appropriate value (either "" to clear slot or node_name).
/// It cannot be assumed that this will successfully update Instance on first try since Device Plugins on other nodes may be simultaneously trying to update the Instance.
/// This returns an error if slot does not need to be updated or `MAX_INSTANCE_UPDATE_TRIES` attempted.
async fn try_update_instance_device_usage(
device_usage_id: &str,
node_name: &str,
instance_name: &str,
instance_namespace: &str,
kube_interface: Arc<impl KubeInterface>,
) -> Result<(), Status> {
let mut instance: Instance;
for x in 0..MAX_INSTANCE_UPDATE_TRIES {
// Grab latest instance
match kube_interface
.find_instance(&instance_name, &instance_namespace)
.await
{
Ok(instance_object) => instance = instance_object.spec,
Err(_) => {
trace!(
"internal_allocate - could not find Instance {}",
instance_name
);
return Err(Status::new(
Code::Unknown,
format!("Could not find Instance {}", instance_name),
));
}
}
// at this point, `value` should either be:
// * `node_name`: meaning that this node is claiming this slot
// * "": meaning this node previously claimed this slot, but kubelet
// knows that claim is no longer valid. In this case, reset the
// slot (which triggers each node to set the slot as Healthy) to
// allow a fair rescheduling of the workload
let value = get_slot_value(device_usage_id, node_name, &instance)?;
instance
.device_usage
.insert(device_usage_id.to_string(), value.clone());
match kube_interface
.update_instance(&instance, &instance_name, &instance_namespace)
.await
{
Ok(()) => {
if value == node_name {
return Ok(());
} else {
return Err(Status::new(Code::Unknown, "Devices are in inconsistent state, updated device usage, please retry scheduling"));
}
}
Err(e) => {
if x == (MAX_INSTANCE_UPDATE_TRIES - 1) {
trace!("internal_allocate - update_instance returned error [{}] after max tries ... returning error", e);
return Err(Status::new(Code::Unknown, "Could not update Instance"));
}
}
}
random_delay().await;
}
Ok(())
}
/// This sets the volume mounts and environment variables according to the instance's protocol.
fn build_container_allocate_response(
annotations: HashMap<String, String>,
instance_properties: &HashMap<String, String>,
protocol: &ProtocolHandler,
) -> v1beta1::ContainerAllocateResponse {
let mut mounts: Vec<v1beta1::Mount> = Vec::new();
// Set mounts according to protocol
match protocol {
ProtocolHandler::udev(_handler_config) => {
trace!("get_volumes_and_mounts - setting volumes and mounts for udev protocol");
mounts = instance_properties
.iter()
.map(|(_id, devpath)| v1beta1::Mount {
container_path: devpath.clone(),
host_path: devpath.clone(),
read_only: true,
})
.collect();
}
_ => trace!("get_volumes_and_mounts - no mounts or volumes required by this protocol"),
}
// Create response, setting environment variables to be an instance's properties (specified by protocol)
v1beta1::ContainerAllocateResponse {
annotations,
mounts,
envs: instance_properties.clone(),
..Default::default()
}
}
/// Try to find Instance CRD for this instance or create one and add it to the global InstanceMap
/// If a Config does not exist for this instance, return error.
/// This is most likely caused by deletion of a Config right after adding it, in which case
/// `handle_config_delete` fails to delete this instance because kubelet has yet to call `list_and_watch`
async fn try_create_instance(
dps: Arc<DevicePluginService>,
kube_interface: Arc<impl KubeInterface>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
// Make sure Configuration exists for instance
if let Err(e) = kube_interface
.find_configuration(&dps.config_name, &dps.config_namespace)
.await
{
error!(
"try_create_instance - no Configuration for device {} ... returning error",
dps.instance_name
);
return Err(e);
}
let device_usage: std::collections::HashMap<String, String> = (0..dps.config.capacity)
.map(|x| (format!("{}-{}", dps.instance_name, x), "".to_string()))
.collect();
let instance = Instance {
configuration_name: dps.config_name.clone(),
shared: dps.shared,
nodes: vec![dps.node_name.clone()],
device_usage,
metadata: dps.instance_properties.clone(),
rbac: "rbac".to_string(),
};
// Try up to MAX_INSTANCE_UPDATE_TRIES to create or update instance, breaking on success
for x in 0..MAX_INSTANCE_UPDATE_TRIES {
// First check if instance already exists
match kube_interface
.find_instance(&dps.instance_name, &dps.config_namespace)
.await
{
Ok(mut instance_object) => {
trace!(
"try_create_instance - discovered Instance {} already created",
dps.instance_name
);
// Check if instance's node list already contains this node, possibly due to device plugin failure and restart
if !instance_object.spec.nodes.contains(&dps.node_name) {
instance_object.spec.nodes.push(dps.node_name.clone());
match kube_interface
.update_instance(
&instance_object.spec,
&instance_object.metadata.name,
&dps.config_namespace,
)
.await
{
Ok(()) => {
trace!(
"try_create_instance - updated Instance {} to include {}",
dps.instance_name,
dps.node_name
);
break;
}
Err(e) => {
trace!("try_create_instance - call to update_instance returned with error {} on try # {} of {}", e, x, MAX_INSTANCE_UPDATE_TRIES);
if x == (MAX_INSTANCE_UPDATE_TRIES - 1) {
return Err(e);
}
}
};
} else {
break;
}
}
Err(_) => {
match kube_interface
.create_instance(
&instance,
&dps.instance_name,
&dps.config_namespace,
&dps.config_name,
&dps.config_uid,
)
.await
{
Ok(()) => {
trace!(
"try_create_instance - created Instance with name {}",
dps.instance_name
);
break;
}
Err(e) => {
trace!("try_create_instance - couldn't create instance with error {} on try # {} of {}", e, x, MAX_INSTANCE_UPDATE_TRIES);
if x == MAX_INSTANCE_UPDATE_TRIES - 1 {
return Err(e);
}
}
}
}
}
random_delay().await;
}
// Successfully created or updated instance. Add it to instance_map.
dps.instance_map.lock().await.insert(
dps.instance_name.clone(),
InstanceInfo {
list_and_watch_message_sender: dps.list_and_watch_message_sender.clone(),
connectivity_status: ConnectivityStatus::Online,
},
);
Ok(())
}
/// Returns list of "virtual" Devices and their health.
/// If the instance is offline, returns all unhealthy virtual Devices.
async fn build_list_and_watch_response(
dps: Arc<DevicePluginService>,
kube_interface: Arc<impl KubeInterface>,
) -> Result<Vec<v1beta1::Device>, Box<dyn std::error::Error + Send + Sync + 'static>> {
info!(
"build_list_and_watch_response -- for Instance {} entered",
dps.instance_name
);
// If instance has been removed from map, send back all unhealthy device slots
if !dps
.instance_map
.lock()
.await
.contains_key(&dps.instance_name)
{
trace!("build_list_and_watch_response - Instance {} removed from map ... returning unhealthy devices", dps.instance_name);
return Ok(build_unhealthy_virtual_devices(
dps.config.capacity,
&dps.instance_name,
));
}
// If instance is offline, send back all unhealthy device slots
if dps
.instance_map
.lock()
.await
.get(&dps.instance_name)
.unwrap()
.connectivity_status
!= ConnectivityStatus::Online
{
trace!("build_list_and_watch_response - device for Instance {} is offline ... returning unhealthy devices", dps.instance_name);
return Ok(build_unhealthy_virtual_devices(
dps.config.capacity,
&dps.instance_name,
));
}
trace!(
"build_list_and_watch_response -- device for Instance {} is online",
dps.instance_name
);
match kube_interface
.find_instance(&dps.instance_name, &dps.config_namespace)
.await
{
Ok(kube_akri_instance) => Ok(build_virtual_devices(
&kube_akri_instance.spec.device_usage,
kube_akri_instance.spec.shared,
&dps.node_name,
)),
Err(_) => {
trace!("build_list_and_watch_response - could not find instance {} so returning unhealthy devices", dps.instance_name);
Ok(build_unhealthy_virtual_devices(
dps.config.capacity,
&dps.instance_name,
))
}
}
}
/// This builds a list of unhealthy virtual Devices.
fn build_unhealthy_virtual_devices(capacity: i32, instance_name: &str) -> Vec<v1beta1::Device> {
let mut devices: Vec<v1beta1::Device> = Vec::new();
for x in 0..capacity {
let device = v1beta1::Device {
id: format!("{}-{}", instance_name, x),
health: UNHEALTHY.to_string(),
};
trace!(
"build_unhealthy_virtual_devices -- for Instance {} reporting unhealthy devices for device with name [{}] and health: [{}]",
instance_name,
device.id,
device.health,
);
devices.push(device);
}
devices
}
/// This builds a list of virtual Devices, determining the health of each virtual Device as follows:
/// Healthy if it is available to be used by this node or Unhealthy if it is already taken by another node.
fn build_virtual_devices(
device_usage: &HashMap<String, String>,
shared: bool,
node_name: &str,
) -> Vec<v1beta1::Device> {
let mut devices: Vec<v1beta1::Device> = Vec::new();
for (device_name, allocated_node) in device_usage {
// Throw error if unshared resource is reserved by another node
if !shared && allocated_node != "" && allocated_node != node_name {
panic!("build_virtual_devices - unshared device reserved by a different node");
}
// Advertise the device as Unhealthy if it is
// USED by !this_node && SHARED
let unhealthy = shared && allocated_node != "" && allocated_node != node_name;
let health = if unhealthy {
UNHEALTHY.to_string()
} else {
HEALTHY.to_string()
};
trace!(
"build_virtual_devices - [shared = {}] device with name [{}] and health: [{}]",
shared,
device_name,
health
);
devices.push(v1beta1::Device {
id: device_name.clone(),
health,
});
}
devices
}
/// This sends message to end `list_and_watch` and removes instance from InstanceMap.
/// Called when an instance has been offline for too long.
pub async fn terminate_device_plugin_service(
instance_name: &str,
instance_map: InstanceMap,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
let mut instance_map = instance_map.lock().await;
trace!(
"terminate_device_plugin_service -- forcing list_and_watch to end for Instance {}",
instance_name
);
instance_map
.get(instance_name)
.unwrap()
.list_and_watch_message_sender
.send(ListAndWatchMessageKind::End)
.unwrap();
trace!(
"terminate_device_plugin_service -- removing Instance {} from instance_map",
instance_name
);
instance_map.remove(instance_name);
Ok(())
}
/// This creates a new DevicePluginService for an instance and registers it with kubelet
pub async fn build_device_plugin(
instance_name: String,
config_name: String,
config_uid: String,
config_namespace: String,
config: Configuration,
shared: bool,
instance_properties: HashMap<String, String>,
instance_map: InstanceMap,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
info!("build_device_plugin - entered for device {}", instance_name);
let capability_id: String = format!("{}/{}", AKRI_PREFIX, instance_name);
let unique_time = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH)?;
let device_endpoint: String = format!("{}-{}.sock", instance_name, unique_time.as_secs());
let socket_path: String = format!(
"{}{}",
DEVICE_PLUGIN_PATH.to_string(),
device_endpoint.clone()
);
// Channel capacity set to 6 because 3 possible senders (allocate, update_connectivity_status, and handle_config_delete)
// and and receiver only periodically checks channel
let (list_and_watch_message_sender, _) = broadcast::channel(6);
// Channel capacity set to 2 because worst case both register and list_and_watch send messages at same time and receiver is always listening
let (server_ender_sender, server_ender_receiver) = mpsc::channel(2);
let device_plugin_service = DevicePluginService {
instance_name: instance_name.clone(),
endpoint: device_endpoint.clone(),
config,
config_name: config_name.clone(),
config_uid: config_uid.clone(),
config_namespace: config_namespace.clone(),
shared,
node_name: env::var("AGENT_NODE_NAME")?,
instance_properties,
instance_map: instance_map.clone(),
list_and_watch_message_sender: list_and_watch_message_sender.clone(),
server_ender_sender: server_ender_sender.clone(),
};
serve(
device_plugin_service,
socket_path.clone(),
server_ender_receiver,
)
.await?;
register(
capability_id,
device_endpoint,
&instance_name,
server_ender_sender,
)
.await?;
Ok(())
}
/// This acts as a signal future to gracefully shutdown DevicePluginServer upon its completion.
/// Ends when it receives message from `list_and_watch`.
async fn shutdown_signal(mut server_ender_receiver: mpsc::Receiver<()>) {
match server_ender_receiver.recv().await {
Some(_) => trace!(
"shutdown_signal - received signal ... device plugin service gracefully shutting down"
),
None => trace!("shutdown_signal - connection to server_ender_sender closed ... error"),
}
}
// This serves DevicePluginServer
async fn serve(
device_plugin_service: DevicePluginService,
socket_path: String,
server_ender_receiver: mpsc::Receiver<()>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
info!(
"serve - creating a device plugin server that will listen at: {}",
socket_path
);
tokio::fs::create_dir_all(Path::new(&socket_path[..]).parent().unwrap())
.await
.expect("Failed to create dir at socket path");
let mut uds = UnixListener::bind(socket_path.clone()).expect("Failed to bind to socket path");
let service = DevicePluginServer::new(device_plugin_service);
let socket_path_to_delete = socket_path.clone();
task::spawn(async move {
Server::builder()
.add_service(service)
.serve_with_incoming_shutdown(
uds.incoming().map_ok(unix::UnixStream),
shutdown_signal(server_ender_receiver),
)
.await
.unwrap();
trace!(
"serve - gracefully shutdown ... deleting socket {}",
socket_path_to_delete
);
// Socket may already be deleted in the case of kubelet restart
std::fs::remove_file(socket_path_to_delete).unwrap_or(());
});
// Test that server is running, trying for at most 10 seconds
// Similar to grpc.timeout, which is yet to be implemented for tonic
// See issue: https://github.com/hyperium/tonic/issues/75
let mut connected = false;
let start = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_secs();
let start_plus_10 = start + 10;
while (SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_secs()
< start_plus_10)
&& !connected
{
let path = socket_path.clone();
if let Ok(_v) = Endpoint::try_from("lttp://[::]:50051")?
.connect_with_connector(service_fn(move |_: Uri| UnixStream::connect(path.clone())))
.await
{
connected = true
} else {
delay_for(Duration::from_secs(1)).await
}
}
if !connected {
error!(
"serve - could not connect to Device Plugin server on socket {}",
socket_path
);
}
Ok(())
}
/// This registers DevicePlugin with kubelet.
/// During registration, the device plugin must send
/// (1) name of unix socket,
/// (2) Device-Plugin API it was built against (v1beta1),
/// (3) resource name akri.sh/device_id.
/// If registration request to kubelet fails, terminates DevicePluginService.
async fn register(
capability_id: String,
socket_name: String,
instance_name: &str,
mut server_ender_sender: mpsc::Sender<()>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
info!(
"register - entered for Instance {} and socket_name: {}",
capability_id, socket_name
);
let op = DevicePluginOptions {
pre_start_required: false,
};
// lttp://... is a fake uri that is unused (in service_fn) but neccessary for uds connection
let channel = Endpoint::try_from("lttp://[::]:50051")?
.connect_with_connector(service_fn(|_: Uri| UnixStream::connect(KUBELET_SOCKET)))
.await?;
let mut registration_client = registration_client::RegistrationClient::new(channel);
let register_request = tonic::Request::new(v1beta1::RegisterRequest {
version: K8S_DEVICE_PLUGIN_VERSION.into(),
endpoint: socket_name,
resource_name: capability_id,
options: Some(op),
});
trace!(
"register - before call to register with Kubelet at socket {}",
KUBELET_SOCKET
);
// If fail to register with kubelet, terminate device plugin
if registration_client
.register(register_request)
.await
.is_err()
{
trace!(
"register - failed to register Instance {} with kubelet ... terminating device plugin",
instance_name
);
server_ender_sender.send(()).await?;
}
Ok(())
}
/// This creates an Instance's unique name
pub fn get_device_instance_name(id: &str, config_name: &str) -> String {
format!("{}-{}", config_name, &id)
.replace(".", "-")
.replace("/", "-")
}
/// Module to enable UDS with tonic grpc.
/// This is unix only since the underlying UnixStream and UnixListener libraries are unix only.
#[cfg(unix)]
mod unix {
use std::{
pin::Pin,
task::{Context, Poll},
};
use tokio::io::{AsyncRead, AsyncWrite};
use tonic::transport::server::Connected;
#[derive(Debug)]
pub struct UnixStream(pub tokio::net::UnixStream);
impl Connected for UnixStream {}
impl AsyncRead for UnixStream {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<std::io::Result<usize>> {
Pin::new(&mut self.0).poll_read(cx, buf)
}
}
impl AsyncWrite for UnixStream {
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<std::io::Result<usize>> {
Pin::new(&mut self.0).poll_write(cx, buf)
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<std::io::Result<()>> {
Pin::new(&mut self.0).poll_flush(cx)
}
fn poll_shutdown(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<std::io::Result<()>> {
Pin::new(&mut self.0).poll_shutdown(cx)
}
}
}
#[cfg(test)]
mod device_plugin_service_tests {
use super::super::v1beta1::device_plugin_client::DevicePluginClient;
use super::*;
use akri_shared::akri::configuration::KubeAkriConfig;
use akri_shared::{
akri::instance::{Instance, KubeAkriInstance},
k8s::test_kube::MockKubeImpl,
};
use mockall::predicate::*;
use std::{
fs,
io::{Error, ErrorKind},
};
enum NodeName {
ThisNode,
OtherNode,
}
// Need to be kept alive during tests
struct DevicePluginServiceReceivers {
list_and_watch_message_receiver: broadcast::Receiver<ListAndWatchMessageKind>,
server_ender_receiver: mpsc::Receiver<()>,
}
fn configure_find_instance(
mock: &mut MockKubeImpl,
result_file: &'static str,
instance_name: String,
instance_namespace: String,
device_usage_node: &'static str,
node_name: NodeName,
) {
let instance_name_clone = instance_name.clone();
mock.expect_find_instance()
.times(1)
.withf(move |name: &str, namespace: &str| {
namespace == instance_namespace && name == instance_name
})
.returning(move |_, _| {
let mut instance_json =
fs::read_to_string(result_file).expect("Unable to read file");
let host_name = match node_name {
NodeName::ThisNode => "node-a",
NodeName::OtherNode => "other",
};
instance_json = instance_json.replace("node-a", &host_name);
instance_json = instance_json.replace("config-a-b494b6", &instance_name_clone);
instance_json =
instance_json.replace("\":\"\"", &format!("\":\"{}\"", device_usage_node));
let instance: KubeAkriInstance = serde_json::from_str(&instance_json).unwrap();
Ok(instance)
});
}
fn create_device_plugin_service(
connectivity_status: ConnectivityStatus,
add_to_instance_map: bool,
) -> (DevicePluginService, DevicePluginServiceReceivers) {
let path_to_config = "../test/json/config-a.json";
let kube_akri_config_json =
fs::read_to_string(path_to_config).expect("Unable to read file");
let kube_akri_config: KubeAkriConfig =
serde_json::from_str(&kube_akri_config_json).unwrap();
let device_instance_name =
get_device_instance_name("b494b6", &kube_akri_config.metadata.name);
let unique_time = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH);
let device_endpoint: String = format!(
"{}-{}.sock",
device_instance_name,
unique_time.unwrap_or_default().as_secs()
);
let (list_and_watch_message_sender, list_and_watch_message_receiver) =
broadcast::channel(4);
let (server_ender_sender, server_ender_receiver) = mpsc::channel(1);
let mut map = HashMap::new();
if add_to_instance_map {
let instance_info: InstanceInfo = InstanceInfo {
list_and_watch_message_sender: list_and_watch_message_sender.clone(),
connectivity_status,
};
map.insert(device_instance_name.clone(), instance_info);
}
let instance_map: InstanceMap = Arc::new(Mutex::new(map));
let dps = DevicePluginService {
instance_name: device_instance_name,
endpoint: device_endpoint,
config: kube_akri_config.spec.clone(),
config_name: kube_akri_config.metadata.name,
config_uid: kube_akri_config.metadata.uid.unwrap(),
config_namespace: kube_akri_config.metadata.namespace.unwrap(),
shared: false,
node_name: "node-a".to_string(),
instance_properties: HashMap::new(),
instance_map,
list_and_watch_message_sender,
server_ender_sender,
};
(
dps,
DevicePluginServiceReceivers {
list_and_watch_message_receiver,
server_ender_receiver,
},
)
}
fn check_devices(instance_name: String, devices: Vec<v1beta1::Device>) {
let capacity: usize = 5;
// update_virtual_devices_health returns devices in jumbled order (ie 2, 4, 1, 5, 3)
let expected_device_ids: Vec<String> = (0..capacity)
.map(|x| format!("{}-{}", instance_name, x))
.collect();
assert_eq!(devices.len(), capacity);
// Can't use map on Device type
let device_ids: Vec<String> = devices.into_iter().map(|device| device.id).collect();
for device in expected_device_ids {
assert!(device_ids.contains(&device));
}
}
// Tests that instance names are formatted correctly
#[test]
fn test_get_device_instance_name() {
let instance_name1: String = "/dev/video0".to_string();
let instance_name2: String = "10.1.2.3".to_string();
assert_eq!(
"usb-camera--dev-video0",
get_device_instance_name(&instance_name1, &"usb-camera".to_string())
);
assert_eq!(
"ip-camera-10-1-2-3".to_string(),
get_device_instance_name(&instance_name2, &"ip-camera".to_string())
);
}
fn configure_find_configuration(
mock: &mut MockKubeImpl,
config_name: String,
config_namespace: String,
) {
mock.expect_find_configuration()
.times(1)
.withf(move |name: &str, namespace: &str| {
namespace == config_namespace && name == config_name
})
.returning(move |_, _| {
let path_to_config = "../test/json/config-a.json";
let kube_akri_config_json =
fs::read_to_string(path_to_config).expect("Unable to read file");
let kube_akri_config: KubeAkriConfig =
serde_json::from_str(&kube_akri_config_json).unwrap();
Ok(kube_akri_config)
});
}
// Tests that try_create_instance creates an instance
#[tokio::test]
async fn test_try_create_instance() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, false);
let mut mock = MockKubeImpl::new();
configure_find_configuration(
&mut mock,
device_plugin_service.config_name.clone(),
device_plugin_service.config_namespace.clone(),
);
let instance_name = device_plugin_service.instance_name.clone();
let config_name = device_plugin_service.config_name.clone();
let config_uid = device_plugin_service.config_uid.clone();
let config_namespace = device_plugin_service.config_namespace.clone();
mock.expect_find_instance()
.times(1)
.withf(move |name: &str, namespace: &str| {
namespace == config_namespace && name == instance_name
})
.returning(move |_, _| {
let error = Error::new(ErrorKind::InvalidInput, "Configuration doesn't exist");
Err(Box::new(error))
});
let instance_name = device_plugin_service.instance_name.clone();
let config_namespace = device_plugin_service.config_namespace.clone();
mock.expect_create_instance()
.withf(move |instance, name, namespace, owner_name, owner_uid| {
namespace == config_namespace
&& name == instance_name
&& instance.nodes.contains(&"node-a".to_string())
&& owner_name == config_name
&& owner_uid == config_uid
})
.returning(move |_, _, _, _, _| Ok(()));
let dps = Arc::new(device_plugin_service);
assert!(try_create_instance(dps.clone(), Arc::new(mock))
.await
.is_ok());
assert!(dps
.instance_map
.lock()
.await
.contains_key(&dps.instance_name));
}
// Tests that try_create_instance updates already existing instance with this node
#[tokio::test]
async fn test_try_create_instance_already_created() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, false);
let mut mock = MockKubeImpl::new();
configure_find_configuration(
&mut mock,
device_plugin_service.config_name.clone(),
device_plugin_service.config_namespace.clone(),
);
configure_find_instance(
&mut mock,
"../test/json/local-instance.json",
device_plugin_service.instance_name.clone(),
device_plugin_service.config_namespace.clone(),
"",
NodeName::OtherNode,
);
let instance_name = device_plugin_service.instance_name.clone();
let config_namespace = device_plugin_service.config_namespace.clone();
mock.expect_update_instance()
.times(1)
.withf(move |instance, name, namespace| {
namespace == config_namespace
&& name == instance_name
&& instance.nodes.contains(&"node-a".to_string())
})
.returning(move |_, _, _| Ok(()));
let dps = Arc::new(device_plugin_service);
assert!(try_create_instance(dps.clone(), Arc::new(mock))
.await
.is_ok());
assert!(dps
.instance_map
.lock()
.await
.contains_key(&dps.instance_name));
}
// Test when instance already created and already contains this node.
// Should find the instance but not update it.
#[tokio::test]
async fn test_try_create_instance_already_created_no_update() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, false);
let mut mock = MockKubeImpl::new();
configure_find_configuration(
&mut mock,
device_plugin_service.config_name.clone(),
device_plugin_service.config_namespace.clone(),
);
configure_find_instance(
&mut mock,
"../test/json/local-instance.json",
device_plugin_service.instance_name.clone(),
device_plugin_service.config_namespace.clone(),
"",
NodeName::ThisNode,
);
let dps = Arc::new(device_plugin_service);
assert!(try_create_instance(dps.clone(), Arc::new(mock))
.await
.is_ok());
assert!(dps
.instance_map
.lock()
.await
.contains_key(&dps.instance_name));
}
// Tests that try_create_instance returns error when trying to create an Instance for a Config that DNE
#[tokio::test]
async fn test_try_create_instance_no_config() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, false);
let config_name = device_plugin_service.config_name.clone();
let config_namespace = device_plugin_service.config_namespace.clone();
let mut mock = MockKubeImpl::new();
mock.expect_find_configuration()
.times(1)
.withf(move |name: &str, namespace: &str| {
namespace == config_namespace && name == config_name
})
.returning(move |_, _| {
let error = Error::new(ErrorKind::InvalidInput, "Configuration doesn't exist");
Err(Box::new(error))
});
assert!(
try_create_instance(Arc::new(device_plugin_service), Arc::new(mock))
.await
.is_err()
);
}
// Tests that try_create_instance error
#[tokio::test]
async fn test_try_create_instance_error() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, false);
let mut mock = MockKubeImpl::new();
configure_find_configuration(
&mut mock,
device_plugin_service.config_name.clone(),
device_plugin_service.config_namespace.clone(),
);
let instance_name = device_plugin_service.instance_name.clone();
let config_name = device_plugin_service.config_name.clone();
let config_uid = device_plugin_service.config_uid.clone();
let config_namespace = device_plugin_service.config_namespace.clone();
mock.expect_find_instance()
.times(MAX_INSTANCE_UPDATE_TRIES as usize)
.withf(move |name: &str, namespace: &str| {
namespace == config_namespace && name == instance_name
})
.returning(move |_, _| Err(None.ok_or("failure")?));
let instance_name = device_plugin_service.instance_name.clone();
let config_namespace = device_plugin_service.config_namespace.clone();
mock.expect_create_instance()
.times(MAX_INSTANCE_UPDATE_TRIES as usize)
.withf(move |instance, name, namespace, owner_name, owner_uid| {
namespace == config_namespace
&& name == instance_name
&& instance.nodes.contains(&"node-a".to_string())
&& owner_name == config_name
&& owner_uid == config_uid
})
.returning(move |_, _, _, _, _| Err(None.ok_or("failure")?));
let dps = Arc::new(device_plugin_service);
assert!(try_create_instance(dps.clone(), Arc::new(mock))
.await
.is_err());
assert!(!dps
.instance_map
.lock()
.await
.contains_key(&dps.instance_name));
}
// Tests list_and_watch by creating DevicePluginService and DevicePlugin client (emulating kubelet)
#[tokio::test]
async fn test_list_and_watch() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, false);
let socket_path: String = format!(
"{}{}",
DEVICE_PLUGIN_PATH.to_string(),
device_plugin_service.endpoint.clone()
);
let list_and_watch_message_sender =
device_plugin_service.list_and_watch_message_sender.clone();
let instance_name = device_plugin_service.instance_name.clone();
serve(
device_plugin_service,
socket_path.clone(),
device_plugin_service_receivers.server_ender_receiver,
)
.await
.unwrap();
let channel = Endpoint::try_from("lttp://[::]:50051")
.unwrap()
.connect_with_connector(service_fn(move |_: Uri| {
UnixStream::connect(socket_path.clone())
}))
.await
.unwrap();
let mut client = DevicePluginClient::new(channel);
let mut stream = client
.list_and_watch(Request::new(Empty {}))
.await
.unwrap()
.into_inner();
list_and_watch_message_sender
.send(ListAndWatchMessageKind::End)
.unwrap();
if let Some(list_and_watch_response) = stream.message().await.unwrap() {
assert_eq!(
list_and_watch_response.devices[0].id,
format!("{}-0", instance_name)
);
};
}
#[tokio::test]
async fn test_build_virtual_devices() {
let mut device_usage: HashMap<String, String> = HashMap::new();
let mut expected_devices_nodea: HashMap<String, String> = HashMap::new();
let mut expected_devices_nodeb: HashMap<String, String> = HashMap::new();
let instance_name = "s0meH@sH";
for x in 0..5 {
if x % 2 == 0 {
device_usage.insert(format!("{}-{}", instance_name, x), "nodeA".to_string());
expected_devices_nodea
.insert(format!("{}-{}", instance_name, x), HEALTHY.to_string());
expected_devices_nodeb
.insert(format!("{}-{}", instance_name, x), UNHEALTHY.to_string());
} else {
device_usage.insert(format!("{}-{}", instance_name, x), "".to_string());
expected_devices_nodea
.insert(format!("{}-{}", instance_name, x), HEALTHY.to_string());
expected_devices_nodeb
.insert(format!("{}-{}", instance_name, x), HEALTHY.to_string());
}
}
// Test shared all healthy
let mut devices: Vec<v1beta1::Device> =
build_virtual_devices(&device_usage, true, &"nodeA".to_string());
for device in devices {
assert_eq!(
expected_devices_nodea.get(&device.id).unwrap(),
&device.health
);
}
// Test unshared all healthy
devices = build_virtual_devices(&device_usage, false, &"nodeA".to_string());
for device in devices {
assert_eq!(
expected_devices_nodea.get(&device.id).unwrap(),
&device.health
);
}
// Test shared some unhealthy (taken by another node)
devices = build_virtual_devices(&device_usage, true, &"nodeB".to_string());
for device in devices {
assert_eq!(
expected_devices_nodeb.get(&device.id).unwrap(),
&device.health
);
}
// Test unshared panic. A different node should never be listed under any device usage slots
let result = std::panic::catch_unwind(|| {
build_virtual_devices(&device_usage, false, &"nodeB".to_string())
});
assert!(result.is_err());
}
// Tests when ConnectivityStatus is offline and unhealthy devices are returned
#[tokio::test]
async fn test_build_list_and_watch_response_offline() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Offline(Instant::now()), true);
let mock = MockKubeImpl::new();
let devices =
build_list_and_watch_response(Arc::new(device_plugin_service), Arc::new(mock))
.await
.unwrap();
devices
.into_iter()
.for_each(|device| assert!(device.health == UNHEALTHY));
}
// Tests when instance has not yet been created for this device, all devices are returned as UNHEALTHY
#[tokio::test]
async fn test_build_list_and_watch_response_no_instance() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, true);
let instance_name = device_plugin_service.instance_name.clone();
let instance_namespace = device_plugin_service.config_namespace.clone();
let mut mock = MockKubeImpl::new();
mock.expect_find_instance()
.times(1)
.withf(move |name: &str, namespace: &str| {
namespace == instance_namespace && name == instance_name
})
.returning(move |_, _| {
let error = Error::new(ErrorKind::InvalidInput, "Instance doesn't exist");
Err(Box::new(error))
});
let devices =
build_list_and_watch_response(Arc::new(device_plugin_service), Arc::new(mock))
.await
.unwrap();
devices
.into_iter()
.for_each(|device| assert!(device.health == UNHEALTHY));
}
// Test when instance has already been created and includes this node
#[tokio::test]
async fn test_build_list_and_watch_response_no_instance_update() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, _device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, true);
let instance_name = device_plugin_service.instance_name.clone();
let instance_namespace = device_plugin_service.config_namespace.clone();
let mut mock = MockKubeImpl::new();
configure_find_instance(
&mut mock,
"../test/json/local-instance.json",
instance_name.clone(),
instance_namespace.clone(),
"",
NodeName::ThisNode,
);
let devices =
build_list_and_watch_response(Arc::new(device_plugin_service), Arc::new(mock))
.await
.unwrap();
check_devices(instance_name, devices);
}
// Test when device_usage[id] == ""
// internal_allocate should set device_usage[id] = m.nodeName, return
#[tokio::test]
async fn test_internal_allocate_success() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, mut device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, true);
let device_usage_id_slot = format!("{}-0", device_plugin_service.instance_name);
let device_usage_id_slot_2 = device_usage_id_slot.clone();
let node_name = device_plugin_service.node_name.clone();
let mut mock = MockKubeImpl::new();
configure_find_instance(
&mut mock,
"../test/json/local-instance.json",
device_plugin_service.instance_name.clone(),
device_plugin_service.config_namespace.clone(),
"",
NodeName::ThisNode,
);
mock.expect_update_instance()
.times(1)
.withf(move |instance_to_update: &Instance, _, _| {
instance_to_update
.device_usage
.get(&device_usage_id_slot)
.unwrap()
== &node_name
})
.returning(move |_, _, _| Ok(()));
let devices_i_ds = vec![device_usage_id_slot_2];
let container_requests = vec![v1beta1::ContainerAllocateRequest { devices_i_ds }];
let requests = Request::new(AllocateRequest { container_requests });
assert!(device_plugin_service
.internal_allocate(requests, Arc::new(mock),)
.await
.is_ok());
assert!(device_plugin_service_receivers
.list_and_watch_message_receiver
.try_recv()
.is_err());
}
// Test when device_usage[id] == self.nodeName
// Expected behavior: internal_allocate should set device_usage[id] == "", invoke list_and_watch, and return error
#[tokio::test]
async fn test_internal_allocate_deallocate() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, mut device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, true);
let device_usage_id_slot = format!("{}-0", device_plugin_service.instance_name);
let device_usage_id_slot_2 = device_usage_id_slot.clone();
let mut mock = MockKubeImpl::new();
configure_find_instance(
&mut mock,
"../test/json/local-instance.json",
device_plugin_service.instance_name.clone(),
device_plugin_service.config_namespace.clone(),
"node-a",
NodeName::ThisNode,
);
mock.expect_update_instance()
.times(1)
.withf(move |instance_to_update: &Instance, _, _| {
instance_to_update
.device_usage
.get(&device_usage_id_slot)
.unwrap()
== ""
})
.returning(move |_, _, _| Ok(()));
let devices_i_ds = vec![device_usage_id_slot_2];
let container_requests = vec![v1beta1::ContainerAllocateRequest { devices_i_ds }];
let requests = Request::new(AllocateRequest { container_requests });
match device_plugin_service
.internal_allocate(requests, Arc::new(mock))
.await
{
Ok(_) => {
panic!("internal allocate is expected to fail due to devices being in bad state")
}
Err(e) => assert_eq!(
e.message(),
"Devices are in inconsistent state, updated device usage, please retry scheduling"
),
}
assert_eq!(
device_plugin_service_receivers
.list_and_watch_message_receiver
.recv()
.await
.unwrap(),
ListAndWatchMessageKind::Continue
);
}
// Tests when device_usage[id] == <another node>
// Expected behavior: should invoke list_and_watch, and return error
#[tokio::test]
async fn test_internal_allocate_taken() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, mut device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, true);
let device_usage_id_slot = format!("{}-0", device_plugin_service.instance_name);
let mut mock = MockKubeImpl::new();
configure_find_instance(
&mut mock,
"../test/json/local-instance.json",
device_plugin_service.instance_name.clone(),
device_plugin_service.config_namespace.clone(),
"other",
NodeName::ThisNode,
);
let devices_i_ds = vec![device_usage_id_slot];
let container_requests = vec![v1beta1::ContainerAllocateRequest { devices_i_ds }];
let requests = Request::new(AllocateRequest { container_requests });
match device_plugin_service
.internal_allocate(requests, Arc::new(mock))
.await
{
Ok(_) => panic!(
"internal allocate is expected to fail due to requested device already being used"
),
Err(e) => assert_eq!(e.message(), "Requested device already in use"),
}
assert_eq!(
device_plugin_service_receivers
.list_and_watch_message_receiver
.recv()
.await
.unwrap(),
ListAndWatchMessageKind::Continue
);
}
// Tests when instance does not have the requested device usage id
// Expected behavior: should invoke list_and_watch, and return error
#[tokio::test]
async fn test_internal_allocate_no_id() {
let _ = env_logger::builder().is_test(true).try_init();
let (device_plugin_service, mut device_plugin_service_receivers) =
create_device_plugin_service(ConnectivityStatus::Online, true);
let device_usage_id_slot = format!("{}-100", device_plugin_service.instance_name);
let mut mock = MockKubeImpl::new();
configure_find_instance(
&mut mock,
"../test/json/local-instance.json",
device_plugin_service.instance_name.clone(),
device_plugin_service.config_namespace.clone(),
"other",
NodeName::ThisNode,
);
let devices_i_ds = vec![device_usage_id_slot];
let container_requests = vec![v1beta1::ContainerAllocateRequest { devices_i_ds }];
let requests = Request::new(AllocateRequest { container_requests });
match device_plugin_service
.internal_allocate(requests, Arc::new(mock))
.await
{
Ok(_) => {
panic!("internal allocate is expected to fail due to invalid device usage slot")
}
Err(e) => assert_eq!(e.message(), "Could not find device usage slot"),
}
assert_eq!(
device_plugin_service_receivers
.list_and_watch_message_receiver
.recv()
.await
.unwrap(),
ListAndWatchMessageKind::Continue
);
}
}
|
{
trace!("get_device_plugin_options - kubelet called get_device_plugin_options");
let resp = DevicePluginOptions {
pre_start_required: true,
};
Ok(Response::new(resp))
}
|
preinit.rs
|
use std::path::{Path, PathBuf};
use termion::event::Key;
use tui::{
backend::Backend,
layout::{Alignment, Constraint, Direction, Layout, Rect},
style::{Color, Style},
text::Text,
widgets::{Block, Borders, Paragraph, Wrap},
Frame,
};
use crate::{
project::{read::NonInitializedRootError, NonInitializedRoot},
CargoFuzzcheckError,
};
use super::framework::{AnyView, HorizontalMove, Theme, ViewState};
pub struct PreInitView {
pub root_path: PathBuf,
pub non_initialized_root: NonInitializedRoot,
focus: Focus,
}
impl PreInitView {
pub fn new(root_path: &Path) -> Result<Self, NonInitializedRootError> {
let non_initialized_root = NonInitializedRoot::from_path(root_path)?;
Ok(Self {
root_path: root_path.to_path_buf(),
non_initialized_root,
focus: Focus::Quit,
})
}
}
enum Focus {
Initialize,
Quit,
}
pub enum Update {
Initialize(Option<String>),
Move(HorizontalMove),
Quit,
}
pub enum OutMessage {
Initialized,
Error(CargoFuzzcheckError),
Quit,
}
impl AnyView for PreInitView {
fn focus(&mut self) {}
fn unfocus(&mut self) {}
fn key_bindings(&self) -> Vec<(Key, String)> {
let mut map = Vec::new();
map.push((Key::Char('\n'), "confirm choice".to_string()));
map
}
}
impl ViewState for PreInitView {
type Update = self::Update;
type InMessage = Key;
type OutMessage = self::OutMessage;
fn convert_in_message(&self, input: Key) -> Option<Update> {
if let Some(mv) = HorizontalMove::from(&input) {
return Some(Update::Move(mv));
}
match input {
Key::Char('\n') => match self.focus {
Focus::Initialize => Some(Update::Initialize(None)),
Focus::Quit => Some(Update::Quit),
},
_ => None,
}
}
fn update(&mut self, u: Update) -> Option<OutMessage> {
match u {
Update::Initialize(fuzzcheck_path) => {
let fuzzcheck_path = fuzzcheck_path.unwrap_or(env!("CARGO_PKG_VERSION").to_string());
let result = self.non_initialized_root.init_command(&fuzzcheck_path);
match result {
Ok(_) => Some(OutMessage::Initialized),
Err(err) => Some(OutMessage::Error(err)),
}
}
Update::Move(HorizontalMove::Left) => match self.focus {
Focus::Quit => {
self.focus = Focus::Initialize;
None
}
_ => None,
},
Update::Move(HorizontalMove::Right) => match self.focus {
Focus::Initialize =>
|
_ => None,
},
Update::Quit => Some(OutMessage::Quit),
}
}
fn draw<B>(&self, frame: &mut Frame<B>, theme: &Theme, area: Rect)
where
B: Backend,
{
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(5), Constraint::Min(0)].as_ref())
.split(area);
let block = Block::default().style(Style::default().bg(Color::Black));
frame.render_widget(block, area);
let bottom_chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(3), Constraint::Min(0)].as_ref())
.split(chunks[1]);
let button_areas = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)].as_ref())
.split(bottom_chunks[0]);
let text = Text::from("The fuzz folder has not been created yet. Would you like to create it?");
let p = Paragraph::new(text)
.block(Block::default().borders(Borders::ALL))
.style(theme.default)
.alignment(Alignment::Center)
.wrap(Wrap { trim: true });
frame.render_widget(p, chunks[0]);
let mut initialize_button = Paragraph::new(Text::raw("Create"))
.block(Block::default().borders(Borders::ALL))
.alignment(Alignment::Center)
.style(theme.default);
let mut quit_button = Paragraph::new(Text::raw("Quit"))
.block(Block::default().borders(Borders::ALL))
.alignment(Alignment::Center)
.style(theme.default);
match self.focus {
Focus::Initialize => {
initialize_button = initialize_button.style(theme.highlight);
}
Focus::Quit => {
quit_button = quit_button.style(theme.highlight);
}
}
frame.render_widget(initialize_button, button_areas[0]);
frame.render_widget(quit_button, button_areas[1]);
}
}
|
{
self.focus = Focus::Quit;
None
}
|
rpc.go
|
// Package rpc is a go-micro rpc handler.
package rpc
import (
"encoding/json"
"io"
"net/http"
"net/textproto"
"strconv"
"strings"
jsonpatch "github.com/evanphx/json-patch/v5"
"github.com/oxtoacart/bpool"
"xinhari.com/xinhari/api"
"xinhari.com/xinhari/api/handler"
"xinhari.com/xinhari/api/internal/proto"
"xinhari.com/xinhari/client"
"xinhari.com/xinhari/client/selector"
"xinhari.com/xinhari/codec"
"xinhari.com/xinhari/codec/jsonrpc"
"xinhari.com/xinhari/codec/protorpc"
"xinhari.com/xinhari/errors"
"xinhari.com/xinhari/logger"
"xinhari.com/xinhari/metadata"
"xinhari.com/xinhari/registry"
"xinhari.com/xinhari/util/ctx"
"xinhari.com/xinhari/util/qson"
)
const (
Handler = "rpc"
)
var (
// supported json codecs
jsonCodecs = []string{
"application/grpc+json",
"application/json",
"application/json-rpc",
}
// support proto codecs
protoCodecs = []string{
"application/grpc",
"application/grpc+proto",
"application/proto",
"application/protobuf",
"application/proto-rpc",
"application/octet-stream",
}
bufferPool = bpool.NewSizedBufferPool(1024, 8)
)
type rpcHandler struct {
opts handler.Options
s *api.Service
}
type buffer struct {
io.ReadCloser
}
func (b *buffer) Write(_ []byte) (int, error) {
return 0, nil
}
// strategy is a hack for selection
func strategy(services []*registry.Service) selector.Strategy {
return func(_ []*registry.Service) selector.Next {
// ignore input to this function, use services above
return selector.Random(services)
}
}
func (h *rpcHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
bsize := handler.DefaultMaxRecvSize
if h.opts.MaxRecvSize > 0 {
bsize = h.opts.MaxRecvSize
}
r.Body = http.MaxBytesReader(w, r.Body, bsize)
defer r.Body.Close()
var service *api.Service
if h.s != nil {
// we were given the service
service = h.s
} else if h.opts.Router != nil {
// try get service from router
s, err := h.opts.Router.Route(r)
if err != nil {
writeError(w, r, errors.InternalServerError("go.micro.api", err.Error()))
return
}
service = s
} else {
// we have no way of routing the request
writeError(w, r, errors.InternalServerError("go.micro.api", "no route found"))
return
}
ct := r.Header.Get("Content-Type")
// Strip charset from Content-Type (like `application/json; charset=UTF-8`)
if idx := strings.IndexRune(ct, ';'); idx >= 0 {
ct = ct[:idx]
}
// micro client
c := h.opts.Client
// create context
cx := ctx.FromRequest(r)
// get context from http handler wrappers
md, ok := metadata.FromContext(r.Context())
if !ok {
md = make(metadata.Metadata)
}
// fill contex with http headers
md["Host"] = r.Host
md["Method"] = r.Method
// get canonical headers
for k, _ := range r.Header {
// may be need to get all values for key like r.Header.Values() provide in go 1.14
md[textproto.CanonicalMIMEHeaderKey(k)] = r.Header.Get(k)
}
// merge context with overwrite
cx = metadata.MergeContext(cx, md, true)
// set merged context to request
*r = *r.Clone(cx)
// if stream we currently only support json
if isStream(r, service) {
// drop older context as it can have timeouts and create new
// md, _ := metadata.FromContext(cx)
//serveWebsocket(context.TODO(), w, r, service, c)
serveWebsocket(cx, w, r, service, c)
return
}
// create strategy
so := selector.WithStrategy(strategy(service.Services))
// walk the standard call path
// get payload
br, err := requestPayload(r)
if err != nil {
writeError(w, r, err)
return
}
var rsp []byte
switch {
// proto codecs
case hasCodec(ct, protoCodecs):
request := &proto.Message{}
// if the extracted payload isn't empty lets use it
if len(br) > 0 {
request = proto.NewMessage(br)
}
// create request/response
response := &proto.Message{}
req := c.NewRequest(
service.Name,
service.Endpoint.Name,
request,
client.WithContentType(ct),
)
// make the call
if err := c.Call(cx, req, response, client.WithSelectOption(so)); err != nil {
writeError(w, r, err)
return
}
// marshall response
rsp, err = response.Marshal()
if err != nil {
writeError(w, r, err)
return
}
default:
// if json codec is not present set to json
if !hasCodec(ct, jsonCodecs) {
ct = "application/json"
}
// default to trying json
var request json.RawMessage
// if the extracted payload isn't empty lets use it
if len(br) > 0 {
request = json.RawMessage(br)
}
// create request/response
var response json.RawMessage
req := c.NewRequest(
service.Name,
service.Endpoint.Name,
&request,
client.WithContentType(ct),
)
// make the call
if err := c.Call(cx, req, &response, client.WithSelectOption(so)); err != nil {
writeError(w, r, err)
return
}
// marshall response
rsp, err = response.MarshalJSON()
if err != nil {
writeError(w, r, err)
return
}
}
// write the response
writeResponse(w, r, rsp)
}
func (rh *rpcHandler) String() string {
return "rpc"
}
func hasCodec(ct string, codecs []string) bool {
for _, codec := range codecs {
if ct == codec {
return true
}
}
return false
}
// requestPayload takes a *http.Request.
// If the request is a GET the query string parameters are extracted and marshaled to JSON and the raw bytes are returned.
// If the request method is a POST the request body is read and returned
func requestPayload(r *http.Request) ([]byte, error) {
var err error
// we have to decode json-rpc and proto-rpc because we suck
// well actually because there's no proxy codec right now
ct := r.Header.Get("Content-Type")
switch {
case strings.Contains(ct, "application/json-rpc"):
msg := codec.Message{
Type: codec.Request,
Header: make(map[string]string),
}
c := jsonrpc.NewCodec(&buffer{r.Body})
if err = c.ReadHeader(&msg, codec.Request); err != nil {
return nil, err
}
var raw json.RawMessage
if err = c.ReadBody(&raw); err != nil {
return nil, err
}
return ([]byte)(raw), nil
case strings.Contains(ct, "application/proto-rpc"), strings.Contains(ct, "application/octet-stream"):
msg := codec.Message{
Type: codec.Request,
Header: make(map[string]string),
}
c := protorpc.NewCodec(&buffer{r.Body})
if err = c.ReadHeader(&msg, codec.Request); err != nil {
return nil, err
}
var raw proto.Message
if err = c.ReadBody(&raw); err != nil {
return nil, err
}
return raw.Marshal()
case strings.Contains(ct, "application/www-x-form-urlencoded"):
r.ParseForm()
// generate a new set of values from the form
vals := make(map[string]string)
for k, v := range r.Form {
vals[k] = strings.Join(v, ",")
}
// marshal
return json.Marshal(vals)
// TODO: application/grpc
}
// otherwise as per usual
ctx := r.Context()
// dont user meadata.FromContext as it mangles names
md, ok := metadata.FromContext(ctx)
if !ok {
md = make(map[string]string)
}
// allocate maximum
matches := make(map[string]interface{}, len(md))
bodydst := ""
// get fields from url path
for k, v := range md {
k = strings.ToLower(k)
// filter own keys
if strings.HasPrefix(k, "x-api-field-") {
matches[strings.TrimPrefix(k, "x-api-field-")] = v
delete(md, k)
} else if k == "x-api-body" {
bodydst = v
delete(md, k)
}
}
// map of all fields
req := make(map[string]interface{}, len(md))
// get fields from url values
if len(r.URL.RawQuery) > 0 {
umd := make(map[string]interface{})
err = qson.Unmarshal(&umd, r.URL.RawQuery)
if err != nil {
return nil, err
}
for k, v := range umd {
matches[k] = v
}
}
// restore context without fields
*r = *r.Clone(metadata.NewContext(ctx, md))
for k, v := range matches {
ps := strings.Split(k, ".")
if len(ps) == 1 {
req[k] = v
continue
}
em := make(map[string]interface{})
em[ps[len(ps)-1]] = v
for i := len(ps) - 2; i > 0; i-- {
nm := make(map[string]interface{})
nm[ps[i]] = em
em = nm
}
if vm, ok := req[ps[0]]; ok {
// nested map
nm := vm.(map[string]interface{})
for vk, vv := range em {
nm[vk] = vv
}
req[ps[0]] = nm
} else {
req[ps[0]] = em
}
}
pathbuf := []byte("{}")
if len(req) > 0 {
pathbuf, err = json.Marshal(req)
if err != nil {
return nil, err
}
}
urlbuf := []byte("{}")
out, err := jsonpatch.MergeMergePatches(urlbuf, pathbuf)
if err != nil {
return nil, err
}
switch r.Method {
case "GET":
// empty response
if strings.Contains(ct, "application/json") && string(out) == "{}" {
return out, nil
} else if string(out) == "{}" && !strings.Contains(ct, "application/json") {
return []byte{}, nil
}
return out, nil
case "PATCH", "POST", "PUT", "DELETE":
bodybuf := []byte("{}")
buf := bufferPool.Get()
defer bufferPool.Put(buf)
if _, err := buf.ReadFrom(r.Body); err != nil {
return nil, err
}
if b := buf.Bytes(); len(b) > 0 {
bodybuf = b
}
if bodydst == "" || bodydst == "*" {
if out, err = jsonpatch.MergeMergePatches(out, bodybuf); err == nil {
return out, nil
}
}
var jsonbody map[string]interface{}
if json.Valid(bodybuf) {
if err = json.Unmarshal(bodybuf, &jsonbody); err != nil {
return nil, err
}
}
dstmap := make(map[string]interface{})
ps := strings.Split(bodydst, ".")
if len(ps) == 1 {
if jsonbody != nil
|
else {
// old unexpected behaviour
dstmap[ps[0]] = bodybuf
}
} else {
em := make(map[string]interface{})
if jsonbody != nil {
em[ps[len(ps)-1]] = jsonbody
} else {
// old unexpected behaviour
em[ps[len(ps)-1]] = bodybuf
}
for i := len(ps) - 2; i > 0; i-- {
nm := make(map[string]interface{})
nm[ps[i]] = em
em = nm
}
dstmap[ps[0]] = em
}
bodyout, err := json.Marshal(dstmap)
if err != nil {
return nil, err
}
if out, err = jsonpatch.MergeMergePatches(out, bodyout); err == nil {
return out, nil
}
//fallback to previous unknown behaviour
return bodybuf, nil
}
return []byte{}, nil
}
func writeError(w http.ResponseWriter, r *http.Request, err error) {
ce := errors.Parse(err.Error())
switch ce.Code {
case 0:
// assuming it's totally screwed
ce.Code = 500
ce.Id = "go.micro.api"
ce.Status = http.StatusText(500)
ce.Detail = "error during request: " + ce.Detail
w.WriteHeader(500)
default:
w.WriteHeader(int(ce.Code))
}
// response content type
w.Header().Set("Content-Type", "application/json")
// Set trailers
if strings.Contains(r.Header.Get("Content-Type"), "application/grpc") {
w.Header().Set("Trailer", "grpc-status")
w.Header().Set("Trailer", "grpc-message")
w.Header().Set("grpc-status", "13")
w.Header().Set("grpc-message", ce.Detail)
}
_, werr := w.Write([]byte(ce.Error()))
if werr != nil {
if logger.V(logger.ErrorLevel, logger.DefaultLogger) {
logger.Error(werr)
}
}
}
func writeResponse(w http.ResponseWriter, r *http.Request, rsp []byte) {
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
w.Header().Set("Content-Length", strconv.Itoa(len(rsp)))
// Set trailers
if strings.Contains(r.Header.Get("Content-Type"), "application/grpc") {
w.Header().Set("Trailer", "grpc-status")
w.Header().Set("Trailer", "grpc-message")
w.Header().Set("grpc-status", "0")
w.Header().Set("grpc-message", "")
}
// write 204 status if rsp is nil
if len(rsp) == 0 {
w.WriteHeader(http.StatusNoContent)
}
// write response
_, err := w.Write(rsp)
if err != nil {
if logger.V(logger.ErrorLevel, logger.DefaultLogger) {
logger.Error(err)
}
}
}
func NewHandler(opts ...handler.Option) handler.Handler {
options := handler.NewOptions(opts...)
return &rpcHandler{
opts: options,
}
}
func WithService(s *api.Service, opts ...handler.Option) handler.Handler {
options := handler.NewOptions(opts...)
return &rpcHandler{
opts: options,
s: s,
}
}
|
{
dstmap[ps[0]] = jsonbody
}
|
jquery.gridList.js
|
// It does not try to register in a CommonJS environment since jQuery is not
// likely to run in those environments.
(function (factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['jquery', 'grid-list'], factory);
} else {
factory(jQuery, GridList);
}
}(function($, GridList) {
var DraggableGridList = function(element, options, draggableOptions) {
this.options = $.extend({}, this.defaults, options);
this.draggableOptions = $.extend(
{}, this.draggableDefaults, draggableOptions);
this.$element = $(element);
this._init();
this._bindEvents();
};
DraggableGridList.prototype = {
defaults: {
lanes: 5,
direction: "horizontal",
itemSelector: 'li[data-w]',
widthHeightRatio: 1,
dragAndDrop: true
},
draggableDefaults: {
zIndex: 2,
scroll: false,
containment: "parent"
},
destroy: function() {
this._unbindEvents();
},
resize: function(lanes) {
if (lanes) {
this.options.lanes = lanes;
}
this._createGridSnapshot();
this.gridList.resizeGrid(this.options.lanes);
this._updateGridSnapshot();
this.reflow();
},
|
*
* @param {Object} size
* @param {Number} [size.w]
* @param {Number} [size.h}
*/
this._createGridSnapshot();
this.gridList.resizeItem(this._getItemByElement(element), size);
this._updateGridSnapshot();
this.render();
},
reflow: function() {
this._calculateCellSize();
this.render();
},
render: function() {
this._applySizeToItems();
this._applyPositionToItems();
},
_bindMethod: function(fn) {
/**
* Bind prototype method to instance scope (similar to CoffeeScript's fat
* arrow)
*/
var that = this;
return function() {
return fn.apply(that, arguments);
};
},
_init: function() {
// Read items and their meta data. Ignore other list elements (like the
// position highlight)
this.$items = this.$element.children(this.options.itemSelector);
this.items = this._generateItemsFromDOM();
this._widestItem = Math.max.apply(
null, this.items.map(function(item) { return item.w; }));
this._tallestItem = Math.max.apply(
null, this.items.map(function(item) { return item.h; }));
// Used to highlight a position an element will land on upon drop
this.$positionHighlight = this.$element.find('.position-highlight').hide();
this._initGridList();
this.reflow();
if (this.options.dragAndDrop) {
// Init Draggable JQuery UI plugin for each of the list items
// http://api.jqueryui.com/draggable/
this.$items.draggable(this.draggableOptions);
}
},
_initGridList: function() {
// Create instance of GridList (decoupled lib for handling the grid
// positioning and sorting post-drag and dropping)
this.gridList = new GridList(this.items, {
lanes: this.options.lanes,
direction: this.options.direction
});
},
_bindEvents: function() {
this._onStart = this._bindMethod(this._onStart);
this._onDrag = this._bindMethod(this._onDrag);
this._onStop = this._bindMethod(this._onStop);
this.$items.on('dragstart', this._onStart);
this.$items.on('drag', this._onDrag);
this.$items.on('dragstop', this._onStop);
},
_unbindEvents: function() {
this.$items.off('dragstart', this._onStart);
this.$items.off('drag', this._onDrag);
this.$items.off('dragstop', this._onStop);
},
_onStart: function(event, ui) {
// Create a deep copy of the items; we use them to revert the item
// positions after each drag change, making an entire drag operation less
// distructable
this._createGridSnapshot();
// Since dragging actually alters the grid, we need to establish the number
// of cols (+1 extra) before the drag starts
this._maxGridCols = this.gridList.grid.length;
},
_onDrag: function(event, ui) {
var item = this._getItemByElement(ui.helper),
newPosition = this._snapItemPositionToGrid(item);
if (this._dragPositionChanged(newPosition)) {
this._previousDragPosition = newPosition;
// Regenerate the grid with the positions from when the drag started
GridList.cloneItems(this._items, this.items);
this.gridList.generateGrid();
// Since the items list is a deep copy, we need to fetch the item
// corresponding to this drag action again
item = this._getItemByElement(ui.helper);
this.gridList.moveItemToPosition(item, newPosition);
// Visually update item positions and highlight shape
this._applyPositionToItems();
this._highlightPositionForItem(item);
}
},
_onStop: function(event, ui) {
this._updateGridSnapshot();
this._previousDragPosition = null;
// HACK: jQuery.draggable removes this class after the dragstop callback,
// and we need it removed before the drop, to re-enable CSS transitions
$(ui.helper).removeClass('ui-draggable-dragging');
this._applyPositionToItems();
this._removePositionHighlight();
},
_generateItemsFromDOM: function() {
/**
* Generate the structure of items used by the GridList lib, using the DOM
* data of the children of the targeted element. The items will have an
* additional reference to the initial DOM element attached, in order to
* trace back to it and re-render it once its properties are changed by the
* GridList lib
*/
var _this = this,
items = [],
item;
this.$items.each(function(i, element) {
items.push({
$element: $(element),
x: Number($(element).attr('data-x')),
y: Number($(element).attr('data-y')),
w: Number($(element).attr('data-w')),
h: Number($(element).attr('data-h')),
id: Number($(element).attr('data-id'))
});
});
return items;
},
_getItemByElement: function(element) {
// XXX: this could be optimized by storing the item reference inside the
// meta data of the DOM element
for (var i = 0; i < this.items.length; i++) {
if (this.items[i].$element.is(element)) {
return this.items[i];
}
}
},
_calculateCellSize: function() {
if (this.options.direction === "horizontal") {
this._cellHeight = Math.floor(this.$element.height() / this.options.lanes);
this._cellWidth = this._cellHeight * this.options.widthHeightRatio;
} else {
this._cellWidth = Math.floor(this.$element.width() / this.options.lanes);
this._cellHeight = this._cellWidth / this.options.widthHeightRatio;
}
if (this.options.heightToFontSizeRatio) {
this._fontSize = this._cellHeight * this.options.heightToFontSizeRatio;
}
},
_getItemWidth: function(item) {
return item.w * this._cellWidth;
},
_getItemHeight: function(item) {
return item.h * this._cellHeight;
},
_applySizeToItems: function() {
for (var i = 0; i < this.items.length; i++) {
this.items[i].$element.css({
width: this._getItemWidth(this.items[i]),
height: this._getItemHeight(this.items[i])
});
}
if (this.options.heightToFontSizeRatio) {
this.$items.css('font-size', this._fontSize);
}
},
_applyPositionToItems: function() {
// TODO: Implement group separators
for (var i = 0; i < this.items.length; i++) {
// Don't interfere with the positions of the dragged items
if (this.items[i].move) {
continue;
}
this.items[i].$element.css({
left: this.items[i].x * this._cellWidth,
top: this.items[i].y * this._cellHeight
});
}
// Update the width of the entire grid container with enough room on the
// right to allow dragging items to the end of the grid.
if (this.options.direction === "horizontal") {
this.$element.width(
(this.gridList.grid.length + (this.options.dragAndDrop ? this._widestItem : 0)) * this._cellWidth);
} else {
this.$element.height(
(this.gridList.grid.length + (this.options.dragAndDrop ? this._tallestItem : 0)) * this._cellHeight);
}
},
_dragPositionChanged: function(newPosition) {
if (!this._previousDragPosition) {
return true;
}
return (newPosition[0] != this._previousDragPosition[0] ||
newPosition[1] != this._previousDragPosition[1]);
},
_snapItemPositionToGrid: function(item) {
var position = item.$element.position();
position[0] -= this.$element.position().left;
var col = Math.round(position.left / this._cellWidth),
row = Math.round(position.top / this._cellHeight);
// Keep item position within the grid and don't let the item create more
// than one extra column
col = Math.max(col, 0);
row = Math.max(row, 0);
if (this.options.direction === "horizontal") {
col = Math.min(col, this._maxGridCols);
row = Math.min(row, this.options.lanes - item.h);
} else {
col = Math.min(col, this.options.lanes - item.w);
row = Math.min(row, this._maxGridCols);
}
return [col, row];
},
_highlightPositionForItem: function(item) {
this.$positionHighlight.css({
width: this._getItemWidth(item),
height: this._getItemHeight(item),
left: item.x * this._cellWidth,
top: item.y * this._cellHeight
}).show();
if (this.options.heightToFontSizeRatio) {
this.$positionHighlight.css('font-size', this._fontSize);
}
},
_removePositionHighlight: function() {
this.$positionHighlight.hide();
},
_createGridSnapshot: function() {
this._items = GridList.cloneItems(this.items);
},
_updateGridSnapshot: function() {
// Notify the user with the items that changed since the previous snapshot
this._triggerOnChange();
GridList.cloneItems(this.items, this._items);
},
_triggerOnChange: function() {
if (typeof(this.options.onChange) != 'function') {
return;
}
this.options.onChange.call(
this, this.gridList.getChangedItems(this._items, '$element'));
}
};
$.fn.gridList = function(options, draggableOptions) {
var instance,
method,
args;
if (typeof(options) == 'string') {
method = options;
args = Array.prototype.slice.call(arguments, 1);
}
this.each(function() {
instance = $(this).data('_gridList');
// The plugin call be called with no method on an existing GridList
// instance to re-initialize it
if (instance && !method) {
instance.destroy();
instance = null;
}
if (!instance) {
instance = new DraggableGridList(this, options, draggableOptions);
$(this).data('_gridList', instance);
}
if (method) {
instance[method].apply(instance, args);
}
});
// Maintain jQuery chain
return this;
};
}));
|
resizeItem: function(element, size) {
/**
* Resize an item.
|
finalizers_accessor.go
|
/*
Copyright 2018 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sdk
import (
"errors"
"fmt"
"reflect"
"k8s.io/apimachinery/pkg/util/sets"
)
// FinalizersAccessor is the interface for a Resource that implements the getter and setting for
// accessing its Finalizer set.
// +k8s:deepcopy-gen=true
type FinalizersAccessor interface {
GetFinalizers() sets.String
SetFinalizers(finalizers sets.String)
}
// NewReflectedFinalizersAccessor uses reflection to return a FinalizersAccessor to access the field
// called "Finalizers".
func NewReflectedFinalizersAccessor(object interface{}) (FinalizersAccessor, error) {
objectValue := reflect.Indirect(reflect.ValueOf(object))
// If object is not a struct, don't even try to use it.
if objectValue.Kind() != reflect.Struct
|
finalizersField := objectValue.FieldByName("Finalizers")
if finalizersField.IsValid() && finalizersField.CanSet() && finalizersField.Kind() == reflect.Slice {
finalizers := sets.NewString()
for i := 0; i < finalizersField.Len(); i++ {
finalizer := finalizersField.Index(i)
if finalizer.IsValid() && finalizer.Kind() == reflect.String {
finalizers.Insert(finalizer.String())
} else {
return nil, fmt.Errorf("element in the Finalizer slice was not a string: %v", finalizer.Kind())
}
}
return &reflectedFinalizersAccessor{
finalizersField: finalizersField,
finalizersSet: finalizers,
}, nil
}
return nil, fmt.Errorf("finalizer was not a slice: %v", finalizersField.Kind())
}
// reflectedFinalizersAccessor is an internal wrapper object to act as the FinalizersAccessor for
// objects that do not implement FinalizersAccessor directly, but do expose the field using the
// name "Finalizers".
type reflectedFinalizersAccessor struct {
finalizersField reflect.Value
finalizersSet sets.String
}
// GetFinalizers uses reflection to return the Finalizers set from the held object.
func (r *reflectedFinalizersAccessor) GetFinalizers() sets.String {
return r.finalizersSet
}
// SetFinalizers uses reflection to set Finalizers on the held object.
func (r *reflectedFinalizersAccessor) SetFinalizers(finalizers sets.String) {
r.finalizersSet = finalizers
r.finalizersField.Set(reflect.ValueOf(finalizers.List()))
}
|
{
return nil, errors.New("object is not a struct")
}
|
example2.py
|
def reverseLists(list1) :
"""
原地用递归的方法反转list
:param list1:
:return:
"""
def helper(list1,left,right) :
if left < right :
list1[left] , list1[right] = list1[right] , list1[left]
|
helper(list1,left + 1 , right -1)
helper(list1,0,len(list1) - 1)
if __name__ == "__main__" :
list1 = ["a", "b", "c" , "d" , "d","e"]
reverseLists(list1)
print()
| |
dynamic-lookup-relation-search-tab.component.ts
|
import { Component, EventEmitter, Input, OnDestroy, OnInit, Output } from '@angular/core';
import { SEARCH_CONFIG_SERVICE } from '../../../../../../my-dspace-page/my-dspace-page.component';
import { SearchConfigurationService } from '../../../../../../core/shared/search/search-configuration.service';
import { Item } from '../../../../../../core/shared/item.model';
import { SearchResult } from '../../../../../search/models/search-result.model';
import { PaginatedList } from '../../../../../../core/data/paginated-list.model';
import { Observable } from 'rxjs';
import { RelationshipOptions } from '../../../models/relationship-options.model';
import { PaginationComponentOptions } from '../../../../../pagination/pagination-component-options.model';
import { ListableObject } from '../../../../../object-collection/shared/listable-object.model';
import { SearchService } from '../../../../../../core/shared/search/search.service';
import { SelectableListService } from '../../../../../object-list/selectable-list/selectable-list.service';
import { hasValue } from '../../../../../empty.util';
import { map, mapTo, switchMap, take, tap } from 'rxjs/operators';
import { getFirstSucceededRemoteData, getRemoteDataPayload } from '../../../../../../core/shared/operators';
import { CollectionElementLinkType } from '../../../../../object-collection/collection-element-link.type';
import { Context } from '../../../../../../core/shared/context.model';
import { LookupRelationService } from '../../../../../../core/data/lookup-relation.service';
import { PaginationService } from '../../../../../../core/pagination/pagination.service';
import { RelationshipService } from '../../../../../../core/data/relationship.service';
import { RelationshipType } from '../../../../../../core/shared/item-relationships/relationship-type.model';
import { Relationship } from '../../../../../../core/shared/item-relationships/relationship.model';
import { SearchObjects } from '../../../../../search/models/search-objects.model';
import { DSpaceObject } from '../../../../../../core/shared/dspace-object.model';
import { BehaviorSubject } from 'rxjs/internal/BehaviorSubject';
@Component({
selector: 'ds-dynamic-lookup-relation-search-tab',
styleUrls: ['./dynamic-lookup-relation-search-tab.component.scss'],
templateUrl: './dynamic-lookup-relation-search-tab.component.html',
providers: [
{
provide: SEARCH_CONFIG_SERVICE,
useClass: SearchConfigurationService
}
]
})
/**
* Tab for inside the lookup model that represents the items that can be used as a relationship in this submission
*/
export class
|
implements OnInit, OnDestroy {
/**
* Options for searching related items
*/
@Input() relationship: RelationshipOptions;
/**
* The ID of the list to add/remove selected items to/from
*/
@Input() listId: string;
@Input() query: string;
/**
* Is the selection repeatable?
*/
@Input() repeatable: boolean;
/**
* The list of selected items
*/
@Input() selection$: Observable<ListableObject[]>;
/**
* The context to display lists
*/
@Input() context: Context;
/**
* The type of relationship
*/
@Input() relationshipType: RelationshipType;
/**
* The item being viewed
*/
@Input() item: Item;
/**
* Check if is left type or right type
*/
@Input() isLeft: boolean;
/**
* Check if is left type or right type
*/
@Input() toRemove: SearchResult<Item>[];
/**
* Check if is being utilized by edit relationship component
*/
@Input() isEditRelationship: boolean;
/**
* Send an event to deselect an object from the list
*/
@Output() deselectObject: EventEmitter<ListableObject> = new EventEmitter<ListableObject>();
/**
* Send an event to select an object from the list
*/
@Output() selectObject: EventEmitter<ListableObject> = new EventEmitter<ListableObject>();
/**
* Search results
*/
resultsRD$: BehaviorSubject<SearchObjects<DSpaceObject>> = new BehaviorSubject<SearchObjects<DSpaceObject>>(null);
/**
* Are all results selected?
*/
allSelected: boolean;
/**
* Are some results selected?
*/
someSelected$: Observable<boolean>;
/**
* Is it currently loading to select all results?
*/
selectAllLoading: boolean;
/**
* Subscription to unsubscribe from
*/
subscription;
/**
* The initial pagination to use
*/
initialPagination = {
page: 1,
pageSize: 5
};
/**
* The type of links to display
*/
linkTypes = CollectionElementLinkType;
/**
* Emits an event with the current search result entries
*/
@Output() resultFound: EventEmitter<SearchObjects<DSpaceObject>> = new EventEmitter<SearchObjects<DSpaceObject>>();
constructor(
private searchService: SearchService,
private selectableListService: SelectableListService,
public searchConfigService: SearchConfigurationService,
public lookupRelationService: LookupRelationService,
private relationshipService: RelationshipService,
private paginationService: PaginationService
) {
}
/**
* Sets up the pagination and fixed query parameters
*/
ngOnInit(): void {
this.resetRoute();
}
/**
* Method to reset the route when the window is opened to make sure no strange pagination issues appears
*/
resetRoute() {
this.paginationService.updateRoute(this.searchConfigService.paginationID, this.initialPagination);
}
/**
* Selects a page in the store
* @param page The page to select
*/
selectPage(page: SearchResult<DSpaceObject>[]) {
this.selection$
.pipe(take(1))
.subscribe((selection: SearchResult<Item>[]) => {
const filteredPage = page.filter((pageItem) => selection.findIndex((selected) => selected.equals(pageItem)) < 0);
this.selectObject.emit(...filteredPage);
});
this.selectableListService.select(this.listId, page);
}
/**
* Deselects a page in the store
* @param page the page to deselect
*/
deselectPage(page: SearchResult<DSpaceObject>[]) {
this.allSelected = false;
this.selection$
.pipe(take(1))
.subscribe((selection: SearchResult<Item>[]) => {
const filteredPage = page.filter((pageItem) => selection.findIndex((selected) => selected.equals(pageItem)) >= 0);
this.deselectObject.emit(...filteredPage);
});
this.selectableListService.deselect(this.listId, page);
}
/**
* Select all items that were found using the current search query
*/
selectAll() {
this.allSelected = true;
this.selectAllLoading = true;
const fullPagination = Object.assign(new PaginationComponentOptions(), {
currentPage: 1,
pageSize: 9999
});
const fullSearchConfig = Object.assign(this.lookupRelationService.searchConfig, { pagination: fullPagination });
const results$ = this.searchService.search<Item>(fullSearchConfig);
results$.pipe(
getFirstSucceededRemoteData(),
map((resultsRD) => resultsRD.payload.page),
tap(() => this.selectAllLoading = false),
switchMap((results) => this.selection$.pipe(
take(1),
tap((selection: SearchResult<Item>[]) => {
const filteredResults = results.filter((pageItem) => selection.findIndex((selected) => selected.equals(pageItem)) < 0);
this.selectObject.emit(...filteredResults);
}),
mapTo(results)
))
).subscribe((results) => {
this.selectableListService.select(this.listId, results);
});
}
/**
* setSelectedIds select all the items from the results that have relationship
* @param idOfItems the uuid of items that are being checked
* @param resultListOfItems the list of results of the items
*/
setSelectedIds(idOfItems: string[], resultListOfItems: SearchResult<DSpaceObject>[]) {
let relationType = this.relationshipType.rightwardType;
if ( this.isLeft ) {
relationType = this.relationshipType.leftwardType;
}
this.relationshipService.searchByItemsAndType( this.relationshipType.id, this.item.uuid, relationType ,idOfItems ).pipe(
getFirstSucceededRemoteData(),
getRemoteDataPayload(),
).subscribe( (res: PaginatedList<Relationship>) => {
let selectableObject = res.page.map( (relationship: any) => {
let arrUrl = [];
if ( this.isLeft ) {
arrUrl = relationship._links.rightItem.href.split('/');
} else {
arrUrl = relationship._links.leftItem.href.split('/');
}
const uuid = arrUrl[ arrUrl.length - 1 ];
return this.getRelatedItem(uuid, resultListOfItems);
});
selectableObject = selectableObject.filter( (selObject) => {
return !this.getIfInRemove(selObject.indexableObject.uuid);
});
if ( selectableObject.length > 0 ) {
this.selectableListService.select(this.listId, selectableObject);
}
});
}
/**
* Deselect all items
*/
deselectAll() {
this.allSelected = false;
this.selection$
.pipe(take(1))
.subscribe((selection: SearchResult<DSpaceObject>[]) => this.deselectObject.emit(...selection));
this.selectableListService.deselectAll(this.listId);
}
getRelatedItem(uuid: string, resultList: SearchResult<DSpaceObject>[]) {
return resultList.find( (resultItem) => {
return resultItem.indexableObject.uuid === uuid;
});
}
getIfInRemove(uuid: string) {
return !!this.toRemove.find( (searchResult) => searchResult.indexableObject.uuid === uuid);
}
ngOnDestroy(): void {
if (hasValue(this.subscription)) {
this.subscription.unsubscribe();
}
}
onResultFound($event: SearchObjects<DSpaceObject>) {
this.resultsRD$.next($event);
this.resultFound.emit($event);
if (this.isEditRelationship ) {
const idOfItems = $event.page.map( itemSearchResult => {
return itemSearchResult.indexableObject.uuid;
});
this.setSelectedIds(idOfItems, $event.page);
}
}
}
|
DsDynamicLookupRelationSearchTabComponent
|
rsid.rs
|
#[doc = "Reader of register RSID"]
pub type R = crate::R<u32, super::RSID>;
#[doc = "Writer for register RSID"]
pub type W = crate::W<u32, super::RSID>;
#[doc = "Register RSID `reset()`'s with value 0"]
impl crate::ResetValue for super::RSID {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `POR`"]
pub type POR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `POR`"]
pub struct POR_W<'a> {
w: &'a mut W,
}
impl<'a> POR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn
|
(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `EXTR`"]
pub type EXTR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EXTR`"]
pub struct EXTR_W<'a> {
w: &'a mut W,
}
impl<'a> EXTR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `WDTR`"]
pub type WDTR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `WDTR`"]
pub struct WDTR_W<'a> {
w: &'a mut W,
}
impl<'a> WDTR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `BODR`"]
pub type BODR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BODR`"]
pub struct BODR_W<'a> {
w: &'a mut W,
}
impl<'a> BODR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `SYSRESET`"]
pub type SYSRESET_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SYSRESET`"]
pub struct SYSRESET_W<'a> {
w: &'a mut W,
}
impl<'a> SYSRESET_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `LOCKUP`"]
pub type LOCKUP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `LOCKUP`"]
pub struct LOCKUP_W<'a> {
w: &'a mut W,
}
impl<'a> LOCKUP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
impl R {
#[doc = "Bit 0 - Assertion of the POR signal sets this bit, and clears all of the other bits in this register. But if another Reset signal (e.g., External Reset) remains asserted after the POR signal is negated, then its bit is set. This bit is not affected by any of the other sources of Reset."]
#[inline(always)]
pub fn por(&self) -> POR_R {
POR_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Assertion of the external RESET signal sets this bit. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn extr(&self) -> EXTR_R {
EXTR_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - This bit is set when the Watchdog Timer times out and the WDTRESET bit in the Watchdog Mode Register is 1. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn wdtr(&self) -> WDTR_R {
WDTR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - This bit is set when the VDD(REG)(3V3) voltage reaches a level below the BOD reset trip level (typically 1.85 V under nominal room temperature conditions). If the VDD(REG)(3V3) voltage dips from the normal operating range to below the BOD reset trip level and recovers, the BODR bit will be set to 1. If the VDD(REG)(3V3) voltage dips from the normal operating range to below the BOD reset trip level and continues to decline to the level at which POR is asserted (nominally 1 V), the BODR bit is cleared. If the VDD(REG)(3V3) voltage rises continuously from below 1 V to a level above the BOD reset trip level, the BODR will be set to 1. This bit is cleared only by software or POR. Note: Only in the case where a reset occurs and the POR = 0, the BODR bit indicates if the VDD(REG)(3V3) voltage was below the BOD reset trip level or not."]
#[inline(always)]
pub fn bodr(&self) -> BODR_R {
BODR_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - This bit is set if the processor has been reset due to a system reset request, as described in Section 40.4.3.6 Application Interrupt and Reset Control Register. Setting the SYSRESETREQ bit in the Cortex-M3 AIRCR register causes a chip reset in the LPC178x/177x. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn sysreset(&self) -> SYSRESET_R {
SYSRESET_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - This bit is set if the processor has been reset due to a lockup, as described in Section 40.3.4.4 Lockup. The lockup state causes a chip reset in the LPC178x/177x. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn lockup(&self) -> LOCKUP_R {
LOCKUP_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Assertion of the POR signal sets this bit, and clears all of the other bits in this register. But if another Reset signal (e.g., External Reset) remains asserted after the POR signal is negated, then its bit is set. This bit is not affected by any of the other sources of Reset."]
#[inline(always)]
pub fn por(&mut self) -> POR_W {
POR_W { w: self }
}
#[doc = "Bit 1 - Assertion of the external RESET signal sets this bit. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn extr(&mut self) -> EXTR_W {
EXTR_W { w: self }
}
#[doc = "Bit 2 - This bit is set when the Watchdog Timer times out and the WDTRESET bit in the Watchdog Mode Register is 1. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn wdtr(&mut self) -> WDTR_W {
WDTR_W { w: self }
}
#[doc = "Bit 3 - This bit is set when the VDD(REG)(3V3) voltage reaches a level below the BOD reset trip level (typically 1.85 V under nominal room temperature conditions). If the VDD(REG)(3V3) voltage dips from the normal operating range to below the BOD reset trip level and recovers, the BODR bit will be set to 1. If the VDD(REG)(3V3) voltage dips from the normal operating range to below the BOD reset trip level and continues to decline to the level at which POR is asserted (nominally 1 V), the BODR bit is cleared. If the VDD(REG)(3V3) voltage rises continuously from below 1 V to a level above the BOD reset trip level, the BODR will be set to 1. This bit is cleared only by software or POR. Note: Only in the case where a reset occurs and the POR = 0, the BODR bit indicates if the VDD(REG)(3V3) voltage was below the BOD reset trip level or not."]
#[inline(always)]
pub fn bodr(&mut self) -> BODR_W {
BODR_W { w: self }
}
#[doc = "Bit 4 - This bit is set if the processor has been reset due to a system reset request, as described in Section 40.4.3.6 Application Interrupt and Reset Control Register. Setting the SYSRESETREQ bit in the Cortex-M3 AIRCR register causes a chip reset in the LPC178x/177x. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn sysreset(&mut self) -> SYSRESET_W {
SYSRESET_W { w: self }
}
#[doc = "Bit 5 - This bit is set if the processor has been reset due to a lockup, as described in Section 40.3.4.4 Lockup. The lockup state causes a chip reset in the LPC178x/177x. This bit is cleared only by software or POR."]
#[inline(always)]
pub fn lockup(&mut self) -> LOCKUP_W {
LOCKUP_W { w: self }
}
}
|
clear_bit
|
timer.rs
|
// Copyright 2017 TiKV Project Authors. Licensed under Apache-2.0.
use crate::time::{monotonic_raw_now, Instant};
use lazy_static::lazy_static;
use std::cmp::{Ord, Ordering, Reverse};
use std::collections::BinaryHeap;
use std::sync::{mpsc, Arc};
use std::thread::Builder;
use std::time::Duration;
use time::Timespec;
use tokio_executor::park::ParkThread;
use tokio_timer::{self, clock::Clock, clock::Now, timer::Handle, Delay};
pub struct Timer<T> {
pending: BinaryHeap<Reverse<TimeoutTask<T>>>,
}
impl<T> Timer<T> {
pub fn new(capacity: usize) -> Self {
Timer {
pending: BinaryHeap::with_capacity(capacity),
}
}
/// Adds a periodic task into the `Timer`.
pub fn add_task(&mut self, timeout: Duration, task: T) {
let task = TimeoutTask {
next_tick: Instant::now() + timeout,
task,
};
self.pending.push(Reverse(task));
}
/// Gets the next `timeout` from the timer.
pub fn next_timeout(&mut self) -> Option<Instant> {
self.pending.peek().map(|task| task.0.next_tick)
}
/// Pops a `TimeoutTask` from the `Timer`, which should be ticked before `instant`.
/// Returns `None` if no tasks should be ticked any more.
///
/// The normal use case is keeping `pop_task_before` until get `None` in order
/// to retrieve all available events.
pub fn pop_task_before(&mut self, instant: Instant) -> Option<T> {
if self
.pending
.peek()
.map_or(false, |t| t.0.next_tick <= instant)
{
return self.pending.pop().map(|t| t.0.task);
}
None
}
}
#[derive(Debug)]
struct TimeoutTask<T> {
next_tick: Instant,
task: T,
}
impl<T> PartialEq for TimeoutTask<T> {
fn eq(&self, other: &TimeoutTask<T>) -> bool {
self.next_tick == other.next_tick
}
}
impl<T> Eq for TimeoutTask<T> {}
impl<T> PartialOrd for TimeoutTask<T> {
fn partial_cmp(&self, other: &TimeoutTask<T>) -> Option<Ordering> {
self.next_tick.partial_cmp(&other.next_tick)
}
}
impl<T> Ord for TimeoutTask<T> {
fn cmp(&self, other: &TimeoutTask<T>) -> Ordering {
// TimeoutTask.next_tick must have same type of instants.
self.partial_cmp(other).unwrap()
}
}
lazy_static! {
pub static ref GLOBAL_TIMER_HANDLE: Handle = start_global_timer();
}
fn start_global_timer() -> Handle {
let (tx, rx) = mpsc::channel();
let props = crate::thread_group::current_properties();
Builder::new()
.name(thd_name!("timer"))
.spawn(move || {
crate::thread_group::set_properties(props);
tikv_alloc::add_thread_memory_accessor();
let mut timer = tokio_timer::Timer::default();
tx.send(timer.handle()).unwrap();
loop {
timer.turn(None).unwrap();
}
})
.unwrap();
rx.recv().unwrap()
}
/// A struct that marks the *zero* time.
///
/// A *zero* time can be any time, as what it represents is `Instant`,
/// which is Opaque.
struct TimeZero {
/// An arbitrary time used as the zero time.
///
/// Note that `zero` doesn't have to be related to `steady_time_point`, as what's
/// observed here is elapsed time instead of time point.
zero: std::time::Instant,
/// A base time point.
///
/// The source of time point should grow steady.
steady_time_point: Timespec,
}
/// A clock that produces time in a steady speed.
///
/// Time produced by the clock is not affected by clock jump or time adjustment.
/// Internally it uses CLOCK_MONOTONIC_RAW to get a steady time source.
///
/// `Instant`s produced by this clock can't be compared or used to calculate elapse
/// unless they are produced using the same zero time.
#[derive(Clone)]
pub struct SteadyClock {
zero: Arc<TimeZero>,
}
lazy_static! {
static ref STEADY_CLOCK: SteadyClock = SteadyClock {
zero: Arc::new(TimeZero {
zero: std::time::Instant::now(),
steady_time_point: monotonic_raw_now(),
}),
};
}
impl Default for SteadyClock {
#[inline]
fn default() -> SteadyClock {
STEADY_CLOCK.clone()
}
}
impl Now for SteadyClock {
#[inline]
fn now(&self) -> std::time::Instant {
let n = monotonic_raw_now();
let dur = Instant::elapsed_duration(n, self.zero.steady_time_point);
self.zero.zero + dur
}
}
/// A timer that creates steady delays.
///
/// Delay created by this timer will not be affected by time adjustment.
#[derive(Clone)]
pub struct SteadyTimer {
clock: SteadyClock,
handle: Handle,
}
impl SteadyTimer {
/// Creates a delay future that will be notified after the given duration.
pub fn delay(&self, dur: Duration) -> Delay {
self.handle.delay(self.clock.now() + dur)
}
}
lazy_static! {
static ref GLOBAL_STEADY_TIMER: SteadyTimer = start_global_steady_timer();
}
impl Default for SteadyTimer {
#[inline]
fn default() -> SteadyTimer {
GLOBAL_STEADY_TIMER.clone()
}
}
fn
|
() -> SteadyTimer {
let (tx, rx) = mpsc::channel();
let clock = SteadyClock::default();
let clock_ = clock.clone();
Builder::new()
.name(thd_name!("steady-timer"))
.spawn(move || {
let c = Clock::new_with_now(clock_);
let mut timer = tokio_timer::Timer::new_with_now(ParkThread::new(), c);
tx.send(timer.handle()).unwrap();
loop {
timer.turn(None).unwrap();
}
})
.unwrap();
SteadyTimer {
clock,
handle: rx.recv().unwrap(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::compat::Future01CompatExt;
use futures::executor::block_on;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
enum Task {
A,
B,
C,
}
#[test]
fn test_timer() {
let mut timer = Timer::new(10);
timer.add_task(Duration::from_millis(20), Task::A);
timer.add_task(Duration::from_millis(150), Task::C);
timer.add_task(Duration::from_millis(100), Task::B);
assert_eq!(timer.pending.len(), 3);
let tick_time = timer.next_timeout().unwrap();
assert_eq!(timer.pop_task_before(tick_time).unwrap(), Task::A);
assert_eq!(timer.pop_task_before(tick_time), None);
let tick_time = timer.next_timeout().unwrap();
assert_eq!(timer.pop_task_before(tick_time).unwrap(), Task::B);
assert_eq!(timer.pop_task_before(tick_time), None);
let tick_time = timer.next_timeout().unwrap();
assert_eq!(timer.pop_task_before(tick_time).unwrap(), Task::C);
assert_eq!(timer.pop_task_before(tick_time), None);
}
#[test]
fn test_global_timer() {
let handle = super::GLOBAL_TIMER_HANDLE.clone();
let delay =
handle.delay(::std::time::Instant::now() + std::time::Duration::from_millis(100));
let timer = Instant::now();
block_on(delay.compat()).unwrap();
assert!(timer.saturating_elapsed() >= Duration::from_millis(100));
}
#[test]
fn test_global_steady_timer() {
let t = SteadyTimer::default();
let timer = t.clock.now();
let delay = t.delay(Duration::from_millis(100));
block_on(delay.compat()).unwrap();
assert!(timer.elapsed() >= Duration::from_millis(100));
}
}
|
start_global_steady_timer
|
run_script_snpla.py
|
# Imports
import sys
import torch
import os
import time
import numpy as np
from torch.distributions.multivariate_normal import MultivariateNormal
# Initial set up
lunarc = int(sys.argv[1])
dim = int(sys.argv[2])
seed = int(sys.argv[3])
seed_data = int(sys.argv[4])
hp_tuning = int(sys.argv[5]) # if hp_tuning = 0, no hyper-param tuning, else hp_tuning for that sample of the hp
lambda_val = float(sys.argv[6]) # if hp_tuning = 0, no hyper-param tuning, else hp_tuning for that sample of the hp
print("Input args:")
print("Dim: " + str(dim))
print("seed: " + str(seed))
print("seed_data: " + str(seed_data))
id_job = str(dim) + '_' + str(seed) + '_' + str(seed_data)
if hp_tuning > 0:
id_job = id_job + "_" + str(hp_tuning)
if lambda_val > 0:
id_job = id_job + "_" + str(lambda_val)
# Set wd
print(os.getcwd())
# set the wd to the base folder for the project
if lunarc == 1:
os.chdir('/home/samwiq/snpla/seq-posterior-approx-w-nf-dev')
else:
os.chdir('/home/samuel/Documents/projects/seq posterior approx w nf/seq posterior approx w nf dev')
sys.path.append('./')
print(os.getcwd())
# Load all utility functions for all methods
import mv_gaussian.low_dim_w_summary_stats.functions as func
import algorithms.snpla as snpla
# Set model and generate data
x_o, conj_model, analytical_posterior = func.set_up_model(seed)
# set up posterior network
flow_lik, flow_post = func.set_up_networks()
## Generate test data
N_prior_pred_test = 1000
x_test, theta_test = func.run_model_sim(N_prior_pred_test, seed + 2, conj_model, analytical_posterior,
conj_model.model.covariance_matrix, dim, True)
# Generate test data for obs data set
print(conj_model.model_sim(theta_test).shape)
N_test_obs_data = 1000
x_test_obs_data = torch.zeros(N_test_obs_data, 5)
theta_test_obs_data = torch.zeros(N_test_obs_data, dim)
for i in range(N_test_obs_data):
x_test_obs_data[i, :] = func.calc_summary_stats(x_o)
theta_test_obs_data[i, :] = conj_model.model.loc
# Set up networks for the likelihood model
# Base dist for posterior model
flow_lik, flow_post = func.set_up_networks()
hyper_params = [0.001, 0.002, 0.95, 0.7] # lr_like, lr_post, gamma_post, gamma
if lambda_val > 0:
hyper_params[-1] = lambda_val
if hp_tuning >= 2:
hyper_params = func.sample_hp("snpla", hp_tuning)
optimizer_lik = torch.optim.Adam(flow_lik.parameters(), lr=hyper_params[0])
optimizer_post = torch.optim.Adam(flow_post.parameters(), lr=hyper_params[1])
decay_rate_post = hyper_params[2] # no adaptation of Adam's base rate
nbr_rounds = 10
prob_prior_decay_rate = hyper_params[3]
prob_prior = snpla.calc_prob_prior(nbr_rounds, prob_prior_decay_rate)
print(prob_prior)
#nbr_lik = [2000, 2000, 2000, 2000]
#nbr_epochs_lik = [25, 25, 25, 25]
#batch_size = 50
#batch_size_post = 50
#nbr_post = [10000, 10000, 10000, 10000]
#nbr_epochs_post = [25, 25, 25, 25]
nbr_lik = [2500 for _ in range(nbr_rounds)] # [1000, 1000, 1000, 1000, 1000] # , 2000, 2000]
nbr_epochs_lik = [75 for _ in range(nbr_rounds)] # [100, 100, 100, 100, 100]
batch_size = 50
batch_size_post = 1000
nbr_post = [10000 for _ in range(nbr_rounds)] # [10000, 10000, 10000, 10000, 10000] # , 10000, 10000]
nbr_epochs_post = [75 for _ in range(nbr_rounds)] # [50, 50, 50, 50, 50, 50]
x_o_batch_post = torch.zeros(batch_size_post, 5)
for i in range(batch_size_post):
x_o_batch_post[i, :] = func.calc_summary_stats(x_o)
torch.manual_seed(seed)
np.random.seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
start = time.time()
# TODO check prior and simulator
models_lik, models_post = snpla.inference_snpla(flow_lik,
flow_post,
conj_model.prior,
conj_model.model_sim,
optimizer_lik,
optimizer_post,
decay_rate_post,
func.calc_summary_stats(x_o),
x_o_batch_post,
dim,
prob_prior,
nbr_lik,
nbr_epochs_lik,
nbr_post,
nbr_epochs_post,
batch_size,
batch_size_post)
end = time.time()
run_time = end - start
print("")
print("Runtime:" + str(round(run_time, 2)))
kl_divs_trained = []
start = time.time()
torch.manual_seed(seed)
for i in range(nbr_rounds):
print(i)
posterior_sample = models_post[i].sample(1000, context=func.calc_summary_stats(x_o))
posterior_sample = posterior_sample.reshape((1000, 2))
kl_divs_trained.append(conj_model.kl_div(analytical_posterior, posterior_sample))
if hp_tuning == 0 and lambda_val > 0:
np.savetxt('mv_gaussian/low_dim_w_summary_stats/lambda_val/post_samples_snpla_' + str(i + 1) + "_" + id_job + '.csv',
posterior_sample.detach().numpy(), delimiter=",")
elif hp_tuning == 0:
np.savetxt('mv_gaussian/low_dim_w_summary_stats/data/post_samples_snpla_' + str(i + 1) + "_" + id_job + '.csv',
posterior_sample.detach().numpy(), delimiter=",")
else:
np.savetxt('mv_gaussian/low_dim_w_summary_stats/hp_tuning/post_samples_snpla_' + str(i + 1) + "_" + id_job + '.csv',
posterior_sample.detach().numpy(), delimiter=",")
end = time.time()
run_time_inference = (end - start) / nbr_rounds
if hp_tuning == 0 and lambda_val > 0:
with open('mv_gaussian/low_dim_w_summary_stats/lambda_val/snpla_' + id_job + '.txt', 'w') as f:
for h in hyper_params:
f.write('%.6f\n' % h)
for p in prob_prior:
f.write('%.6f\n' % p)
f.write('%.4f\n' % run_time)
f.write('%.4f\n' % run_time_inference)
for i in range(nbr_rounds):
f.write('%.4f\n' % kl_divs_trained[i])
elif hp_tuning == 0:
with open('mv_gaussian/low_dim_w_summary_stats/results/snpla_' + id_job + '.txt', 'w') as f:
f.write('%.4f\n' % run_time)
f.write('%.4f\n' % run_time_inference)
for i in range(nbr_rounds):
f.write('%.4f\n' % kl_divs_trained[i])
else:
|
with open('mv_gaussian/low_dim_w_summary_stats/hp_tuning/snpla_' + id_job + '.txt', 'w') as f:
f.write('%.4f\n' % hp_tuning)
for h in hyper_params:
f.write('%.6f\n' % h)
f.write('%.4f\n' % run_time)
f.write('%.4f\n' % run_time_inference)
for i in range(nbr_rounds):
f.write('%.4f\n' % kl_divs_trained[i])
if hp_tuning == 0:
# Inference
# Sample data from post pred
N_post_pred_test = 1000
x_post_pred, theta_post_pred = func.run_model_sim(N_post_pred_test, seed + 3, conj_model, analytical_posterior,
conj_model.model.covariance_matrix, dim, False)
torch.manual_seed(seed)
x_prior = flow_lik.sample(1, context=theta_test)
x_theta_true = flow_lik.sample(1, context=theta_test_obs_data)
x_post = flow_lik.sample(1, context=theta_post_pred)
x_prior = x_prior.reshape(x_test.shape)
x_theta_true = x_theta_true.reshape(x_test_obs_data.shape)
x_post = x_post.reshape(x_post_pred.shape)
# Write results
np.savetxt('mv_gaussian/low_dim_w_summary_stats/data/data_recon_snpla_' + id_job +
'.csv', x_theta_true.detach().numpy(), delimiter=",")
np.savetxt('mv_gaussian/low_dim_w_summary_stats/data/data_recon_prior_snpla_' + id_job + '.csv',
x_prior.detach().numpy(), delimiter=",")
np.savetxt('mv_gaussian/low_dim_w_summary_stats/data/data_recon_post_snpla_' + id_job + '.csv',
x_post.detach().numpy(), delimiter=",")
| |
mod.rs
|
pub mod device;
pub mod framer;
pub mod mac;
|
pub mod xmac;
mod driver;
pub use self::driver::RadioDriver;
pub use self::driver::DRIVER_NUM;
|
pub mod virtual_mac;
|
operations.rs
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod marketplace_agreements {
use crate::models::*;
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
offer_type: &str,
publisher_id: &str,
offer_id: &str,
plan_id: &str,
) -> std::result::Result<AgreementTerms, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MarketplaceOrdering/offerTypes/{}/publishers/{}/offers/{}/plans/{}/agreements/current",
operation_config.base_path(),
subscription_id,
offer_type,
publisher_id,
offer_id,
plan_id
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AgreementTerms =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
offer_type: &str,
subscription_id: &str,
publisher_id: &str,
offer_id: &str,
plan_id: &str,
parameters: &AgreementTerms,
) -> std::result::Result<AgreementTerms, create::Error>
|
pub mod create {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn sign(
operation_config: &crate::OperationConfig,
subscription_id: &str,
publisher_id: &str,
offer_id: &str,
plan_id: &str,
) -> std::result::Result<AgreementTerms, sign::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MarketplaceOrdering/agreements/{}/offers/{}/plans/{}/sign",
operation_config.base_path(),
subscription_id,
publisher_id,
offer_id,
plan_id
);
let mut url = url::Url::parse(url_str).map_err(sign::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(sign::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(sign::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(sign::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AgreementTerms =
serde_json::from_slice(rsp_body).map_err(|source| sign::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| sign::Error::DeserializeError(source, rsp_body.clone()))?;
Err(sign::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod sign {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn cancel(
operation_config: &crate::OperationConfig,
subscription_id: &str,
publisher_id: &str,
offer_id: &str,
plan_id: &str,
) -> std::result::Result<AgreementTerms, cancel::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MarketplaceOrdering/agreements/{}/offers/{}/plans/{}/cancel",
operation_config.base_path(),
subscription_id,
publisher_id,
offer_id,
plan_id
);
let mut url = url::Url::parse(url_str).map_err(cancel::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(cancel::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(cancel::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(cancel::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AgreementTerms =
serde_json::from_slice(rsp_body).map_err(|source| cancel::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| cancel::Error::DeserializeError(source, rsp_body.clone()))?;
Err(cancel::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod cancel {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_agreement(
operation_config: &crate::OperationConfig,
subscription_id: &str,
publisher_id: &str,
offer_id: &str,
plan_id: &str,
) -> std::result::Result<AgreementTerms, get_agreement::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MarketplaceOrdering/agreements/{}/offers/{}/plans/{}",
operation_config.base_path(),
subscription_id,
publisher_id,
offer_id,
plan_id
);
let mut url = url::Url::parse(url_str).map_err(get_agreement::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_agreement::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_agreement::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_agreement::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AgreementTerms =
serde_json::from_slice(rsp_body).map_err(|source| get_agreement::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| get_agreement::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_agreement::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_agreement {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<Vec<AgreementTerms>, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MarketplaceOrdering/agreements",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<AgreementTerms> =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.MarketplaceOrdering/operations",
operation_config.base_path(),
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
{
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MarketplaceOrdering/offerTypes/{}/publishers/{}/offers/{}/plans/{}/agreements/current",
operation_config.base_path(),
subscription_id,
offer_type,
publisher_id,
offer_id,
plan_id
);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(create::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AgreementTerms =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: ErrorResponse =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
|
options.go
|
package memcached
// Option is an optional configuration of this Gnomock preset. Use available
// Options to configure the container
type Option func(*P)
// WithValues initializes Memcached with the provided key/value pairs. These values
// never expire. Only strings are supported.
func WithValues(vs map[string]string) Option {
return func(p *P) {
p.Values = vs
}
}
// WithByteValues initializes Memcached with the provided key/value paris. These values
// never expire. Only byte slices are supported.
func WithByteValues(vs map[string][]byte) Option
|
// WithVersion sets image version.
func WithVersion(version string) Option {
return func(o *P) {
o.Version = version
}
}
|
{
return func(p *P) {
p.ByteValues = vs
}
}
|
uploader-demo.js
|
$(document).ready(function() {
|
var uploader = new qq.FineUploader({
element: $('#basicUploadSuccessExample')[0],
debug: true,
request: {
endpoint: "http://ray.test:8080/upload/receiver"
},
callbacks: {
onError: errorHandler
},
deleteFile: {
enabled: true
}
});
var uploader2 = new qq.FineUploader({
element: $('#manualUploadModeExample')[0],
autoUpload: false,
uploadButtonText: "Select Files",
request: {
endpoint: "/upload/receiver"
},
callbacks: {
onError: errorHandler
}
});
$('#triggerUpload').click(function() {
uploader2.uploadStoredFiles();
});
var uploader3 = new qq.FineUploader({
element: $('#basicUploadFailureExample')[0],
callbacks: {
onError: errorHandler
},
request: {
endpoint: "/upload/receiver",
params: {"generateError": true}
},
failedUploadTextDisplay: {
mode: 'custom',
maxChars: 5
}
});
var uploader4 = new qq.FineUploader({
element: $('#uploadWithVariousOptionsExample')[0],
multiple: false,
request: {
endpoint: "/upload/receiver"
},
validation: {
allowedExtensions: ['jpeg', 'jpg', 'txt'],
sizeLimit: 50000
},
text: {
uploadButton: "Click Or Drop"
},
callbacks: {
onError: errorHandler
}
});
uploader5 = new qq.FineUploaderBasic({
multiple: false,
autoUpload: false,
button: $("#fubUploadButton")[0],
request: {
endpoint: "/upload/receiver"
},
callbacks: {
onError: errorHandler
}
});
});
|
var errorHandler = function(event, id, fileName, reason) {
qq.log("id: " + id + ", fileName: " + fileName + ", reason: " + reason);
};
|
final_rrl.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 28 00:33:10 2019
@author: Aalap
"""
# -*- coding: utf-8 -*
"""
Created on Thu Mar 28 18:47:25 2019
@author: Aalap
"""
import numpy as np
import matplotlib.pyplot as plt
import math
class Node:
def
|
(self, nodex, nodey,nodetheta, cost, parentnode,vx,vy,vt):
self.nodex = nodex
self.nodey = nodey
self.nodetheta=nodetheta
self.cost = cost
self.parentnode = parentnode
self.vx=vx
self.vy=vy
self.vt=vt
def get_nodex(self):
return self.nodex
def get_nodey(self):
return self.nodey
def get_nodetheta(self):
return self.nodetheta
def get_vx(self):
return vx
def get_vy(self):
return vy
def get_vt(self):
return vt
def motion(current_node,ur,ul,time):
r=3.8
l=23
ur=0.104666667*ur
ul=0.104666667*ul
thetadot=(r/l)*(ur-ul)
newnodetheta=thetadot*time+current_node.nodetheta
xdot=(r/2)*(ur+ul)*(math.cos(current_node.nodetheta))
ydot=(r/2)*(ur+ul)*(math.sin(current_node.nodetheta))
d=math.sqrt((ydot)**2+(xdot)**2)
#delta_x=d*math.cos(newnodetheta)
#delta_y=d*math.sin(newnodetheta)
cost=math.sqrt((xdot*time)**2+(ydot*time)**2)
newcost=round(cost+current_node.cost)
newnodex=round(xdot*time+current_node.nodex)
newnodey=round(ydot*time+current_node.nodey)
xvelocity=(ur)
yvelocity=(ul)
thetavelocity=thetadot
newnodex,newnodey,newnodetheta,newcost,xvelocity,yvelocity,thetavelocity
return newnodex,newnodey,newnodetheta,newcost,xvelocity,yvelocity,thetavelocity
def shortest_path(goalnode, visited, reso):
#shortest path found until parent id is -1
path_x = []#stroes path x coordinates
path_y = []#stroes path x coordinates
xvelocity = []
yvelocity = []
thetavelocity =[]
path_x.append((goalnode.nodex))
path_y.append((goalnode.nodey))
xvelocity.append((goalnode.vx))
yvelocity.append((goalnode.vy))
thetavelocity.append((goalnode.vt))
p = goalnode.parentnode
print(p)
while (p != -1):
print('lll')
tracknode = visited[p]
path_x.append((tracknode.nodex))
path_y.append((tracknode.nodey))
xvelocity.append((tracknode.vx))
yvelocity.append((tracknode.vy))
thetavelocity.append((tracknode.vt))
p = tracknode.parentnode
return path_x, path_y,xvelocity,yvelocity,thetavelocity
def node_key(node):
node_key = (node.nodex) * 250 + node.nodey#unique key generation by equation
return node_key
def hd(node,goalnode):
d=math.sqrt((node.nodex-goalnode.nodex)**2+(node.nodey-goalnode.nodey)**2)#cost to go
return d
def check_node(node,obsmap,obs_x,obs_y):
#check of node correctness
if (node.nodex < (min(obs_x)) or node.nodex > (max(obs_x)) or node.nodey < (min(obs_y)) or node.nodey > (max(obs_y))):
return False
if (obsmap[node.nodex][node.nodey]):
return False
if (node.nodex < 0):
return False
if (node.nodex > 1110):
return False
if (node.nodey < 0):
return False
if (node.nodey > 1011):
return False
return True
def check_goal_node(node,goalnode):
d=math.sqrt((node.nodex-goalnode.nodex)**2+(node.nodey-goalnode.nodey)**2)
if(d<10):
#check goalnode reached
return True
def obstacle_map(obs_x, obs_y):
max_x = round(max(obs_x))
max_y = round(max(obs_y))
min_x = round(min(obs_x))
min_y = round(min(obs_y))
obsmap = np.zeros((1111,1011))#make a world space which is all false
for i in range(min_x,max_x):
for j in range(min_y,max_y):
obsmap[i][j]=False#make a obstacle space that is all false
for index,i in enumerate(obs_x):
obsmap[obs_x[index]][obs_y[index]] = True#update the obstacle space at points where there is obstacle to true
return obsmap
def obstacle_space(r,c):
points=[]#stores points of obstacle space
obs_x=[]#stores x coordinates of obstacle space
obs_y=[]#stores y coordinates of obstacle space
e=r+c
##circular obstacle space
print("computing circle1 obstacle")
k = 40.5 + (r) + c
for i in range(e,(1111-e)):
for j in range(e,(1011-e)):
if (((i - 390) ** 2 + (j - 45) ** 2 - (k ** 2)) <= 0):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
print("circle1 obstacle computed")
#print("c1x",obs_x)
#print("c1y",obs_y)
print("computing circle2 obstacle")
k = 40.5 + (r) + c
for i in range(e,(1111-e)):
for j in range(e,(1011-e)):
if (((i - 438) ** 2 + (j - 274) ** 2 - (k ** 2)) <= 0):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
print("circle2 obstacle computed")
#print("c2x",obs_x)
#print("c2y",obs_y)
print("computing circle3 obstacle")
k = 40.5 + (r) + c
for i in range(e,(1111-e)):
for j in range(e,(1011-e)):
if (((i - 438) ** 2 + (j - 736) ** 2 - (k ** 2)) <= 0):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
print("circle3 obstacle computed")
#print("c3x",obs_x)
#print("c3y",obs_y)
print("computing circle4 obstacle")
k = 40.5 + (r) + c
for i in range(e,(1111-e)):
for j in range(e,(1011-e)):
if (((i - 390) ** 2 + (j - 965) ** 2 - (k ** 2)) <= 0):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
print("circle4 obstacle computed")
#print("c4x",obs_x)
#print("c4y",obs_y)
print("computing rectangle1 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 1110-r-c <= 0) & (j - 35+r+c >= 0) & (j - 111-r-c <= 0) &(i -927+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle1 obstacle")
print("computing rectangle2 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 896-r-c <= 0) & (j - 35+r+c >= 0) & (j - 93-r-c <= 0) &(i -779+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle2 obstacle")
print("computing rectangle3 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 748-r-c <= 0) & (j - 35+r+c >= 0) & (j - 187-r-c <= 0) &(i -474+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle3 obstacle")
print("computing rectangle4 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 1110-r-c <= 0) & (j - 621+r+c >= 0) & (j - 697-r-c <= 0) &(i -744+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle4 obstacle")
print("computing rectangle5 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 1110-r-c <= 0) & (j - 448.5+r+c >= 0) & (j - 565.5-r-c <= 0) &(i -1052+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle5 obstacle")
print("computing rectangle6 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 1110-r-c <= 0) & (j - 362.5+r+c >= 0) & (j - 448.5-r-c <= 0) &(i -1019+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle6 obstacle")
print("computing rectangle7 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 1110-r-c <= 0) & (j - 178.25+r+c >= 0) & (j - 295.25-r-c <= 0) &(i -1052+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle7 obstacle")
print("computing rectangle8 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 529-r-c <= 0) & (j - 314.5+r+c >= 0) & (j - 497.5-r-c <= 0) &(i -438+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle8 obstacle")
print("computing rectangle9 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i - 712-r-c <= 0) & (j - 256+r+c >= 0) & (j - 332-r-c <= 0) &(i -529+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle9 obstacle")
print("computing rectangle10 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i -1026 -r-c <= 0) & (j -919+r+c >= 0) & (j - 1010-r-c <= 0) &(i -983+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle10 obstacle")
print("computing rectangle11 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i -918 -r-c <= 0) & (j -827+r+c >= 0) & (j - 1010-r-c <= 0) &(i -832+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle11 obstacle")
print("computing rectangle12 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i -1110 -r-c <= 0) & (j -0+r+c >= 0) & (j - 58-r-c <= 0) &(i -585+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle12 obstacle")
print("computing rectangle13 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i -936 -r-c <= 0) & (j -267+r+c >= 0) & (j - 384-r-c <= 0) &(i -784+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle13 obstacle")
print("computing rectangle14 obstacle")
for i in range(e,1111-e):
for j in range(e,1011-e):
if ((i -309 -r-c <= 0) & (j -750+r+c >= 0) & (j - 910-r-c <= 0) &(i -150+r+c >= 0)):
obs_x.append(i)
obs_y.append(j)
points.append([i, j])
print("computed rectangle14 obstacle")
#semi circle
print("computing semicircle5 obstacle")
k = 80 + (r) + c
for i in range(e,(1111-e)):
for j in range(e,(1011-e)):
if (((i - 150) ** 2 + (j - 830) ** 2 - (k ** 2)) <= 0):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
print("semicircle5 obstacle computed")
print("computing semicircle6 obstacle")
k = 80 + (r) + c
for i in range(e,(1111-e)):
for j in range(e,(1011-e)):
if (((i - 310) ** 2 + (j - 830) ** 2 - (k ** 2)) <= 0):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
print("semicircle6 obstacle computed")
#boundary obstacle space
print("computing boundary ")
if(r==0 and c==0):
for i in range(1111):
for j in range(1011):
if(i==0 or i==1110 or j==1010 or j==0):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
else:
e=r+c
for i in range(e,1111-e):
for j in range(e,1011-e):
if(i==r+c or i==1110-r-c or j==1010-r-c or j==r+c):
obs_x.append(i)
obs_y.append(j)
points.append([i,j])
print("boundary computed")
print(min(obs_x))
print(max(obs_x))
print(min(obs_y))
print(max(obs_y))
return obs_x,obs_y
def a_algo(startx,starty,starttheta,goalx,goaly,goaltheta,reso,r,c,time):
show=True
lx = []#used to store all explored node x
ly = []#used to store all explored node y
flag=0
unvisited=dict()#dictionary to storedunvisited node
visited=dict()#dictionary to stored visited node for back tracking
moves = [[60, 0], [40, 0], [60, 40], [40, 60], [60, 60], [40, 40],
[0,60], [0, 40]]#all possible moves allowed
startnode = Node(round(startx / reso), round(starty / reso), 0,0, -1,0,0,0)#start node formation
goalnode = Node(round(goalx / reso), round(goaly / reso), 0,1000, 0,0,0,0)#goal node formation
obs_x, obs_y = obstacle_space(r, c)#obstacle space fromed
#obstacle space in discretized formate
obs_x = [round(x / reso) for x in obs_x]
obs_y = [round(y / reso) for y in obs_y]
#obstacle space converted to true false obstacle map
obsmap= obstacle_map(obs_x,obs_y)
#checking if the startnode or goalnode is not in obstacle or out of world space
if not(startnode.nodex < min(obs_x) or startnode.nodex > max(obs_x) or startnode.nodey < min(obs_y) or startnode.nodey > max(obs_y)):
if not(goalnode.nodex < min(obs_x) or goalnode.nodex > max(obs_x) or goalnode.nodey < min(obs_y) or goalnode.nodey > max(obs_y)):
if not obsmap[startnode.nodex][startnode.nodey] and not obsmap[goalnode.nodex][goalnode.nodey]:
flag = 1
unvisited[node_key(startnode)] = startnode
while (flag):
current_node_id = min(unvisited, key=lambda o: unvisited[o].cost+hd(goalnode,unvisited[o]))#finding minimum cost node
current_node = unvisited[current_node_id]#making it the current node
visited[current_node_id] = current_node#putting current node to visited dictionary
del unvisited[current_node_id]#removing current node from unvisited dictionary
for i, _ in enumerate(moves):#node exploration
newnodex,newnodey,newnodetheta,newcost,xvelocity,yvelocity,thetavelocity = motion(current_node , moves[i][0], moves[i][1],time)
node=Node(newnodex,newnodey,newnodetheta,newcost,current_node_id,xvelocity,yvelocity,thetavelocity)
lx.append(Node.get_nodex(node))#used get node to store new nodex in lx
ly.append(Node.get_nodey(node))#used get node to store new nodey in ly
if (len(lx)%1000==0):
if(show):
plt.plot(lx,ly,".r")
plt.plot(obs_x, obs_y,".k")#obstacle space
plt.show()
plt.grid()
if (check_goal_node(node, goalnode)):
goalnode.nodex=node.nodex
goalnode.parentnode=node.parentnode
goalnode.nodey=node.nodey
goalnode.cost=node.cost
goalnode.vt=node.vt
goalnode.vx=node.vx
goalnode.vy=node.vy
goalnode.nodetheta=node.nodetheta
print(node.parentnode,"sdaadsas")
flag=False
break
f = node_key(node)
if not check_node(node, obsmap,obs_x,obs_y):#check the new node is not in obstacle
continue
if f in visited:#check new node in visited
continue
if f in unvisited:#check node in unvisited and update the parameters
if (unvisited[f].cost > node.cost):
unvisited[f].cost = node.cost
unvisited[f].parentnode = node.parentnode
else:
unvisited[f] = node#add new node to unvisited dictionary
print(visited)
a, b,xvelocity,yvelocity,thetavelocity = shortest_path(goalnode, visited, reso)#return shortest path
if(flag):
print("shortest path aaya")
else:
print("end")
return a, b, obs_x, obs_y, lx,ly,xvelocity,yvelocity,thetavelocity
def main():
print( "astar algorithm start!!")
show=True#flag used to display the result
startx = 50.0 # startx coordinate
starty = 50.0 # starty coordinate
starttheta=0
goalx = 250.0 # goalx coordinate
goaly = 250.0 # goaly coordinate
goaltheta=0
reso = 1 # resolution
r = 24 #robot radius
c= 0# clearance
time=1
if show:
plt.plot(startx/reso, starty/reso, "xc")
plt.plot(goalx/reso, goaly/reso, "xb")
a,b, obs_x, obs_y, lx,ly,xvelocity,yvelocity,thetavelocity =a_algo(startx,starty,starttheta,goalx,goaly,goaltheta,reso,r,c,time)
print(a)
print(b)
print(xvelocity)
print(yvelocity)
print(thetavelocity)
if show:
#displaying the result
#if input or output is incorrect then only obstacle and start and goal is displayed
print("final output for astar!!!!")
plt.plot(lx,ly,".g")#node explored
plt.plot(obs_x, obs_y,".k")#obstacle space
plt.plot(a, b, "-r")#shortest path
plt.grid()
plt.show()
if __name__ == '__main__':
main()# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
|
__init__
|
main.rs
|
fn main() {
|
yew::start_app::<node_refs_web_sys::Model>();
}
|
|
recordsdotconfig.go
|
package cfgfile
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import (
"github.com/apache/trafficcontrol/lib/go-atscfg"
"github.com/apache/trafficcontrol/traffic_ops/ort/atstccfg/config"
)
func GetConfigFileProfileRecordsDotConfig(toData *config.TOData) (string, string, string, error)
|
{
params := ParamsToMap(FilterParams(toData.ServerParams, atscfg.RecordsFileName, "", "", "location"))
return atscfg.MakeRecordsDotConfig(toData.Server.Profile, params, toData.TOToolName, toData.TOURL), atscfg.ContentTypeRecordsDotConfig, atscfg.LineCommentRecordsDotConfig, nil
}
|
|
jp_all.pac
|
function base64decode(str) {
var c1, c2, c3, c4;
var i, len, out;
var base64DecodeChars = new Array(
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1,
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1,
-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1);
len = str.length;
i = 0;
out = "";
while (i < len) {
do {
c1 = base64DecodeChars[str.charCodeAt(i++) & 0xff];
} while (i < len && c1 == -1);
if (c1 == -1)
break;
do {
c2 = base64DecodeChars[str.charCodeAt(i++) & 0xff];
} while (i < len && c2 == -1);
if (c2 == -1)
break;
out += String.fromCharCode((c1 << 2) | ((c2 & 0x30) >> 4));
do {
c3 = str.charCodeAt(i++) & 0xff;
if (c3 == 61)
return out;
c3 = base64DecodeChars[c3];
} while (i < len && c3 == -1);
if (c3 == -1)
break;
out += String.fromCharCode(((c2 & 0XF) << 4) | ((c3 & 0x3C) >> 2));
do {
c4 = str.charCodeAt(i++) & 0xff;
if (c4 == 61)
return out;
c4 = base64DecodeChars[c4];
} while (i < len && c4 == -1);
if (c4 == -1)
break;
out += String.fromCharCode(((c3 & 0x03) << 6) | c4);
}
return out;
}
function suffix(s1, s2) {
return s1.indexOf(s2, s1.length - s2.length) !== -1;
}
function isHTTPS(s1) {
return s1.indexOf('https://', 0) !== -1;
}
function check_ipv4(host) {
var re_ipv4 = /^\d+\.\d+\.\d+\.\d+$/g;
if (re_ipv4.test(host)) {
return true;
}else{
return false;
}
}
function loopc(List, host, Rex) {
for (var i in List) {
if (suffix(host,List[i])) {
return Rex;
}
}
return false;
}
function loopn(List, ip, Rex) {
for (var i in List) {
if (isInNet(ip, List[i][0], List[i][1])) {
return Rex;
}
}
return false;
}
function FindProxyForURL(url, host){
var L_LAN = [['10.0.0.0', '255.0.0.0'], ['172.16.0.0', '255.240.0.0'], ['192.168.0.0', '255.255.0.0'], ['127.0.0.1', '255.255.255.255']];
var D = 'DIRECT';
//ServerList
if(isHTTPS(url)===false){
var P = 'HTTPS onenet-jp.vnet.link:211;HTTPS onenet-jp.vnet.link:221;HTTPS onenet-jp.vnet.link:231;PROXY onenet-jp.vnet.link:210;PROXY onenet-jp.vnet.link:220;PROXY onenet-jp.vnet.link:230;';
}else{
var P = 'HTTPS onenet-jp.vnet.link:211;HTTPS onenet-jp.vnet.link:221;HTTPS onenet-jp.vnet.link:231;PROXY onenet-jp.vnet.link:210;PROXY onenet-jp.vnet.link:220;PROXY onenet-jp.vnet.link:230;';
}
//Preload-DirectGo
if(suffix(host,'vnet.link')||suffix(host,'getpac.tk')){
return D;
}
//Preload-DMM-JP
if(suffix(host,'dmm.com')||suffix(host,'openx.net')||suffix(host,'jp')){
return 'HTTPS node-jp.vnet.link:111;PROXY node-jp.vnet.link:101;';
|
}
//Preload-Out
var L_service_out = eval(base64decode('WyJ5b3VrdS5jb20iLCAidHVkb3UuY29tIiwgInNjb3JlY2FyZHJlc2VhcmNoLmNvbSAiLCAiYWRtYXN0ZXIuY29tLmNuIiwgImlyczAxLmNvbSIsICJhbGltYW1hLmNuIiwgInRhbnguY29tIiwgInphbXBkc3AuY29tIiwgIm1tc3RhdC5jb20iLCAiYWxpY2RuLmNvbSIsICJtaWFvemhlbi5jb20iLCAieWtpbWcuY29tIiwgImd0YWdzLm5ldCIsICJjci1uaWVsc2VuLmNvbSIsICJ0ZGltZy5jb20iLCAidGFvYmFvY2RuLmNvbSIsICJtZWRpYXYuY29tIiwgInFpeWkuY29tIiwgInAweS5jbiIsICJxbG9nby5jbiIsICJzaW5haW1nLmNuIiwgImlwaW55b3UuY29tIiwgImd0aW1nLmNuIiwgIjM2MGJ1eWltZy5jb20iLCAidGVuY2VudG1pbmQuY29tIiwgImd0aW1nLmNvbSIsICIzLmNuIiwgInNvaHUuY29tIiwgImlyczAxLm5ldCIsICJpdGMuY24iLCAid3JhdGluZy5jb20iLCAic29nb3UuY29tIiwgIm9wdGFpbS5jb20iLCAiYmFpZHVzdGF0aWMuY29tIiwgImJhaWR1LmNvbSIsICJwYWlwYWlpbWcuY29tIiwgIm1tY2RuLmNuIiwgIm1sdDAxLmNvbSIsICJhY3M4Ni5jb20iLCAieHVubGVpLmNvbSIsICJrYW5rYW4uY29tIiwgInNhbmRhaS5uZXQiLCAia2FuaW1nLmNvbSIsICJyZXZzY2kubmV0IiwgInNjb3JlY2FyZHJlc2VhcmNoLmNvbSIsICJiaWxpYmlsaS5jb20iLCAiYWNndmlkZW8uY29tIiwgImhkc2xiLmNvbSIsICJmdW5zaGlvbi5jb20iLCAiZnVuc2hpb24ubmV0IiwgImJhaWR1c3RhaWMuY29tIiwgImRvdWJsZWNsaWNrLm5ldCIsICJ6aGl6aXl1bi5jb20iLCAiNnJvb21zLmNvbSIsICI2LmNuIiwgImxldHYuY29tIiwgImxldHZjZG4uY29tIiwgImFkbWFzdGVyLmNvbSIsICJsZXR2LmNuIiwgIm1tMTExLm5ldCIsICJhY2Z1bi50diIsICJsZXR2Y2xvdWQuY29tIiwgImlzdHJlYW1zY2hlLmNvbSIsInRvdWRvdXVpLmNvbSJd'));
var L2x_out = loopc(L_service_out,host,P);
if(L2x_out!==false){return L2x_out;}
//Default
return P;}
| |
v8.gyp
|
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
'includes': ['../../build/common.gypi'],
'conditions': [
['use_system_v8==0', {
'targets': [
{
'target_name': 'v8',
'dependencies_traverse': 1,
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
['v8_use_snapshot=="true"', {
# The dependency on v8_base should come from a transitive
# dependency however the Android toolchain requires libv8_base.a
# to appear before libv8_snapshot.a so it's listed explicitly.
'dependencies': ['v8_base', 'v8_snapshot'],
},
{
# The dependency on v8_base should come from a transitive
# dependency however the Android toolchain requires libv8_base.a
# to appear before libv8_snapshot.a so it's listed explicitly.
'dependencies': ['v8_base', 'v8_nosnapshot'],
}],
['component=="shared_library"', {
'type': '<(component)',
'sources': [
# Note: on non-Windows we still build this file so that gyp
# has some sources to link into the component.
'../../src/v8dll-main.cc',
],
'defines': [
'V8_SHARED',
'BUILDING_V8_SHARED',
],
'direct_dependent_settings': {
'defines': [
'V8_SHARED',
'USING_V8_SHARED',
],
},
'conditions': [
['OS=="mac"', {
'xcode_settings': {
'OTHER_LDFLAGS': ['-dynamiclib', '-all_load']
},
}],
['soname_version!=""', {
'product_extension': 'so.<(soname_version)',
}],
],
},
{
'type': 'none',
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../../include',
],
},
},
{
'target_name': 'v8_snapshot',
'type': '<(library)',
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
'dependencies': ['mksnapshot#host', 'js2c#host'],
}, {
'toolsets': ['target'],
'dependencies': ['mksnapshot', 'js2c'],
}],
['component=="shared_library"', {
'defines': [
'V8_SHARED',
'BUILDING_V8_SHARED',
],
'direct_dependent_settings': {
'defines': [
'V8_SHARED',
'USING_V8_SHARED',
],
},
}],
],
'dependencies': [
'v8_base',
],
'include_dirs+': [
'../../src',
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(INTERMEDIATE_DIR)/snapshot.cc',
],
'actions': [
{
'action_name': 'run_mksnapshot',
'inputs': [
'<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/snapshot.cc',
],
'variables': {
'mksnapshot_flags': [
'--log-snapshot-positions',
'--logfile', '<(INTERMEDIATE_DIR)/snapshot.log',
],
},
'conditions': [
['v8_target_arch=="arm"', {
# The following rules should be consistent with chromium's
# common.gypi and V8's runtime rule to ensure they all generate
# the same correct machine code. The following issue is about
# V8's runtime rule about vfpv3 and neon:
# http://code.google.com/p/v8/issues/detail?id=914
'conditions': [
['armv7==1', {
# The ARM Architecture Manual mandates VFPv3 if NEON is
# available.
# The current V8 doesn't use d16-d31, so for vfpv3-d16, we can
# also enable vfp3 for the better performance.
'conditions': [
['arm_neon!=1 and arm_fpu!="vfpv3" and arm_fpu!="vfpv3-d16"', {
'variables': {
'mksnapshot_flags': [
'--noenable_vfp3',
],
},
}],
],
},{ # else: armv7!=1
'variables': {
'mksnapshot_flags': [
'--noenable_armv7',
'--noenable_vfp3',
],
},
}],
],
}],
],
'action': [
'<@(_inputs)',
'<@(mksnapshot_flags)',
'<@(_outputs)'
],
},
],
},
{
'target_name': 'v8_nosnapshot',
'type': '<(library)',
'dependencies': [
'v8_base',
],
'include_dirs+': [
'../../src',
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'../../src/snapshot-empty.cc',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
'dependencies': ['js2c#host'],
}, {
'toolsets': ['target'],
'dependencies': ['js2c'],
}],
['component=="shared_library"', {
'defines': [
'BUILDING_V8_SHARED',
'V8_SHARED',
],
}],
]
},
{
'target_name': 'v8_base',
'type': '<(library)',
'variables': {
'optimize': 'max',
},
'include_dirs+': [
'../../src',
],
'sources': [
'../../src/accessors.cc',
'../../src/accessors.h',
'../../src/allocation.cc',
'../../src/allocation.h',
'../../src/api.cc',
'../../src/api.h',
'../../src/apiutils.h',
'../../src/arguments.h',
'../../src/assembler.cc',
'../../src/assembler.h',
'../../src/ast.cc',
'../../src/ast.h',
'../../src/atomicops.h',
'../../src/atomicops_internals_x86_gcc.cc',
'../../src/bignum.cc',
'../../src/bignum.h',
'../../src/bignum-dtoa.cc',
'../../src/bignum-dtoa.h',
'../../src/bootstrapper.cc',
'../../src/bootstrapper.h',
'../../src/builtins.cc',
'../../src/builtins.h',
'../../src/bytecodes-irregexp.h',
'../../src/cached-powers.cc',
'../../src/cached-powers.h',
'../../src/char-predicates-inl.h',
'../../src/char-predicates.h',
'../../src/checks.cc',
'../../src/checks.h',
'../../src/circular-queue-inl.h',
'../../src/circular-queue.cc',
'../../src/circular-queue.h',
'../../src/code-stubs.cc',
'../../src/code-stubs.h',
'../../src/code.h',
'../../src/codegen.cc',
'../../src/codegen.h',
'../../src/compilation-cache.cc',
'../../src/compilation-cache.h',
'../../src/compiler.cc',
'../../src/compiler.h',
'../../src/contexts.cc',
'../../src/contexts.h',
'../../src/conversions-inl.h',
'../../src/conversions.cc',
'../../src/conversions.h',
'../../src/counters.cc',
'../../src/counters.h',
'../../src/cpu.h',
'../../src/cpu-profiler-inl.h',
'../../src/cpu-profiler.cc',
'../../src/cpu-profiler.h',
'../../src/data-flow.cc',
'../../src/data-flow.h',
'../../src/date.cc',
'../../src/date.h',
'../../src/dateparser.cc',
'../../src/dateparser.h',
'../../src/dateparser-inl.h',
'../../src/debug.cc',
'../../src/debug.h',
'../../src/debug-agent.cc',
'../../src/debug-agent.h',
'../../src/deoptimizer.cc',
'../../src/deoptimizer.h',
'../../src/disasm.h',
'../../src/disassembler.cc',
'../../src/disassembler.h',
'../../src/diy-fp.cc',
'../../src/diy-fp.h',
'../../src/double.h',
'../../src/dtoa.cc',
'../../src/dtoa.h',
'../../src/elements.cc',
'../../src/elements.h',
'../../src/elements-kind.cc',
'../../src/elements-kind.h',
'../../src/execution.cc',
'../../src/execution.h',
'../../src/factory.cc',
'../../src/factory.h',
'../../src/fast-dtoa.cc',
'../../src/fast-dtoa.h',
'../../src/flag-definitions.h',
'../../src/fixed-dtoa.cc',
'../../src/fixed-dtoa.h',
'../../src/flags.cc',
'../../src/flags.h',
'../../src/frames-inl.h',
'../../src/frames.cc',
'../../src/frames.h',
'../../src/full-codegen.cc',
'../../src/full-codegen.h',
'../../src/func-name-inferrer.cc',
'../../src/func-name-inferrer.h',
'../../src/global-handles.cc',
'../../src/global-handles.h',
'../../src/globals.h',
'../../src/handles-inl.h',
'../../src/handles.cc',
'../../src/handles.h',
'../../src/hashmap.h',
'../../src/heap-inl.h',
'../../src/heap.cc',
'../../src/heap.h',
'../../src/heap-profiler.cc',
'../../src/heap-profiler.h',
'../../src/hydrogen.cc',
'../../src/hydrogen.h',
'../../src/hydrogen-instructions.cc',
'../../src/hydrogen-instructions.h',
'../../src/ic-inl.h',
'../../src/ic.cc',
'../../src/ic.h',
'../../src/incremental-marking.cc',
'../../src/incremental-marking.h',
'../../src/inspector.cc',
'../../src/inspector.h',
'../../src/interface.cc',
'../../src/interface.h',
'../../src/interpreter-irregexp.cc',
'../../src/interpreter-irregexp.h',
'../../src/json-parser.h',
'../../src/jsregexp.cc',
'../../src/jsregexp.h',
'../../src/isolate.cc',
'../../src/isolate.h',
'../../src/lazy-instance.h',
'../../src/list-inl.h',
'../../src/list.h',
'../../src/lithium.cc',
'../../src/lithium.h',
'../../src/lithium-allocator.cc',
'../../src/lithium-allocator.h',
'../../src/lithium-allocator-inl.h',
'../../src/liveedit.cc',
'../../src/liveedit.h',
'../../src/liveobjectlist-inl.h',
'../../src/liveobjectlist.cc',
'../../src/liveobjectlist.h',
'../../src/log-inl.h',
'../../src/log-utils.cc',
'../../src/log-utils.h',
'../../src/log.cc',
'../../src/log.h',
'../../src/macro-assembler.h',
'../../src/mark-compact.cc',
'../../src/mark-compact.h',
'../../src/messages.cc',
'../../src/messages.h',
'../../src/natives.h',
'../../src/objects-debug.cc',
'../../src/objects-printer.cc',
'../../src/objects-inl.h',
'../../src/objects-visiting.cc',
'../../src/objects-visiting.h',
'../../src/objects.cc',
'../../src/objects.h',
'../../src/once.cc',
'../../src/once.h',
'../../src/parser.cc',
'../../src/parser.h',
'../../src/platform-posix.h',
'../../src/platform-tls-mac.h',
'../../src/platform-tls-win32.h',
'../../src/platform-tls.h',
'../../src/platform.h',
'../../src/preparse-data-format.h',
'../../src/preparse-data.cc',
'../../src/preparse-data.h',
'../../src/preparser.cc',
'../../src/preparser.h',
'../../src/prettyprinter.cc',
'../../src/prettyprinter.h',
'../../src/property.cc',
'../../src/property.h',
'../../src/property-details.h',
'../../src/profile-generator-inl.h',
'../../src/profile-generator.cc',
'../../src/profile-generator.h',
'../../src/regexp-macro-assembler-irregexp-inl.h',
'../../src/regexp-macro-assembler-irregexp.cc',
'../../src/regexp-macro-assembler-irregexp.h',
'../../src/regexp-macro-assembler-tracer.cc',
'../../src/regexp-macro-assembler-tracer.h',
'../../src/regexp-macro-assembler.cc',
'../../src/regexp-macro-assembler.h',
'../../src/regexp-stack.cc',
'../../src/regexp-stack.h',
'../../src/rewriter.cc',
'../../src/rewriter.h',
'../../src/runtime.cc',
'../../src/runtime.h',
'../../src/runtime-profiler.cc',
'../../src/runtime-profiler.h',
'../../src/safepoint-table.cc',
'../../src/safepoint-table.h',
'../../src/scanner.cc',
'../../src/scanner.h',
'../../src/scanner-character-streams.cc',
'../../src/scanner-character-streams.h',
'../../src/scopeinfo.cc',
'../../src/scopeinfo.h',
'../../src/scopes.cc',
'../../src/scopes.h',
'../../src/serialize.cc',
'../../src/serialize.h',
'../../src/small-pointer-list.h',
'../../src/smart-array-pointer.h',
'../../src/snapshot-common.cc',
'../../src/snapshot.h',
'../../src/spaces-inl.h',
'../../src/spaces.cc',
'../../src/spaces.h',
'../../src/store-buffer-inl.h',
'../../src/store-buffer.cc',
'../../src/store-buffer.h',
'../../src/string-search.cc',
'../../src/string-search.h',
'../../src/string-stream.cc',
'../../src/string-stream.h',
'../../src/strtod.cc',
'../../src/strtod.h',
'../../src/stub-cache.cc',
'../../src/stub-cache.h',
'../../src/token.cc',
'../../src/token.h',
'../../src/type-info.cc',
'../../src/type-info.h',
'../../src/unbound-queue-inl.h',
'../../src/unbound-queue.h',
'../../src/unicode-inl.h',
'../../src/unicode.cc',
'../../src/unicode.h',
'../../src/utils-inl.h',
'../../src/utils.cc',
'../../src/utils.h',
'../../src/v8-counters.cc',
'../../src/v8-counters.h',
'../../src/v8.cc',
'../../src/v8.h',
'../../src/v8checks.h',
'../../src/v8conversions.cc',
'../../src/v8conversions.h',
'../../src/v8globals.h',
'../../src/v8memory.h',
'../../src/v8threads.cc',
'../../src/v8threads.h',
'../../src/v8utils.cc',
'../../src/v8utils.h',
'../../src/variables.cc',
'../../src/variables.h',
'../../src/version.cc',
'../../src/version.h',
'../../src/vm-state-inl.h',
'../../src/vm-state.h',
'../../src/zone-inl.h',
'../../src/zone.cc',
'../../src/zone.h',
'../../src/extensions/externalize-string-extension.cc',
'../../src/extensions/externalize-string-extension.h',
'../../src/extensions/gc-extension.cc',
'../../src/extensions/gc-extension.h',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
['v8_target_arch=="arm"', {
'sources': [
'../../src/arm/assembler-arm-inl.h',
'../../src/arm/assembler-arm.cc',
'../../src/arm/assembler-arm.h',
'../../src/arm/builtins-arm.cc',
'../../src/arm/code-stubs-arm.cc',
'../../src/arm/code-stubs-arm.h',
'../../src/arm/codegen-arm.cc',
'../../src/arm/codegen-arm.h',
'../../src/arm/constants-arm.h',
'../../src/arm/constants-arm.cc',
'../../src/arm/cpu-arm.cc',
'../../src/arm/debug-arm.cc',
'../../src/arm/deoptimizer-arm.cc',
'../../src/arm/disasm-arm.cc',
'../../src/arm/frames-arm.cc',
'../../src/arm/frames-arm.h',
'../../src/arm/full-codegen-arm.cc',
'../../src/arm/ic-arm.cc',
'../../src/arm/lithium-arm.cc',
'../../src/arm/lithium-arm.h',
'../../src/arm/lithium-codegen-arm.cc',
'../../src/arm/lithium-codegen-arm.h',
'../../src/arm/lithium-gap-resolver-arm.cc',
'../../src/arm/lithium-gap-resolver-arm.h',
'../../src/arm/macro-assembler-arm.cc',
'../../src/arm/macro-assembler-arm.h',
'../../src/arm/regexp-macro-assembler-arm.cc',
'../../src/arm/regexp-macro-assembler-arm.h',
'../../src/arm/simulator-arm.cc',
'../../src/arm/stub-cache-arm.cc',
],
}],
['v8_target_arch=="ia32" or v8_target_arch=="mac" or OS=="mac"', {
'sources': [
'../../src/ia32/assembler-ia32-inl.h',
'../../src/ia32/assembler-ia32.cc',
'../../src/ia32/assembler-ia32.h',
'../../src/ia32/builtins-ia32.cc',
'../../src/ia32/code-stubs-ia32.cc',
'../../src/ia32/code-stubs-ia32.h',
'../../src/ia32/codegen-ia32.cc',
'../../src/ia32/codegen-ia32.h',
'../../src/ia32/cpu-ia32.cc',
'../../src/ia32/debug-ia32.cc',
'../../src/ia32/deoptimizer-ia32.cc',
'../../src/ia32/disasm-ia32.cc',
'../../src/ia32/frames-ia32.cc',
'../../src/ia32/frames-ia32.h',
'../../src/ia32/full-codegen-ia32.cc',
'../../src/ia32/ic-ia32.cc',
'../../src/ia32/lithium-codegen-ia32.cc',
'../../src/ia32/lithium-codegen-ia32.h',
'../../src/ia32/lithium-gap-resolver-ia32.cc',
'../../src/ia32/lithium-gap-resolver-ia32.h',
'../../src/ia32/lithium-ia32.cc',
'../../src/ia32/lithium-ia32.h',
'../../src/ia32/macro-assembler-ia32.cc',
'../../src/ia32/macro-assembler-ia32.h',
'../../src/ia32/regexp-macro-assembler-ia32.cc',
'../../src/ia32/regexp-macro-assembler-ia32.h',
'../../src/ia32/stub-cache-ia32.cc',
],
}],
['v8_target_arch=="mips"', {
'sources': [
'../../src/mips/assembler-mips.cc',
'../../src/mips/assembler-mips.h',
'../../src/mips/assembler-mips-inl.h',
'../../src/mips/builtins-mips.cc',
'../../src/mips/codegen-mips.cc',
'../../src/mips/codegen-mips.h',
'../../src/mips/code-stubs-mips.cc',
'../../src/mips/code-stubs-mips.h',
'../../src/mips/constants-mips.cc',
'../../src/mips/constants-mips.h',
'../../src/mips/cpu-mips.cc',
'../../src/mips/debug-mips.cc',
'../../src/mips/deoptimizer-mips.cc',
'../../src/mips/disasm-mips.cc',
'../../src/mips/frames-mips.cc',
'../../src/mips/frames-mips.h',
'../../src/mips/full-codegen-mips.cc',
'../../src/mips/ic-mips.cc',
'../../src/mips/lithium-codegen-mips.cc',
'../../src/mips/lithium-codegen-mips.h',
'../../src/mips/lithium-gap-resolver-mips.cc',
'../../src/mips/lithium-gap-resolver-mips.h',
'../../src/mips/lithium-mips.cc',
'../../src/mips/lithium-mips.h',
'../../src/mips/macro-assembler-mips.cc',
'../../src/mips/macro-assembler-mips.h',
'../../src/mips/regexp-macro-assembler-mips.cc',
'../../src/mips/regexp-macro-assembler-mips.h',
'../../src/mips/simulator-mips.cc',
'../../src/mips/stub-cache-mips.cc',
],
}],
['v8_target_arch=="x64" or v8_target_arch=="mac" or OS=="mac"', {
'sources': [
'../../src/x64/assembler-x64-inl.h',
'../../src/x64/assembler-x64.cc',
'../../src/x64/assembler-x64.h',
'../../src/x64/builtins-x64.cc',
'../../src/x64/code-stubs-x64.cc',
'../../src/x64/code-stubs-x64.h',
'../../src/x64/codegen-x64.cc',
'../../src/x64/codegen-x64.h',
'../../src/x64/cpu-x64.cc',
'../../src/x64/debug-x64.cc',
'../../src/x64/deoptimizer-x64.cc',
'../../src/x64/disasm-x64.cc',
'../../src/x64/frames-x64.cc',
'../../src/x64/frames-x64.h',
'../../src/x64/full-codegen-x64.cc',
'../../src/x64/ic-x64.cc',
'../../src/x64/lithium-codegen-x64.cc',
'../../src/x64/lithium-codegen-x64.h',
'../../src/x64/lithium-gap-resolver-x64.cc',
'../../src/x64/lithium-gap-resolver-x64.h',
'../../src/x64/lithium-x64.cc',
'../../src/x64/lithium-x64.h',
'../../src/x64/macro-assembler-x64.cc',
'../../src/x64/macro-assembler-x64.h',
'../../src/x64/regexp-macro-assembler-x64.cc',
'../../src/x64/regexp-macro-assembler-x64.h',
'../../src/x64/stub-cache-x64.cc',
],
}],
['OS=="linux"', {
'link_settings': {
'conditions': [
['v8_compress_startup_data=="bz2"', {
'libraries': [
'-lbz2',
]
}],
],
},
'sources': [
'../../src/platform-linux.cc',
'../../src/platform-posix.cc'
],
}
],
['OS=="android"', {
'defines': [
'CAN_USE_VFP_INSTRUCTIONS',
],
'sources': [
'../../src/platform-posix.cc',
],
'conditions': [
['host_os=="mac"', {
'target_conditions': [
['_toolset=="host"', {
'sources': [
'../../src/platform-macos.cc'
]
}, {
'sources': [
'../../src/platform-linux.cc'
]
}],
],
}, {
'sources': [
'../../src/platform-linux.cc'
]
}],
],
},
],
['OS=="freebsd"', {
'link_settings': {
'libraries': [
'-L/usr/local/lib -lexecinfo',
]},
'sources': [
'../../src/platform-freebsd.cc',
'../../src/platform-posix.cc'
],
}
],
['OS=="openbsd"', {
'link_settings': {
'libraries': [
'-L/usr/local/lib -lexecinfo',
]},
'sources': [
'../../src/platform-openbsd.cc',
'../../src/platform-posix.cc'
],
}
],
['OS=="netbsd"', {
'link_settings': {
'libraries': [
'-L/usr/pkg/lib -Wl,-R/usr/pkg/lib -lexecinfo',
]},
'sources': [
'../../src/platform-openbsd.cc',
'../../src/platform-posix.cc'
],
}
],
['OS=="solaris"', {
'link_settings': {
'libraries': [
'-lsocket -lnsl',
]},
'sources': [
'../../src/platform-solaris.cc',
'../../src/platform-posix.cc',
],
}
],
['OS=="mac"', {
'sources': [
'../../src/platform-macos.cc',
'../../src/platform-posix.cc'
]},
],
['OS=="win"', {
'sources': [
'../../src/platform-win32.cc',
'../../src/win32-math.cc',
'../../src/win32-math.h',
],
'msvs_disabled_warnings': [4351, 4355, 4800],
'direct_dependent_settings': {
'msvs_disabled_warnings': [4351, 4355, 4800],
},
'link_settings': {
'libraries': [ '-lwinmm.lib', '-lws2_32.lib' ],
},
}],
['component=="shared_library"', {
'defines': [
'BUILDING_V8_SHARED',
'V8_SHARED',
],
}],
['v8_postmortem_support=="true"', {
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
]
}],
],
},
{
'target_name': 'js2c',
'type': 'none',
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
],
'variables': {
'library_files': [
'../../src/runtime.js',
'../../src/v8natives.js',
'../../src/array.js',
'../../src/string.js',
'../../src/uri.js',
'../../src/math.js',
'../../src/messages.js',
'../../src/apinatives.js',
'../../src/debug-debugger.js',
'../../src/mirror-debugger.js',
'../../src/liveedit-debugger.js',
'../../src/date.js',
'../../src/json.js',
'../../src/regexp.js',
'../../src/macros.py',
],
'experimental_library_files': [
'../../src/macros.py',
'../../src/proxy.js',
'../../src/collection.js',
],
},
'actions': [
{
'action_name': 'js2c',
'inputs': [
'../../tools/js2c.py',
'<@(library_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
],
'action': [
'python',
'../../tools/js2c.py',
'<@(_outputs)',
'CORE',
'<(v8_compress_startup_data)',
'<@(library_files)'
],
},
{
'action_name': 'js2c_experimental',
'inputs': [
'../../tools/js2c.py',
|
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
],
'action': [
'python',
'../../tools/js2c.py',
'<@(_outputs)',
'EXPERIMENTAL',
'<(v8_compress_startup_data)',
'<@(experimental_library_files)'
],
},
],
},
{
'target_name': 'postmortem-metadata',
'type': 'none',
'variables': {
'heapobject_files': [
'../../src/objects.h',
'../../src/objects-inl.h',
],
},
'actions': [
{
'action_name': 'gen-postmortem-metadata',
'inputs': [
'../../tools/gen-postmortem-metadata.py',
'<@(heapobject_files)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
],
'action': [
'python',
'../../tools/gen-postmortem-metadata.py',
'<@(_outputs)',
'<@(heapobject_files)'
]
}
]
},
{
'target_name': 'mksnapshot',
'type': 'executable',
'dependencies': [
'v8_base',
'v8_nosnapshot',
],
'include_dirs+': [
'../../src',
],
'sources': [
'../../src/mksnapshot.cc',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
['v8_compress_startup_data=="bz2"', {
'libraries': [
'-lbz2',
]
}],
],
},
{
'target_name': 'v8_shell',
'type': 'executable',
'dependencies': [
'v8'
],
'sources': [
'../../samples/shell.cc',
],
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
['OS=="win"', {
# This could be gotten by not setting chromium_code, if that's OK.
'defines': ['_CRT_SECURE_NO_WARNINGS'],
}],
['v8_compress_startup_data=="bz2"', {
'libraries': [
'-lbz2',
]
}],
],
},
{
'target_name': 'preparser_lib',
'type': '<(library)',
'include_dirs+': [
'../../src',
],
'sources': [
'../../include/v8-preparser.h',
'../../include/v8stdint.h',
'../../src/allocation.cc',
'../../src/allocation.h',
'../../src/atomicops.h',
'../../src/atomicops_internals_x86_gcc.cc',
'../../src/bignum.cc',
'../../src/bignum.h',
'../../src/bignum-dtoa.cc',
'../../src/bignum-dtoa.h',
'../../src/cached-powers.cc',
'../../src/cached-powers.h',
'../../src/char-predicates-inl.h',
'../../src/char-predicates.h',
'../../src/checks.h',
'../../src/conversions-inl.h',
'../../src/conversions.cc',
'../../src/conversions.h',
'../../src/diy-fp.cc',
'../../src/diy-fp.h',
'../../src/double.h',
'../../src/dtoa.cc',
'../../src/dtoa.h',
'../../src/fast-dtoa.cc',
'../../src/fast-dtoa.h',
'../../src/fixed-dtoa.cc',
'../../src/fixed-dtoa.h',
'../../src/globals.h',
'../../src/hashmap.h',
'../../src/list-inl.h',
'../../src/list.h',
'../../src/once.cc',
'../../src/once.h',
'../../src/preparse-data-format.h',
'../../src/preparse-data.cc',
'../../src/preparse-data.h',
'../../src/preparser.cc',
'../../src/preparser.h',
'../../src/preparser-api.cc',
'../../src/scanner.cc',
'../../src/scanner.h',
'../../src/strtod.cc',
'../../src/strtod.h',
'../../src/token.cc',
'../../src/token.h',
'../../src/unicode-inl.h',
'../../src/unicode.cc',
'../../src/unicode.h',
'../../src/utils-inl.h',
'../../src/utils.cc',
'../../src/utils.h',
],
'conditions': [
['OS=="win"', {
'sources': [
'../../src/win32-math.cc',
'../../src/win32-math.h',
]}],
],
},
],
}, { # use_system_v8 != 0
'targets': [
{
'target_name': 'v8',
'type': 'none',
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}, {
'toolsets': ['target'],
}],
],
'link_settings': {
'libraries': [
'-lv8',
],
},
},
{
'target_name': 'v8_shell',
'type': 'none',
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host'],
}, {
'toolsets': ['target'],
}],
],
'dependencies': [
'v8'
],
},
],
}],
],
}
|
'<@(experimental_library_files)',
],
'outputs': [
|
gen_boundary_data.py
|
# -*- coding: utf-8 -*-
# Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A tool to generate boundary data.
Bounday data binary image is an array of uint16 whose length is 2N, where N is
the number of POS IDs including special POS. The array has the following
structure:
-------------------------------------
prefix penalty of POS ID 0 (2 bytes)
-------------------------------------
suffix penalty of POS ID 0 (2 bytes)
-------------------------------------
prefix penalty of POS ID 1 (2 bytes)
-------------------------------------
suffix penalty of POS ID 1 (2 bytes)
-------------------------------------
.
.
.
-------------------------------------
prefix penalty of POS ID N (2 bytes)
-------------------------------------
suffix penalty of POS ID N (2 bytes)
-------------------------------------
See converter/segmenter.cc for how it's used.
"""
from __future__ import absolute_import
from __future__ import print_function
import codecs
import optparse
import re
import struct
import sys
from six.moves import range
def PatternToRegexp(pattern):
return '^' + pattern.replace('*', '[^,]+')
def LoadPatterns(file):
prefix = []
suffix = []
for line in codecs.open(file, 'r', encoding='utf-8'):
if len(line) <= 1 or line[0] == '#':
continue
fields = line.split()
label = fields[0]
feature = fields[1]
cost = int(fields[2])
if cost < 0 or cost > 0xffff:
sys.exit(-1)
if label == 'PREFIX':
prefix.append([re.compile(PatternToRegexp(feature)), cost])
elif label == 'SUFFIX':
suffix.append([re.compile(PatternToRegexp(feature)), cost])
else:
print('format error %s' % (line))
sys.exit(0)
return (prefix, suffix)
def GetCost(patterns, feature):
|
def LoadFeatures(filename):
features = []
for line in codecs.open(filename, 'r', encoding='utf-8'):
fields = line.split()
features.append(fields[1])
return features
def CountSpecialPos(filename):
count = 0
for line in codecs.open(filename, 'r', encoding='utf-8'):
line = line.rstrip()
if not line or line[0] == '#':
continue
count += 1
return count
def ParseOptions():
parser = optparse.OptionParser()
parser.add_option('--boundary_def', dest='boundary_def',
help='Boundary definition file')
parser.add_option('--id_def', dest='id_def',
help='Boundary definition file')
parser.add_option('--special_pos', dest='special_pos',
help='Special POS definition file')
parser.add_option('--output', dest='output',
help='Output binary file')
return parser.parse_args()[0]
def main():
opts = ParseOptions()
prefix, suffix = LoadPatterns(opts.boundary_def)
features = LoadFeatures(opts.id_def)
num_special_pos = CountSpecialPos(opts.special_pos)
with open(opts.output, 'wb') as f:
for feature in features:
f.write(struct.pack('<H', GetCost(prefix, feature)))
f.write(struct.pack('<H', GetCost(suffix, feature)))
for _ in range(num_special_pos):
f.write(struct.pack('<H', 0))
f.write(struct.pack('<H', 0))
if __name__ == '__main__':
main()
|
for p in patterns:
pat = p[0]
cost = p[1]
if pat.match(feature):
return cost
return 0
|
sha256bruteforce.py
|
#!/usr/bin/env python3
import hashlib
def
|
():
print(hashlib.sha256("hugh,13145820,20193833".encode("ascii")).hexdigest())
# 13145820
guess_flag = True
digits = 1
while guess_flag:
bound = 10**digits
guess = 0
while guess < bound:
guess_str = ("hugh,{:0" + str(digits) +
"d},20193833").format(guess)
print(guess_str, end='\r')
result = hashlib.sha256(guess_str.encode("ascii")).hexdigest()
if result == "ee688ca24c201a27fcc94ebd46e87ae6a7c4f54b445fccfc0727a70332353f7f":
print("Right! %s" % guess)
guess_flag = False
break
guess += 1
digits += 1
if __name__ == "__main__":
main()
|
main
|
__init__.py
|
# U S Σ R Δ T O R / Ümüd
""" U S Σ R Δ T O R """
from userbot import LOGS
from telethon.tl.types import DocumentAttributeFilename
def __list_all_modules():
from os.path import dirname, basename, isfile
import glob
mod_paths = glob.glob(dirname(__file__) + "/*.py")
all_modules = [
basename(f)[:-3] for f in mod_paths
if isfile(f) and f.endswith(".py") and not f.endswith("__init__.py")
]
return all_modules
ALL_MODULES = sorted(__list_all_modules())
LOGS.info("Yüklənəcək modullar: %s", str(ALL_MODULES))
__all__ = ALL_MODULES + ["ALL_MODULES"]
async def MEDIACHECK(reply):
type = "img"
if reply and reply.media:
if reply.photo:
data = reply.photo
elif reply.document:
if DocumentAttributeFilename(file_name='AnimatedSticker.tgs') in reply.media.document.attributes:
return False
if reply.gif or reply.video:
type = "vid"
if reply.audio or reply.voice:
return False
data = reply.media.document
else:
return False
else:
return False
|
else:
return (data, type)
|
if not data or data is None:
return False
|
__init__.py
|
import sys
import os
import re
import importlib
import warnings
is_pypy = '__pypy__' in sys.builtin_module_names
warnings.filterwarnings('ignore',
r'.+ distutils\b.+ deprecated',
DeprecationWarning)
def warn_distutils_present():
if 'distutils' not in sys.modules:
return
if is_pypy and sys.version_info < (3, 7):
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
return
warnings.warn(
"Distutils was imported before Setuptools, but importing Setuptools "
"also replaces the `distutils` module in `sys.modules`. This may lead "
"to undesirable behaviors or errors. To avoid these issues, avoid "
"using distutils directly, ensure that setuptools is installed in the "
"traditional way (e.g. not an editable install), and/or make sure "
"that setuptools is always imported before distutils.")
def clear_distutils():
if 'distutils' not in sys.modules:
return
warnings.warn("Setuptools is replacing distutils.")
mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
for name in mods:
del sys.modules[name]
def enabled():
"""
Allow selection of distutils by environment variable.
"""
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
return which == 'local'
def ensure_local_distutils():
clear_distutils()
# With the DistutilsMetaFinder in place,
# perform an import to cause distutils to be
# loaded from setuptools._distutils. Ref #2906.
add_shim()
importlib.import_module('distutils')
remove_shim()
# check that submodules load as expected
core = importlib.import_module('distutils.core')
assert '_distutils' in core.__file__, core.__file__
def do_override():
"""
Ensure that the local copy of distutils is preferred over stdlib.
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
if enabled():
warn_distutils_present()
ensure_local_distutils()
class DistutilsMetaFinder:
def find_spec(self, fullname, path, target=None):
if path is not None:
return
method_name = 'spec_for_{fullname}'.format(**locals())
method = getattr(self, method_name, lambda: None)
return method()
def spec_for_distutils(self):
import importlib.abc
import importlib.util
class DistutilsLoader(importlib.abc.Loader):
def create_module(self, spec):
return importlib.import_module('setuptools._distutils')
def exec_module(self, module):
pass
return importlib.util.spec_from_loader('distutils', DistutilsLoader())
def spec_for_pip(self):
"""
Ensure stdlib distutils when running under pip.
See pypa/pip#8761 for rationale.
"""
if self.pip_imported_during_build():
return
clear_distutils()
self.spec_for_distutils = lambda: None
@staticmethod
def pip_imported_during_build():
"""
Detect if pip is being imported in a build script. Ref #2355.
"""
import traceback
return any(
frame.f_globals['__file__'].endswith('setup.py')
for frame, line in traceback.walk_stack(None)
)
DISTUTILS_FINDER = DistutilsMetaFinder()
def add_shim():
|
def remove_shim():
try:
sys.meta_path.remove(DISTUTILS_FINDER)
except ValueError:
pass
|
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
version.go
|
package cmd
import "github.com/spf13/cobra"
func NewCmdVersion() *cobra.Command {
var cmd = &cobra.Command{
Use: "version",
Short: "version",
|
return nil
},
}
return cmd
}
|
Long: `version`,
RunE: func(cmd *cobra.Command, args []string) error {
cmd.Println(version)
|
method.go
|
package method
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strings"
"github.com/kirillkuprii/gotest/app/contract"
"github.com/kirillkuprii/gotest/app/filters"
"github.com/kirillkuprii/gotest/app/storage"
)
// GetAllCoupons returns list of all coupons
func GetAllCoupons(db storage.Storage, writer http.ResponseWriter, requestPtr *http.Request) {
data, err := json.Marshal(db.GetAllItems())
if err != nil {
http.Error(writer, err.Error(), http.StatusInternalServerError)
return
}
fmt.Fprintf(writer, "%s", data)
}
// GetCouponsCount returns count of all coupons
func GetCouponsCount(db storage.Storage, writer http.ResponseWriter, requestPtr *http.Request) {
number := len(db.GetAllItems())
writer.Header().Set("Content-Length", fmt.Sprintf("%v", number))
}
// GetFilteredCoupons aplies a filter to coupons
func GetFilteredCoupons(db storage.Storage, writer http.ResponseWriter, requestPtr *http.Request) {
filters, err := getFilters(requestPtr.URL.String())
if err != nil {
http.Error(writer, err.Error(), http.StatusBadRequest)
return
}
items := make(map[int]*contract.Coupon)
for key, item := range db.GetAllItems() {
bFiltered := false
for _, filter := range filters {
if !filter.IsPassing(item) {
bFiltered = true
break
}
}
if !bFiltered {
items[key] = item
}
}
data, err := json.Marshal(items)
if err != nil {
http.Error(writer, err.Error(), http.StatusInternalServerError)
return
}
fmt.Fprintf(writer, "%s", data)
}
// DeleteCoupons deletes selected coupon(s)
func DeleteCoupons(db storage.Storage, writer http.ResponseWriter, requestPtr *http.Request) {
items := []storage.UID{}
body, err := ioutil.ReadAll(requestPtr.Body)
if err != nil {
http.Error(writer, err.Error(), http.StatusBadRequest)
return
}
if err = json.Unmarshal(body, &items); err != nil {
http.Error(writer, err.Error(), http.StatusBadRequest)
return
}
db.DeleteItems(items)
writer.WriteHeader(http.StatusNoContent)
}
// PutCoupons adds new coupon(s) or modifies existing
func PutCoupons(db storage.Storage, writer http.ResponseWriter, requestPtr *http.Request) {
var PutList struct {
Update map[storage.UID]contract.Coupon `json:"update"`
Add []contract.Coupon `json:"add"`
}
body, err := ioutil.ReadAll(requestPtr.Body)
if err != nil {
http.Error(writer, err.Error(), http.StatusBadRequest)
return
}
if err = json.Unmarshal(body, &PutList); err != nil {
http.Error(writer, err.Error(), http.StatusBadRequest)
return
}
db.UpdateItems(&PutList.Update)
itemsToAdd := []*contract.Coupon{}
for _, coupon := range PutList.Add {
itemsToAdd = append(itemsToAdd, &coupon)
}
db.AddItems(itemsToAdd)
}
func getFilters(url string) ([]filters.Filter, error) {
parsed := strings.Split(strings.Replace(url, "%20", " ", -1), "?")
params := parsed[1:]
filtersList := []filters.Filter{}
for _, param := range params {
filter, err := filters.CreateFilter(param)
if err != nil {
|
}
return filtersList, nil
}
|
return nil, err
}
filtersList = append(filtersList, filter)
|
fst_path.rs
|
use std::hash::{Hash, Hasher};
use anyhow::Result;
use crate::semirings::Semiring;
use crate::{Label, EPS_LABEL};
/// Structure representing a path in a FST
/// (list of input labels, list of output labels and total weight).
#[derive(PartialEq, Debug, Clone, PartialOrd)]
pub struct FstPath<W: Semiring> {
/// List of input labels.
pub ilabels: Vec<Label>,
/// List of output labels.
pub olabels: Vec<Label>,
/// Total weight of the path computed by multiplying the weight of each transition.
pub weight: W,
}
impl<W: Semiring> FstPath<W> {
/// Creates a new Path.
pub fn new(ilabels: Vec<Label>, olabels: Vec<Label>, weight: W) -> Self {
FstPath {
ilabels,
olabels,
weight,
}
}
/// Adds the content of an FST transition to the Path.
/// Labels are added at the end of the corresponding vectors and the weight
/// is multiplied by the total weight already stored in the Path.
pub fn add_to_path(&mut self, ilabel: Label, olabel: Label, weight: &W) -> Result<()> {
if ilabel != EPS_LABEL {
self.ilabels.push(ilabel);
}
if olabel != EPS_LABEL {
self.olabels.push(olabel);
}
self.weight.times_assign(weight)
}
/// Add a single weight to the Path by multiplying the weight by the total weight of the path.
pub fn add_weight(&mut self, weight: &W) -> Result<()> {
self.weight.times_assign(weight)
}
/// Append a Path to the current Path. Labels are appended and weights multiplied.
pub fn concat(&mut self, other: FstPath<W>) -> Result<()> {
self.ilabels.extend(other.ilabels);
self.olabels.extend(other.olabels);
self.weight.times_assign(other.weight)
}
}
impl<W: Semiring> Default for FstPath<W> {
/// Creates an empty path with a weight one.
fn default() -> Self {
FstPath {
ilabels: vec![],
olabels: vec![],
weight: W::one(),
}
}
}
#[allow(clippy::derive_hash_xor_eq)]
impl<W: Semiring + Hash + Eq> Hash for FstPath<W> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ilabels.hash(state);
self.olabels.hash(state);
self.weight.hash(state);
}
}
impl<W: Semiring + Hash + Eq> Eq for FstPath<W> {}
/// Creates a Path containing the arguments.
///
/// There are multiple forms to this macro :
///
/// - Create an unweighted acceptor path :
///
/// ```
/// # #[macro_use] extern crate rustfst; fn main() {
/// # use rustfst::semirings::{IntegerWeight, Semiring};
/// # use rustfst::FstPath;
/// let path : FstPath<IntegerWeight> = fst_path![1,2,3];
/// assert_eq!(path.ilabels, vec![1,2,3]);
/// assert_eq!(path.olabels, vec![1,2,3]);
/// assert_eq!(path.weight, IntegerWeight::one());
/// # }
/// ```
///
/// - Create an unweighted transducer path :
///
/// ```
/// # #[macro_use] extern crate rustfst; fn main() {
/// # use rustfst::semirings::{IntegerWeight, Semiring};
/// # use rustfst::FstPath;
/// let path : FstPath<IntegerWeight> = fst_path![1,2,3 => 1,2,4];
/// assert_eq!(path.ilabels, vec![1,2,3]);
/// assert_eq!(path.olabels, vec![1,2,4]);
/// assert_eq!(path.weight, IntegerWeight::one());
/// # }
/// ```
///
/// - Create a weighted acceptor path :
///
/// ```
/// # #[macro_use] extern crate rustfst; fn main() {
/// # use rustfst::semirings::{IntegerWeight, Semiring};
/// # use rustfst::FstPath;
/// let path : FstPath<IntegerWeight> = fst_path![1,2,3; 18];
/// assert_eq!(path.ilabels, vec![1,2,3]);
/// assert_eq!(path.olabels, vec![1,2,3]);
/// assert_eq!(path.weight, IntegerWeight::new(18));
/// # }
/// ```
///
/// - Create a weighted transducer path :
///
/// ```
/// # #[macro_use] extern crate rustfst; fn main() {
/// # use rustfst::semirings::{IntegerWeight, Semiring};
/// # use rustfst::FstPath;
/// let path : FstPath<IntegerWeight> = fst_path![1,2,3 => 1,2,4; 18];
/// assert_eq!(path.ilabels, vec![1,2,3]);
/// assert_eq!(path.olabels, vec![1,2,4]);
/// assert_eq!(path.weight, IntegerWeight::new(18));
/// # }
/// ```
///
#[macro_export]
macro_rules! fst_path {
|
{
fn semiring_one<W: Semiring>() -> W {
W::one()
}
FstPath::new(
vec![$($x),*],
vec![$($x),*],
semiring_one()
)
}
};
( $( $x:expr ),* => $( $y:expr ),* ) => {
{
fn semiring_one<W: Semiring>() -> W {
W::one()
}
FstPath::new(
vec![$($x),*],
vec![$($y),*],
semiring_one()
)
}
};
( $( $x:expr ),* ; $weight:expr) => {
{
fn semiring_new<W: Semiring>(v: W::Type) -> W {
W::new(v)
}
FstPath::new(
vec![$($x),*],
vec![$($x),*],
semiring_new($weight)
)
}
};
( $( $x:expr ),* => $( $y:expr ),* ; $weight:expr) => {
{
fn semiring_new<W: Semiring>(v: W::Type) -> W {
W::new(v)
}
FstPath::new(
vec![$($x),*],
vec![$($y),*],
semiring_new($weight)
)
}
};
}
|
( $( $x:expr ),*) => {
|
test_conventions.py
|
import numpy as np
import pandas as pd
from datetime import datetime
import warnings
from xray import conventions
from . import TestCase, requires_netCDF4
class
|
(TestCase):
def test(self):
x = conventions.MaskedAndScaledArray(np.arange(3), fill_value=0)
self.assertEqual(x.dtype, np.dtype('float'))
self.assertEqual(x.shape, (3,))
self.assertEqual(x.size, 3)
self.assertEqual(x.ndim, 1)
self.assertEqual(len(x), 3)
self.assertArrayEqual([np.nan, 1, 2], x)
x = conventions.MaskedAndScaledArray(np.arange(3), add_offset=1)
self.assertArrayEqual(np.arange(3) + 1, x)
x = conventions.MaskedAndScaledArray(np.arange(3), scale_factor=2)
self.assertArrayEqual(2 * np.arange(3), x)
x = conventions.MaskedAndScaledArray(np.array([-99, -1, 0, 1, 2]), -99, 0.01, 1)
expected = np.array([np.nan, 0.99, 1, 1.01, 1.02])
self.assertArrayEqual(expected, x)
def test_0d(self):
x = conventions.MaskedAndScaledArray(np.array(0), fill_value=0)
self.assertTrue(np.isnan(x))
self.assertTrue(np.isnan(x[...]))
x = conventions.MaskedAndScaledArray(np.array(0), fill_value=10)
self.assertEqual(0, x[...])
class TestCharToStringArray(TestCase):
def test(self):
array = np.array(list('abc'))
actual = conventions.CharToStringArray(array)
expected = np.array('abc')
self.assertEqual(actual.dtype, expected.dtype)
self.assertEqual(actual.shape, expected.shape)
self.assertEqual(actual.size, expected.size)
self.assertEqual(actual.ndim, expected.ndim)
with self.assertRaises(TypeError):
len(actual)
self.assertArrayEqual(expected, actual)
with self.assertRaises(IndexError):
actual[:2]
self.assertEqual(str(actual), 'abc')
array = np.array([list('abc'), list('cdf')])
actual = conventions.CharToStringArray(array)
expected = np.array(['abc', 'cdf'])
self.assertEqual(actual.dtype, expected.dtype)
self.assertEqual(actual.shape, expected.shape)
self.assertEqual(actual.size, expected.size)
self.assertEqual(actual.ndim, expected.ndim)
self.assertEqual(len(actual), len(expected))
self.assertArrayEqual(expected, actual)
self.assertArrayEqual(expected[:1], actual[:1])
with self.assertRaises(IndexError):
actual[:, :2]
class TestDatetime(TestCase):
@requires_netCDF4
def test_cf_datetime(self):
import netCDF4 as nc4
for num_dates, units in [
(np.arange(100), 'days since 2000-01-01'),
(np.arange(100).reshape(10, 10), 'days since 2000-01-01'),
(12300 + np.arange(50), 'hours since 1680-01-01 00:00:00'),
(10, 'days since 2000-01-01'),
([10], 'days since 2000-01-01'),
([[10]], 'days since 2000-01-01'),
([10, 10], 'days since 2000-01-01'),
(0, 'days since 1000-01-01'),
([0], 'days since 1000-01-01'),
([[0]], 'days since 1000-01-01'),
(np.arange(20), 'days since 1000-01-01'),
(np.arange(0, 100000, 10000), 'days since 1900-01-01')
]:
for calendar in ['standard', 'gregorian', 'proleptic_gregorian']:
expected = nc4.num2date(num_dates, units, calendar)
actual = conventions.decode_cf_datetime(num_dates, units, calendar)
if (isinstance(actual, np.ndarray)
and np.issubdtype(actual.dtype, np.datetime64)):
self.assertEqual(actual.dtype, np.dtype('M8[ns]'))
# For some reason, numpy 1.8 does not compare ns precision
# datetime64 arrays as equal to arrays of datetime objects,
# but it works for us precision. Thus, convert to us
# precision for the actual array equal comparison...
actual_cmp = actual.astype('M8[us]')
else:
actual_cmp = actual
self.assertArrayEqual(expected, actual_cmp)
encoded, _, _ = conventions.encode_cf_datetime(actual, units, calendar)
self.assertArrayEqual(num_dates, np.around(encoded))
if (hasattr(num_dates, 'ndim') and num_dates.ndim == 1
and '1000' not in units):
# verify that wrapping with a pandas.Index works
# note that it *does not* currently work to even put
# non-datetime64 compatible dates into a pandas.Index :(
encoded, _, _ = conventions.encode_cf_datetime(
pd.Index(actual), units, calendar)
self.assertArrayEqual(num_dates, np.around(encoded))
@requires_netCDF4
def test_cf_datetime_nan(self):
for num_dates, units, expected_list in [
([np.nan], 'days since 2000-01-01', ['NaT']),
([np.nan, 0], 'days since 2000-01-01',
['NaT', '2000-01-01T00:00:00Z']),
([np.nan, 0, 1], 'days since 2000-01-01',
['NaT', '2000-01-01T00:00:00Z', '2000-01-02T00:00:00Z']),
]:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', 'All-NaN')
actual = conventions.decode_cf_datetime(num_dates, units)
expected = np.array(expected_list, dtype='datetime64[ns]')
self.assertArrayEqual(expected, actual)
def test_guess_time_units(self):
for dates, expected in [(pd.date_range('1900-01-01', periods=5),
'days since 1900-01-01 00:00:00'),
(pd.date_range('1900-01-01 12:00:00', freq='H',
periods=2),
'hours since 1900-01-01 12:00:00'),
(['1900-01-01', '1900-01-02',
'1900-01-02 00:00:01'],
'seconds since 1900-01-01 00:00:00')]:
self.assertEquals(expected, conventions.guess_time_units(dates))
|
TestMaskedAndScaledArray
|
redhat.py
|
# -*- coding: utf-8 -*-
import re
import os
import unix
import weakref
from .. import Linux, Chroot, LinuxError
from unix.linux.services import Initd, Upstart, Systemd
DISTRIBS = ('RedHat', 'CentOS')
_CONFDIR = '/etc/sysconfig'
_NETFILE = os.path.join(_CONFDIR, 'network')
def RedHat(host, force=False):
unix.isvalid(host)
root = host.__dict__.get('root', None)
|
instances = unix.instances(host)
if len(instances) >= 1:
host = Linux(getattr(unix, instances[0]).clone(host))
if root:
host = Chroot(host, root)
if host.distrib[0] not in DISTRIBS and not force:
raise LinuxError('invalid distrib')
class RedHatHost(host.__class__):
def __init__(self):
kwargs = {'root': root} if root else {}
host.__class__.__init__(self, **kwargs)
self.__dict__.update(host.__dict__)
def list_packages(self):
return self.execute('dpkg -l')
@property
def hostname(self):
with self.open(_NETFILE) as fhandler:
for line in fhandler.read().splitlines():
attr, value = line.split('=')
if attr == 'HOSTNAME':
return value
@hostname.setter
def hostname(self, value):
contnet = ''
with self.open(_NETFILE) as fhandler:
content = re.sub('HOSTNAME=[^\n]*',
'HOSTNAME=%s\n' % value,
fhandler.read())
with self.open(_NETFILE, 'w') as fhandler:
fhandler.write(content)
@property
def services(self):
major_version = int(self.distrib[1][0])
if major_version <= 5:
service_handler = Initd
elif major_version == 6:
service_handler = Upstart
elif major_version >= 7:
service_handler = Systemd
return service_handler(weakref.ref(self)())
return RedHatHost()
| |
sys_email.go
|
package api
import (
"github.com/gin-gonic/gin"
"github.com/qingfeng777/owls/server/global"
"github.com/qingfeng777/owls/server/model/common/response"
email_response "github.com/qingfeng777/owls/server/plugin/email/model/response"
"github.com/qingfeng777/owls/server/plugin/email/service"
"go.uber.org/zap"
)
type EmailApi struct{}
// @Tags System
// @Summary 发送测试邮件
// @Security ApiKeyAuth
// @Produce application/json
// @Success 200 {string} string "{"success":true,"data":{},"msg":"发送成功"}"
// @Router /email/emailTest [post]
func (s *EmailApi) EmailTest(c *gin.Context) {
if err := service.ServiceGroupApp.EmailTest(); err != nil {
global.GVA_LOG.Error("发送失败!", zap.Error(err))
response.FailWithMessage("发送失败", c)
} else {
response.OkWithData("发送成功", c)
}
}
// @Tags System
// @Summary 发送邮件
// @Security ApiKeyAuth
// @Produce application/json
// @Param data body email_response.Email true "发送邮件必须的参数"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"发送成功"}"
// @Router /email/sendEmail [post]
func (s *EmailApi) SendEmail(c *gin.Context) {
var email email_response.Email
_ = c.ShouldBindJSON(&email)
if err := service.ServiceGroupApp.SendEmail(email.To, email.Subject, email.Body); err != nil {
global.GVA_LOG.Error("发送失败!", zap.Error(err))
response.FailWithMessage("发送失败", c)
|
} else {
response.OkWithData("发送成功", c)
}
}
| |
button.tsx
|
import React, {FC, ReactElement} from 'react';
import {classNameFactory,classes} from '../helpers/classes'
import './button.scss'
interface ButtonProps extends React.ButtonHTMLAttributes<HTMLButtonElement>{
buttonType?: string,
size?:string,
shape?:string,
icon?: ReactElement,
loading?:boolean,
disabled?:boolean,
danger?:boolean
}
const cm=classNameFactory('button')
const Button:FC<ButtonProps> = (props) => {
const {className,buttonType,size,shape,icon,loading=false,disabled=false,danger=false,...rest}=props
const hasIcon=icon && 'hasIcon' || undefined
const _btnType=buttonType ||'default'
const isLoading=(loading && 'loading') ||undefined
const isDisabled=(disabled && 'disabled') ||undefined
const isDanger=(danger && 'danger') || undefined;
return (
<button className={classes(cm(),className,cm(_btnType),size,shape,hasIcon,isLoading,isDisabled,isDanger)} {...rest}>
{icon}
{props.children}
|
};
export default Button;
|
</button>
);
|
lib.rs
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use analyze::get_top_level_decls;
use anyhow::anyhow;
use anyhow::Context;
use anyhow::Result;
use analyze::get_ignore_line_indexes;
use anyhow::bail;
use deno_ast::apply_text_changes;
use deno_ast::TextChange;
use deno_graph::ModuleKind;
use graph::ModuleGraphOptions;
use mappings::Mappings;
use mappings::SYNTHETIC_SPECIFIERS;
use mappings::SYNTHETIC_TEST_SPECIFIERS;
use polyfills::build_polyfill_file;
use polyfills::polyfills_for_target;
use polyfills::Polyfill;
use specifiers::Specifiers;
use utils::get_relative_specifier;
use utils::prepend_statement_to_text;
use visitors::fill_polyfills;
use visitors::get_deno_comment_directive_text_changes;
use visitors::get_global_text_changes;
use visitors::get_import_exports_text_changes;
use visitors::FillPolyfillsParams;
use visitors::GetGlobalTextChangesParams;
use visitors::GetImportExportsTextChangesParams;
pub use deno_ast::ModuleSpecifier;
pub use loader::LoadResponse;
pub use loader::Loader;
pub use utils::url_to_file_path;
use crate::declaration_file_resolution::TypesDependency;
use crate::utils::strip_bom;
mod analyze;
mod declaration_file_resolution;
mod graph;
mod loader;
mod mappings;
mod parser;
mod polyfills;
mod specifiers;
mod utils;
mod visitors;
#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Debug, PartialEq)]
pub struct OutputFile {
pub file_path: PathBuf,
pub file_text: String,
}
#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Clone, Debug, PartialEq)]
pub struct Dependency {
pub name: String,
pub version: String,
}
#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Debug, PartialEq)]
pub struct TransformOutput {
pub main: TransformOutputEnvironment,
pub test: TransformOutputEnvironment,
pub warnings: Vec<String>,
}
#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Debug, PartialEq, Default)]
pub struct TransformOutputEnvironment {
pub entry_points: Vec<PathBuf>,
pub files: Vec<OutputFile>,
pub dependencies: Vec<Dependency>,
}
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[cfg_attr(
feature = "serialization",
serde(tag = "kind", content = "value", rename_all = "camelCase")
)]
#[derive(Clone, Debug)]
pub enum MappedSpecifier {
Package(PackageMappedSpecifier),
Module(ModuleSpecifier),
}
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Clone, Debug)]
pub struct PackageMappedSpecifier {
/// Name being mapped to.
pub name: String,
/// Version of the specifier. Leave this blank to not have a
/// dependency (ex. Node modules like "fs")
pub version: Option<String>,
/// Sub path of the npm package to use in the module specifier.
pub sub_path: Option<String>,
}
impl PackageMappedSpecifier {
pub(crate) fn module_specifier_text(&self) -> String {
if let Some(path) = &self.sub_path {
format!("{}/{}", self.name, path)
} else {
self.name.clone()
}
}
}
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Clone, Debug)]
pub struct GlobalName {
/// Name to use as the global name.
pub name: String,
/// Optional name of the export from the package.
pub export_name: Option<String>,
/// Whether this is a name that only exists as a type declaration.
pub type_only: bool,
}
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[cfg_attr(
feature = "serialization",
serde(tag = "kind", content = "value", rename_all = "camelCase")
)]
#[derive(Clone, Debug)]
pub enum Shim {
Package(PackageShim),
Module(ModuleShim),
}
impl Shim {
pub fn global_names(&self) -> &Vec<GlobalName> {
match self {
Shim::Package(shim) => &shim.global_names,
Shim::Module(shim) => &shim.global_names,
}
}
pub(crate) fn maybe_specifier(&self) -> Option<ModuleSpecifier> {
match self {
Shim::Package(_) => None,
Shim::Module(module) => module.maybe_specifier(),
}
}
}
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Clone, Debug)]
pub struct PackageShim {
/// Information about the npm package to use for this shim.
pub package: PackageMappedSpecifier,
/// Npm package to include in the dev depedencies that has the type declarations.
pub types_package: Option<Dependency>,
/// Names this shim provides that will be injected in global contexts.
pub global_names: Vec<GlobalName>,
}
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[cfg_attr(feature = "serialization", serde(rename_all = "camelCase"))]
#[derive(Clone, Debug)]
pub struct ModuleShim {
/// Information about the module or bare specifier to use for this shim.
pub module: String,
/// Names this shim provides that will be injected in global contexts.
pub global_names: Vec<GlobalName>,
|
impl ModuleShim {
pub fn maybe_specifier(&self) -> Option<ModuleSpecifier> {
if self.module.starts_with("node:") {
None
} else {
ModuleSpecifier::parse(&self.module).ok()
}
}
}
// make sure to update `ScriptTarget` in the TS code when changing the names on this
#[cfg_attr(feature = "serialization", derive(serde::Deserialize))]
#[derive(Clone, Copy, Debug)]
pub enum ScriptTarget {
ES3 = 0,
ES5 = 1,
ES2015 = 2,
ES2016 = 3,
ES2017 = 4,
ES2018 = 5,
ES2019 = 6,
ES2020 = 7,
ES2021 = 8,
Latest = 9,
}
pub struct TransformOptions {
pub entry_points: Vec<ModuleSpecifier>,
pub test_entry_points: Vec<ModuleSpecifier>,
pub shims: Vec<Shim>,
pub test_shims: Vec<Shim>,
pub loader: Option<Box<dyn Loader>>,
/// Maps specifiers to an npm package or module.
pub specifier_mappings: HashMap<ModuleSpecifier, MappedSpecifier>,
/// Version of ECMAScript that the final code will target.
/// This controls whether certain polyfills should occur.
pub target: ScriptTarget,
/// Optional import map.
pub import_map: Option<ModuleSpecifier>,
}
struct EnvironmentContext<'a> {
environment: TransformOutputEnvironment,
searching_polyfills: Vec<Box<dyn Polyfill>>,
found_polyfills: Vec<Box<dyn Polyfill>>,
shim_file_specifier: &'a ModuleSpecifier,
shim_global_names: HashSet<&'a str>,
shims: &'a Vec<Shim>,
used_shim: bool,
}
pub async fn transform(options: TransformOptions) -> Result<TransformOutput> {
if options.entry_points.is_empty() {
anyhow::bail!("at least one entry point must be specified");
}
let (module_graph, specifiers) =
crate::graph::ModuleGraph::build_with_specifiers(ModuleGraphOptions {
entry_points: options
.entry_points
.iter()
.cloned()
.chain(options.shims.iter().filter_map(|s| s.maybe_specifier()))
.collect(),
test_entry_points: options
.test_entry_points
.iter()
.cloned()
.chain(
options
.test_shims
.iter()
.filter_map(|s| s.maybe_specifier()),
)
.collect(),
specifier_mappings: &options.specifier_mappings,
loader: options.loader,
import_map: options.import_map,
})
.await?;
let mappings = Mappings::new(&module_graph, &specifiers)?;
let all_package_specifier_mappings: HashMap<ModuleSpecifier, String> =
specifiers
.main
.mapped
.iter()
.chain(specifiers.test.mapped.iter())
.map(|m| (m.0.clone(), m.1.module_specifier_text()))
.collect();
let mut warnings = get_declaration_warnings(&specifiers);
let mut main_env_context = EnvironmentContext {
environment: TransformOutputEnvironment {
entry_points: options
.entry_points
.iter()
.map(|p| mappings.get_file_path(p).to_owned())
.collect(),
dependencies: get_dependencies(specifiers.main.mapped),
..Default::default()
},
searching_polyfills: polyfills_for_target(options.target),
found_polyfills: Default::default(),
shim_file_specifier: &SYNTHETIC_SPECIFIERS.shims,
shim_global_names: options
.shims
.iter()
.flat_map(|s| s.global_names().iter().map(|s| s.name.as_str()))
.collect(),
shims: &options.shims,
used_shim: false,
};
let mut test_env_context = EnvironmentContext {
environment: TransformOutputEnvironment {
entry_points: options
.test_entry_points
.iter()
.map(|p| mappings.get_file_path(p).to_owned())
.collect(),
dependencies: get_dependencies(specifiers.test.mapped),
..Default::default()
},
searching_polyfills: polyfills_for_target(options.target),
found_polyfills: Default::default(),
shim_file_specifier: &SYNTHETIC_TEST_SPECIFIERS.shims,
shim_global_names: options
.test_shims
.iter()
.flat_map(|s| s.global_names().iter().map(|s| s.name.as_str()))
.collect(),
shims: &options.test_shims,
used_shim: false,
};
for specifier in specifiers
.local
.iter()
.chain(specifiers.remote.iter())
.chain(specifiers.types.iter().map(|(_, d)| &d.selected.specifier))
{
let module = module_graph.get(specifier);
let env_context = if specifiers.test_modules.contains(specifier) {
&mut test_env_context
} else {
&mut main_env_context
};
let file_text = match module.kind {
ModuleKind::Esm => {
let parsed_source = module
.maybe_parsed_source
.as_ref()
.ok_or_else(|| anyhow!("Expected source for: {}", module.specifier))?
.clone();
let text_changes = parsed_source
.with_view(|program| -> Result<Vec<TextChange>> {
let top_level_context = parsed_source.top_level_context();
let ignore_line_indexes =
get_ignore_line_indexes(parsed_source.specifier(), &program);
let top_level_decls =
get_top_level_decls(&program, top_level_context);
warnings.extend(ignore_line_indexes.warnings);
fill_polyfills(&mut FillPolyfillsParams {
found_polyfills: &mut env_context.found_polyfills,
searching_polyfills: &mut env_context.searching_polyfills,
program: &program,
top_level_context: parsed_source.top_level_context(),
top_level_decls: &top_level_decls,
});
let mut text_changes = Vec::new();
// shim changes
{
let shim_relative_specifier = get_relative_specifier(
mappings.get_file_path(specifier),
mappings.get_file_path(env_context.shim_file_specifier),
);
let result =
get_global_text_changes(&GetGlobalTextChangesParams {
program: &program,
top_level_context,
shim_specifier: &shim_relative_specifier,
shim_global_names: &env_context.shim_global_names,
ignore_line_indexes: &ignore_line_indexes.line_indexes,
top_level_decls: &top_level_decls,
});
text_changes.extend(result.text_changes);
if result.imported_shim {
env_context.used_shim = true;
}
}
text_changes
.extend(get_deno_comment_directive_text_changes(&program));
text_changes.extend(get_import_exports_text_changes(
&GetImportExportsTextChangesParams {
specifier,
module_graph: &module_graph,
mappings: &mappings,
program: &program,
package_specifier_mappings: &all_package_specifier_mappings,
},
)?);
Ok(text_changes)
})
.with_context(|| {
format!(
"Issue getting text changes from {}",
parsed_source.specifier()
)
})?;
eprintln!("{:#?}", text_changes);
apply_text_changes(parsed_source.source().text_str(), text_changes)
}
ModuleKind::Asserted => {
if let Some(source) = &module.maybe_source {
format!(
"export default JSON.parse(`{}`);",
strip_bom(&source.replace('`', "\\`").replace("${", "\\${"))
)
} else {
continue;
}
}
_ => bail!(
"Not implemented module kind {:?} for {}",
module.kind,
module.specifier
),
};
let file_path = mappings.get_file_path(specifier).to_owned();
env_context.environment.files.push(OutputFile {
file_path,
file_text,
});
}
check_add_polyfill_file_to_environment(
&mut main_env_context,
mappings.get_file_path(&SYNTHETIC_SPECIFIERS.polyfills),
);
check_add_polyfill_file_to_environment(
&mut test_env_context,
mappings.get_file_path(&SYNTHETIC_TEST_SPECIFIERS.polyfills),
);
check_add_shim_file_to_environment(
&mut main_env_context,
mappings.get_file_path(&SYNTHETIC_SPECIFIERS.shims),
&mappings,
);
check_add_shim_file_to_environment(
&mut test_env_context,
mappings.get_file_path(&SYNTHETIC_TEST_SPECIFIERS.shims),
&mappings,
);
add_shim_types_packages_to_test_environment(
&mut test_env_context.environment,
options.shims.iter().chain(options.test_shims.iter()),
);
// Remove any dependencies from the test environment that
// are found in the main environment. Only check for exact
// matches in order to cause an npm install error if there
// are two dependencies with the same name, but different versions.
test_env_context.environment.dependencies = test_env_context
.environment
.dependencies
.into_iter()
.filter(|d| !main_env_context.environment.dependencies.contains(d))
.collect();
Ok(TransformOutput {
main: main_env_context.environment,
test: test_env_context.environment,
warnings,
})
}
fn add_shim_types_packages_to_test_environment<'a>(
test_output_env: &mut TransformOutputEnvironment,
all_shims: impl Iterator<Item = &'a Shim>,
) {
for shim in all_shims {
if let Shim::Package(shim) = shim {
if let Some(types_package) = &shim.types_package {
test_output_env.dependencies.push(types_package.clone())
}
}
}
}
fn check_add_polyfill_file_to_environment(
env_context: &mut EnvironmentContext,
polyfill_file_path: &Path,
) {
if let Some(polyfill_file_text) =
build_polyfill_file(&env_context.found_polyfills)
{
env_context.environment.files.push(OutputFile {
file_path: polyfill_file_path.to_path_buf(),
file_text: polyfill_file_text,
});
for entry_point in env_context.environment.entry_points.iter() {
if let Some(file) = env_context
.environment
.files
.iter_mut()
.find(|f| &f.file_path == entry_point)
{
prepend_statement_to_text(
&file.file_path,
&mut file.file_text,
&format!(
"import \"{}\";",
get_relative_specifier(&file.file_path, &polyfill_file_path)
),
);
}
}
}
}
fn check_add_shim_file_to_environment(
env_context: &mut EnvironmentContext,
shim_file_path: &Path,
mappings: &Mappings,
) {
if env_context.used_shim {
let shim_file_text =
build_shim_file(env_context.shims, shim_file_path, mappings);
env_context.environment.files.push(OutputFile {
file_path: shim_file_path.to_path_buf(),
file_text: shim_file_text,
});
for shim in env_context.shims.iter() {
if let Shim::Package(shim) = shim {
if !env_context
.environment
.dependencies
.iter()
.any(|d| d.name == shim.package.name)
{
if let Some(version) = &shim.package.version {
env_context.environment.dependencies.push(Dependency {
name: shim.package.name.to_string(),
version: version.clone(),
});
}
}
}
}
}
fn build_shim_file(
shims: &[Shim],
shim_file_path: &Path,
mappings: &Mappings,
) -> String {
fn get_specifer_text(n: &GlobalName) -> String {
let name_text = if let Some(export_name) = &n.export_name {
format!("{} as {}", export_name, n.name)
} else {
n.name.to_string()
};
if n.type_only {
format!("type {}", name_text)
} else {
name_text
}
}
fn get_module_specifier_text(
shim: &Shim,
shim_file_path: &Path,
mappings: &Mappings,
) -> String {
match shim {
Shim::Package(shim) => shim.package.module_specifier_text(),
Shim::Module(shim) => match shim.maybe_specifier() {
Some(specifier) => {
let to = mappings.get_file_path(&specifier);
get_relative_specifier(shim_file_path, to)
}
None => shim.module.clone(),
},
}
}
let mut text = String::new();
for shim in shims.iter() {
let declaration_names = shim
.global_names()
.iter()
.filter(|n| !n.type_only)
.collect::<Vec<_>>();
let module_specifier_text =
get_module_specifier_text(shim, shim_file_path, mappings);
if !declaration_names.is_empty() {
text.push_str(&format!(
"import {{ {} }} from \"{}\";\n",
declaration_names
.into_iter()
.map(get_specifer_text)
.collect::<Vec<_>>()
.join(", "),
&module_specifier_text,
));
}
text.push_str(&format!(
"export {{ {} }} from \"{}\";\n",
shim
.global_names()
.iter()
.map(get_specifer_text)
.collect::<Vec<_>>()
.join(", "),
&module_specifier_text,
));
}
if !text.is_empty() {
text.push('\n');
}
text.push_str("const dntGlobals = {\n");
for global_name in shims.iter().flat_map(|s| s.global_names().iter()) {
if !global_name.type_only {
text.push_str(&format!(" {},\n", global_name.name));
}
}
text.push_str("};\n");
text.push_str("export const dntGlobalThis = createMergeProxy(globalThis, dntGlobals);\n\n");
text.push_str(
&include_str!("scripts/createMergeProxy.ts")
.replace("export function", "function"),
);
text
}
}
fn get_dependencies(
mappings: BTreeMap<ModuleSpecifier, PackageMappedSpecifier>,
) -> Vec<Dependency> {
let mut dependencies = mappings
.into_iter()
.filter_map(|entry| {
if let Some(version) = entry.1.version {
Some(Dependency {
name: entry.1.name,
version,
})
} else {
None
}
})
.collect::<Vec<_>>();
dependencies.sort_by(|a, b| a.name.cmp(&b.name));
dependencies.dedup(); // only works after sorting
dependencies
}
fn get_declaration_warnings(specifiers: &Specifiers) -> Vec<String> {
let mut messages = Vec::new();
for (code_specifier, d) in specifiers.types.iter() {
if d.selected.referrer.scheme() == "file" {
let local_referrers =
d.ignored.iter().filter(|d| d.referrer.scheme() == "file");
for dep in local_referrers {
messages.push(get_dep_warning(
code_specifier,
dep,
&d.selected,
"Supress this warning by having only one local file specify the declaration file for this module.",
));
}
} else {
for dep in d.ignored.iter() {
messages.push(get_dep_warning(
code_specifier,
dep,
&d.selected,
"Supress this warning by specifying a declaration file for this module locally via `@deno-types`.",
));
}
}
}
return messages;
fn get_dep_warning(
code_specifier: &ModuleSpecifier,
dep: &TypesDependency,
selected_dep: &TypesDependency,
post_message: &str,
) -> String {
format!("Duplicate declaration file found for {}\n Specified {} in {}\n Selected {}\n {}", code_specifier, dep.specifier, dep.referrer, selected_dep.specifier, post_message)
}
}
|
}
|
logout.go
|
package cmd
import (
"github.com/spf13/cobra"
"github.com/hookdeck/hookdeck-cli/pkg/logout"
"github.com/hookdeck/hookdeck-cli/pkg/validators"
)
type logoutCmd struct {
cmd *cobra.Command
all bool
}
func newLogoutCmd() *logoutCmd {
lc := &logoutCmd{}
lc.cmd = &cobra.Command{
Use: "logout",
Args: validators.NoArgs,
Short: "Logout of your Hookdeck account",
Long: `Logout of your Hookdeck account to setup the CLI`,
RunE: lc.runLogoutCmd,
}
lc.cmd.Flags().BoolVarP(&lc.all, "all", "a", false, "Clear credentials for all projects you are currently logged into.")
return lc
}
func (lc *logoutCmd) runLogoutCmd(cmd *cobra.Command, args []string) error {
if lc.all {
|
}
|
return logout.All(&Config)
}
return logout.Logout(&Config)
|
priorityjobqueue.go
|
package queue
import "log"
// priorityJobQueue is a priority queue of jobs.
type priorityJobQueue struct {
statusQueues map[string]*jobQueue
operations chan priorityQueueOperation
}
// priorityQueueOperation defines the interface for an operation on a priorityJobQueue
// e.g. reserve, delete etc.
type priorityQueueOperation interface {
doOperation(*priorityJobQueue)
}
// A priorityQueueOperationReponse is a response object from a priority queue operation.
type priorityQueueOperationReponse struct {
success bool
job *job
}
// newPriorityJobQueue creates a new priorityJobQueue.
func newPriorityJobQueue() *priorityJobQueue {
queue := &priorityJobQueue{
statusQueues: map[string]*jobQueue{
"reserved": nil,
"ready": nil,
"delayed": nil,
"buried": nil,
},
operations: make(chan priorityQueueOperation),
}
go queue.doOperations()
return queue
}
// getStatusQueue gets the correct job queue for the current status of the job.
func (p *priorityJobQueue) getStatusQueue(job *job) *jobQueue {
queue, ok := p.statusQueues[job.status]
if !ok {
log.Fatalf("Job %v has unknown status: %v\n", job.id, job.status)
}
if queue == nil {
// No queue yet made for the status so initialise one
p.statusQueues[job.status] = newJobQueue(job.priority)
queue = p.statusQueues[job.status]
}
return queue
}
// doOperations performs all operations in the queue one at a time reading from the operations channel.
func (p *priorityJobQueue) doOperations() {
for {
op, ok := <-p.operations
if !ok {
break
}
op.doOperation(p)
}
}
// addJob adds the given job to the queue.
func (p *priorityJobQueue) addJob(job *job) {
op := &priorityQueueAdd{
jobToAdd: job,
response: make(chan *priorityQueueOperationReponse),
}
p.operations <- op
// Wait for response before returning
_ = <-op.response
}
// A priorityQueueAdd encapsulates an add operation
type priorityQueueAdd struct {
jobToAdd *job
response chan *priorityQueueOperationReponse
}
// doOperation does the operation to add the job to the queue
func (o *priorityQueueAdd) doOperation(q *priorityJobQueue) {
statusQueue := q.getStatusQueue(o.jobToAdd)
statusQueue.addJob(o.jobToAdd)
o.response <- &priorityQueueOperationReponse{success: true}
}
// reserveJob gets the next ready job in the queue and reserves it.
// Second returned value is false if there is no job that can be reserved.
func (p *priorityJobQueue) reserveJob() (*job, bool) {
op := &priorityQueueReserve{
response: make(chan *priorityQueueOperationReponse),
}
p.operations <- op
// Wait for response before returning
opResponse := <-op.response
return opResponse.job, opResponse.success
}
// A priorityQueueOperation encapsulates a reserve operation
type priorityQueueReserve struct {
response chan *priorityQueueOperationReponse
}
// doOperation does the operation to reserve a job
func (o *priorityQueueReserve) doOperation(q *priorityJobQueue) {
statusQueue := q.statusQueues["ready"]
if statusQueue == nil {
o.response <- &priorityQueueOperationReponse{success: false}
return
}
reservedJob, ok := statusQueue.getNextJob()
if ok {
err := reservedJob.reserve()
if err != nil
|
newQueue := q.getStatusQueue(reservedJob)
newQueue.addJob(reservedJob)
o.response <- &priorityQueueOperationReponse{success: true, job: reservedJob}
return
}
o.response <- &priorityQueueOperationReponse{success: false}
return
}
// deleteJob deletes the given job from the queue
func (p *priorityJobQueue) deleteJob(job *job) {
op := &priorityQueueDelete{
jobToDelete: job,
response: make(chan *priorityQueueOperationReponse),
}
p.operations <- op
// Wait for response before returning
_ = <-op.response
}
// A priorityQueueAdd encapsulates a add operation
type priorityQueueDelete struct {
jobToDelete *job
response chan *priorityQueueOperationReponse
}
// doOperation does the operation to delete a job
func (o *priorityQueueDelete) doOperation(q *priorityJobQueue) {
statusQueue := q.getStatusQueue(o.jobToDelete)
statusQueue.removeJob(o.jobToDelete)
o.response <- &priorityQueueOperationReponse{success: true}
}
|
{
log.Fatalf("Failed to reserve job %v from ready queue: %v\n", reservedJob.id, err.Error())
}
|
__init__.py
|
__title__ = 'saga_requests'
__description__ = 'Saga pattern implementation for sequential HTTP requests.'
__author__ = 'Kutay Aslan'
__author_email__ = '[email protected]'
__license__ = 'MIT'
|
from .saga_requests import SagaBuilder, SagaAction, SagaRequest, SagaRequestKwargs, SagaContext
|
__copyright__ = 'Copyright 2021 Kutay Aslan'
|
voucher.rs
|
use crate::{println, Vec, debug_println};
use super::attr::*;
use super::sid::{self, Sid, SidDisc};
use super::sid_data::SidData;
use super::cose_sig;
use super::cose_data::{CoseError, CborError, CoseData, COSE_SIGN_ONE_TAG};
pub use super::cose_data::SignatureAlgorithm;
use core::convert::TryFrom;
/// Errors that can be returned from `Voucher` functions.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum VoucherError {
CborFailure(CborError),
CoseFailure(CoseError),
InvalidArgument,
MalformedInput,
MissingAttributes,
SigningFailed,
UnexpectedCborType,
ValidationFailed,
}
/// A structure implementing both
/// ["Voucher Request"](https://www.ietf.org/archive/id/draft-ietf-anima-constrained-voucher-15.html#name-voucher-request-artifact)
/// and
/// ["Voucher"](https://www.ietf.org/archive/id/draft-ietf-anima-constrained-voucher-15.html#name-voucher-artifact)
/// artifacts of
/// [Constrained BRSKI](https://www.ietf.org/archive/id/draft-ietf-anima-constrained-voucher-15.html).
#[derive(PartialEq, Debug)]
pub struct Voucher {
sd: SidData,
cd: CoseData,
}
#[derive(Copy, Clone, PartialEq, Debug)]
enum VoucherType {
Vch, // 'voucher'
Vrq, // 'voucher request'
}
impl Voucher {
/// Creates a new, empty ["Voucher Request"](https://www.ietf.org/archive/id/draft-ietf-anima-constrained-voucher-15.html#name-voucher-request-artifact) instance.
///
/// # Examples
///
/// ```
/// use minerva_voucher::Voucher;
///
/// let mut vrq = Voucher::new_vrq();
/// ```
pub fn new_vrq() -> Self {
Self::new(VoucherType::Vrq)
}
/// Creates a new, empty ["Voucher"](https://www.ietf.org/archive/id/draft-ietf-anima-constrained-voucher-15.html#name-voucher-artifact) instance.
///
/// # Examples
///
/// ```
/// use minerva_voucher::Voucher;
///
/// let mut vch = Voucher::new_vch();
/// ```
pub fn new_vch() -> Self {
Self::new(VoucherType::Vch)
}
/// Returns `true` if the voucher is a ["Voucher Request"](https://www.ietf.org/archive/id/draft-ietf-anima-constrained-voucher-15.html#name-voucher-request-artifact) instance.
///
/// # Examples
///
/// ```
/// use minerva_voucher::Voucher;
///
/// let mut v = Voucher::new_vrq();
/// assert!(v.is_vrq());
/// ```
pub fn is_vrq(&self) -> bool {
self.sd.is_vrq()
}
/// Returns `true` if the voucher is a ["Voucher"](https://www.ietf.org/archive/id/draft-ietf-anima-constrained-voucher-15.html#name-voucher-artifact) instance.
///
/// # Examples
///
/// ```
/// use minerva_voucher::Voucher;
///
/// let mut v = Voucher::new_vch();
/// assert!(v.is_vch());
/// ```
pub fn is_vch(&self) -> bool {
!self.is_vrq()
}
fn new(ty: VoucherType) -> Self {
Self {
sd: match ty {
VoucherType::Vrq => SidData::new_vrq_cbor(),
VoucherType::Vch => SidData::new_vch_cbor(),
},
cd: CoseData::new(true),
}
}
/// Returns a reference to the attribute in the voucher, if any, that corresponds to the given attribute discriminant value.
///
/// # Examples
///
/// ```
/// use minerva_voucher::{Voucher, attr::*};
///
/// let mut vrq = Voucher::new_vrq();
/// vrq.set(Attr::CreatedOn(1475868702));
///
/// assert_eq!(vrq.get(ATTR_CREATED_ON), Some(&Attr::CreatedOn(1475868702)));
/// assert_eq!(vrq.get(ATTR_SERIAL_NUMBER), None);
/// ```
pub fn get(&self, adisc: AttrDisc) -> Option<&Attr> {
let sdisc = self.to_sid_disc(adisc)?;
for sid in self.sd.iter() {
if sid.disc() == sdisc {
return sid.as_attr();
}
}
None
}
/// Adds an attribute to the voucher, replacing the existing attribute, if any, that corresponds to the given one. Returns a `mut` reference to the voucher.
///
/// # Panics
///
/// Panics if an invalid voucher attribute is being set.
///
/// # Examples
///
/// ```
/// use minerva_voucher::{Voucher, attr::*};
///
/// let mut vrq = Voucher::new_vrq();
/// assert_eq!(vrq.get(ATTR_CREATED_ON), None);
///
/// vrq.set(Attr::CreatedOn(1475868702));
/// assert_eq!(vrq.get(ATTR_CREATED_ON), Some(&Attr::CreatedOn(1475868702)));
///
/// vrq.set(Attr::CreatedOn(1599086034));
/// assert_eq!(vrq.get(ATTR_CREATED_ON), Some(&Attr::CreatedOn(1599086034)));
///
/// // Panics because `Attr::PinnedDomainPubk` is invalid for a "voucher request".
/// // vrq.set(Attr::PinnedDomainPubk(vec![]));
/// ```
pub fn set(&mut self, attr: Attr) -> &mut Self {
let sdisc = self.to_sid_disc(attr.disc())
.ok_or(VoucherError::InvalidArgument)
.unwrap();
self.set_sid(Sid::try_from((attr.into_yang(), sdisc)).unwrap());
self
}
fn set_sid(&mut self, sid: Sid) -> &mut Self {
self.sd.replace(sid);
self
}
/// Removes an attribute from the voucher. Returns whether the attribute was present in the voucher.
///
/// # Examples
///
/// ```
/// use minerva_voucher::{Voucher, attr::*};
///
/// let mut vrq = Voucher::new_vrq();
/// vrq.set(Attr::CreatedOn(1475868702));
///
/// assert_eq!(vrq.remove(ATTR_CREATED_ON), true);
/// assert_eq!(vrq.remove(ATTR_CREATED_ON), false);
/// ```
pub fn remove(&mut self, adisc: AttrDisc) -> bool {
if let Some(sdisc) = self.to_sid_disc(adisc) {
self.sd.remove(sdisc)
} else {
false
}
}
/// Removes and returns the attribute in the voucher, if any, that corresponds to the given attribute discriminant value.
///
/// # Examples
///
/// ```
/// use minerva_voucher::{Voucher, attr::*};
///
/// let mut vrq = Voucher::new_vrq();
///
/// vrq.set(Attr::CreatedOn(1475868702));
/// assert_eq!(vrq.take(ATTR_CREATED_ON), Some(Attr::CreatedOn(1475868702)));
/// assert_eq!(vrq.take(ATTR_CREATED_ON), None);
///
/// let sn = b"00-D0-E5-F2-00-02";
/// vrq.set(Attr::SerialNumber(sn.to_vec()));
/// assert_eq!(vrq.take(ATTR_SERIAL_NUMBER), Some(Attr::SerialNumber(sn.to_vec())));
/// assert_eq!(vrq.take(ATTR_SERIAL_NUMBER), None);
/// ```
pub fn take(&mut self, adisc: AttrDisc) -> Option<Attr> {
self.sd
.take(self.to_sid_disc(adisc)?)
.and_then(|sid| sid.into_attr())
}
fn to_sid_disc(&self, adisc: AttrDisc) -> Option<SidDisc> {
Attr::to_sid_disc(adisc, self.is_vrq())
}
/// Returns the number of attributes in the voucher.
///
/// # Examples
///
/// ```
/// use minerva_voucher::{Voucher, attr::Attr};
///
/// let mut vrq = Voucher::new_vrq();
/// assert_eq!(vrq.len(), 0);
/// vrq.set(Attr::CreatedOn(1475868702));
/// assert_eq!(vrq.len(), 1);
/// ```
pub fn len(&self) -> usize {
self.iter().count()
}
/// Gets an iterator that visits the attributes in the voucher.
///
/// # Examples
///
/// ```
/// use minerva_voucher::{Voucher, attr::{Attr, Assertion}};
///
/// let mut vrq = Voucher::new_vrq();
///
/// vrq.set(Attr::Assertion(Assertion::Proximity))
/// .set(Attr::CreatedOn(1599086034))
/// .set(Attr::SerialNumber(b"00-D0-E5-F2-00-02".to_vec()));
///
/// let mut vrq_iter = vrq.iter();
/// assert!(vrq_iter.next().is_some());
/// assert!(vrq_iter.next().is_some());
/// assert!(vrq_iter.next().is_some());
/// assert!(vrq_iter.next().is_none());
/// ```
pub fn iter(&self) -> impl Iterator<Item = &Attr> + '_
|
fn iter_with_sid(&self) -> impl Iterator<Item = (&Attr, sid::SidDisc)> + '_ {
self.sd.iter()
.filter_map(|sid| Some((sid.as_attr()?, sid.disc())))
}
/// Returns a tuple of references to the signature and its corresponding algorithm in the voucher, if any.
///
/// # Examples
///
/// ```
/// use minerva_voucher::{Voucher, SignatureAlgorithm};
/// use core::convert::TryFrom;
///
/// static VCH_F2_00_02: &[u8] = core::include_bytes!(
/// concat!(env!("CARGO_MANIFEST_DIR"), "/data/00-D0-E5-F2-00-02/voucher_00-D0-E5-F2-00-02.vch"));
///
/// let vch = Voucher::new_vch();
/// assert_eq!(vch.get_signature(), None);
///
/// let vch = Voucher::try_from(VCH_F2_00_02).unwrap();
/// let (signature, alg) = vch.get_signature().unwrap();
/// assert_eq!(signature.len(), 64);
/// assert_eq!(*alg, SignatureAlgorithm::ES256);
/// ```
pub fn get_signature(&self) -> Option<(&[u8], &SignatureAlgorithm)> {
let sig = self.cd.sig();
if sig.signature.len() > 0 {
Some((&sig.signature, &sig.signature_type))
} else {
None
}
}
/// Encodes the voucher into CBOR. Returns a CBOR byte string.
///
/// # Errors
///
/// If the voucher is missing any mandatory attributes, then an error is returned.
///
/// # Examples
///
/// See [Encoding a `Voucher` into CBOR](index.html#2-encoding-a-voucher-into-cbor).
pub fn serialize(&self) -> Result<Vec<u8>, VoucherError> {
if self.get(ATTR_ASSERTION).is_none() {
debug_println!("serialize(): `Attr::Assertion` is mandatory; but missing");
return Err(VoucherError::MissingAttributes);
}
if self.get(ATTR_SERIAL_NUMBER).is_none() {
debug_println!("serialize(): `Attr::SerialNumber` is mandatory; but missing");
return Err(VoucherError::MissingAttributes);
}
let content = self.cd.get_content().ok().or_else(|| {
use sid::Cbor;
self.cd.generate_content(&self.sd.serialize().unwrap()).ok()
});
self.cd.encode(content)
.or_else(|ce| Err(VoucherError::CoseFailure(ce)))
}
/// Returns a reference to the signer certificate in the voucher, if any.
///
/// # Examples
///
/// ```
/// use minerva_voucher::Voucher;
///
/// let mut vrq = Voucher::new_vrq();
///
/// assert_eq!(vrq.get_signer_cert(), None);
/// vrq.set_signer_cert(&[4, 186, 197, 177, 28, 173, 143, 153, 249, 199, 43, 5, 207, 75, 158, 38, 210, 68, 220, 24, 159, 116, 82, 40, 37, 90, 33, 154, 134, 214, 160, 158, 255, 32, 19, 139, 248, 45, 193, 182, 213, 98, 190, 15, 165, 74, 183, 128, 74, 58, 100, 182, 215, 44, 207, 237, 107, 111, 182, 237, 40, 187, 252, 17, 126]);
/// assert_eq!(vrq.get_signer_cert().unwrap().len(), 65);
/// ```
pub fn get_signer_cert(&self) -> Option<&[u8]> {
self.cd.get_signer_cert()
}
/// Adds a singer certificate to the voucher. Returns a `mut` reference to the voucher.
///
/// # Examples
///
/// ```
/// use minerva_voucher::Voucher;
///
/// let mut vrq = Voucher::new_vrq();
///
/// assert_eq!(vrq.get_signer_cert(), None);
/// vrq.set_signer_cert(&[4, 186, 197, 177, 28, 173, 143, 153, 249, 199, 43, 5, 207, 75, 158, 38, 210, 68, 220, 24, 159, 116, 82, 40, 37, 90, 33, 154, 134, 214, 160, 158, 255, 32, 19, 139, 248, 45, 193, 182, 213, 98, 190, 15, 165, 74, 183, 128, 74, 58, 100, 182, 215, 44, 207, 237, 107, 111, 182, 237, 40, 187, 252, 17, 126]);
/// assert_eq!(vrq.get_signer_cert().unwrap().len(), 65);
/// ```
pub fn set_signer_cert(&mut self, cert: &[u8]) -> &mut Self {
self.cd.set_signer_cert(cert);
self
}
#[cfg(test)]
pub fn get_cose_content(&self) -> Option<Vec<u8>> {
self.cd.get_content().ok()
}
fn update_cose_content(&mut self) -> &mut Self {
use sid::Cbor;
self.cd.set_content(&self.sd.serialize().unwrap());
self
}
/// Interfaces with meta data required for signing the voucher.
/// This method needs to be used when implementing the [`Sign`](crate::Sign) trait.
///
/// Returns a tuple of
/// - a `mut` reference to the `Vec<u8>` data where a new signature is being written, and
/// - a reference to [the CBOR-encoded `COSE_Sign1` structure](https://datatracker.ietf.org/doc/html/rfc8152#section-4.2) for which signing is performed.
///
/// # Examples
///
/// See [the default implementation of the `Sign` trait](../src/minerva_voucher/sign.rs.html).
pub fn to_sign(&mut self, alg: SignatureAlgorithm) -> (&mut Vec<u8>, &[u8]) {
self.cd.set_alg(alg);
use core::ops::DerefMut;
let sig = self.update_cose_content()
.cd.sig_mut().deref_mut();
(&mut sig.signature, &sig.to_verify)
}
/// Interfaces with meta data required for validating the voucher.
/// This method needs to be used when implementing the [`Validate`](crate::Validate) trait.
///
/// Returns a tuple of
/// - a reference to the signer certificate in the voucher, if any,
/// - a tuple of references to the signature and its corresponding algorithm in the voucher, if any, and
/// - a reference to [the CBOR-encoded `COSE_Sign1` structure](https://datatracker.ietf.org/doc/html/rfc8152#section-4.2) for which validation is performed.
///
/// # Examples
///
/// See [the default implementation of the `Validate` trait](../src/minerva_voucher/validate.rs.html).
pub fn to_validate(&self) -> (Option<&[u8]>, Option<(&[u8], &SignatureAlgorithm)>, &[u8]) {
(self.get_signer_cert(), self.get_signature(), &self.cd.sig().to_verify)
}
/// Prints internal representation of the voucher for debugging purposes.
///
/// # Examples
///
/// ```
/// use minerva_voucher::Voucher;
/// use core::convert::TryFrom;
///
/// static VCH_JADA: &[u8] = core::include_bytes!(
/// concat!(env!("CARGO_MANIFEST_DIR"), "/data/jada/voucher_jada123456789.vch"));
///
/// let vch = Voucher::try_from(VCH_JADA).unwrap();
///
/// vch.dump();
/// /* stdout:
/// ======== Voucher::dump()
/// ==== SidData::dump()
/// Voucher({VchTopLevel(VoucherVoucher), VchAssertion(Enumeration(Assertion(Proximity))), VchCreatedOn(DateAndTime(CreatedOn(1475868702))), VchExpiresOn(DateAndTime(ExpiresOn(1506816000))), VchNonce(Binary(Nonce([97, 98, 99, 100, 49, 50, 51, 52, 53]))), VchPinnedDomainPubk(Binary(PinnedDomainPubk([77, 70, 107, 119, 69, 119, 89, 72, 75, 111, 90, 73, 122, 106, 48, 67, 65, 81, 89, 73, 75, 111, 90, 73, 122, 106, 48, 68, 65, 81, 99, 68, 81, 103, 65, 69, 108, 109, 86, 81, 99, 106, 83, 54, 110, 43, 88, 100, 53, 108, 47, 50, 56, 73, 70, 118, 54, 85, 105, 101, 103, 81, 119, 83, 66, 122, 116, 71, 106, 53, 100, 107, 75, 50, 77, 65, 106, 81, 73, 80, 86, 56, 108, 56, 108, 72, 43, 69, 106, 76, 73, 79, 89, 100, 98, 74, 105, 73, 48, 86, 116, 69, 73, 102, 49, 47, 74, 113, 116, 43, 84, 79, 66, 102, 105, 110, 84, 78, 79, 76, 79, 103, 61, 61]))), VchSerialNumber(String(SerialNumber([74, 65, 68, 65, 49, 50, 51, 52, 53, 54, 55, 56, 57])))})
/// ====
/// ==== CoseSig::dump()
/// signature_type: ES256
/// signature: [len=64] [234, 232, 104, 236, 193, 118, 136, 55, 102, 197, 220, 91, 165, 184, 220, 162, 93, 171, 60, 46, 86, 165, 81, 206, 87, 5, 183, 147, 145, 67, 72, 225, 217, 85, 56, 95, 66, 111, 229, 137, 148, 12, 142, 214, 58, 86, 83, 68, 254, 186, 154, 162, 228, 175, 25, 168, 102, 60, 251, 36, 170, 105, 99, 194]
/// signer_cert: [len=65] [4, 186, 197, 177, 28, 173, 143, 153, 249, 199, 43, 5, 207, 75, 158, 38, 210, 68, 220, 24, 159, 116, 82, 40, 37, 90, 33, 154, 134, 214, 160, 158, 255, 32, 19, 139, 248, 45, 193, 182, 213, 98, 190, 15, 165, 74, 183, 128, 74, 58, 100, 182, 215, 44, 207, 237, 107, 111, 182, 237, 40, 187, 252, 17, 126]
/// to_verify: [len=202] [132, 106, 83, 105, 103, 110, 97, 116, 117, 114, 101, 49, 67, 161, 1, 38, 64, 88, 183, 161, 25, 9, 147, 166, 1, 105, 112, 114, 111, 120, 105, 109, 105, 116, 121, 2, 193, 26, 87, 247, 248, 30, 4, 193, 26, 89, 208, 48, 0, 11, 109, 74, 65, 68, 65, 49, 50, 51, 52, 53, 54, 55, 56, 57, 7, 105, 97, 98, 99, 100, 49, 50, 51, 52, 53, 9, 120, 124, 77, 70, 107, 119, 69, 119, 89, 72, 75, 111, 90, 73, 122, 106, 48, 67, 65, 81, 89, 73, 75, 111, 90, 73, 122, 106, 48, 68, 65, 81, 99, 68, 81, 103, 65, 69, 108, 109, 86, 81, 99, 106, 83, 54, 110, 43, 88, 100, 53, 108, 47, 50, 56, 73, 70, 118, 54, 85, 105, 101, 103, 81, 119, 83, 66, 122, 116, 71, 106, 53, 100, 107, 75, 50, 77, 65, 106, 81, 73, 80, 86, 56, 108, 56, 108, 72, 43, 69, 106, 76, 73, 79, 89, 100, 98, 74, 105, 73, 48, 86, 116, 69, 73, 102, 49, 47, 74, 113, 116, 43, 84, 79, 66, 102, 105, 110, 84, 78, 79, 76, 79, 103, 61, 61]
/// ====
/// ========
/// */
/// ```
pub fn dump(&self) {
println!("======== Voucher::dump()");
self.sd.dump();
self.cd.dump();
println!("========");
}
#[cfg(test)]
pub fn dump_and_panic(&self) {
self.dump();
panic!();
}
}
impl TryFrom<&[u8]> for Voucher {
type Error = VoucherError;
/// Decodes a CBOR-encoded voucher. Returns a `Voucher`.
///
/// # Errors
///
/// If the voucher cannot be decoded, then an error is returned.
///
/// # Examples
///
/// See [Decoding a CBOR-encoded voucher into a `Voucher`](index.html#3-decoding-a-cbor-encoded-voucher-into-a-voucher).
fn try_from(raw: &[u8]) -> Result<Self, Self::Error> {
let (tag, cd) = CoseData::decode(raw).or_else(|ce| {
debug_println!("Failed to decode raw voucher");
Err(VoucherError::CoseFailure(ce))
})?;
if tag != COSE_SIGN_ONE_TAG {
debug_println!("Only `CoseSign1` vouchers are supported");
return Err(VoucherError::CoseFailure(CoseError::UnexpectedTag));
}
let content = cd.get_content().or_else(|ce| {
debug_println!("Failed to get `content`");
Err(VoucherError::CoseFailure(ce))
})?;
let sidhash = cose_sig::decode(&content).or_else(|ce| {
debug_println!("Failed to decode `content`");
Err(VoucherError::CborFailure(ce))
})?;
SidData::try_from(sidhash)
.and_then(|sd| Ok(Self { sd, cd }))
}
}
|
{
self.iter_with_sid()
.map(|(attr, _)| attr)
}
|
apply.go
|
// Package rules contains all of the functions for hashcat rules
package rules
//TODO add documentation
// https://godoc.org/github.com/coolbry95/passutils/ruleprocessor/rules
import (
"unicode"
)
// reason for var temp []rune and copy(temp, word)
// is slices are passed by value but reference their underlying array
// if a slice is passed and is manipulated like word[i] = 'j' then
// the word variable passed gets changed also
// may change to not using copy
// then changes would be applied to underlying slice
// global memorized word
var Saved []rune
// ToNum converts A-Z to ints
func ToNum(char rune) int {
if char <= 57 && char >= 48 {
return int(char) - 48
}
return int(char) - 65 + 10
}
func ToNumByte(num uint8) int {
return ToNum(rune(num))
}
func ToAlpha(num int) rune {
if num < 10 {
return rune(48 + num)
}
return rune(65 + num - 10)
}
// :
func Nothing(word []rune) []rune {
return word
}
// l
func Lowercase(word []rune) []rune {
if len(word) == 0 {
return word
}
// keep strings.Tolower() and then convert back to rune?
// only operates on single rune
for i, v := range word {
word[i] = unicode.ToLower(v)
}
return word
}
// u
func Uppercase(word []rune) []rune {
if len(word) == 0 {
return word
}
for i, v := range word {
word[i] = unicode.ToUpper(v)
}
return word
}
// c
func Capitalize(word []rune) []rune {
if len(word) == 0 {
return word
}
for i, v := range word {
if i == 0 {
word[i] = unicode.ToUpper(v)
continue
}
word[i] = unicode.ToLower(v)
}
return word
}
// C
func InvertCapitalize(word []rune) []rune {
if len(word) == 0 {
return word
}
for i, v := range word {
if i == 0 {
word[i] = unicode.ToLower(v)
continue
}
word[i] = unicode.ToUpper(v)
}
return word
}
// t
func ToggleCase(word []rune) []rune {
if len(word) == 0
|
for i, v := range word {
switch {
case unicode.IsUpper(v):
word[i] = unicode.ToLower(v)
case unicode.IsLower(v):
word[i] = unicode.ToUpper(v)
default:
word[i] = v
}
}
return word
}
// TN
func ToggleAt(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
if len(word)-1 < n {
return word
}
switch {
case unicode.IsUpper(word[n]):
word[n] = unicode.ToLower(word[n])
case unicode.IsLower(word[n]):
word[n] = unicode.ToUpper(word[n])
default:
return word
}
return word
}
// https://stackoverflow.com/questions/1752414/how-to-reverse-a-string-in-go
// r
func Reverse(word []rune) []rune {
if len(word) == 0 {
return word
}
for i, j := 0, len(word)-1; i < j; i, j = i+1, j-1 {
word[i], word[j] = word[j], word[i]
}
return word
}
// d
func Duplicate(word []rune) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, len(word), len(word)*2)
copy(temp, word)
temp = append(temp, word[:]...)
return temp
}
// pN
func DuplicateN(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, 0, len(word)*n)
for i := 0; i <= n; i++ {
temp = append(temp, word[:]...)
}
return temp
}
// f didnt change yet
func Reflect(word []rune) []rune {
if len(word) == 0 {
return word
}
firstHalf := make([]rune, len(word))
copy(firstHalf, word)
temp := make([]rune, len(word), len(word))
for i, j := len(word)-1, 0; i >= 0; i, j = i-1, j+1 {
temp[j] = firstHalf[i]
}
firstHalf = append(firstHalf, temp[:]...)
return firstHalf
}
// {
func RotateLeft(word []rune) []rune {
if len(word) == 0 {
return word
}
if len(word)-1 < 0 {
return word
}
temp := make([]rune, 0, len(word))
temp = append(temp, word[1:]...)
temp = append(temp, word[0])
return temp
}
// }
func RotateRight(word []rune) []rune {
if len(word) == 0 {
return word
}
if len(word)-1 == 0 {
return word
}
temp := make([]rune, 0, len(word))
temp = append(temp, word[len(word)-1])
temp = append(temp, word[:len(word)-1]...)
return temp
}
// $X
func AppendCharacter(word []rune, char rune) []rune {
if len(word) == 0 {
return word
}
word = append(word, char)
return word
}
// ^X
func PrependCharacter(word []rune, char rune) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, 1, len(word)+1)
temp[0] = char
temp = append(temp, word[:]...)
return temp
}
// [
func TruncateLeft(word []rune) []rune {
if len(word) == 0 {
return word
}
return word[1:]
}
// ]
func TruncateRight(word []rune) []rune {
if len(word) == 0 {
return word
}
return word[:len(word)-1]
}
// DN
func DeleteN(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
if len(word)-1 < n {
return word
}
/*
temp := make([]rune, 0, len(word)-n)
temp = append(temp, word[:n]...)
temp = append(temp, word[n+1:]...)
*/
word = append(word[:n], word[n+1:]...)
return word
}
// xNM
func ExtractRange(word []rune, n, m int) []rune {
if len(word) == 0 {
return word
}
if n > len(word) || m > len(word) {
return nil
}
word = word[n : m+n]
return word
}
// ONM
func OmitRange(word []rune, n, m int) []rune {
if len(word) == 0 {
return word
}
if n > len(word)-1 || m > len(word)-1 || n == m {
return word
}
// no idea about this now lol
// FIXME
// m delete the entire word
/*
if m == len(word) -1 {
return ""
}
*/
/*
temp := make([]rune, 0, n+m)
temp = append(temp, word[:n]...)
temp = append(temp, word[m+n:]...)
*/
word = append(word[:n], word[m+n:]...)
return word
}
// iNX
func InsertAtN(word []rune, n int, char rune) []rune {
if len(word) == 0 {
return word
}
// TODO breaks this function!?! if it is len(word)-1
if len(word) < n {
return word
}
temp := make([]rune, 0, len(word)+1)
temp = append(temp, word[:n]...)
temp = append(temp, char)
temp = append(temp, word[n:]...)
//word = append(word[:n], char, word[n:]...)
//word = append(word[:n], append([]rune{char}, word[n:]...)...)
return temp
}
// oNX
func OverwriteAtN(word []rune, n int, char rune) []rune {
if len(word) == 0 {
return word
}
if len(word)-1 < n {
return word
}
/*
temp := make([]rune, 0, len(word)+1)
temp = append(temp, word[:n]...)
temp = append(temp, char)
temp = append(temp, word[n+1:]...)
*/
word[n] = char
return word
}
// 'N
func TruncateAtN(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
if len(word)-1 < n {
return word
}
// dont know here either lol
// FIXME
if n == 0 {
return word
}
// or here
// FIXME
/*
temp := make([]rune, len(word))
copy(temp, word)
return temp[:n]
*/
word = word[:n]
return word
}
// sXY
func Replace(word []rune, x, y rune) []rune {
if len(word) == 0 {
return word
}
for i := range word {
if word[i] == x {
word[i] = y
}
}
return word
}
// @X
func Purge(word []rune, x rune) []rune {
if len(word) == 0 {
return word
}
// use cap len(word) because worse case is nothing is purged
temp := make([]rune, 0, len(word))
for i := 0; i < len(word); i++ {
if word[i] == x {
continue
}
temp = append(temp, word[i])
}
return temp
}
// zN
func DuplicateFirstN(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, 0, len(word)+n)
for i := 0; i < n; i++ {
temp = append(temp, word[0])
}
temp = append(temp, word[:]...)
return temp
}
// ZN
func DuplicateLastN(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, len(word), len(word)+n)
copy(temp, word)
for i := 0; i < n; i++ {
temp = append(temp, word[len(word)-1])
}
return temp
}
// q
func DuplicateAll(word []rune) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, 0, len(word)*2)
for i := 0; i < len(word); i++ {
temp = append(temp, word[i])
temp = append(temp, word[i])
}
return temp
}
// XNMI
func ExtractMemory(word []rune, n, m, i int) []rune {
if len(word) == 0 {
return word
}
// minus one because they will be indexing the saved word
if n > len(Saved) || m > len(Saved) || i > len(word) {
return nil
}
temp := make([]rune, len(word[:i]), len(word)+(n+m))
//temp = append(temp, word[:i]...)
copy(temp, word[:i])
temp = append(temp, Saved[n:m+n]...)
temp = append(temp, word[i:]...)
return temp
}
// 4
func AppendMemory(word []rune) []rune {
if len(word) == 0 {
return word
}
//temp := make([]rune, 0, len(word)+len(Saved))
// use copy instead??
// append to word instead of making new variable
/*
temp = append(temp, word[:]...)
temp = append(temp, Saved[:]...)
*/
word = append(word, Saved[:]...)
return word
}
// 6
func PrependMemory(word []rune) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, 0, len(word)+len(Saved))
// use copy instead??
temp = append(temp, Saved[:]...)
temp = append(temp, word[:]...)
return temp
}
// M
func Memorize(word []rune) {
// dont know if you need
// Saved = copy(Saved, word[:])
// is this possible Saved = word
//copy(Saved, word)
//Saved = word
Saved = make([]rune, len(word), len(word))
copy(Saved, word)
}
// reject greater and less use the 0-9 a-z
// need to change becuase hashcat does not use this
// <N
func RejectLess(word []rune, n int) bool {
if len(word) > n {
return true
}
return false
}
// >N
func RejectGreater(word []rune, n int) bool {
if len(word) < n {
return true
}
return false
}
// use index instead?
// !X
func RejectContain(word []rune, char rune) bool {
for _, v := range word {
if v == char {
return true
}
}
return false
}
// /X
func RejectNotContain(word []rune, char rune) bool {
for _, v := range word {
if v == char {
return false
}
}
return true
}
// (X
func RejectEqualFirst(word []rune, char rune) bool {
if word[0] != char {
return true
}
return false
}
// )X
func RejectEqualLast(word []rune, char rune) bool {
if word[len(word)-1] != char {
return true
}
return false
}
// =NX
func RejectEqualAt(word []rune, char rune, n int) bool {
if word[n] != char {
return true
}
return false
}
// %NX
func RejectContains(word []rune, char rune, n int) bool {
count := 0
for _, v := range word {
if v == char {
count++
}
}
if count < n {
return true
}
return false
}
// Q
func RejectMemory(word []rune) bool {
if word == nil && Saved == nil {
return false
}
if word == nil || Saved == nil {
return false
}
if len(word) < len(Saved) {
return false
}
for i := range word {
if word[i] != Saved[i] {
return false
}
}
return true
}
// k
func SwapFront(word []rune) []rune {
if len(word) == 1 || len(word) == 0 {
return word
}
word[0], word[1] = word[1], word[0]
return word
}
// K
func SwapBack(word []rune) []rune {
if len(word) == 1 || len(word) == 0 {
return word
}
word[len(word)-1], word[len(word)-2] = word[len(word)-2], word[len(word)-1]
return word
}
// *XY
func SwapAtN(word []rune, x, y int) []rune {
if len(word) == 0 {
return word
}
if x == 0 || y == 0 {
return word
}
if len(word)-1 <= x || len(word)-1 < y {
return word
}
word[x], word[y] = word[y], word[x]
return word
}
// LN
func BitwiseShiftLeft(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
word[n] = word[n] << 1
return word
}
// RN
func BitwiseShiftRight(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
word[n] = word[n] >> 1
return word
}
// +N
func ASCIIIncrementPlus(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
word[n] = word[n] + 1
return word
}
// -N
func ASCIIIncrementMinus(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
word[n] = word[n] - 1
return word
}
// .N
func ReplaceNPlus(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
if n >= len(word)-1 {
return word
}
/*
temp := make([]rune, 0, len(word))
temp = append(temp, word[:n]...)
temp = append(temp, word[n+1])
temp = append(temp, word[n+1:]...)
*/
word[n] = word[n+1]
return word
}
// ,N
func ReplaceNMinus(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
if n > len(word)-1 {
return word
}
// FIXME
// index error on 0
if n == 0 {
return word
}
temp := make([]rune, 0, len(word))
temp = append(temp, word[:n]...)
temp = append(temp, word[n-1])
temp = append(temp, word[n+1:]...)
return temp
}
// yN
func DuplicateBlockFront(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, len(word[:n]), len(word)+n)
copy(temp, word[:n])
temp = append(temp, word[:]...)
return temp
}
// YN
func DuplicateBlockBack(word []rune, n int) []rune {
if len(word) == 0 {
return word
}
temp := make([]rune, len(word), len(word)+n)
copy(temp, word)
temp = append(temp, word[len(word)-n:]...)
return temp
}
// E
func Title(word []rune) []rune {
if len(word) == 0 {
return word
}
word[0] = unicode.ToUpper(word[0])
for i := 1; i <= len(word)-1; i++ {
if word[i] == ' ' {
/*
if i == len(word)-1 {
break
}
*/
word[i+1] = unicode.ToUpper(word[i+1])
i++
} else {
word[i] = unicode.ToLower(word[i])
}
}
return word
/*
temp := make([]rune, len(word))
copy(temp, word)
temp[0] = unicode.ToUpper(temp[0])
for i := 1; i <= len(temp)-1; i++ {
if temp[i] == ' ' {
//
if i == len(word)-1 {
break
}
//
temp[i+1] = unicode.ToUpper(temp[i+1])
i++
} else {
temp[i] = unicode.ToLower(temp[i])
}
}
return temp
*/
}
|
{
return word
}
|
objects.go
|
package main
import (
"fmt"
"github.com/minio/minio-go"
"io"
"log"
"net/http"
)
var conn *minio.Client
func connectDHO() (*minio.Client, error) {
ssl := true
var err error
err = nil
if conn == nil {
conn, err = minio.NewV2(params.Endpoint, params.Access, params.Secret, ssl)
}
return conn, err
}
func objectList() (map[string]bool, error) {
isRecursive := true
doneCh := make(chan struct{})
defer close(doneCh)
minioClient, err := connectDHO()
if err != nil {
return nil, err
}
receiver := minioClient.ListObjects(params.bucket, params.prefix, isRecursive, doneCh)
filenames := make(map[string]bool)
for info := range receiver {
filenames[info.Key] = true
}
return filenames, nil
}
//func uploadImages(original string, resized *os.File) error {
func
|
(r io.ReadSeeker, name string) {
minioClient, err := connectDHO()
if err != nil {
log.Printf(" **there was an error connecting to DO %s: ", err)
return
}
buffer := make([]byte, 512)
_, err = r.Read(buffer)
if err != nil {
log.Printf(" **there was an error reading from %s: %s", name, err)
}
r.Seek(0, 0)
mime := http.DetectContentType(buffer)
objName := pathToObject(name)
fmt.Printf(" Uploading %s:%s\n", params.bucket, objName)
_, err = minioClient.PutObject(params.bucket, objName, r, mime)
if err != nil {
log.Printf(" **there was an error uploading %s: %s", objName, err)
return
}
//TODO: change permissions on object to public-read
}
// Remove after setting up repo
func test_dho() error {
ssl := false
fmt.Println("Testing connection to DHO")
_ = "breakpoint"
minioClient, err := minio.NewV2(params.Endpoint, params.Access, params.Secret, ssl)
if err != nil {
return err
}
//Other libraries...
//svc := s3.New(session.New(), &aws.Config{Endpoint: aws.String(params.Endpoint)})
//resp, err := svc.HeadBucket(&s3.HeadBucketInput{
//Bucket: aws.String(params.bucket),
//})
//auth := aws.Auth{
//AccessKey: params.Access,
//SecretKey: params.Secret,
//}
//conn := s3.New(auth, aws.Region{Name: "dho", S3Endpoint: params.Endpoint})
//b := conn.Bucket("kmarsh")
//res, err := b.List("", "", "", 1000)
fmt.Println("Bucket exists?")
err = minioClient.BucketExists(params.bucket)
if err != nil {
fmt.Println("error checking bucket")
return err
}
fmt.Println("bucket exists")
return nil
}
|
uploadImages
|
__init__.py
|
# SPDX-License-Identifier: MIT
#
# Copyright (c) 2021 The Anvil Extras project team members listed at
# https://github.com/anvilistas/anvil-extras/graphs/contributors
#
# This software is published at https://github.com/anvilistas/anvil-extras
import anvil.js
from anvil import HtmlPanel as _HtmlPanel
from ..utils._component_helpers import _get_color, _html_injector, _spacing_property
from ._anvil_designer import SliderTemplate
__version__ = "1.7.1"
noui_version = "15.4.0"
_html_injector.cdn(
f"https://cdn.jsdelivr.net/npm/nouislider@{noui_version}/dist/nouislider.min.css"
)
_html_injector.css(
"""
.anvil-slider-container {
padding: 10px 0;
}
.anvil-slider-container.has-pips {
padding-bottom: 40px;
}
.anvil-container-overflow, .anvil-panel-col {
overflow: visible;
}
.noUi-connect {
background: var(--primary);
}
.noUi-horizontal .noUi-handle {
width: 34px;
height: 34px;
right: -17px;
top: -10px;
border-radius: 50%;
}
.noUi-handle::before, .noUi-handle::after {
content: none
}
"""
)
_Slider = anvil.js.import_from(
f"https://cdn.skypack.dev/nouislider@{noui_version}"
).default
import json
def _as_list(x):
return x if isinstance(x, list) else [x]
def _from_list(x):
return x[0] if isinstance(x, list) else x
def _parse(s, force_list=False):
if not isinstance(s, str):
return s
s = s.lower().strip()
if not s:
return None if not force_list else []
if ("," in s or force_list) and s[0] != "[":
s = "[" + s + "]"
try:
return json.loads(s)
except Exception:
return [] if force_list else s
try:
# added in python 3.9 not currently available in skulpt
_removeprefix = str.removeprefix
_removesuffix = str.removesuffix
except AttributeError:
def
|
(s, prefix):
return s[len(prefix) :] if s.startswith(prefix) else s
def _removesuffix(s, suffix):
return s[: len(s) - len(suffix)] if s.endswith(suffix) else s
def _wrap_formatter(formatter):
fto = formatter["to"]
ffrom = formatter["from"]
def wrap_to(f: float, *args) -> str:
s = fto(f)
if not isinstance(s, str):
raise TypeError(
f"Custom formatter returned {type(s).__name__} (expected str)"
)
return s
def wrap_from(s: str, *args) -> float:
#### This function is called from javascript so accept *args
if not isinstance(s, str):
raise TypeError(
f"got an unexpected value when trying to assign a value to the slider, (got {s})"
)
try:
return ffrom(s)
except Exception as e:
try:
# we may have just been give a number so do the obvious thing
res = float(s)
return int(res) if res.is_integer() else res
except Exception:
raise RuntimeError(f"your custom formatter raised an exception: {e!r}")
return {"to": wrap_to, "from": wrap_from, "format_spec": formatter}
def _get_formatter(formatspec: str) -> dict:
"""
Expecting a format spec e.g. '.2f'
Or a simple string '£{:.2f}'
"""
if isinstance(formatspec, dict):
return _wrap_formatter(formatspec)
if not isinstance(formatspec, str):
raise TypeError("expected property format to be of type str")
first = formatspec.find("{")
last = formatspec.find("}")
prefix = "" if first == -1 else formatspec[:first]
suffix = "" if last == -1 else formatspec[last + 1 :]
type = formatspec[len(formatspec) - 1] if last == -1 else formatspec[last - 1]
def to_format(f: float, *args) -> str:
# Used in javascript world so expects extra args
try:
return format(f, formatspec) if first == -1 else formatspec.format(f)
except Exception:
return f # better just to return what was passed to us
# this will raise an error if we have an invalid spec
format(1.1, formatspec) if first == -1 else formatspec.format(1.1)
def from_format(s: str, *args) -> float:
# Used in javascript world so expects extra args
if not isinstance(s, str):
raise TypeError(
f"got an unexpected value when trying to assign a value to the slider, (got {s})"
)
s = (
_removesuffix(_removeprefix(s, prefix), suffix)
.strip()
.replace(",", "")
.replace("_", "")
)
has_percent = type == "%" and s[len(s) - 1] == "%"
if has_percent:
s = s[: len(s) - 1]
try:
f = float(s)
except Exception:
return False
if has_percent:
f = f / 100
return int(f) if f.is_integer() else f
# noUiSlider requires a format like {from: (float) => str, to: (str) => float}
return {"from": from_format, "to": to_format, "format_spec": formatspec}
def _prop_getter(prop, fget=None):
return lambda self: self._props[prop] if fget is None else fget(self._props[prop])
def _slider_prop(prop, fset=None, fget=None):
def setter(self, value):
value = value if fset is None else fset(value)
self._props[prop] = value
if prop == "format":
pips = self._make_pips()
self._slider.updateOptions({prop: value, "pips": pips})
else:
self._slider.updateOptions({prop: value})
return property(_prop_getter(prop, fget), setter)
def _min_max_prop(prop):
def getter(self):
return self._props["range"][prop]
def setter(self, value):
r = self._props["range"]
r[prop] = value
self._slider.updateOptions({"range": r})
return property(getter, setter)
def _pips_prop(prop):
def setter(self, value):
self._props[prop] = value
pips = self._make_pips()
self._toggle_has_pips(pips)
self._slider.updateOptions({"pips": pips})
return property(_prop_getter(prop), setter)
_defaults = {
"animate": True,
"start": 20,
"step": None,
"tooltips": False,
"connect": False,
"behaviour": "tap",
"format": None,
"pips": None,
"pips_mode": None,
"pips_values": [],
"pips_density": -1,
"pips_stepped": True,
"margin": None,
"padding": None,
"limit": None,
"range": None,
"min": 0,
"max": 100,
"visible": True,
"enabled": True,
"spacing_above": "small",
"spacing_below": "small",
"value": None,
"values": None,
"formatted_value": None,
"formatted_values": None,
}
class Slider(SliderTemplate):
def __init__(self, **properties):
# Any code you write here will run when the form opens.
dom_node = self._dom_node = anvil.js.get_dom_node(self)
dom_node.classList.add("anvil-slider-container")
self._slider_node = dom_node.querySelector(".anvil-slider")
# remove the script to stop them loading
while dom_node.firstElementChild:
dom_node.removeChild(dom_node.firstElementChild)
dom_node.append(self._slider_node)
props = self._props = _defaults | properties
for prop in (
"start",
"connect",
"margin",
"padding",
"limit",
"pips_values",
):
props[prop] = _parse(props[prop], prop == "pips_values")
props["range"] = props["range"] or {"min": props["min"], "max": props["max"]}
props["format"] = _get_formatter(props["format"] or ".2f")
pips = self._make_pips()
self._toggle_has_pips(pips)
try:
self._slider = _Slider.create(self._slider_node, props | {"pips": pips})
except Exception as e:
raise RuntimeError(repr(e).replace("noUiSlider", "Slider"))
###### EVENTS ######
self._slider.on("slide", lambda a, h, *e: self.raise_event("slide", handle=h))
self._slider.on("change", lambda a, h, *e: self.raise_event("change", handle=h))
###### PROPS TO INIT ######
always = {p: props[p] for p in ("color", "spacing_above", "spacing_below")}
if_true = {
p: props[p]
for p in ("formatted_value", "formatted_values", "value", "values")
if props[p] is not None
}
if_false = {p: props[p] for p in ("enabled", "visible") if not props[p]}
self.init_components(**always, **if_false, **if_true)
###### VALUE PROPERTIES ######
def _value_setter(self, val):
self._slider.set(val)
def _value(self):
return _from_list(self._slider.get(True))
def _values(self):
return _as_list(self._slider.get(True))
def _formatted_value(self):
return _from_list(self._slider.get())
def _formatted_values(self):
return _as_list(self._slider.get())
value = property(_value, _value_setter)
values = property(_values, _value_setter)
formatted_value = property(_formatted_value, _value_setter)
formatted_values = property(_formatted_values, _value_setter)
###### noUiSlider PROPS ######
connect = _slider_prop("connect") # not dynamic
behaviour = _slider_prop("behaviour") # not dynamic
margin = _slider_prop("margin")
padding = _slider_prop("padding")
limit = _slider_prop("limit")
step = _slider_prop("step")
start = _slider_prop("start")
range = _slider_prop("range")
min = _min_max_prop("min")
max = _min_max_prop("max")
tooltips = _slider_prop("tooltips")
animate = _slider_prop("animate")
format = _slider_prop(
"format", fset=lambda s: _get_formatter(s), fget=lambda d: d["format_spec"]
)
###### PIPS PROPS ######
pips = _pips_prop("pips")
pips_mode = _pips_prop("pips_mode")
pips_values = _pips_prop("pips_values")
pips_density = _pips_prop("pips_density")
pips_stepped = _pips_prop("pips_stepped")
def _toggle_has_pips(self, pips):
self._dom_node.classList.toggle("has-pips", bool(pips))
def _make_pips(self):
props = self._props
pips = props["pips"]
if not pips:
return None
elif pips is True:
return {
"format": props["format"],
"mode": props["pips_mode"],
"values": props["pips_values"],
"density": props["pips_density"],
"stepped": props["pips_stepped"],
}
elif isinstance(pips, dict):
return pips
else:
raise TypeError(f"pips should be a bool or a dict, got {type(pips)}")
###### VISUAL PROPS ######
@property
def enabled(self):
return not self._slider_node.getAttribute("disabled")
@enabled.setter
def enabled(self, value):
if value:
self._slider_node.removeAttribute("disabled")
else:
self._slider_node.setAttribute("disabled", True)
@property
def color(self):
return self._color
@color.setter
def color(self, value):
self._color = value
self._dom_node.style.setProperty("--primary", _get_color(value))
spacing_above = _spacing_property("above")
spacing_below = _spacing_property("below")
visible = _HtmlPanel.visible
###### METHODS ######
def reset(self):
self._slider.reset()
self.raise_event("x-writeback")
|
_removeprefix
|
mod.rs
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::F12R2 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct FB0R {
bits: bool,
}
impl FB0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB1R {
bits: bool,
}
impl FB1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB2R {
bits: bool,
}
impl FB2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB3R {
bits: bool,
}
impl FB3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB4R {
bits: bool,
}
impl FB4R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB5R {
bits: bool,
}
impl FB5R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB6R {
bits: bool,
}
impl FB6R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB7R {
bits: bool,
}
impl FB7R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB8R {
bits: bool,
}
impl FB8R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB9R {
bits: bool,
}
impl FB9R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB10R {
bits: bool,
}
impl FB10R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB11R {
bits: bool,
}
impl FB11R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB12R {
bits: bool,
}
impl FB12R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB13R {
bits: bool,
}
impl FB13R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB14R {
bits: bool,
}
impl FB14R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB15R {
bits: bool,
}
impl FB15R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB16R {
bits: bool,
}
impl FB16R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB17R {
bits: bool,
}
impl FB17R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB18R {
bits: bool,
}
impl FB18R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB19R {
bits: bool,
}
impl FB19R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB20R {
bits: bool,
}
impl FB20R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB21R {
bits: bool,
}
impl FB21R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB22R {
bits: bool,
}
impl FB22R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB23R {
bits: bool,
}
impl FB23R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB24R {
bits: bool,
}
impl FB24R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB25R {
bits: bool,
}
impl FB25R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB26R {
bits: bool,
}
impl FB26R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB27R {
bits: bool,
}
impl FB27R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB28R {
bits: bool,
}
impl FB28R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
|
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB29R {
bits: bool,
}
impl FB29R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB30R {
bits: bool,
}
impl FB30R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FB31R {
bits: bool,
}
impl FB31R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _FB0W<'a> {
w: &'a mut W,
}
impl<'a> _FB0W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB1W<'a> {
w: &'a mut W,
}
impl<'a> _FB1W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB2W<'a> {
w: &'a mut W,
}
impl<'a> _FB2W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB3W<'a> {
w: &'a mut W,
}
impl<'a> _FB3W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB4W<'a> {
w: &'a mut W,
}
impl<'a> _FB4W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB5W<'a> {
w: &'a mut W,
}
impl<'a> _FB5W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB6W<'a> {
w: &'a mut W,
}
impl<'a> _FB6W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB7W<'a> {
w: &'a mut W,
}
impl<'a> _FB7W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB8W<'a> {
w: &'a mut W,
}
impl<'a> _FB8W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB9W<'a> {
w: &'a mut W,
}
impl<'a> _FB9W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB10W<'a> {
w: &'a mut W,
}
impl<'a> _FB10W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB11W<'a> {
w: &'a mut W,
}
impl<'a> _FB11W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB12W<'a> {
w: &'a mut W,
}
impl<'a> _FB12W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB13W<'a> {
w: &'a mut W,
}
impl<'a> _FB13W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB14W<'a> {
w: &'a mut W,
}
impl<'a> _FB14W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB15W<'a> {
w: &'a mut W,
}
impl<'a> _FB15W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB16W<'a> {
w: &'a mut W,
}
impl<'a> _FB16W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB17W<'a> {
w: &'a mut W,
}
impl<'a> _FB17W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB18W<'a> {
w: &'a mut W,
}
impl<'a> _FB18W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB19W<'a> {
w: &'a mut W,
}
impl<'a> _FB19W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB20W<'a> {
w: &'a mut W,
}
impl<'a> _FB20W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB21W<'a> {
w: &'a mut W,
}
impl<'a> _FB21W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB22W<'a> {
w: &'a mut W,
}
impl<'a> _FB22W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB23W<'a> {
w: &'a mut W,
}
impl<'a> _FB23W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 23;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB24W<'a> {
w: &'a mut W,
}
impl<'a> _FB24W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB25W<'a> {
w: &'a mut W,
}
impl<'a> _FB25W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB26W<'a> {
w: &'a mut W,
}
impl<'a> _FB26W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB27W<'a> {
w: &'a mut W,
}
impl<'a> _FB27W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB28W<'a> {
w: &'a mut W,
}
impl<'a> _FB28W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB29W<'a> {
w: &'a mut W,
}
impl<'a> _FB29W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB30W<'a> {
w: &'a mut W,
}
impl<'a> _FB30W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 30;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FB31W<'a> {
w: &'a mut W,
}
impl<'a> _FB31W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 31;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Filter bits"]
#[inline]
pub fn fb0(&self) -> FB0R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB0R { bits }
}
#[doc = "Bit 1 - Filter bits"]
#[inline]
pub fn fb1(&self) -> FB1R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB1R { bits }
}
#[doc = "Bit 2 - Filter bits"]
#[inline]
pub fn fb2(&self) -> FB2R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB2R { bits }
}
#[doc = "Bit 3 - Filter bits"]
#[inline]
pub fn fb3(&self) -> FB3R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB3R { bits }
}
#[doc = "Bit 4 - Filter bits"]
#[inline]
pub fn fb4(&self) -> FB4R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB4R { bits }
}
#[doc = "Bit 5 - Filter bits"]
#[inline]
pub fn fb5(&self) -> FB5R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB5R { bits }
}
#[doc = "Bit 6 - Filter bits"]
#[inline]
pub fn fb6(&self) -> FB6R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB6R { bits }
}
#[doc = "Bit 7 - Filter bits"]
#[inline]
pub fn fb7(&self) -> FB7R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB7R { bits }
}
#[doc = "Bit 8 - Filter bits"]
#[inline]
pub fn fb8(&self) -> FB8R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB8R { bits }
}
#[doc = "Bit 9 - Filter bits"]
#[inline]
pub fn fb9(&self) -> FB9R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB9R { bits }
}
#[doc = "Bit 10 - Filter bits"]
#[inline]
pub fn fb10(&self) -> FB10R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB10R { bits }
}
#[doc = "Bit 11 - Filter bits"]
#[inline]
pub fn fb11(&self) -> FB11R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB11R { bits }
}
#[doc = "Bit 12 - Filter bits"]
#[inline]
pub fn fb12(&self) -> FB12R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB12R { bits }
}
#[doc = "Bit 13 - Filter bits"]
#[inline]
pub fn fb13(&self) -> FB13R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB13R { bits }
}
#[doc = "Bit 14 - Filter bits"]
#[inline]
pub fn fb14(&self) -> FB14R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB14R { bits }
}
#[doc = "Bit 15 - Filter bits"]
#[inline]
pub fn fb15(&self) -> FB15R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB15R { bits }
}
#[doc = "Bit 16 - Filter bits"]
#[inline]
pub fn fb16(&self) -> FB16R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB16R { bits }
}
#[doc = "Bit 17 - Filter bits"]
#[inline]
pub fn fb17(&self) -> FB17R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB17R { bits }
}
#[doc = "Bit 18 - Filter bits"]
#[inline]
pub fn fb18(&self) -> FB18R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB18R { bits }
}
#[doc = "Bit 19 - Filter bits"]
#[inline]
pub fn fb19(&self) -> FB19R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB19R { bits }
}
#[doc = "Bit 20 - Filter bits"]
#[inline]
pub fn fb20(&self) -> FB20R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB20R { bits }
}
#[doc = "Bit 21 - Filter bits"]
#[inline]
pub fn fb21(&self) -> FB21R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB21R { bits }
}
#[doc = "Bit 22 - Filter bits"]
#[inline]
pub fn fb22(&self) -> FB22R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB22R { bits }
}
#[doc = "Bit 23 - Filter bits"]
#[inline]
pub fn fb23(&self) -> FB23R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 23;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB23R { bits }
}
#[doc = "Bit 24 - Filter bits"]
#[inline]
pub fn fb24(&self) -> FB24R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB24R { bits }
}
#[doc = "Bit 25 - Filter bits"]
#[inline]
pub fn fb25(&self) -> FB25R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB25R { bits }
}
#[doc = "Bit 26 - Filter bits"]
#[inline]
pub fn fb26(&self) -> FB26R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB26R { bits }
}
#[doc = "Bit 27 - Filter bits"]
#[inline]
pub fn fb27(&self) -> FB27R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB27R { bits }
}
#[doc = "Bit 28 - Filter bits"]
#[inline]
pub fn fb28(&self) -> FB28R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB28R { bits }
}
#[doc = "Bit 29 - Filter bits"]
#[inline]
pub fn fb29(&self) -> FB29R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB29R { bits }
}
#[doc = "Bit 30 - Filter bits"]
#[inline]
pub fn fb30(&self) -> FB30R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 30;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB30R { bits }
}
#[doc = "Bit 31 - Filter bits"]
#[inline]
pub fn fb31(&self) -> FB31R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 31;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FB31R { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Filter bits"]
#[inline]
pub fn fb0(&mut self) -> _FB0W {
_FB0W { w: self }
}
#[doc = "Bit 1 - Filter bits"]
#[inline]
pub fn fb1(&mut self) -> _FB1W {
_FB1W { w: self }
}
#[doc = "Bit 2 - Filter bits"]
#[inline]
pub fn fb2(&mut self) -> _FB2W {
_FB2W { w: self }
}
#[doc = "Bit 3 - Filter bits"]
#[inline]
pub fn fb3(&mut self) -> _FB3W {
_FB3W { w: self }
}
#[doc = "Bit 4 - Filter bits"]
#[inline]
pub fn fb4(&mut self) -> _FB4W {
_FB4W { w: self }
}
#[doc = "Bit 5 - Filter bits"]
#[inline]
pub fn fb5(&mut self) -> _FB5W {
_FB5W { w: self }
}
#[doc = "Bit 6 - Filter bits"]
#[inline]
pub fn fb6(&mut self) -> _FB6W {
_FB6W { w: self }
}
#[doc = "Bit 7 - Filter bits"]
#[inline]
pub fn fb7(&mut self) -> _FB7W {
_FB7W { w: self }
}
#[doc = "Bit 8 - Filter bits"]
#[inline]
pub fn fb8(&mut self) -> _FB8W {
_FB8W { w: self }
}
#[doc = "Bit 9 - Filter bits"]
#[inline]
pub fn fb9(&mut self) -> _FB9W {
_FB9W { w: self }
}
#[doc = "Bit 10 - Filter bits"]
#[inline]
pub fn fb10(&mut self) -> _FB10W {
_FB10W { w: self }
}
#[doc = "Bit 11 - Filter bits"]
#[inline]
pub fn fb11(&mut self) -> _FB11W {
_FB11W { w: self }
}
#[doc = "Bit 12 - Filter bits"]
#[inline]
pub fn fb12(&mut self) -> _FB12W {
_FB12W { w: self }
}
#[doc = "Bit 13 - Filter bits"]
#[inline]
pub fn fb13(&mut self) -> _FB13W {
_FB13W { w: self }
}
#[doc = "Bit 14 - Filter bits"]
#[inline]
pub fn fb14(&mut self) -> _FB14W {
_FB14W { w: self }
}
#[doc = "Bit 15 - Filter bits"]
#[inline]
pub fn fb15(&mut self) -> _FB15W {
_FB15W { w: self }
}
#[doc = "Bit 16 - Filter bits"]
#[inline]
pub fn fb16(&mut self) -> _FB16W {
_FB16W { w: self }
}
#[doc = "Bit 17 - Filter bits"]
#[inline]
pub fn fb17(&mut self) -> _FB17W {
_FB17W { w: self }
}
#[doc = "Bit 18 - Filter bits"]
#[inline]
pub fn fb18(&mut self) -> _FB18W {
_FB18W { w: self }
}
#[doc = "Bit 19 - Filter bits"]
#[inline]
pub fn fb19(&mut self) -> _FB19W {
_FB19W { w: self }
}
#[doc = "Bit 20 - Filter bits"]
#[inline]
pub fn fb20(&mut self) -> _FB20W {
_FB20W { w: self }
}
#[doc = "Bit 21 - Filter bits"]
#[inline]
pub fn fb21(&mut self) -> _FB21W {
_FB21W { w: self }
}
#[doc = "Bit 22 - Filter bits"]
#[inline]
pub fn fb22(&mut self) -> _FB22W {
_FB22W { w: self }
}
#[doc = "Bit 23 - Filter bits"]
#[inline]
pub fn fb23(&mut self) -> _FB23W {
_FB23W { w: self }
}
#[doc = "Bit 24 - Filter bits"]
#[inline]
pub fn fb24(&mut self) -> _FB24W {
_FB24W { w: self }
}
#[doc = "Bit 25 - Filter bits"]
#[inline]
pub fn fb25(&mut self) -> _FB25W {
_FB25W { w: self }
}
#[doc = "Bit 26 - Filter bits"]
#[inline]
pub fn fb26(&mut self) -> _FB26W {
_FB26W { w: self }
}
#[doc = "Bit 27 - Filter bits"]
#[inline]
pub fn fb27(&mut self) -> _FB27W {
_FB27W { w: self }
}
#[doc = "Bit 28 - Filter bits"]
#[inline]
pub fn fb28(&mut self) -> _FB28W {
_FB28W { w: self }
}
#[doc = "Bit 29 - Filter bits"]
#[inline]
pub fn fb29(&mut self) -> _FB29W {
_FB29W { w: self }
}
#[doc = "Bit 30 - Filter bits"]
#[inline]
pub fn fb30(&mut self) -> _FB30W {
_FB30W { w: self }
}
#[doc = "Bit 31 - Filter bits"]
#[inline]
pub fn fb31(&mut self) -> _FB31W {
_FB31W { w: self }
}
}
|
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
|
keymap_key.rs
|
// Take a look at the license at the top of the repository in the LICENSE file.
use glib::translate::*;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub struct KeymapKey {
group: i32,
keycode: u32,
level: i32,
}
impl KeymapKey {
pub fn new(keycode: u32, group: i32, level: i32) -> KeymapKey {
assert_initialized_main_thread!();
KeymapKey {
keycode,
group,
level,
}
}
pub fn get_keycode(&self) -> u32 {
self.keycode
}
pub fn get_group(&self) -> i32 {
self.group
}
pub fn get_level(&self) -> i32 {
self.level
}
}
#[doc(hidden)]
impl<'a> ToGlibPtr<'a, *const crate::ffi::GdkKeymapKey> for KeymapKey {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *const crate::ffi::GdkKeymapKey, Self> {
let ptr: *const KeymapKey = &*self;
Stash(ptr as *const crate::ffi::GdkKeymapKey, self)
}
}
#[doc(hidden)]
impl<'a> ToGlibPtrMut<'a, *mut crate::ffi::GdkKeymapKey> for KeymapKey {
type Storage = &'a mut Self;
#[inline]
fn to_glib_none_mut(&'a mut self) -> StashMut<'a, *mut crate::ffi::GdkKeymapKey, Self> {
let ptr: *mut KeymapKey = &mut *self;
StashMut(ptr as *mut crate::ffi::GdkKeymapKey, self)
}
}
#[doc(hidden)]
impl FromGlibPtrNone<*const crate::ffi::GdkKeymapKey> for KeymapKey {
unsafe fn from_glib_none(ptr: *const crate::ffi::GdkKeymapKey) -> Self {
*(ptr as *const KeymapKey)
}
}
#[doc(hidden)]
impl FromGlibPtrNone<*mut crate::ffi::GdkKeymapKey> for KeymapKey {
unsafe fn
|
(ptr: *mut crate::ffi::GdkKeymapKey) -> Self {
*(ptr as *mut KeymapKey)
}
}
#[doc(hidden)]
impl FromGlibPtrFull<*mut crate::ffi::GdkKeymapKey> for KeymapKey {
#[inline]
unsafe fn from_glib_full(ptr: *mut crate::ffi::GdkKeymapKey) -> Self {
let geom = *(ptr as *mut KeymapKey);
glib::ffi::g_free(ptr as *mut _);
geom
}
}
#[doc(hidden)]
impl FromGlibContainerAsVec<ffi::GdkKeymapKey, *mut ffi::GdkKeymapKey> for KeymapKey {
unsafe fn from_glib_none_num_as_vec(ptr: *mut ffi::GdkKeymapKey, num: usize) -> Vec<Self> {
if num == 0 || ptr.is_null() {
return Vec::new();
}
let mut res = Vec::with_capacity(num);
for i in 0..num {
res.push(from_glib_none(ptr.add(i)));
}
res
}
unsafe fn from_glib_container_num_as_vec(ptr: *mut ffi::GdkKeymapKey, num: usize) -> Vec<Self> {
let res = FromGlibContainerAsVec::from_glib_none_num_as_vec(ptr, num);
glib::ffi::g_free(ptr as *mut _);
res
}
unsafe fn from_glib_full_num_as_vec(ptr: *mut ffi::GdkKeymapKey, num: usize) -> Vec<Self> {
FromGlibContainerAsVec::from_glib_container_num_as_vec(ptr, num)
}
}
|
from_glib_none
|
TKGetLocalValue.spec.ts
|
// import { shallowMount } from "@vue/test-utils";
import { TKGetLocalValue, TKLabel } from "@/domain/utils/TKLabel";
describe("TKGetLocalValue", () => {
it("renders props.msg when passed", () => {
const labelWithEnglish: TKLabel = {
en: "english",
fr: "french"
};
expect(TKGetLocalValue(labelWithEnglish, "en")).toMatch("english");
expect(TKGetLocalValue(labelWithEnglish, "fr")).toMatch("french");
expect(TKGetLocalValue(labelWithEnglish, "FR")).toMatch("english");
expect(TKGetLocalValue(labelWithEnglish, "blablabla")).toMatch("english");
const labelWithoutEnglish: TKLabel = {
fr: "french",
it: "italian"
};
expect(TKGetLocalValue(labelWithoutEnglish, "it")).toMatch("italian");
expect(TKGetLocalValue(labelWithoutEnglish, "fr")).toMatch("french");
expect(TKGetLocalValue(labelWithoutEnglish, "blablabla")).toMatch("");
|
});
});
|
|
config.py
|
# -*- coding: utf-8 -*-
import datetime
from collections import OrderedDict
from gluon import current
from gluon.storage import Storage
from s3 import S3Method
from .controllers import deploy_index
RED_CROSS = "Red Cross / Red Crescent"
def config(settings):
"""
Template settings for IFRC's Resource Management System
- Americas Zone
http://eden.sahanafoundation.org/wiki/Deployments/IFRC
"""
T = current.T
# -------------------------------------------------------------------------
# System Name
#
settings.base.system_name = T("Resource Management System")
settings.base.system_name_short = T("RMS")
# -------------------------------------------------------------------------
# Pre-Populate
#
settings.base.prepopulate.append("RMSAmericas")
settings.base.prepopulate_demo.append("RMSAmericas/Demo")
# -------------------------------------------------------------------------
# Theme (folder to use for views/layout.html)
#
settings.base.theme = "RMSAmericas"
# Uncomment to disable responsive behavior of datatables
#settings.ui.datatables_responsive = False
# Uncomment to show a default cancel button in standalone create/update forms
settings.ui.default_cancel_button = True
# @todo: configure custom icons
#settings.ui.custom_icons = {
# "male": "icon-male",
# "female": "icon-female",
# "medical": "icon-plus-sign-alt",
#}
# =========================================================================
# System Settings
# -------------------------------------------------------------------------
# Security Policy
settings.security.policy = 8 # Delegations
settings.security.map = True
# Authorization Settings
settings.auth.registration_requires_approval = True
settings.auth.registration_requires_verification = True
settings.auth.registration_requests_organisation = True
settings.auth.registration_organisation_required = True
settings.auth.registration_requests_site = True
settings.auth.registration_link_user_to = {"staff": T("Staff"),
"volunteer": T("Volunteer"),
#"member": T("Member")
}
# This hides the options from the UI
#settings.auth.registration_link_user_to_default = ["volunteer"]
#settings.auth.record_approval = True
# @ToDo: Should we fallback to organisation_id if site_id is None?
settings.auth.registration_roles = {"site_id": ["reader",
],
}
# Owner Entity
settings.auth.person_realm_human_resource_site_then_org = True
settings.auth.person_realm_member_org = True
# Activate entity role manager tabs for OrgAdmins
settings.auth.entity_role_manager = True
def ifrc_realm_entity(table, row):
"""
Assign a Realm Entity to records
"""
tablename = table._tablename
# Do not apply realms for Master Data
# @ToDo: Restore Realms and add a role/functionality support for Master Data
if tablename in ("hrm_certificate",
"hrm_department",
"hrm_job_title",
"hrm_course",
"hrm_programme",
"member_membership_type",
"vol_award",
):
return None
db = current.db
s3db = current.s3db
# Entity reference fields
EID = "pe_id"
OID = "organisation_id"
SID = "site_id"
#GID = "group_id"
PID = "person_id"
# Owner Entity Foreign Key
realm_entity_fks = {"pr_contact": [("org_organisation", EID),
#("po_household", EID),
("pr_person", EID),
],
"pr_contact_emergency": EID,
"pr_physical_description": EID,
"pr_address": [("org_organisation", EID),
("pr_person", EID),
],
"pr_image": EID,
"pr_identity": PID,
"pr_education": PID,
"pr_note": PID,
"hrm_human_resource": SID,
"hrm_training": PID,
"hrm_training_event": OID,
"inv_adj": SID,
"inv_recv": SID,
"inv_send": SID,
"inv_inv_item": SID,
"inv_track_item": "track_org_id",
"inv_adj_item": "adj_id",
"req_req_item": "req_id",
#"po_household": "area_id",
#"po_organisation_area": "area_id",
}
# Default Foreign Keys (ordered by priority)
default_fks = (#"household_id",
"catalog_id",
"project_id",
"project_location_id",
)
# Link Tables
#realm_entity_link_table = {
# "project_task": Storage(tablename = "project_task_project",
# link_key = "task_id"
# )
# }
#if tablename in realm_entity_link_table:
# # Replace row with the record from the link table
# link_table = realm_entity_link_table[tablename]
# table = s3db[link_table.tablename]
# rows = db(table[link_table.link_key] == row.id).select(table.id,
# limitby=(0, 1))
# if rows:
# # Update not Create
# row = rows.first()
# Check if there is a FK to inherit the realm_entity
realm_entity = 0
fk = realm_entity_fks.get(tablename, None)
fks = [fk] if not isinstance(fk, list) else list(fk)
fks.extend(default_fks)
for default_fk in fks:
if isinstance(default_fk, tuple):
instance_type, fk = default_fk
else:
instance_type, fk = None, default_fk
if fk not in table.fields:
continue
# Inherit realm_entity from parent record
if fk == EID:
if instance_type:
ftable = s3db.table(instance_type)
if not ftable:
continue
else:
ftable = s3db.pr_person
query = (ftable[EID] == row[EID])
else:
ftablename = table[fk].type[10:] # reference tablename
ftable = s3db[ftablename]
query = (table.id == row["id"]) & \
(table[fk] == ftable.id)
record = db(query).select(ftable.realm_entity,
limitby = (0, 1)
).first()
if record:
realm_entity = record.realm_entity
break
#else:
# Continue to loop through the rest of the default_fks
# Fall back to default get_realm_entity function
use_user_organisation = False
#use_user_root_organisation = False
# Suppliers & Partners are owned by the user's organisation
if realm_entity == 0 and tablename == "org_organisation":
ottable = s3db.org_organisation_type
ltable = db.org_organisation_organisation_type
query = (ltable.organisation_id == row["id"]) & \
(ltable.organisation_type_id == ottable.id)
otype = db(query).select(ottable.name,
limitby = (0, 1)
).first()
if not otype or otype.name != RED_CROSS:
use_user_organisation = True
# Facilities, Forums & Requisitions are owned by the user's organisation
elif tablename in ("org_facility", "pr_forum", "req_req"):
use_user_organisation = True
elif tablename == "hrm_training":
# Inherit realm entity from the related HR record
htable = s3db.hrm_human_resource
query = (table.id == row["id"]) & \
(htable.person_id == table.person_id) & \
(htable.deleted != True)
rows = db(query).select(htable.realm_entity,
limitby = (0, 2)
)
if len(rows) == 1:
realm_entity = rows.first().realm_entity
else:
# Ambiguous => try course organisation
ctable = s3db.hrm_course
otable = s3db.org_organisation
query = (table.id == row["id"]) & \
(ctable.id == table.course_id) & \
(otable.id == ctable.organisation_id)
org = db(query).select(otable.pe_id,
limitby = (0, 1)
).first()
if org:
realm_entity = org.pe_id
# otherwise: inherit from the person record
# Groups are owned by the user's organisation
#elif tablename in ("pr_group",):
elif tablename == "pr_group":
use_user_organisation = True
auth = current.auth
user = auth.user
if user:
if use_user_organisation:
# @ToDo - this might cause issues if the user's org is different from the realm that gave them permissions to create the Org
realm_entity = s3db.pr_get_pe_id("org_organisation",
user.organisation_id)
#elif use_user_root_organisation:
# realm_entity = s3db.pr_get_pe_id("org_organisation",
# auth.root_org())
return realm_entity
settings.auth.realm_entity = ifrc_realm_entity
# -------------------------------------------------------------------------
# L10n (Localization) settings
#
settings.L10n.languages = OrderedDict([
("en", "English"),
("pt-br", "Portuguese (Brazil)"),
("es", "Spanish"),
])
# Default Language
settings.L10n.default_language = "en"
# Default timezone for users
settings.L10n.timezone = "America/Bogota"
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = ","
# Unsortable 'pretty' date format (for use in English)
settings.L10n.date_format = "%d-%b-%Y"
# Make last name in person/user records mandatory
#settings.L10n.mandatory_lastname = True # mother's surname
settings.L10n.mandatory_middlename = True # father's surname
# Uncomment this to Translate Layer Names
settings.L10n.translate_gis_layer = True
# Translate Location Names
settings.L10n.translate_gis_location = True
# Uncomment this for Alternate Location Names
settings.L10n.name_alt_gis_location = True
# Uncomment this to Translate Organisation Names/Acronyms
settings.L10n.translate_org_organisation = True
# Names of Orgs with specific settings
HNRC = "Honduran Red Cross"
# -------------------------------------------------------------------------
# Finance settings
#
def currencies(default):
""" RMS- and NS-specific currencies (lazy setting) """
# Currencies that are common for all NS
currencies = {"EUR" : "Euros",
"CHF" : "Swiss Francs",
"USD" : "United States Dollars",
}
# NS-specific currencies
root_org = current.auth.root_org_name()
if root_org == HNRC:
currencies["HNL"] = "Honduran Lempira"
return currencies
settings.fin.currencies = currencies
def currency_default(default):
""" NS-specific default currencies (lazy setting) """
root_org = current.auth.root_org_name()
if root_org == HNRC:
default = "HNL"
#else:
# default = "USD"
return default
settings.fin.currency_default = currency_default
def currency_represent(currency):
""" NS-specific currency represent """
if currency == "HNL":
root_org = current.auth.root_org_name()
if root_org == HNRC:
return "L"
return currency
# -------------------------------------------------------------------------
# Map Settings
# Display Resources recorded to Admin-Level Locations on the map
# @ToDo: Move into gis_config?
settings.gis.display_L0 = True
# Uncomment to display the Map Legend as a floating DIV
settings.gis.legend = "float"
# GeoNames username
settings.gis.geonames_username = "rms_dev"
# @ToDo: Lazy fn once we have NS to enable this for
# (off for HN & off by default)
settings.gis.postcode_selector = False
# -------------------------------------------------------------------------
# Use the label 'Camp' instead of 'Shelter'
#
settings.ui.camp = True
# -------------------------------------------------------------------------
# Filter Manager
#
#settings.search.filter_manager = False
# -------------------------------------------------------------------------
# Default Summary
#
settings.ui.summary = ({"common": True,
"name": "add",
"widgets": [{"method": "create"}],
},
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}],
},
{"name": "charts",
"label": "Report",
"widgets": [{"method": "report", "ajax_init": True}],
},
{"name": "map",
"label": "Map",
"widgets": [{"method": "map", "ajax_init": True}],
},
)
# -------------------------------------------------------------------------
# Content Management
#
#settings.cms.hide_index = True
settings.cms.richtext = True
# -------------------------------------------------------------------------
# Messaging
# Parser
#settings.msg.parser = "IFRC"
# =========================================================================
# Module Settings
# -------------------------------------------------------------------------
# Members
#
settings.member.cv_tab = True
# -------------------------------------------------------------------------
# Organisations
#
# Enable the use of Organisation Branches
settings.org.branches = True
# Set the length of the auto-generated org/site code the default is 10
#settings.org.site_code_len = 3
# Set the label for Sites
settings.org.site_label = "Office/Warehouse/Facility"
# Enable certain fields just for specific Organisations
#settings.org.dependent_fields = \
# {"pr_person.middle_name" : (CVTL, VNRC),
# "pr_person_details.mother_name" : (BRCS, ),
# "pr_person_details.father_name" : (ARCS, BRCS),
# "pr_person_details.grandfather_name" : (ARCS, ),
# "pr_person_details.affiliations" : (PRC, ),
# "pr_person_details.company" : (PRC, ),
# "vol_details.availability" : (VNRC, ),
# "vol_details.card" : (ARCS, ),
# "vol_volunteer_cluster.vol_cluster_type_id" : (PRC, ),
# "vol_volunteer_cluster.vol_cluster_id" : (PRC, ),
# "vol_volunteer_cluster.vol_cluster_position_id" : (PRC, ),
# }
# -------------------------------------------------------------------------
# Human Resource Management
#
# Uncomment to allow Staff & Volunteers to be registered without an email address
settings.hrm.email_required = True
settings.hrm.mix_staff = True
# Uncomment to show the Organisation name in HR represents
settings.hrm.show_organisation = True
# Uncomment to allow HRs to have multiple Job Titles
#settings.hrm.multiple_job_titles = True
# Uncomment to have each root Org use a different Job Title Catalog
settings.hrm.org_dependent_job_titles = True
settings.hrm.staff_departments = False
settings.hrm.teams = False
# Uncomment to disable the use of HR Credentials
settings.hrm.use_credentials = False
# Uncomment to disable the use of HR Certificates
#settings.hrm.use_certificates = False
# Uncomment to filter certificates by (root) Organisation & hence not allow Certificates from other orgs to be added to a profile (except by Admin)
#settings.hrm.filter_certificates = True
# Uncomment to auto-create certificates for courses
settings.hrm.create_certificates_from_courses = "organisation_id"
settings.hrm.use_code = True
settings.hrm.use_description = None # Replaced by Medical Information
# Uncomment to enable the use of HR Education
settings.hrm.use_education = True
# Uncomment to hide Job Titles
settings.hrm.use_job_titles = False
settings.hrm.use_medical = "Medical Information"
settings.hrm.use_national_id = True
settings.hrm.use_skills = True
# Custom label for Organisations in HR module
settings.hrm.organisation_label = "National Society / Branch"
# Custom label for Top-level Organisations in HR module
settings.hrm.root_organisation_label = "National Society"
# Uncomment to consolidate tabs into a single CV
settings.hrm.cv_tab = True
settings.hrm.vol_experience = "programme"
# Uncomment to consolidate tabs into Staff Record (set to False to hide the tab)
settings.hrm.record_tab = "record"
# Use Locations for Training Events, not Facilities
settings.hrm.event_site = False
# Training Instructors are Multiple
settings.hrm.training_instructors = "multiple"
# Training Filters are Contains
settings.hrm.training_filter_and = True
settings.hrm.record_label = "Information"
# Pass marks are defined by Course
settings.hrm.course_pass_marks = True
# Work History & Missions
settings.hrm.staff_experience = "both"
# Uncomment to do a search for duplicates in the new AddPersonWidget2
settings.pr.lookup_duplicates = True
settings.pr.separate_name_fields = 3
#def dob_required(default):
# """ NS-specific dob_required (lazy setting) """
# if current.auth.override is True:
# default = False
# else:
# root_org = current.auth.root_org_name()
# if root_org == HNRC:
# default = False
# else:
# # Human Talent module for zone
# default = True
# return default
#settings.pr.dob_required = dob_required
def hrm_course_grades(default):
""" Course Grades """
default = {0: T("No Show"),
1: T("Left Early"),
#2: T("Attendance"),
8: T("Pass"),
9: T("Fail"),
}
return default
settings.hrm.course_grades = hrm_course_grades
# =========================================================================
def vol_programme_active(person_id):
"""
Whether a Volunteer counts as 'Active' based on the number of hours
they've done (both Trainings & Programmes) per month, averaged over
the last year.
If nothing recorded for the last 3 months, don't penalise as assume
that data entry hasn't yet been done.
@ToDo: This should be based on the HRM record, not Person record
- could be active with Org1 but not with Org2
"""
now = current.request.utcnow
# Time spent on Programme work
htable = current.s3db.hrm_programme_hours
query = (htable.deleted == False) & \
(htable.person_id == person_id) & \
(htable.date != None)
programmes = current.db(query).select(htable.hours,
htable.date,
orderby=htable.date)
if programmes:
# Ignore up to 3 months of records
three_months_prior = (now - datetime.timedelta(days=92))
end = max(programmes.last().date, three_months_prior.date())
last_year = end - datetime.timedelta(days=365)
# Is this the Volunteer's first year?
if programmes.first().date > last_year:
# Only start counting from their first month
start = programmes.first().date
else:
# Start from a year before the latest record
start = last_year
# Total hours between start and end
programme_hours = 0
for programme in programmes:
if programme.date >= start and programme.date <= end and programme.hours:
programme_hours += programme.hours
# Average hours per month
months = max(1, (end - start).days / 30.5)
average = programme_hours / months
# Active?
if average >= 8:
return True
return False
def hrm_vol_active(default):
""" Whether & How to track Volunteers as Active """
#root_org = current.auth.root_org_name()
#if root_org in (ARCS, IRCS):
# # Simple checkbox
# return True
#elif root_org in (CVTL, PMI, PRC):
# # Use formula based on hrm_programme
# return vol_programme_active
#elif root_org in (CRMADA, ):
# # Use formula based on vol_activity
# return vol_activity_active
#return False
# Use formula based on hrm_programme
return vol_programme_active
settings.hrm.vol_active = hrm_vol_active
settings.hrm.vol_active_tooltip = "A volunteer is defined as active if they've participated in an average of 8 or more hours of Program work or Trainings per month in the last year"
# Roles which are permitted to export ID cards
ID_CARD_EXPORT_ROLES = ("ORG_ADMIN", "hr_manager", "hr_assistant")
# -------------------------------------------------------------------------
# RIT
settings.deploy.team_label = "RIT"
settings.customise_deploy_home = deploy_index
# Alerts get sent to all recipients
settings.deploy.manual_recipients = False
settings.deploy.post_to_twitter = True
# -------------------------------------------------------------------------
# Projects
settings.project.assign_staff_tab = False
# Uncomment this to use settings suitable for a global/regional organisation (e.g. DRR)
settings.project.mode_3w = True
# Uncomment this to use DRR (Disaster Risk Reduction) extensions
settings.project.mode_drr = True
# Uncomment this to use Activity Types for Activities & Projects
#settings.project.activity_types = True
# Uncomment this to use Codes for projects
settings.project.codes = True
# Uncomment this to call project locations 'Communities'
#settings.project.community = True
# Uncomment this to enable Demographics in 3W projects
#settings.project.demographics = True
# Uncomment this to enable Hazards in 3W projects
settings.project.hazards = True
# Uncomment this to enable Indicators in projects
# Just HNRC
#settings.project.indicators = True
# Uncomment this to use multiple Budgets per project
settings.project.multiple_budgets = True
# Uncomment this to use multiple Organisations per project
settings.project.multiple_organisations = True
# Ondelete behaviour for ProjectPlanningModel
settings.project.planning_ondelete = "RESTRICT"
# Uncomment this to enable Programmes in projects
settings.project.programmes = True
# Uncomment this to enable Themes in 3W projects
settings.project.themes = True
# Uncomment this to customise
# Links to Filtered Components for Donors & Partners
settings.project.organisation_roles = {
1: T("Host National Society"),
2: T("Partner"),
3: T("Donor"),
#4: T("Customer"), # T("Beneficiary")?
#5: T("Supplier"),
9: T("Partner National Society"),
}
# -------------------------------------------------------------------------
# Inventory Management
# Hide Staff Management Tabs for Facilities in Inventory Module
settings.inv.facility_manage_staff = False
settings.inv.show_mode_of_transport = True
settings.inv.send_show_time_in = True
#settings.inv.collapse_tabs = True
# Uncomment if you need a simpler (but less accountable) process for managing stock levels
#settings.inv.direct_stock_edits = True
#settings.inv.org_dependent_warehouse_types = True
# Settings for HNRC:
settings.inv.stock_count = False
settings.inv.item_status = {#0: current.messages["NONE"], # Not defined yet
0: T("Good"),
1: T("Damaged"),
#1: T("Dump"),
#2: T("Sale"),
#3: T("Reject"),
#4: T("Surplus")
}
settings.inv.recv_types = {#0: current.messages["NONE"], In Shipment Types
#11: T("Internal Shipment"), In Shipment Types
32: T("Donation"),
34: T("Purchase"),
36: T("Consignment"), # Borrowed
37: T("In Transit"), # Loaning warehouse space to another agency
}
# -------------------------------------------------------------------------
# Request Management
# Uncomment to disable Inline Forms in Requests module
settings.req.inline_forms = False
settings.req.req_type = ["Stock"]
settings.req.use_commit = False
# Should Requests ask whether Transportation is required?
settings.req.ask_transport = True
settings.req.pack_values = False
# Disable Request Matching as we don't want users making requests to see what stock is available
settings.req.prompt_match = False
# Uncomment to disable Recurring Request
settings.req.recurring = False # HNRC
# =========================================================================
# Template Modules
#
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = "RMS",
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
#module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
#module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
#description = "Site Administration",
restricted = True,
#module_type = None # No Menu
)),
("errors", Storage(
name_nice = T("Ticket Viewer"),
#description = "Needed for Breadcrumbs",
restricted = False,
#module_type = None # No Menu
)),
("setup", Storage(
name_nice = T("Setup"),
#description = "WebSetup",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # No Menu
)),
("sync", Storage(
name_nice = T("Synchronization"),
#description = "Synchronization",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
#module_type = None # This item is handled separately for the menu
)),
("translate", Storage(
name_nice = T("Translation Functionality"),
#description = "Selective translation of strings based on module.",
#module_type = None,
)),
# Uncomment to enable internal support requests
("support", Storage(
name_nice = T("Support"),
#description = "Support Requests",
restricted = True,
#module_type = None # This item is handled separately for the menu
)),
("gis", Storage(
name_nice = T("Map"),
#description = "Situation Awareness & Geospatial Analysis",
restricted = True,
#module_type = 6, # 6th item in the menu
)),
("pr", Storage(
name_nice = T("Person Registry"),
#description = "Central point to record details on People",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
#module_type = 10
)),
("org", Storage(
name_nice = T("Organizations"),
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
restricted = True,
#module_type = 1
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = T("Staff"),
#description = "Human Resources Management",
restricted = True,
#module_type = 2,
)),
("vol", Storage(
name_nice = T("Volunteers"),
#description = "Human Resources Management",
restricted = True,
#module_type = 2,
)),
("cms", Storage(
name_nice = T("Content Management"),
#description = "Content Management System",
restricted = True,
module_type = None,
)),
("doc", Storage(
name_nice = T("Documents"),
#description = "A library of digital resources, such as photos, documents and reports",
restricted = True,
#module_type = 10,
)),
("msg", Storage(
name_nice = T("Messaging"),
#description = "Sends & Receives Alerts via Email & SMS",
restricted = True,
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
#module_type = None,
)),
("supply", Storage(
name_nice = T("Supply Chain Management"),
#description = "Used within Inventory Management, Request Management and Asset Management",
restricted = True,
#module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouses"),
#description = "Receiving and Sending Items",
restricted = True,
#module_type = 4
)),
#("asset", Storage(
# name_nice = T("Assets"),
# #description = "Recording and Assigning Assets",
# restricted = True,
# #module_type = 5,
# )),
("req", Storage(
name_nice = T("Requests"),
#description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.",
restricted = True,
#module_type = 10,
)),
("project", Storage(
name_nice = T("Projects"),
#description = "Tracking of Projects, Activities and Tasks",
restricted = True,
#module_type = 2
)),
("budget", Storage(
name_nice = T("Budgets"),
#description = "Tracking of Budgets",
restricted = True,
#module_type = None
)),
#("survey", Storage(
# name_nice = T("Assessments"),
# #description = "Create, enter, and manage surveys.",
# restricted = True,
# #module_type = 5,
# )),
# Used by RIT
("event", Storage(
name_nice = T("Events"),
#description = "Events",
restricted = True,
#module_type = 10
)),
("member", Storage(
name_nice = T("Partners"),
#description = "Membership Management System",
restricted = True,
#module_type = 10,
)),
("deploy", Storage(
name_nice = T("Regional Intervention Teams"),
#description = "Alerting and Deployment of Disaster Response Teams",
restricted = True,
#module_type = 10,
)),
#("po", Storage(
# name_nice = T("Recovery Outreach"),
# #description = "Population Outreach",
# restricted = True,
# #module_type = 10,
# )),
("stats", Storage(
name_nice = T("Statistics"),
#description = "Manages statistics",
restricted = True,
#module_type = None,
)),
#("vulnerability", Storage(
# name_nice = T("Vulnerability"),
# #description = "Manages vulnerability indicators",
# restricted = True,
# #module_type = 10,
# )),
])
# -------------------------------------------------------------------------
# Functions which are local to this Template
# -------------------------------------------------------------------------
def ns_only(tablename,
fieldname = "organisation_id",
required = True,
branches = True,
updateable = True,
limit_filter_opts = True
):
"""
Function to configure an organisation_id field to be restricted to just
NS/Branch
@param required: Field is mandatory
@param branches: Include Branches
@param updateable: Limit to Orgs which the user can update
@param limit_filter_opts: Also limit the Filter options
NB If limit_filter_opts=True, apply in customise_xx_controller inside prep,
after standard_prep is run
"""
# Lookup organisation_type_id for Red Cross
db = current.db
s3db = current.s3db
ttable = s3db.org_organisation_type
try:
type_id = db(ttable.name == RED_CROSS).select(ttable.id,
limitby=(0, 1),
cache = s3db.cache,
).first().id
except AttributeError:
# No IFRC prepop done - skip (e.g. testing impacts of CSS changes in this theme)
return
# Load standard model
f = s3db[tablename][fieldname]
if limit_filter_opts:
# Find the relevant filter widget & limit it's options
filter_widgets = s3db.get_config(tablename, "filter_widgets")
filter_widget = None
if filter_widgets:
from s3 import FS, S3HierarchyFilter
for w in filter_widgets:
if isinstance(w, S3HierarchyFilter) and \
w.field == "organisation_id":
filter_widget = w
break
if filter_widget is not None:
selector = FS("organisation_organisation_type.organisation_type_id")
filter_widget.opts["filter"] = (selector == type_id)
# Label
if branches:
f.label = T("National Society / Branch")
else:
f.label = T("National Society")
# Requires
# Filter by type
ltable = db.org_organisation_organisation_type
rows = db(ltable.organisation_type_id == type_id).select(ltable.organisation_id)
filter_opts = [row.organisation_id for row in rows]
auth = current.auth
s3_has_role = auth.s3_has_role
Admin = s3_has_role("ADMIN")
if branches:
if Admin:
parent = True
else:
# @ToDo: Set the represent according to whether the user can see resources of just a single NS or multiple
# @ToDo: Consider porting this into core
user = auth.user
if user:
realms = user.realms
#delegations = user.delegations
if realms:
parent = True
else:
parent = False
else:
parent = True
else:
# Keep the represent function as simple as possible
parent = False
# Exclude branches
btable = s3db.org_organisation_branch
rows = db((btable.deleted != True) &
(btable.branch_id.belongs(filter_opts))).select(btable.branch_id)
filter_opts = list(set(filter_opts) - set(row.branch_id for row in rows))
organisation_represent = s3db.org_OrganisationRepresent
represent = organisation_represent(parent=parent)
f.represent = represent
from s3 import IS_ONE_OF
requires = IS_ONE_OF(db, "org_organisation.id",
represent,
filterby = "id",
filter_opts = filter_opts,
updateable = updateable,
orderby = "org_organisation.name",
sort = True)
if not required:
from gluon import IS_EMPTY_OR
requires = IS_EMPTY_OR(requires)
f.requires = requires
if parent:
# Use hierarchy-widget
from s3 import FS, S3HierarchyWidget
# No need for parent in represent (it's a hierarchy view)
node_represent = organisation_represent(parent = False)
# Filter by type
# (no need to exclude branches - we wouldn't be here if we didn't use branches)
selector = FS("organisation_organisation_type.organisation_type_id")
f.widget = S3HierarchyWidget(lookup = "org_organisation",
filter = (selector == type_id),
represent = node_represent,
multiple = False,
leafonly = False,
)
else:
# Dropdown not Autocomplete
f.widget = None
# Comment
if (Admin or s3_has_role("ORG_ADMIN")):
# Need to do import after setting Theme
from s3layouts import S3PopupLink
from s3 import S3ScriptItem
add_link = S3PopupLink(c = "org",
f = "organisation",
vars = {"organisation_type.name": RED_CROSS},
label = T("Create National Society"),
title = T("National Society"),
)
comment = f.comment
if not comment or isinstance(comment, S3PopupLink):
f.comment = add_link
elif isinstance(comment[1], S3ScriptItem):
# Don't overwrite scripts
f.comment[0] = add_link
else:
f.comment = add_link
else:
# Not allowed to add NS/Branch
f.comment = ""
# -------------------------------------------------------------------------
def user_org_default_filter(selector, tablename=None):
"""
Default filter for organisation_id:
* Use the user's organisation if logged-in and associated with an
organisation.
"""
auth = current.auth
user_org_id = auth.is_logged_in() and auth.user.organisation_id
if user_org_id:
return user_org_id
else:
# no default
return {}
# -------------------------------------------------------------------------
#def user_org_and_children_default_filter(selector, tablename=None):
# """
# Default filter for organisation_id:
# * Use the user's organisation if logged-in and associated with an
# organisation.
# """
# auth = current.auth
# user_org_id = auth.is_logged_in() and auth.user.organisation_id
# if user_org_id:
# db = current.db
# s3db = current.s3db
# otable = s3db.org_organisation
# org = db(otable.id == user_org_id).select(otable.pe_id,
# limitby=(0, 1)
# ).first()
# if org:
# pe_id = org.pe_id
# pe_ids = s3db.pr_get_descendants((pe_id,),
# entity_types=("org_organisation",))
# rows = db(otable.pe_id.belongs(pe_ids)).select(otable.id)
# ids = [row.id for row in rows]
# ids.append(user_org_id)
# return ids
# else:
# return user_org_id
# else:
# # no default
# return {}
# -------------------------------------------------------------------------
def customise_auth_user_controller(**attr):
"""
Customise admin/user() and default/user() controllers
"""
# Organisation needs to be an NS/Branch
ns_only("auth_user",
required = True,
branches = True,
updateable = False, # Need to see all Orgs in Registration screens
)
table = current.db.auth_user
table.first_name.label = T("Forenames")
table.last_name.label = T("Father's Surname")
return attr
settings.customise_auth_user_controller = customise_auth_user_controller
# -------------------------------------------------------------------------
def customise_deploy_alert_resource(r, tablename):
s3db = current.s3db
# Only send Alerts via Email
# @ToDo: Also send via Twitter
f = s3db[tablename].contact_method
f.readable = f.writable = False
#from s3 import S3SQLCustomForm
#crud_form = S3SQLCustomForm("mission_id",
# "subject",
# "body",
# "modified_on",
# )
#s3db.configure(tablename,
# crud_form = crud_form,
# list_fields = ["mission_id",
# "subject",
# "body",
# ],
# )
settings.customise_deploy_alert_resource = customise_deploy_alert_resource
# -------------------------------------------------------------------------
def deploy_application_onaccept(form):
"""
RIT Members should be added to the RIT Role
"""
db = current.db
s3db = current.s3db
htable = db.hrm_human_resource
ptable = db.pr_person
# Find the Person
human_resource_id = form.vars.get("human_resource_id")
if human_resource_id:
query = (htable.id == human_resource_id)
else:
table = db.deploy_application
query = (table.id == form.vars.get("id")) & \
(table.human_resource_id == htable.id)
hr = db(query).select(htable.person_id,
limitby=(0, 1)
).first()
person_id = hr.person_id
# Do they have a User Account?
ltable = s3db.pr_person_user
query = (ptable.id == person_id) & \
(ltable.pe_id == ptable.pe_id)
link = db(query).select(ltable.user_id,
limitby=(0, 1)
).first()
if link:
# Add them to the RIT role
current.auth.s3_assign_role(link.user_id, "RIT_MEMBER")
# -------------------------------------------------------------------------
def customise_deploy_application_resource(r, tablename):
current.s3db.configure(tablename,
create_onaccept = deploy_application_onaccept,
)
settings.customise_deploy_application_resource = customise_deploy_application_resource
# -------------------------------------------------------------------------
def customise_deploy_mission_resource(r, tablename):
s3db = current.s3db
s3db[tablename].event_type_id.label = T("Disaster Type")
COUNTRY = current.messages.COUNTRY
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm("name",
"date",
"location_id",
"event_type_id",
)
#from s3 import S3DateFilter, S3LocationFilter, S3OptionsFilter, S3TextFilter
#filter_widgets = [S3TextFilter(["name",
# "event_type_id$name",
# "location_id",
# ],
# label=T("Search")
# ),
# S3LocationFilter("location_id",
# label=COUNTRY,
# widget="multiselect",
# levels=["L0"],
# hidden=True
# ),
# S3OptionsFilter("event_type_id",
# widget="multiselect",
# hidden=True
# ),
# #S3OptionsFilter("status",
# # options=s3db.deploy_mission_status_opts,
# # hidden=True
# # ),
# S3DateFilter("date",
# hide_time=True,
# hidden=True
# ),
# ]
list_fields = ["name",
"date",
"event_type_id",
(COUNTRY, "location_id"),
(T("Responses"), "response_count"),
(T("Members Deployed"), "hrquantity"),
]
s3db.configure(tablename,
crud_form = crud_form,
list_fields = list_fields,
)
settings.customise_deploy_mission_resource = customise_deploy_mission_resource
# -------------------------------------------------------------------------
def customise_event_event_type_resource(r, tablename):
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Create Disaster Type"),
title_display = T("Disaster Type Details"),
title_list = T("Disaster Types"),
title_update = T("Edit Disaster Type Details"),
title_upload = T("Import Disaster Types"),
label_list_button = T("List Disaster Types"),
label_delete_button = T("Delete Disaster Type"),
msg_record_created = T("Disaster Type added"),
msg_record_modified = T("Disaster Type Details updated"),
msg_record_deleted = T("Disaster Type deleted"),
msg_list_empty = T("No Disaster Types currently defined"))
settings.customise_event_event_type_resource = customise_event_event_type_resource
# -------------------------------------------------------------------------
def customise_hrm_certificate_controller(**attr):
table = current.s3db.hrm_course
auth = current.auth
if auth.s3_has_role("ADMIN"):
# See all Certificates
pass
elif auth.s3_has_roles(("training_coordinator",
"training_assistant",
)):
# Only show this Center's Certificates
organisation_id = auth.user.organisation_id
current.response.s3.filter = (table.organisation_id == organisation_id) | \
(table.organisation_id == None)
# Default to this Training Center
table.organisation_id.default = organisation_id
else:
# See NS Certificates
organisation_id = auth.root_org()
current.response.s3.filter = (table.organisation_id == organisation_id) | \
(table.organisation_id == None)
# Default to this NS
table.organisation_id.default = organisation_id
return attr
settings.customise_hrm_certificate_controller = customise_hrm_certificate_controller
# -------------------------------------------------------------------------
def customise_hrm_course_controller(**attr):
table = current.s3db.hrm_course
auth = current.auth
if auth.s3_has_role("ADMIN"):
# See all Courses
pass
elif auth.s3_has_roles(("training_coordinator",
"training_assistant",
)):
# Only show this Center's courses
current.response.s3.filter = (table.organisation_id == auth.user.organisation_id) | (table.organisation_id == None)
else:
# See NS Courses
current.response.s3.filter = (table.organisation_id == auth.root_org()) | (table.organisation_id == None)
return attr
settings.customise_hrm_course_controller = customise_hrm_course_controller
# -------------------------------------------------------------------------
def customise_hrm_course_resource(r, tablename):
from gluon import IS_EMPTY_OR, IS_NOT_IN_DB
from s3 import S3SQLCustomForm
db = current.db
auth = current.auth
s3db = current.s3db
table = s3db[tablename]
# Code should be Unique
f = table.code
f.requires = IS_EMPTY_OR(IS_NOT_IN_DB(db, "hrm_course.code"))
if auth.s3_has_roles(("training_coordinator",
"training_assistant",
)):
f = table.organisation_id
f.label = T("Training Center")
f.comment = False # Don't create here
org_represent = s3db.org_OrganisationRepresent(parent=False)
f.represent = org_represent
list_fields = ["code",
"name",
]
has_role = auth.s3_has_role
if has_role("ADMIN"):
table.organisation_id.label = T("National Society / Training Center")
list_fields.insert(0, "organisation_id")
#f.readable = f.writable = True
#ttable = s3db.org_organisation_type
#try:
# type_id = db(ttable.name == "Training Center").select(ttable.id,
# limitby=(0, 1),
# ).first().id
#except:
# # No/incorrect prepop done - skip (e.g. testing impacts of CSS changes in this theme)
# pass
#else:
# ltable = s3db.org_organisation_organisation_type
# rows = db(ltable.organisation_type_id == type_id).select(ltable.organisation_id)
# filter_opts = [row.organisation_id for row in rows]
# f.requires = IS_ONE_OF(db, "org_organisation.id",
# org_represent,
# orderby = "org_organisation.name",
# sort = True,
# filterby = "id",
# filter_opts = filter_opts,
# )
elif has_role("training_coordinator"):
f.default = auth.user.organisation_id
crud_form = S3SQLCustomForm("organisation_id",
"code",
"name",
"comments",
)
s3db.configure(tablename,
crud_form = crud_form,
list_fields = list_fields,
orderby = "hrm_course.code",
)
settings.customise_hrm_course_resource = customise_hrm_course_resource
# -------------------------------------------------------------------------
#def customise_hrm_department_controller(**attr):
# # Organisation needs to be an NS/Branch
# ns_only("hrm_department",
# required = False,
# branches = False,
# )
# return attr
#settings.customise_hrm_department_controller = customise_hrm_department_controller
# -------------------------------------------------------------------------
#def emergency_contact_represent(row):
# """
# Representation of Emergency Contacts (S3Represent label renderer)
# @param row: the row
# """
# items = [row["pr_contact_emergency.name"]]
# relationship = row["pr_contact_emergency.relationship"]
# if relationship:
# items.append(" (%s)" % relationship)
# phone_number = row["pr_contact_emergency.phone"]
# if phone_number:
# items.append(": %s" % phone_number)
# return "".join(items)
# -------------------------------------------------------------------------
def customise_hrm_home():
from gluon import URL
from s3 import s3_redirect_default
has_role = current.auth.s3_has_role
len_roles = len(current.session.s3.roles)
if (len_roles <= 2) or \
(len_roles == 3 and has_role("RIT_MEMBER") and not has_role("ADMIN")):
# No specific Roles
# Go to Personal Profile
s3_redirect_default(URL(f="person"))
else:
# Bypass home page & go direct to searchable list of Staff
s3_redirect_default(URL(f="human_resource", args="summary"))
settings.customise_hrm_home = customise_hrm_home
# -------------------------------------------------------------------------
def customise_hrm_experience_resource(r, tablename):
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Work History"),
title_display = T("Work History Details"),
title_list = T("Work History"),
title_update = T("Edit Work History"),
label_list_button = T("List Work History"),
label_delete_button = T("Delete Work History"),
msg_record_created = T("Work History added"),
msg_record_modified = T("Work History updated"),
msg_record_deleted = T("Work History deleted"),
msg_list_empty = T("No entries currently registered"))
settings.customise_hrm_experience_resource = customise_hrm_experience_resource
# -------------------------------------------------------------------------
def hrm_human_resource_create_onaccept(form):
"""
If the Staff/Volunteer is RC then create them a user account with a random password
"""
db = current.db
s3db = current.s3db
form_vars = form.vars
# Call normal onaccept
s3db.hrm_human_resource_onaccept(form)
# Is the person RC?
organisation_id = form_vars.get("organisation_id")
if not organisation_id:
hr_id = form_vars.get("id")
if not hr_id:
# Nothing we can do!
current.log.warning("Cannot create user for HR as no id in the form")
return
htable = s3db.hrm_human_resource
hr = db(htable.id == hr_id).select(htable.id,
htable.person_id,
htable.type,
htable.organisation_id,
limitby = (0, 1),
).first()
try:
organisation_id = hr.organisation_id
except AttributeError:
# Nothing we can do!
current.log.warning("Cannot create user for HR %s as cannot find HR record" % hr_id)
return
else:
hr = None
ttable = s3db.org_organisation_type
ltable = s3db.org_organisation_organisation_type
query = (ttable.name == RED_CROSS) & \
(ltable.organisation_type_id == ttable.id) & \
(ltable.organisation_id == organisation_id)
RC = db(query).select(ltable.id,
limitby=(0, 1),
).first()
if not RC:
return
# Collect the Details needed
person_id = form_vars.get("person_id")
if not person_id:
if not hr:
hr_id = form_vars.get("id")
if not hr_id:
# Nothing we can do!
current.log.warning("Cannot create user for HR as no id in the form")
return
htable = s3db.hrm_human_resource
hr = db(htable.id == hr_id).select(htable.id,
htable.person_id,
htable.type,
limitby = (0, 1),
).first()
try:
person_id = hr.person_id
except AttributeError:
current.log.warning("Cannot create user for HR %s as cannot find HR record" % hr_id)
return
ptable = s3db.pr_person
person = db(ptable.id == person_id).select(ptable.id,
ptable.first_name,
ptable.middle_name, # NB We use middle_name for User in RMS Americas!
ptable.pe_id,
limitby = (0, 1),
).first()
try:
pe_id = person.pe_id
except AttributeError:
# Nothing we can do!
return
ctable = s3db.pr_contact
query = (ctable.pe_id == pe_id) & \
(ctable.contact_method == "EMAIL")
contact = db(query).select(ctable.value,
limitby = (0, 1),
).first()
try:
email = contact.value
except AttributeError:
# Nothing we can do!
hr_id = form_vars.get("id")
current.log.warning("Cannot create user for HR %s as cannot find Email" % hr_id)
return
hr_type = form_vars.get("type")
if not hr_type:
if not hr:
hr_id = form_vars.get("id")
if not hr_id:
# Nothing we can do!
current.log.warning("Cannot create user for HR as no id in the form")
return
htable = s3db.hrm_human_resource
hr = db(htable.id == hr_id).select(htable.id,
htable.type,
limitby = (0, 1),
).first()
try:
hr_type = str(hr.type)
except AttributeError:
# Nothing we can do!
current.log.warning("Cannot create user for HR %s as cannot find HR record" % hr_id)
return
if hr_type == "1":
link_user_to = "staff"
else:
link_user_to = "volunteer"
# This field has been manually added to the form
language = current.request.post_vars.get("language")
auth = current.auth
# Generate a password
password, crypted = auth.s3_password(8)
# Create User
user = Storage(organisation_id = organisation_id,
language = language,
first_name = person.first_name,
last_name = person.middle_name, # NB We use middle_name for User in RMS Americas!
email = email,
link_user_to = link_user_to,
password = str(crypted),
)
#user = auth.get_or_create_user(user, login=False)
user_id = db.auth_user.insert(**user)
# Set the HR record to be owned by this user
if hr:
hr.update_record(owned_by_user=user_id)
else:
hr_id = form_vars.get("id")
db(s3db.hrm_human_resource.id == hr_id).update(owned_by_user=user_id)
# Set the Person record to be owned by this user
person.update_record(owned_by_user=user_id)
# Cascade down to components
# pr_address
atable = s3db.pr_address
db(atable.pe_id == pe_id).update(owned_by_user=user_id)
# pr_contact
db(ctable.pe_id == pe_id).update(owned_by_user=user_id)
# Link to Person so that we find this in the 'Link'
ltable = s3db.pr_person_user
ltable.insert(pe_id = pe_id,
user_id = user_id,
)
# Approve User, link to Person & send them a Welcome email
user.update(id = user_id)
messages = auth.messages
messages.lock_keys = False
messages.welcome_email = \
"""Welcome to %(system_name)s
- You can start using %(system_name)s at: %(url)s
- Your password is: %(password)s
- To edit your profile go to: %(url)s%(profile)s
Thank you"""
messages.lock_keys = True
auth.s3_approve_user(user, password=password)
# -------------------------------------------------------------------------
def customise_hrm_insurance_resource(r, tablename):
table = current.s3db.hrm_insurance
table.type.default = "HEALTH"
table.insurance_number.label = T("Affiliate Number")
table.phone.label = T("Emergency Number")
table.insurer.label = "%s / %s" % (T("Insurance Company"),
T("Social Work or Prepaid"),
)
settings.customise_hrm_insurance_resource = customise_hrm_insurance_resource
# -------------------------------------------------------------------------
def hrm_human_resource_onvalidation(form):
"""
Check that the Organization ID is unique per NS
"""
# Read Code
form_vars_get = form.vars.get
code = form_vars_get("code")
if code is None:
return
db = current.db
s3db = current.s3db
# Lookup Root Org
organisation_id = form_vars_get("organisation_id")
otable = s3db.org_organisation
root_org = db(otable.id == organisation_id).select(otable.root_organisation,
limitby = (0, 1)
).first()
root_organisation = root_org.root_organisation
# Check for another HR in the same NS with same code
htable = s3db.hrm_human_resource
query = (htable.code == code) & \
(htable.organisation_id == otable.id) & \
(otable.root_organisation == root_organisation)
human_resource_id = form_vars_get("id")
if human_resource_id:
# Update Form: Skip our own record
query &= (htable.id != human_resource_id)
match = db(query).select(htable.id,
limitby = (0, 1)
).first()
if match:
# Error
form.errors["code"] = current.T("Organization ID already in use")
return
# -------------------------------------------------------------------------
def customise_hrm_human_resource_resource(r, tablename):
# Organization ID needs to be unique per NS
current.s3db.configure(tablename,
onvalidation = hrm_human_resource_onvalidation,
)
settings.customise_hrm_human_resource_resource = customise_hrm_human_resource_resource
# -------------------------------------------------------------------------
def customise_hrm_human_resource_controller(**attr):
#controller = current.request.controller
#if controller != "deploy":
# # Default Filter
# from s3 import s3_set_default_filter
# s3_set_default_filter("~.organisation_id",
# user_org_and_children_default_filter,
# tablename = "hrm_human_resource")
s3 = current.response.s3
# Enable scalability-optimized strategies
settings.base.bigtable = True
if current.request.function == "trainee":
EXTERNAL = True
else:
EXTERNAL = False
def add_language(form):
from gluon import LABEL, OPTION, SELECT
from s3 import s3_addrow
formstyle = settings.get_ui_formstyle()
language_opts = [OPTION(T("Spanish"), _value="es", _selected="selected"),
OPTION(T("French"), _value="fr"),
OPTION(T("English"), _value="en"),
]
s3_addrow(form,
LABEL("%s:" % T("Language"),
_id = "auth_user_language__label",
_for = "auth_user_language",
),
SELECT(_id = "auth_user_language",
_name = "language",
*language_opts
),
"",
formstyle,
"auth_user_language__row",
position = 3,
)
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
from s3 import FS
db = current.db
s3db = current.s3db
auth = current.auth
resource = r.resource
table = r.table
if EXTERNAL:
f = table.organisation_id
f.label = T("Organization")
# Organisation cannot be an NS/Branch
# Lookup organisation_type_id for Red Cross
ttable = s3db.org_organisation_type
type_ids = db(ttable.name.belongs((RED_CROSS, "Training Center"))).select(ttable.id,
limitby = (0, 2),
cache = s3db.cache,
)
if type_ids:
from s3 import IS_ONE_OF
ltable = db.org_organisation_organisation_type
type_ids = [t.id for t in type_ids]
rows = db(ltable.organisation_type_id.belongs(type_ids)).select(ltable.organisation_id)
not_filter_opts = [row.organisation_id for row in rows]
f.requires = IS_ONE_OF(db, "org_organisation.id",
f.represent,
not_filterby = "id",
not_filter_opts = not_filter_opts,
updateable = True,
orderby = "org_organisation.name",
sort = True)
resource.add_filter(~FS("organisation_id").belongs(not_filter_opts))
# Find the relevant filter widget & limit it's options
filter_widgets = s3db.get_config("hrm_human_resource", "filter_widgets")
filter_widget = None
if filter_widgets:
from s3 import S3HierarchyFilter
for w in filter_widgets:
if isinstance(w, S3HierarchyFilter) and \
w.field == "organisation_id":
filter_widget = w
break
if filter_widget is not None:
filter_widget.opts["filter"] = (~FS("id").belongs(not_filter_opts))
else:
otable = s3db.org_organisation
otable.root_organisation.label = T("National Society")
# Organisation needs to be an NS/Branch
ns_only("hrm_human_resource",
required = True,
branches = True,
# default
#limit_filter_opts = True,
)
export_formats = list(settings.get_ui_export_formats())
if r.method in ("create", "summary", None):
# Provide a default Organization ID
organisation_id = auth.user.organisation_id
if organisation_id:
org = db(otable.id == organisation_id).select(otable.root_organisation,
limitby = (0, 1)
).first()
root_organisation_id = org.root_organisation
f = table.code
query = (otable.root_organisation == root_organisation_id) & \
(otable.id == table.organisation_id)
last_code = db(query).select(f,
limitby = (0, 1),
orderby = ~f
).first()
last_code = last_code.code
if last_code:
f.default = int(last_code) + 1
else:
f.default = 1
if not r.id:
# Filter to just RC people
resource.add_filter(FS("organisation_id$organisation_type.name") == RED_CROSS)
resource.configure(create_onaccept = hrm_human_resource_create_onaccept,
form_postp = add_language,
)
# Custom list_fields
list_fields = [(T("Full Name"), "person_id"),
"organisation_id",
(T("Program"), "person_id$hours.programme_id"),
(T("National ID"), "person_id$national_id.value"),
"code",
(T("Email"), "email.value"),
(settings.get_ui_label_mobile_phone(), "phone.value"),
]
r.resource.configure(list_fields = list_fields)
# Bind method for signature list export + add export icon
from templates.RMSAmericas.siglist import HRSignatureList
s3db.set_method("hrm", "human_resource",
method = "siglist",
action = HRSignatureList,
)
export_formats.append(("siglist.pdf", "fa fa-list", T("Export Signature List")))
s3.formats["siglist.pdf"] = r.url(method="siglist")
if auth.s3_has_roles(ID_CARD_EXPORT_ROLES):
if r.representation == "card":
# Configure ID card layout
from templates.RMSAmericas.idcards import IDCardLayout
resource.configure(pdf_card_layout = IDCardLayout)
if not r.id and not r.component:
# Add export-icon for ID cards
export_formats.append(("card", "fa fa-id-card", T("Export ID Cards")))
s3.formats["card"] = r.url(method="")
settings.ui.export_formats = export_formats
if not auth.s3_has_role("ADMIN") and \
auth.s3_has_roles(("training_coordinator", "training_assistant")):
# Filter People to just those trained by this Reference Center
resource.add_filter(FS("training.training_event_id$organisation_id") == auth.user.organisation_id)
# Default to Volunteers
table.type.default = 2
# Hide Venues from the list of Offices
from gluon import IS_EMPTY_OR
ttable = s3db.org_facility_type
ltable = s3db.org_site_facility_type
query = (ltable.facility_type_id == ttable.id) & \
(ttable.name == "Venue")
venues = db(query).select(ltable.site_id)
venues = [v.site_id for v in venues]
stable = s3db.org_site
dbset = db(~stable.site_id.belongs(venues))
f = table.site_id
new_requires = f.requires.other
new_requires.dbset = dbset
f.requires = IS_EMPTY_OR(new_requires)
table = s3db.pr_person
table.first_name.label = T("Forenames")
table.middle_name.label = T("Father's Surname")
table.last_name.label = T("Mother's Surname")
# For the filter
s3db.hrm_competency.skill_id.label = T("Language")
return True
s3.prep = custom_prep
# Custom postp
standard_postp = s3.postp
def custom_postp(r, output):
# Call standard postp
if callable(standard_postp):
output = standard_postp(r, output)
if not EXTERNAL and \
r.method in (None, "create") and \
isinstance(output, dict):
form = output.get("form")
if form:
add_language(form)
return output
s3.postp = custom_postp
return attr
settings.customise_hrm_human_resource_controller = customise_hrm_human_resource_controller
# -------------------------------------------------------------------------
def customise_hrm_job_title_resource(r, tablename):
s3db = current.s3db
f = s3db.hrm_job_title.type
f.default = 3 # Both
#f.readable = f.writable = False
label = T("Position")
label_create = T("Create Position")
current.response.s3.crud_strings[tablename] = Storage(
label_create = label_create,
title_display = T("Position Details"),
title_list = T("Position Catalog"),
title_update = T("Edit Position"),
title_upload = T("Import Positions"),
label_list_button = T("List Positions"),
label_delete_button = T("Delete Position"),
msg_record_created = T("Position added"),
msg_record_modified = T("Position updated"),
msg_record_deleted = T("Position deleted"),
msg_list_empty = T("Currently no entries in the catalog"))
from s3layouts import S3PopupLink
f = s3db.hrm_job_title_id.attr
f.label = label
f.comment = S3PopupLink(c = "hrm",
f = "job_title",
label = label_create,
title = label,
)
settings.customise_hrm_job_title_resource = customise_hrm_job_title_resource
# -------------------------------------------------------------------------
def customise_hrm_job_title_controller(**attr):
s3 = current.response.s3
# Organisation needs to be an NS
ns_only("hrm_job_title",
required = False,
branches = False,
)
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
|
s3.prep = custom_prep
return attr
settings.customise_hrm_job_title_controller = customise_hrm_job_title_controller
# -------------------------------------------------------------------------
def customise_hrm_programme_controller(**attr):
table = current.s3db.hrm_programme
# Organisation needs to be an NS/Branch
ns_only("hrm_programme",
required = False,
branches = False,
)
# non-Admins should only see programmes for their NS
auth = current.auth
if not auth.s3_has_role("ADMIN"):
current.response.s3.filter = (table.organisation_id == auth.root_org())
f = table.name_long
f.readable = f.writable = False
return attr
settings.customise_hrm_programme_controller = customise_hrm_programme_controller
# -------------------------------------------------------------------------
def customise_hrm_programme_hours_controller(**attr):
# Default Filter
from s3 import s3_set_default_filter
s3_set_default_filter("~.person_id$human_resource.organisation_id",
user_org_default_filter,
tablename = "hrm_programme_hours")
attr["csv_template"] = ("../../themes/RMSAmericas/formats", "hrm_programme_hours")
return attr
settings.customise_hrm_programme_hours_controller = customise_hrm_programme_hours_controller
# -------------------------------------------------------------------------
def skip_create(deduplicate):
""" Decorator for deduplicators to prevent creation of new records """
def wrapped(item):
if callable(deduplicate):
deduplicate(item)
item.strategy = [item.METHOD.UPDATE]
return wrapped
def customise_hrm_programme_hours_resource(r, tablename):
from s3 import S3SQLCustomForm
s3db = current.s3db
phtable = s3db.hrm_programme_hours
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Hours of Service"),
title_display = T("Hours Details"),
title_list = T("Hours of Service"),
title_update = T("Edit Hours"),
label_list_button = T("List Hours"),
label_delete_button = T("Delete Hours"),
msg_record_created = T("Hours added"),
msg_record_modified = T("Hours updated"),
msg_record_deleted = T("Hours deleted"),
msg_list_empty = T("Currently no hours recorded"))
# Show new custom fields
phtable.event.readable = phtable.event.writable = True
phtable.place.readable = phtable.place.writable = True
# Hide old fields so they don't appear in list_fields in hrm_Record
#phtable.programme_id.readable = phtable.programme_id.writable = False
phtable.job_title_id.readable = phtable.job_title_id.writable = False
crud_form = S3SQLCustomForm("date",
"programme_id",
"place",
"event",
"hours",
)
# Only visible in hrm_Record which controls list_fields itself
#list_fields = ["date",
# "programme_id",
# "place",
# "event",
# "training_id$training_event_id$location_id",
# "training_id$training_event_id$course_id",
# "hours",
# ]
configure = s3db.configure
configure("hrm_programme_hours",
crud_form = crud_form,
#list_fields = list_fields,
)
# Prevent create during imports
get_config = s3db.get_config
configure("pr_person",
deduplicate = skip_create(get_config("pr_person", "deduplicate")),
)
configure("org_organisation",
deduplicate = skip_create(get_config("org_organisation", "deduplicate")),
)
configure("hrm_programme",
deduplicate = skip_create(get_config("hrm_programme", "deduplicate")),
)
settings.customise_hrm_programme_hours_resource = customise_hrm_programme_hours_resource
# -------------------------------------------------------------------------
def customise_hrm_skill_resource(r, tablename):
#label = T("Language")
label_create = T("Create Language")
current.response.s3.crud_strings[tablename] = Storage(
label_create = label_create,
title_display = T("Language Details"),
title_list = T("Language Catalog"),
title_update = T("Edit Language"),
label_list_button = T("List Languages"),
label_delete_button = T("Delete Language"),
msg_record_created = T("Language added"),
msg_record_modified = T("Language updated"),
msg_record_deleted = T("Language deleted"),
msg_list_empty = T("Currently no entries in the catalog"))
# No use since cannot be sure this runs before hrm_competency table is loaded
#from s3layouts import S3PopupLink
#f = current.s3db.hrm_skill_id.attr
#f.label = label
#f.comment = S3PopupLink(c = "hrm",
# f = "skill",
# label = label_create,
# title = label,
# )
settings.customise_hrm_skill_resource = customise_hrm_skill_resource
# -------------------------------------------------------------------------
def customise_hrm_competency_resource(r, tablename):
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Language"),
title_display = T("Language Details"),
title_list = T("Languages"),
title_update = T("Edit Language"),
label_list_button = T("List Languages"),
label_delete_button = T("Delete Language"),
msg_record_created = T("Language added"),
msg_record_modified = T("Language updated"),
msg_record_deleted = T("Language deleted"),
msg_list_empty = T("No entries currently registered"))
label = T("Language")
from s3layouts import S3PopupLink
f = current.s3db.hrm_competency.skill_id
f.label = label
f.comment = S3PopupLink(c = "hrm",
f = "skill",
label = T("Create Language"),
title = label,
)
settings.customise_hrm_competency_resource = customise_hrm_competency_resource
# -------------------------------------------------------------------------
def hrm_training_onaccept(form):
"""
Add People to the RIT Alert List if they have passed the RIT course
"""
db = current.db
s3db = current.s3db
form_vars = form.vars
# Lookup full record
table = db.hrm_training
record = db(table.id == form_vars.id).select(table.id,
table.person_id,
table.course_id,
table.grade,
limitby=(0, 1)).first()
try:
course_id = record.course_id
except AttributeError:
current.log.error("Cannot find Training record")
return
# Lookup the RIT Course ID
ctable = db.hrm_course
row = db(ctable.name == "Regional Intervention Teams").select(ctable.id,
cache = s3db.cache,
limitby=(0, 1)
).first()
try:
rit_course_id = row.id
except AttributeError:
current.log.error("Cannot find RIT Course: Prepop not done?")
return
if course_id != rit_course_id:
# Nothing to do
return
if record.grade != 8:
# Not passed: Nothing to do
return
# Is person already a RIT Member?
person_id = record.person_id
htable = s3db.hrm_human_resource
hr = db(htable.person_id == person_id).select(htable.id,
limitby=(0, 1)
).first()
try:
human_resource_id = hr.id
except AttributeError:
current.log.error("Cannot find Human Resource record")
return
dtable = s3db.deploy_application
exists = db(dtable.human_resource_id == human_resource_id).select(dtable.id,
limitby=(0, 1)
).first()
if not exists:
# Add them to the list
dtable.insert(human_resource_id = human_resource_id)
# Add them to the RIT role
ltable = s3db.pr_person_user
ptable = db.pr_person
query = (ptable.id == person_id) & \
(ltable.pe_id == ptable.pe_id)
link = db(query).select(ltable.user_id,
limitby=(0, 1)
).first()
if link:
current.auth.s3_assign_role(link.user_id, "RIT_MEMBER")
# -------------------------------------------------------------------------
def hrm_training_postimport(import_info):
"""
Create Users for Persons created
"""
training_ids = import_info["created"]
if not training_ids:
# No new people created
return
db = current.db
s3db = current.s3db
# Find all the Persons
ttable = s3db.hrm_training
ptable = s3db.pr_person
query = (ttable.id.belongs(training_ids)) & \
(ttable.person_id == ptable.id)
trainings = db(query).select(ptable.pe_id)
person_pe_ids = {p.pe_id for p in trainings}
if not person_pe_ids:
# No people?
return
# Remove those with a User Account
ltable = s3db.pr_person_user
users = db(ltable.pe_id.belongs(person_pe_ids)).select(ltable.pe_id)
user_pe_ids = [u.pe_id for u in users]
discard = person_pe_ids.discard
for pe_id in user_pe_ids:
discard(pe_id)
if not person_pe_ids:
# Nobody without a User Account already
return
# Read Person Details
ctable = s3db.pr_contact
dtable = s3db.pr_person_details
htable = s3db.hrm_human_resource
left = [ctable.on((ctable.pe_id == ptable.pe_id) & \
(ctable.contact_method == "EMAIL")
),
dtable.on(dtable.person_id == ptable.id),
htable.on(htable.person_id == ptable.id),
]
persons = db(ptable.pe_id.belongs(person_pe_ids)).select(ptable.id,
ptable.first_name,
# RMSAmericas uses Apellido Paterno for Last Name
ptable.middle_name,
#ptable.last_name,
ctable.value,
dtable.language,
htable.type,
htable.organisation_id,
left=left,
)
auth = current.auth
utable = db.auth_user
create_user = utable.insert
approve_user = auth.s3_approve_user
cert_table = s3db.hrm_certification
# For each Person
for p in persons:
person = p["pr_person"]
hr = p["hrm_human_resource"]
if hr.type == 1:
link_user_to = "staff"
else:
link_user_to = "volunteer"
# Set random password
password, crypted = auth.s3_password(8)
# Create a User Account
user = Storage(first_name = person.first_name,
last_name = person.middle_name,
#last_name = person.last_name,
email = p["pr_contact.value"],
language = p["pr_person_details.language"],
password = crypted,
organisation_id = hr.organisation_id,
link_user_to = link_user_to,
)
user_id = create_user(**user)
# Standard Approval (inc Link to Person/HR and Send out Welcome Email with password)
user["id"] = user_id
approve_user(user, password)
# Fixup permissions
person_id = person.id
db(htable.person_id == person_id).update(owned_by_user = user_id)
db(ttable.person_id == person_id).update(owned_by_user = user_id)
db(cert_table.person_id == person_id).update(owned_by_user = user_id)
# -------------------------------------------------------------------------
def customise_hrm_training_controller(**attr):
s3 = current.response.s3
# Default Filter
#from s3 import s3_set_default_filter
#s3_set_default_filter("~.person_id$human_resource.organisation_id",
# user_org_default_filter,
# tablename = "hrm_training")
auth = current.auth
if not auth.s3_has_role("ADMIN") and \
auth.s3_has_roles(("training_coordinator", "training_assistant")):
TC = True
# Filter Trainings to just those done by this Reference Center
from s3 import FS
query = FS("~.training_event_id$organisation_id") == auth.user.organisation_id
s3.filter = query
else:
TC = False
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
if r.method == "import":
# HR records may be created via importing them as participants
s3db = current.s3db
# Default to Volunteers
s3db.hrm_human_resource.type.default = 2
if TC:
# Doesn't work as email created after human_resource
#s3db.configure("hrm_human_resource",
# create_onaccept = hrm_human_resource_create_onaccept,
# )
# Create User Accounts for those Persons without them
s3db.configure("hrm_training",
postimport = hrm_training_postimport,
)
return True
s3.prep = custom_prep
return attr
settings.customise_hrm_training_controller = customise_hrm_training_controller
# -------------------------------------------------------------------------
def customise_hrm_training_resource(r, tablename):
s3db = current.s3db
table = s3db.hrm_training
f = table.grade
f.readable = f.writable = True
f = table.qualitative_feedback
f.readable = f.writable = True
s3db.hrm_certification.number.label = T("Registration Number")
from s3 import S3SQLCustomForm, S3TextFilter, S3OptionsFilter, S3DateFilter
if r.function == "person":
crud_form = S3SQLCustomForm("course_id",
"end_date",
"grade",
"grade_details",
"qualitative_feedback",
"certification_from_training.number",
)
else:
crud_form = S3SQLCustomForm("person_id",
"end_date",
"grade",
"grade_details",
"qualitative_feedback",
"certification_from_training.number",
)
filter_widgets = [
S3TextFilter(["person_id$first_name",
"person_id$last_name",
"course_id$name",
"comments",
],
label = T("Search"),
comment = T("You can search by trainee name, course name or comments. You may use % as wildcard. Press 'Search' without input to list all trainees."),
_class="filter-search",
),
S3OptionsFilter("training_event_id$site_id",
label = T("Country"),
represent = s3db.org_SiteRepresent(show_type=False),
),
S3OptionsFilter("person_id$human_resource.organisation_id",
label = T("Organization"),
),
S3OptionsFilter("course_id",
),
S3OptionsFilter("grade",
),
S3DateFilter("date",
hide_time=True,
),
]
default_onaccept = s3db.get_config(tablename, "onaccept")
if default_onaccept and not isinstance(default_onaccept, list): # Catch running twice
onaccept = [default_onaccept,
hrm_training_onaccept,
]
else:
onaccept = hrm_training_onaccept
s3db.configure(tablename,
crud_form = crud_form,
filter_widgets = filter_widgets,
onaccept = onaccept,
)
settings.customise_hrm_training_resource = customise_hrm_training_resource
# -------------------------------------------------------------------------
def customise_hrm_training_event_resource(r, tablename):
from s3 import IS_ONE_OF, S3SQLCustomForm, S3SQLInlineComponent
db = current.db
auth = current.auth
s3db = current.s3db
table = s3db.hrm_training_event
org_represent = s3db.org_OrganisationRepresent(parent=False)
f = table.organisation_id
f.label = T("Training Center")
f.comment = False # Don't create here
f.represent = org_represent
list_fields = ["organisation_id",
"course_id",
#"site_id",
"location_id",
"start_date",
"training_event_instructor.person_id",
"comments",
]
if auth.s3_has_role("ADMIN"):
#f.readable = f.writable = True
ttable = s3db.org_organisation_type
try:
type_id = db(ttable.name == "Training Center").select(ttable.id,
limitby=(0, 1),
).first().id
except AttributeError:
# No/incorrect prepop done - skip (e.g. testing impacts of CSS changes in this theme)
pass
else:
ltable = s3db.org_organisation_organisation_type
rows = db(ltable.organisation_type_id == type_id).select(ltable.organisation_id)
filter_opts = [row.organisation_id for row in rows]
f.requires = IS_ONE_OF(db, "org_organisation.id",
org_represent,
orderby = "org_organisation.name",
sort = True,
filterby = "id",
filter_opts = filter_opts,
)
elif auth.s3_has_roles(("training_coordinator", "training_assistant")):
organisation_id = auth.user.organisation_id
f.default = organisation_id
f.writable = False
list_fields.pop(0) # organisation_id
table.course_id.requires.set_filter(filterby = "organisation_id",
filter_opts = [organisation_id],
)
# Hours are Optional
from gluon import IS_EMPTY_OR
table.hours.requires = IS_EMPTY_OR(table.hours)
#site_represent = S3Represent(lookup = "org_site")
# Filter list of Venues
#f = table.site_id
#f.default = None
#f.label = T("Country")
#f.represent = site_represent
#ftable = s3db.org_facility
#ltable = s3db.org_site_facility_type
#ttable = s3db.org_facility_type
#query = (ftable.deleted == False) & \
# (ftable.site_id == ltable.site_id) & \
# (ltable.facility_type_id == ttable.id) & \
# (ttable.name == "Venue")
#rows = db(query).select(ftable.site_id)
#filter_opts = [row.site_id for row in rows]
#f.requires = IS_ONE_OF(db, "org_site.site_id",
# site_represent,
# filterby="site_id",
# filter_opts=filter_opts,
# )
# Multiple Instructors
crud_form = S3SQLCustomForm("organisation_id",
# @ToDo: Filter Courses by Training Center
"course_id",
#"site_id",
"location_id",
"start_date",
"end_date",
S3SQLInlineComponent("training_event_instructor",
label = T("Instructor"),
fields = [("", "person_id")],
# @ToDo: Filter to HRMs (this should be done through AC?)
#filterby = ({"field": "type",
# "options": 3,
# },),
),
"comments",
)
s3db.configure(tablename,
crud_form = crud_form,
list_fields = list_fields,
)
settings.customise_hrm_training_event_resource = customise_hrm_training_event_resource
# -------------------------------------------------------------------------
def hrm_training_event_report_pdf_export(r, **attr):
"""
Generate a PDF Export of a training Event Report
"""
from s3 import s3_fullname, s3_str
record = r.record
T = current.T
db = current.db
s3db = current.s3db
current_language = T.accepted_language
if current_language == "es":
# Reach different translation
title = s3_str(T("Training Event Report"))
else:
title = s3_str(T("Training Report"))
if record.course_id:
course_name = s3db.hrm_training_event.course_id.represent(record.course_id)
title = "%s: %s" % (title, course_name)
def callback(r):
from gluon.html import DIV, TABLE, TD, TH, TR
rtable = s3db.hrm_training_event_report
date_represent = rtable.date.represent
org_represent = s3db.org_OrganisationRepresent(parent = False,
acronym = False)
# Logo
otable = db.org_organisation
org_id = record.organisation_id
org = db(otable.id == org_id).select(otable.name,
otable.acronym, # Present for consistent cache key
otable.logo,
limitby=(0, 1),
).first()
#if settings.get_L10n_translate_org_organisation():
#org_name = org_represent(org_id)
#else:
# org_name = org.name
logo = org.logo
if logo:
logo = s3db.org_organisation_logo(org)
elif current.deployment_settings.get_org_branches():
root_org = current.cache.ram(
# Common key with auth.root_org
"root_org_%s" % org_id,
lambda: s3db.org_root_organisation(org_id),
time_expire=120
)
logo = s3db.org_organisation_logo(root_org)
# Read the report
report = db(rtable.training_event_id == r.id).select(limitby = (0, 1),
).first()
# Header
header = TABLE(TR(TH("%s:" % T("Name")),
TD(s3_fullname(report.person_id)),
TH("%s:" % T("Training Date")),
TD(date_represent(record.start_date)),
),
TR(TH("%s:" % T("Position")),
TD(rtable.job_title_id.represent(report.job_title_id)),
TH("%s:" % T("Finance Codes")),
TD(report.code),
),
TR(TH("%s:" % T("National Society Visited")),
TD(org_represent(report.organisation_id)),
TH("%s:" % T("Report Date")),
TD(date_represent(report.date)),
),
TR(TH("%s:" % T("Training Purpose")),
TD(report.purpose,
_colspan = 3,
),
),
)
# Main
main = TABLE(TR(TH("1. %s" % T("Objectives"))),
TR(TD(report.objectives)),
TR(TH("2. %s" % T("Methodology"))),
TR(TD(report.methodology)),
TR(TH("3. %s" % T("Implemented Actions"))),
TR(TD(report.actions)),
TR(TH("4. %s" % T("About the participants"))),
TR(TD(report.participants)),
TR(TH("5. %s" % T("Results and Lessons Learned"))),
TR(TD(report.results)),
TR(TH("6. %s" % T("Follow-up Required"))),
TR(TD(report.followup)),
TR(TH("7. %s" % T("Additional relevant information"))),
TR(TD(report.additional)),
TR(TH("8. %s" % T("General Comments"))),
TR(TD(report.comments)),
)
output = DIV(TABLE(TR(TD(logo),
#TD(org_name), # This isn't rtl-proof, check vol_service_record for how to handle that if-required
)),
TABLE(TR(TD(title))),
TABLE(header),
TABLE(main),
)
return output
attr["rheader"] = None
from s3.s3export import S3Exporter
exporter = S3Exporter().pdf
pdf_title = title
return exporter(r.resource,
request = r,
method = "list",
pdf_title = pdf_title,
pdf_table_autogrow = "B",
pdf_callback = callback,
**attr
)
# -------------------------------------------------------------------------
def customise_hrm_training_event_controller(**attr):
T = current.T
auth = current.auth
s3db = current.s3db
s3 = current.response.s3
if not auth.s3_has_role("ADMIN") and \
auth.s3_has_roles(("training_coordinator", "training_assistant")):
# Filter People to just those trained by this Reference Center
from s3 import FS
query = FS("~.organisation_id") == auth.user.organisation_id
s3.filter = query
s3db.set_method("hrm", "training_event",
method = "report_pdf_export",
action = hrm_training_event_report_pdf_export,
)
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.component_name == "training_event_report" and r.component_id:
from gluon.html import A, DIV, URL
from s3 import ICON
s3.rfooter = DIV(A(ICON("print"),
" ",
T("PDF Report"),
_href=URL(args=[r.id, "report_pdf_export"]),#, extension="pdf"),
_class="action-btn",
),
)
return result
s3.prep = custom_prep
attr["rheader"] = lambda r: \
s3db.hrm_rheader(r, tabs=[(T("Training Event Details"), None),
(T("Participants"), "participant"),
(T("Report"), "training_event_report"),
])
return attr
settings.customise_hrm_training_event_controller = customise_hrm_training_event_controller
# -------------------------------------------------------------------------
def customise_hrm_training_event_report_resource(r, tablename):
s3db = current.s3db
table = s3db.hrm_training_event_report
table.person_id.default = current.auth.s3_logged_in_person()
table.person_id.label = T("Name")
ns_only("hrm_training_event_report",
required = False,
branches = False,
updateable = False,
)
table.organisation_id.label = T("National Society Visited")
table.code.label = T("Finance Codes")
from s3 import S3SQLCustomForm, S3SQLInlineComponent
crud_form = S3SQLCustomForm("person_id",
"job_title_id",
"organisation_id",
"purpose",
"code",
"date",
(("1. %s" % table.objectives.label), "objectives"),
(("2. %s" % table.methodology.label), "methodology"),
(("3. %s" % table.actions.label), "actions"),
(("4. %s" % table.participants.label), "participants"),
(("5. %s" % table.results.label), "results"),
(("6. %s" % table.followup.label), "followup"),
(("7. %s" % table.additional.label), "additional"),
(("8. %s" % table.comments.label), "comments"),
S3SQLInlineComponent("document",
label = "9. %s" % T("Supporting Documentation"),
link = False,
fields = ["file"],
),
"comments",
)
s3db.configure(tablename,
crud_form = crud_form,
)
settings.customise_hrm_training_event_report_resource = customise_hrm_training_event_report_resource
# -------------------------------------------------------------------------
def customise_member_membership_resource(r, tablename):
from s3layouts import S3PopupLink
ADD_MEMBERSHIP_TYPE = T("Create Partner Type")
s3db = current.s3db
table = s3db.member_membership
table.code.label = T("Partner ID")
table.membership_type_id.comment = S3PopupLink(f = "membership_type",
label = ADD_MEMBERSHIP_TYPE,
title = ADD_MEMBERSHIP_TYPE,
tooltip = T("Add a new partner type to the catalog."),
)
list_fields = [(T("Full Name"), "person_id"),
"organisation_id",
"membership_type_id",
"code",
(T("National ID"), "person_id$national_id.value"),
(T("Email"), "email.value"),
(T("Mobile Phone"), "phone.value"),
"membership_fee",
(T("Paid"), "paid"),
]
s3db.configure(tablename,
list_fields = list_fields,
)
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Create Partner"),
title_display = T("Partner Details"),
title_list = T("Partners"),
title_update = T("Edit Partner Details"),
title_upload = T("Import Partners"),
label_list_button = T("List Partners"),
label_delete_button = T("Delete Partner"),
msg_record_created = T("Partner added"),
msg_record_modified = T("Partner updated"),
msg_record_deleted = T("Partner deleted"),
msg_list_empty = T("No Partners currently defined"))
settings.customise_member_membership_resource = customise_member_membership_resource
# -------------------------------------------------------------------------
def customise_member_membership_controller(**attr):
ns_only("member_membership",
required = True,
branches = True,
updateable = True,
)
return attr
settings.customise_member_membership_controller = customise_member_membership_controller
# -------------------------------------------------------------------------
def customise_member_membership_type_resource(r, tablename):
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Create Partner Type"),
title_display = T("Partner Type Details"),
title_list = T("Partner Types"),
title_update = T("Edit Partner Type Details"),
title_upload = T("Import Partner Types"),
label_list_button = T("List Partner Types"),
label_delete_button = T("Delete Partner Type"),
msg_record_created = T("Partner Type added"),
msg_record_modified = T("Partner Type updated"),
msg_record_deleted = T("Partner Type deleted"),
msg_list_empty = T("No Partner Types currently defined"))
settings.customise_member_membership_type_resource = customise_member_membership_type_resource
# -------------------------------------------------------------------------
def customise_member_membership_type_controller(**attr):
ns_only("member_membership_type",
required = False,
branches = False,
updateable = True,
)
return attr
settings.customise_member_membership_type_controller = customise_member_membership_type_controller
# -------------------------------------------------------------------------
def customise_inv_home():
"""
Homepage for the Inventory module
"""
from gluon import URL
from s3 import s3_redirect_default
auth = current.auth
if auth.user and auth.user.site_id:
has_role = auth.s3_has_role
if has_role("national_wh_manager") or \
has_role(current.session.s3.system_roles.ORG_ADMIN):
pass
else:
# Redirect to this Warehouse
table = current.s3db.inv_warehouse
wh = current.db(table.site_id == auth.user.site_id).select(table.id,
limitby = (0, 1)
).first()
if wh:
s3_redirect_default(URL(c="inv", f="warehouse",
args = [wh.id, "inv_item"],
))
# Redirect to Warehouse Summary Page
s3_redirect_default(URL(c="inv", f="warehouse", args="summary"))
settings.customise_inv_home = customise_inv_home
# -------------------------------------------------------------------------
def inv_pdf_header(r, title=None):
"""
PDF header for Stock Reports
@param r: the S3Request
@param title: the report title
"""
# Get organisation name and logo
from .layouts import OM
name, logo = OM().render()
from gluon.html import DIV, H2, H4, P, TABLE, TR, TD
# Report title and subtitle
title = H2(title) if title else ""
subtitle = ""
get_vars = r.get_vars
report = get_vars.get("report")
if report == "movements":
from s3 import S3TypeConverter, S3DateTime
# Get earliest/latest date from filter
convert = S3TypeConverter.convert
dtstr = get_vars.get("_transaction.date__ge")
earliest = convert(datetime.datetime, dtstr) if dtstr else ""
dtstr = get_vars.get("_transaction.date__le")
latest = convert(datetime.datetime, dtstr) if dtstr else ""
# Convert into local calendar/format
if earliest:
earliest = S3DateTime.date_represent(earliest, utc=True)
if latest:
latest = S3DateTime.date_represent(latest, utc=True)
# Add as subtitle
if earliest or latest:
subtitle = P(" - ".join((earliest, latest)))
output = TABLE(TR(TD(DIV(logo, H4(name))),
TD(DIV(title, subtitle)),
),
)
return output
# -------------------------------------------------------------------------
def customise_inv_inv_item_resource(r, tablename):
s3db = current.s3db
resource = r.resource
if resource.tablename == "inv_inv_item" and r.method == "grouped":
report = r.get_vars.get("report")
if report == "weight_and_volume":
# Add field methods for total weight and volume
from gluon import Field
table = s3db.inv_inv_item
table.total_weight = Field.Method("total_weight",
s3db.inv_item_total_weight,
)
table.total_volume = Field.Method("total_volume",
s3db.inv_item_total_volume,
)
s3db.configure("inv_inv_item",
extra_fields = ["item_id$weight",
"item_id$volume",
],
)
elif report == "movements":
# Inject a date filter for transactions
filter_widgets = resource.get_config("filter_widgets")
from s3 import S3DateFilter
date_filter = S3DateFilter("transaction_date",
label = T("Date"),
fieldtype = "date",
selector = "_transaction.date",
)
filter_widgets.insert(1, date_filter)
# Stock Reports
stock_reports = {"default": {
"title": T("Stock Position Report"),
"fields": [(T("Warehouse"), "site_id$name"),
"item_id$item_category_id",
"bin",
"item_id$name",
"quantity",
"pack_value",
"total_value",
],
"groupby": ["site_id",
],
"orderby": ["site_id$name",
"item_id$name",
],
"aggregate": [("sum", "quantity"),
("sum", "total_value"),
],
"pdf_header": inv_pdf_header,
},
"weight_and_volume": {
"title": T("Weight and Volume Report"),
"fields": [(T("Warehouse"), "site_id$name"),
"item_id$item_category_id",
"bin",
"item_id$name",
"quantity",
"item_id$weight",
"item_id$volume",
"total_weight",
"total_volume",
],
"groupby": ["site_id",
],
"orderby": ["site_id$name",
"item_id$name",
],
"aggregate": [("sum", "quantity"),
("sum", "total_weight"),
("sum", "total_volume"),
],
"pdf_header": inv_pdf_header,
},
"movements": {
"title": T("Stock Movements Report"),
"fields": [(T("Warehouse"), "site_id$name"),
"item_id$item_category_id",
"bin",
"item_id$name",
(T("Origin/Destination"), "sites"),
(T("Documents"), "documents"),
(T("Initial Quantity"), "original_quantity"),
(T("Incoming"), "quantity_in"),
(T("Outgoing"), "quantity_out"),
(T("Final Quantity"), "quantity"),
],
"groupby": ["site_id",
],
"orderby": ["site_id$name",
"item_id$name",
],
"aggregate": [("sum", "original_quantity"),
("sum", "quantity_in"),
("sum", "quantity_out"),
("sum", "quantity"),
],
"extract": s3db.inv_stock_movements,
"pdf_header": inv_pdf_header,
},
}
current.s3db.configure("inv_inv_item",
create = False,
deletable = False,
editable = False,
listadd = False,
grouped = stock_reports,
)
settings.customise_inv_inv_item_resource = customise_inv_inv_item_resource
# -------------------------------------------------------------------------
def customise_inv_send_resource(r, tablename):
s3db = current.s3db
s3db.configure("inv_send",
list_fields = ["id",
"send_ref",
"req_ref",
#"sender_id",
"site_id",
"date",
"recipient_id",
"delivery_date",
"to_site_id",
"status",
#"driver_name",
#"driver_phone",
#"vehicle_plate_no",
#"time_out",
"comments",
],
)
# Custom Waybill
s3db.set_method("inv", "send",
method = "form",
action = PrintableShipmentForm,
)
settings.customise_inv_send_resource = customise_inv_send_resource
# -------------------------------------------------------------------------
def customise_inv_recv_resource(r, tablename):
# Custom GRN
current.s3db.set_method("inv", "recv",
method = "form",
action = PrintableShipmentForm,
)
settings.customise_inv_recv_resource = customise_inv_recv_resource
# -------------------------------------------------------------------------
def customise_inv_warehouse_resource(r, tablename):
settings.inv.recv_tab_label = "Received/Incoming Shipments"
settings.inv.send_tab_label = "Sent Shipments"
# Only Nepal RC use Warehouse Types
s3db = current.s3db
field = s3db.inv_warehouse.warehouse_type_id
field.readable = field.writable = False
list_fields = s3db.get_config("inv_warehouse", "list_fields")
try:
list_fields.remove("warehouse_type_id")
except ValueError:
# Already removed
pass
settings.customise_inv_warehouse_resource = customise_inv_warehouse_resource
# -------------------------------------------------------------------------
def customise_org_facility_resource(r, tablename):
#root_org = current.auth.root_org_name()
#if root_org != HNRC:
# return
# Simplify Form
s3db = current.s3db
table = s3db.org_facility
table.code.readable = table.code.writable = False
table.opening_times.readable = table.opening_times.writable = False
table.website.readable = table.website.writable = False
field = s3db.org_site_facility_type.facility_type_id
field.readable = field.writable = False
# Simplify Search Fields
from s3 import S3TextFilter, S3OptionsFilter, S3LocationFilter
# Which levels of Hierarchy are we using?
levels = current.gis.get_relevant_hierarchy_levels()
text_fields = ["name",
#"code",
"comments",
"organisation_id$name",
"organisation_id$acronym",
]
for level in levels:
lfield = "location_id$%s" % level
text_fields.append(lfield)
s3db.configure("org_facility",
filter_widgets = [
S3TextFilter(text_fields,
label = T("Search"),
),
S3OptionsFilter("organisation_id"),
S3LocationFilter("location_id",
levels = levels,
),
]
)
settings.customise_org_facility_resource = customise_org_facility_resource
# -------------------------------------------------------------------------
def customise_org_office_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
# Organisation needs to be an NS/Branch
ns_only("org_office",
required = True,
branches = True,
# default
#limit_filter_opts = True,
)
return result
s3.prep = custom_prep
return attr
settings.customise_org_office_controller = customise_org_office_controller
# -------------------------------------------------------------------------
def customise_org_organisation_controller(**attr):
s3 = current.response.s3
type_filter = current.request.get_vars.get("organisation_type.name")
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.interactive or r.representation == "aadata":
if not r.component or r.component_name == "branch":
resource = r.resource
table = resource.table
if r.function == "training_center":
auth = current.auth
if not auth.s3_has_role("ADMIN"):
# See NS Training Centers only
resource.add_filter(table.root_organisation == auth.root_org())
if not auth.s3_has_role("ORG_ADMIN"):
resource.configure(insertable = False)
type_label = T("Type")
if r.get_vars.get("caller") == "org_facility_organisation_id":
# Simplify
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm("name",
"acronym",
"phone",
"comments",
)
resource.configure(crud_form = crud_form,
)
else:
list_fields = ["name",
"acronym",
"organisation_organisation_type.organisation_type_id",
"country",
"website",
]
if type_filter:
type_names = type_filter.split(",")
if len(type_names) == 1:
# Strip Type from list_fields
try:
list_fields.remove("organisation_organisation_type.organisation_type_id")
except ValueError:
# Already removed
pass
type_label = ""
if type_filter == RED_CROSS:
# Modify filter_widgets
filter_widgets = resource.get_config("filter_widgets")
# Remove type (always 'RC')
filter_widgets.pop(1)
# Modify CRUD Strings
s3.crud_strings.org_organisation = Storage(
label_create = T("Create National Society"),
title_display = T("National Society Details"),
title_list = T("Red Cross & Red Crescent National Societies"),
title_update = T("Edit National Society"),
title_upload = T("Import Red Cross & Red Crescent National Societies"),
label_list_button = T("List Red Cross & Red Crescent National Societies"),
label_delete_button = T("Delete National Society"),
msg_record_created = T("National Society added"),
msg_record_modified = T("National Society updated"),
msg_record_deleted = T("National Society deleted"),
msg_list_empty = T("No Red Cross & Red Crescent National Societies currently registered")
)
# Add Region to list_fields
list_fields.insert(-1, "region_id")
# Region is required
table.region_id.requires = table.region_id.requires.other
else:
table.region_id.readable = table.region_id.writable = False
if type_filter == "Supplier":
# Show simple free-text contact field
contact_field = table.contact
contact_field.readable = True
contact_field.writable = True
# Include contact information in list_fields
list_fields = ["name",
"acronym",
"country",
"contact",
"phone",
"website",
]
resource.configure(list_fields=list_fields)
if r.interactive:
table.country.label = T("Country")
from s3 import S3SQLCustomForm, S3SQLInlineLink
crud_form = S3SQLCustomForm(
"name",
"acronym",
S3SQLInlineLink("organisation_type",
field = "organisation_type_id",
label = type_label,
multiple = False,
#widget = "hierarchy",
),
"region_id",
"country",
"contact",
"phone",
"website",
"logo",
"comments",
)
resource.configure(crud_form=crud_form)
return result
s3.prep = custom_prep
settings = current.deployment_settings
if type_filter == "Supplier":
# Suppliers have simpler Tabs (hide Offices, Warehouses and Contacts)
tabs = [(T("Basic Details"), None, {"native": 1}),
]
if settings.get_L10n_translate_org_organisation():
tabs.append((T("Local Names"), "name"))
attr["rheader"] = lambda r: current.s3db.org_rheader(r, tabs=tabs)
elif type_filter == "Academic,Bilateral,Government,Intergovernmental,NGO,UN agency":
# Partners have simpler Tabs (hide Offices, Warehouses and Contacts)
tabs = [(T("Basic Details"), None, {"native": 1}),
(T("Projects"), "project"),
]
if settings.get_L10n_translate_org_organisation():
tabs.insert(1, (T("Local Names"), "name"))
attr["rheader"] = lambda r: current.s3db.org_rheader(r, tabs=tabs)
else:
# Enable tab for PDF card configurations
settings.org.pdf_card_configs = True
return attr
settings.customise_org_organisation_controller = customise_org_organisation_controller
# -------------------------------------------------------------------------
def customise_pr_address_resource(r, tablename):
#if current.auth.root_org_name() in ("Honduran Red Cross",
# "Paraguayan Red Cross",
# ):
# Location Hierarchy loaded: Leave things as they are since we have the
# pass
#else:
s3db = current.s3db
s3db.gis_location.addr_street.label = T("Address")
s3db.configure("pr_address",
list_fields = ["type",
(current.messages.COUNTRY, "location_id$L0"),
(T("Address"), "location_id$addr_street"),
#(settings.get_ui_label_postcode(),
# "location_id$addr_postcode")
],
)
settings.customise_pr_address_resource = customise_pr_address_resource
# -------------------------------------------------------------------------
def customise_pr_contact_resource(r, tablename):
table = current.s3db[tablename]
table.comments.readable = table.comments.writable = False
table.contact_description.readable = table.contact_description.writable = False
table.priority.readable = table.priority.writable = False
settings.customise_pr_contact_resource = customise_pr_contact_resource
# -------------------------------------------------------------------------
def customise_pr_education_resource(r, tablename):
s3db = current.s3db
table = s3db[tablename]
table.country.readable = table.country.writable = True
table.grade.readable = table.grade.writable = False
table.major.readable = table.major.writable = False
s3db.configure(tablename,
list_fields = [# Normally accessed via component
#"person_id",
"year",
"level_id",
"award",
#"major",
#"grade",
"institute",
],
)
settings.customise_pr_education_resource = customise_pr_education_resource
# -------------------------------------------------------------------------
def customise_pr_forum_resource(r, tablename):
table = current.s3db.pr_forum
table.forum_type.readable = table.forum_type.writable = False
settings.customise_pr_forum_resource = customise_pr_forum_resource
# -------------------------------------------------------------------------
def customise_pr_forum_controller(**attr):
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.method == "assign":
auth = current.auth
has_role = auth.s3_has_role
if not has_role("ADMIN") and has_role("training_coordinator"):
# Filter people to just those Trained by this Reference Center or Staff of this Reference Center
from s3 import FS
organisation_id = auth.user.organisation_id
query = (FS("training.training_event_id$organisation_id") == organisation_id) | \
(FS("user.organisation_id") == organisation_id)
s3.filter = query
return result
s3.prep = custom_prep
return attr
settings.customise_pr_forum_controller = customise_pr_forum_controller
# -------------------------------------------------------------------------
#def customise_pr_group_controller(**attr):
# # Organisation needs to be an NS/Branch
# ns_only("org_organisation_team",
# required = False,
# branches = True,
# )
# return attr
#settings.customise_pr_group_controller = customise_pr_group_controller
# -------------------------------------------------------------------------
def customise_pr_person_resource(r, tablename):
table = current.s3db[tablename]
table.first_name.label = T("Forenames")
table.middle_name.label = T("Father's Surname")
table.last_name.label = T("Mother's Surname")
settings.customise_pr_person_resource = customise_pr_person_resource
# -------------------------------------------------------------------------
def customise_pr_person_controller(**attr):
s3db = current.s3db
s3 = current.response.s3
# Enable scalability-optimized strategies
settings.base.bigtable = True
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
if not result:
return False
auth = current.auth
has_role = auth.s3_has_role
EXTERNAL = False
if "profile" in current.request.get_vars:
profile = True
else:
len_roles = len(current.session.s3.roles)
if (len_roles <= 2) or \
(len_roles == 3 and has_role("RIT_MEMBER") and not has_role("ADMIN")):
profile = True
else:
profile = False
if r.function == "trainee_person":
EXTERNAL = True
s3.crud_strings["pr_person"].update(
title_display = T("External Trainee Details"),
title_update = T("External Trainee Details")
)
if profile:
# Configure for personal mode
# People can edit their own HR data
configure = s3db.configure
configure("hrm_human_resource",
deletable = False,
#editable = True,
insertable = False,
)
if not has_role("RIT_MEMBER"):
#configure("hrm_certification",
# deletable = True,
# editable = True,
# insertable = True,
# )
configure("hrm_training",
deletable = False,
editable = False,
insertable = False,
)
component_name = r.component_name
method = r.method
if method == "import":
# HR records may be created via import
# Default to Volunteers
s3db.hrm_human_resource.type.default = 2
# Doesn't work as email created after human_resource
#s3db.configure("hrm_human_resource",
# create_onaccept = hrm_human_resource_create_onaccept,
# )
elif method == "record" or component_name == "human_resource":
table = s3db.hrm_human_resource
if EXTERNAL:
db = current.db
f = table.organisation_id
f.label = T("Organization")
# Organisation cannot be an NS/Branch
# Lookup organisation_type_id for Red Cross
ttable = s3db.org_organisation_type
type_ids = db(ttable.name.belongs((RED_CROSS, "Training Center"))).select(ttable.id,
limitby=(0, 2),
cache = s3db.cache,
)
if type_ids:
from s3 import IS_ONE_OF
ltable = db.org_organisation_organisation_type
rows = db(ltable.organisation_type_id.belongs(type_ids)).select(ltable.organisation_id)
not_filter_opts = [row.organisation_id for row in rows]
f.requires = IS_ONE_OF(db, "org_organisation.id",
f.represent,
not_filterby = "id",
not_filter_opts = not_filter_opts,
updateable = True,
orderby = "org_organisation.name",
sort = True)
else:
# Organisation needs to be an NS/Branch
if auth.s3_has_roles(("surge_capacity_manager",
"ns_training_manager",
"ns_training_assistant",
"training_coordinator",
"training_assistant",
)):
updateable = False
else:
updateable = True
ns_only("hrm_human_resource",
required = True,
branches = True,
updateable = updateable,
)
f = table.essential
f.readable = f.writable = False
f = table.site_contact
f.readable = f.writable = False
if method == "record":
if not auth.s3_has_roles(("ORG_ADMIN",
"hr_manager",
"hr_assistant",
)):
table.organisation_id.writable = False
# Hide the Site field as this data isn't loaded & we want to keep things simple
# @ToDo: Re-enable for specific NS as-required
f = table.site_id
f.readable = f.writable = False
# Use default form (legacy)
#s3db.clear_config("hrm_human_resource", "crud_form")
elif not component_name:
s3db.configure("pr_person",
listadd = True,
)
# Basic Details tab
f = s3db.pr_person.middle_name
f.readable = f.writable = True
f = s3db.pr_person_details.nationality2
f.readable = f.writable = True
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm("first_name",
"middle_name",
"last_name",
"date_of_birth",
"gender",
"person_details.marital_status",
"person_details.nationality",
"person_details.nationality2",
"comments",
)
s3db.configure("pr_person",
crud_form = crud_form,
)
elif component_name == "appraisal":
atable = r.component.table
atable.organisation_id.readable = atable.organisation_id.writable = False
# Organisation needs to be an NS
#ns_only("hrm_appraisal",
# required = True,
# branches = False,
# )
field = atable.supervisor_id
field.readable = field.writable = False
field = atable.job_title_id
field.comment = None
field.label = T("Sector") # RDRT-specific
from s3 import IS_ONE_OF
field.requires = IS_ONE_OF(current.db, "hrm_job_title.id",
field.represent,
filterby = "type",
filter_opts = (4,),
)
elif component_name == "certification":
ctable = r.component.table
ctable.organisation_id.readable = False
elif component_name == "competency":
ctable = r.component.table
ctable.skill_id.label = T("Language")
ctable.organisation_id.readable = False
elif component_name == "experience":
# 2 options here: Work Experience & Missions
# These have very different views
# Work Experience
etable = r.component.table
etable.organisation_id.readable = etable.organisation_id.writable = False
etable.job_title_id.readable = etable.job_title_id.writable = False
etable.responsibilities.readable = etable.responsibilities.writable = False
etable.hours.readable = etable.hours.writable = False
etable.supervisor_id.readable = etable.supervisor_id.writable = False
etable.organisation.readable = etable.organisation.writable = True
etable.job_title.readable = etable.job_title.writable = True
from s3 import S3LocationSelector
etable.location_id.label = T("Country")
etable.location_id.widget = S3LocationSelector(levels=("L0",),
show_map=False,
show_postcode=False,
)
elif component_name == "identity":
#itable = r.component.table
# Default
#itable.country_code.readable = itable.country_code.writable = False
#itable.ia_name.readable = itable.ia_name.writable = False
f = r.component.table.ia_name
f.readable = f.writable = False
list_fields = ["type",
"value",
"valid_until",
]
s3db.configure("pr_identity",
list_fields = list_fields,
)
# Moved to MedicalTab
#elif component_name == "physical_description":
# from gluon import DIV
# dtable = r.component.table
# dtable.medical_conditions.comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Medical Conditions"),
# T("Chronic Illness, Disabilities, Mental/Psychological Condition etc.")))
# dtable.allergic.writable = dtable.allergic.readable = True
# dtable.allergies.writable = dtable.allergies.readable = True
# dtable.ethnicity.writable = dtable.ethnicity.readable = False
# dtable.other_details.writable = dtable.other_details.readable = False
# import json
# SEPARATORS = (",", ":")
# s3.jquery_ready.append('''S3.showHidden('%s',%s,'%s')''' % \
# ("allergic", json.dumps(["allergies"], separators=SEPARATORS), "pr_physical_description"))
if not EXTERNAL and \
auth.s3_has_roles(ID_CARD_EXPORT_ROLES):
# Show button to export ID card
settings.hrm.id_cards = True
return True
s3.prep = custom_prep
if current.request.controller in ("hrm", "vol"):
attr["csv_template"] = ("../../themes/RMSAmericas/formats", "hrm_person")
# Common rheader for all views
attr["rheader"] = s3db.hrm_rheader
return attr
settings.customise_pr_person_controller = customise_pr_person_controller
# -------------------------------------------------------------------------
def customise_pr_physical_description_resource(r, tablename):
from gluon import DIV
from s3 import S3SQLCustomForm
s3db = current.s3db
#s3db.pr_physical_description.medical_conditions.comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Medical Conditions"),
# T("Chronic Illness, Disabilities, Mental/Psychological Condition etc.")))
s3db.pr_physical_description.medical_conditions.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Medical Conditions"),
T("It is important to include, if they exist: surgical history, medical restrictions, vaccines, etc.")))
s3db.configure(tablename,
crud_form = S3SQLCustomForm("blood_type",
"medical_conditions",
"medication",
"diseases",
"allergic",
"allergies",
),
)
settings.customise_pr_physical_description_resource = customise_pr_physical_description_resource
# -------------------------------------------------------------------------
def customise_supply_item_category_resource(r, tablename):
#root_org = current.auth.root_org_name()
#if root_org == HNRC:
# Not using Assets Module
field = current.s3db.supply_item_category.can_be_asset
field.readable = field.writable = False
settings.customise_supply_item_category_resource = customise_supply_item_category_resource
# -------------------------------------------------------------------------
def customise_project_window_resource(r, tablename):
r.resource.configure(deletable = False,
insertable = False,
)
settings.customise_project_window_resource = customise_project_window_resource
# -------------------------------------------------------------------------
def customise_project_activity_data_resource(r, tablename):
if current.auth.s3_has_roles(("monitoring_evaluation", "ORG_ADMIN")):
# Normal Access
return
# Project Manager
if r.method == "update":
table = current.s3db.project_activity_data
if r.tablename == "project_activity_data":
record_id = r.id
else:
record_id = r.component_id
record = current.db(table.id == record_id).select(table.value,
limitby=(0, 1)
).first()
if record.value:
# Redirect to Read-only mode
from gluon import redirect
redirect(r.url(method="read"))
else:
# Cannot edit anything
for f in table.fields:
table[f].writable = False
# Except add a Real value
table.value.writable = True
# Or Amend the Comments
table.comments.writable = True
else:
s3db = current.s3db
table = s3db.project_window
record = current.db(table.deleted == False).select(table.start_date,
table.end_date,
limitby = (0, 1)
).first()
if record:
if record.start_date <= r.utcnow.date() <= record.end_date:
# Inside the time window: Project Manager may update Actuals
return
# Outside the time window: Project Manager cannot add the Actual value
s3db.project_activity_data.value.writable = False
s3db.configure("project_activity_data",
updateable = False,
)
settings.customise_project_activity_data_resource = customise_project_activity_data_resource
# -------------------------------------------------------------------------
def customise_project_organisation_resource(r, tablename):
root_org = current.auth.root_org_name()
if root_org == HNRC:
from gluon import IS_IN_SET
currency_opts = {"EUR" : "EUR",
"CHF" : "CHF",
"HNL" : "L",
"USD" : "USD",
}
f = current.s3db.project_organisation.currency
f.represent = currency_represent
f.requires = IS_IN_SET(currency_opts)
settings.customise_project_organisation_resource = customise_project_organisation_resource
# -------------------------------------------------------------------------
def project_project_postprocess(form):
"""
When using Budget Monitoring (i.e. HNRC) then create the entries
"""
db = current.db
s3db = current.s3db
project_id = form.vars.id
# Read Budget Entity ID, Start Date and End Date
ptable = s3db.project_project
project = db(ptable.id == project_id).select(ptable.budget_entity_id,
ptable.name,
ptable.start_date,
ptable.end_date,
limitby=(0, 1)
).first()
if not project:
return
# Copy Project Name to Budget Name
budget_entity_id = project.budget_entity_id
btable = s3db.budget_budget
query = (btable.budget_entity_id == budget_entity_id)
budget = db(query).select(btable.id, # Needed for update_record
# If we want to provide smoothed default expected values
#btable.total_budget,
btable.currency,
# Assume Monthly
#btable.monitoring_frequency,
limitby=(0, 1)
).first()
if not budget:
return
# Build Budget Name from Project Name
project_name = project.name
# Check for duplicates
query = (btable.name == project_name) & \
(btable.id != budget.id)
duplicate = db(query).select(btable.id,
limitby=(0, 1)
).first()
if not duplicate:
budget_name = project_name[:128]
else:
# Need another Unique name
import uuid
budget_name = "%s %s" % (project_name[:91], uuid.uuid4())
budget.update_record(name = budget_name)
mtable = s3db.budget_monitoring
exists = db(mtable.budget_entity_id == budget_entity_id).select(mtable.id,
limitby=(0, 1))
if not exists:
# Create Monitoring Data entries
start_date = project.start_date
end_date = project.end_date
if not start_date or not end_date:
return
# Assume Monthly
#monitoring_frequency = budget.monitoring_frequency
#if not monitoring_frequency:
# return
#total_budget = budget.total_budget
currency = budget.currency
# Create entries for the 1st of every month between start_date and end_date
from dateutil import rrule
dates = list(rrule.rrule(rrule.MONTHLY, bymonthday=1, dtstart=start_date, until=end_date))
for d in dates:
mtable.insert(budget_entity_id = budget_entity_id,
# @ToDo: This needs to be modified whenever entries are manually edited
# Set/update this in budget_monitoring_onaccept
# - also check here that we don't exceed overall budget
start_date = start_date,
end_date = d,
currency = currency,
)
# Start date relates to previous entry
start_date = d
# -------------------------------------------------------------------------
def customise_project_programme_controller(**attr):
# Organisation needs to be an NS/Branch
ns_only("project_programme",
required = True,
branches = False,
updateable = True,
)
return attr
settings.customise_project_programme_controller = customise_project_programme_controller
# -------------------------------------------------------------------------
def customise_project_project_controller(**attr):
tablename = "project_project"
# Default Filter
from s3 import s3_set_default_filter
s3_set_default_filter("~.organisation_id",
user_org_default_filter,
tablename = "project_project")
# Load standard model
s3db = current.s3db
table = s3db[tablename]
# Disable Map Tab on Summary View
# - until we can support multiple Points per Record
settings.ui.summary = ({"common": True,
"name": "add",
"widgets": [{"method": "create"}],
},
#{"common": True,
# "name": "cms",
# "widgets": [{"method": "cms"}]
# },
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}]
},
{"name": "charts",
"label": "Report",
"widgets": [{"method": "report",
"ajax_init": True}]
},
#{"name": "map",
# "label": "Map",
# "widgets": [{"method": "map",
# "ajax_init": True}],
# },
)
# @ToDo: S3SQLInlineComponent for Project orgs
# Get IDs for Partner NS/Partner Donor
# db = current.db
# ttable = db.org_organisation_type
# rows = db(ttable.deleted != True).select(ttable.id,
# ttable.name,
# )
# rc = []
# not_rc = []
# nappend = not_rc.append
# for row in rows:
# if row.name == RED_CROSS:
# rc.append(row.id)
# elif row.name == "Supplier":
# pass
# else:
# nappend(row.id)
# Custom Fields
table.organisation_id.label = T("Host National Society")
# Custom Crud Form
from s3 import S3SQLCustomForm, S3SQLInlineComponent, S3SQLInlineLink
# Special cases for different NS
root_org = current.auth.root_org_name()
if root_org == HNRC:
# @ToDo: Use Inter-American Framework instead (when extending to Zone office)
# @ToDo: Add 'Business Line' (when extending to Zone office)
project_settings = settings.project
project_settings.details_tab = True
#project_settings.community_volunteers = True
# Done in a more structured way instead
objectives = None
outputs = None
project_settings.goals = True
project_settings.outcomes = True
project_settings.outputs = True
project_settings.indicators = True
project_settings.indicator_criteria = True
project_settings.status_from_activities = True
table.human_resource_id.label = T("Coordinator")
# Use Budget module instead of ProjectAnnualBudget
project_settings.multiple_budgets = False
project_settings.budget_monitoring = True
# Require start/end dates
table.start_date.requires = table.start_date.requires.other
table.end_date.requires = table.end_date.requires.other
budget = S3SQLInlineComponent("budget",
label = T("Budget"),
#link = False,
multiple = False,
fields = ["total_budget",
"currency",
#"monitoring_frequency",
],
)
btable = s3db.budget_budget
# Need to provide a name
import random, string
btable.name.default = "".join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(16))
btable.monitoring_frequency.default = 3 # Monthly
btable.currency.represent = currency_represent
currency_opts = {"EUR" : "EUR",
"CHF" : "CHF",
"HNL" : "L",
"USD" : "USD",
}
from gluon import IS_IN_SET
btable.currency.requires = IS_IN_SET(currency_opts)
s3db.budget_monitoring.currency.represent = currency_represent
postprocess = project_project_postprocess
list_fields = s3db.get_config("project_project", "list_fields")
list_fields += [(T("Actual Progress"), "actual_progress_by_activities"),
(T("Planned Progress"), "planned_progress_by_activities"),
]
else:
objectives = "objectives"
outputs = S3SQLInlineComponent(
"output",
label = T("Outputs"),
fields = ["name", "status"],
)
budget = None
postprocess = None
if settings.get_project_programmes():
# Inject inline link for programmes including AddResourceLink
#from s3layouts import S3PopupLink
comment = s3db.project_programme_id.attr.comment
comment.vars = {"caller": "link_defaultprogramme",
"prefix": "project",
"parent": "programme_project",
}
programme = S3SQLInlineLink("programme",
label = T("Program"),
field = "programme_id",
multiple = False,
comment = comment,
)
else:
programme = None
crud_form = S3SQLCustomForm(
"organisation_id",
programme,
"name",
"code",
"description",
"status_id",
"start_date",
"end_date",
budget,
#S3SQLInlineComponent(
# "location",
# label = T("Locations"),
# fields = ["location_id"],
#),
# Outputs
outputs,
S3SQLInlineLink(
"hazard",
label = T("Hazards"),
field = "hazard_id",
help_field = s3db.project_hazard_help_fields,
cols = 4,
translate = True,
),
S3SQLInlineLink(
"sector",
label = T("Sectors"),
field = "sector_id",
cols = 4,
translate = True,
),
S3SQLInlineLink(
"theme",
label = T("Themes"),
field = "theme_id",
help_field = s3db.project_theme_help_fields,
cols = 4,
translate = True,
# Filter Theme by Sector
filterby = "theme_id:project_theme_sector.sector_id",
match = "sector_project.sector_id",
script = '''
$.filterOptionsS3({
'trigger':{'alias':'sector','name':'sector_id','inlineType':'link'},
'target':{'alias':'theme','name':'theme_id','inlineType':'link'},
'lookupPrefix':'project',
'lookupResource':'theme',
'lookupKey':'theme_id:project_theme_sector.sector_id',
'showEmptyField':false,
'tooltip':'project_theme_help_fields(id,name)'
})'''
),
objectives,
"human_resource_id",
# Disabled since we need organisation_id filtering to either organisation_type_id == RC or NOT
# & also hiding Branches from RCs
# & also rewriting for organisation_type_id via link table
# Partner NS
# S3SQLInlineComponent(
# "organisation",
# name = "partnerns",
# label = T("Partner National Societies"),
# fields = ["organisation_id",
# "comments",
# ],
# Filter Organisation by Type
# filter = ["organisation_id": {"filterby": "organisation_type_id",
# "filterfor": rc,
# }],
# filterby = dict(field = "role",
# options = [9])
# ),
# Partner Orgs
# S3SQLInlineComponent(
# "organisation",
# name = "partner",
# label = T("Partner Organizations"),
# fields = ["organisation_id",
# "comments",
# ],
# Filter Organisation by Type
# filter = ["organisation_id": {"filterby": "organisation_type_id",
# "filterfor": not_rc,
# }],
# filterby = dict(field = "role",
# options = [2])
# ),
# Donors
# S3SQLInlineComponent(
# "organisation",
# name = "donor",
# label = T("Donor(s)"),
# fields = ["organisation_id",
# "amount",
# "currency"],
# Filter Organisation by Type
# filter = ["organisation_id": {"filterby": "organisation_type_id",
# "filterfor": not_rc,
# }],
# filterby = dict(field = "role",
# options = [3])
# ),
#"budget",
#"currency",
"comments",
postprocess = postprocess,
)
s3db.configure(tablename,
crud_form = crud_form,
)
s3 = current.response.s3
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard prep
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if r.method == "grouped":
grouped = {"default":
{"title": T("Global Report of Projects Status"),
"fields": [(T("Project"), "name"),
(T("Program"), "programme.name"),
(T("Donor"), "donor.organisation_id"),
(T("Budget"), "budget.total_budget"),
(T("Location"), "location.location_id"),
"start_date",
"end_date",
],
"orderby": ["name",
],
"aggregate": [("sum", "budget.total_budget"),
],
},
}
from s3 import S3DateFilter, S3OptionsFilter
filter_widgets = [S3DateFilter("date",
label = T("Time Period"),
hide_time = True,
),
S3OptionsFilter("programme_project.programme_id",
label = T("Programs"),
),
S3OptionsFilter("theme_project.theme_id",
label = T("Themes"),
),
S3OptionsFilter("sector_project.sector_id",
label = T("Sectors"),
),
S3OptionsFilter("beneficiary.parameter_id",
label = T("Beneficiaries"),
),
S3OptionsFilter("hazard_project.hazard_id",
label = T("Hazards"),
),
]
s3db.configure(tablename,
filter_widgets = filter_widgets,
grouped = grouped,
)
elif r.component:
if r.component_name == "organisation":
component_id = r.component_id
if component_id:
# No r.component.record :/
ctable = s3db.project_organisation
crecord = current.db(ctable.id == component_id).select(ctable.role,
limitby=(0, 1)
).first()
if crecord.role == settings.get_project_organisation_lead_role():
ns_only("project_organisation",
required = True,
branches = False,
updateable = True,
)
#ctable.organisation_id.requires = \
# s3db.org_organisation_requires(required = True,
# # Only allowed to add Projects for Orgs
# # that the user has write access to
# updateable = True,
# )
else:
# Lead Organisation needs to be an NS (not a branch)
ns_only(tablename,
required = True,
branches = False,
# default
#limit_filter_opts = True,
)
# Set the Host NS filter as Visible so that the default filter works
filter_widgets = s3db.get_config(tablename, "filter_widgets")
for widget in filter_widgets:
if widget.field == "organisation_id":
widget.opts.hidden = False
break
return result
s3.prep = custom_prep
return attr
settings.customise_project_project_controller = customise_project_project_controller
# -------------------------------------------------------------------------
#def customise_project_beneficiary_resource(r, tablename):
# """
# Link Project Beneficiaries to Activity Type
# """
# if r.interactive and r.component:
# if r.tablename == "project_project":
# # We are a component of the Project
# project_id = r.id
# elif r.tablename == "project_location":
# # We are a component of the Project Location
# project_id = r.record.project_id
# else:
# # Unknown!
# return
# db = current.db
# s3db = current.s3db
# # Filter Activity Type by Sector
# ltable = s3db.project_sector_project
# rows = db(ltable.project_id == project_id).select(ltable.sector_id)
# sectors = [row.sector_id for row in rows]
# ltable = s3db.project_activity_type_sector
# rows = db(ltable.sector_id.belongs(sectors)).select(ltable.activity_type_id)
# filteropts = [row.activity_type_id for row in rows]
# def postprocess(form):
# # Update project_location.activity_type
# beneficiary_id = form.vars.get("id", None)
# table = db.project_beneficiary
# row = db(table.id == beneficiary_id).select(table.project_location_id,
# limitby = (0, 1)
# ).first()
# if not row:
# return
# project_location_id = row.project_location_id
# if not project_location_id:
# return
# ltable = db.project_beneficiary_activity_type
# row = db(ltable.beneficiary_id == beneficiary_id).select(ltable.activity_type_id,
# limitby = (0, 1)
# ).first()
# if not row:
# return
# activity_type_id = row.activity_type_id
# ltable = s3db.project_activity_type_location
# query = (ltable.project_location_id == project_location_id) & \
# (ltable.activity_type_id == activity_type_id)
# exists = db(query).select(ltable.id,
# limitby = (0, 1)
# ).first()
# if not exists:
# ltable.insert(project_location_id = project_location_id,
# activity_type_id = activity_type_id,
# )
# from s3 import S3SQLCustomForm, S3SQLInlineLink
# crud_form = S3SQLCustomForm(#"project_id",
# "project_location_id",
# S3SQLInlineLink("activity_type",
# field = "activity_type_id",
# filterby = "id",
# options = filteropts,
# label = T("Activity Type"),
# multiple = False,
# ),
# "parameter_id",
# "value",
# "target_value",
# "date",
# "end_date",
# "comments",
# postprocess = postprocess,
# )
# s3db.configure(tablename,
# crud_form = crud_form,
# )
# elif not r.component:
# # Report
# from s3 import S3OptionsFilter
# resource = r.resource
# filter_widgets = resource.get_config("filter_widgets")
# filter_widgets.insert(1,
# S3OptionsFilter("beneficiary_activity_type.activity_type_id",
# label = T("Activity Type"),
# ))
# report_options = resource.get_config("report_options")
# report_options.rows.append("beneficiary_activity_type.activity_type_id")
# # Same object so would be added twice
# #report_options.cols.append("beneficiary_activity_type.activity_type_id")
# resource.configure(filter_widgets = filter_widgets,
# report_options = report_options,
# )
# Only used for activity_types which aren't used by HNRC
#settings.customise_project_beneficiary_resource = customise_project_beneficiary_resource
# -------------------------------------------------------------------------
#def customise_project_indicator_resource(r, tablename):
# table = current.s3db.project_indicator
# table.definition.label = T("Indicator Definition")
# table.measures.label = T("Indicator Criteria")
#settings.customise_project_indicator_resource = customise_project_indicator_resource
# -------------------------------------------------------------------------
def customise_project_indicator_data_resource(r, tablename):
table = current.s3db.project_indicator_data
f = table.start_date
f.readable = f.writable = True
f.label = T("Start Date")
table.end_date.label = T("End Date")
if r.method == "update":
has_role = current.auth.s3_has_role
if has_role("monitoring_evaluation") or has_role("ORG_ADMIN"):
# Normal Access
return
# Project Manager
if r.tablename == "project_indicator_data":
record_id = r.id
else:
record_id = r.component_id
record = current.db(table.id == record_id).select(table.value,
limitby=(0, 1)
).first()
if record.value:
# Redirect to Read-only mode
# @ToDo: Remove 'Update' button from the read-only page
from gluon import redirect
redirect(r.url(method="read"))
else:
# Cannot edit anything
for f in table.fields:
table[f].writable = False
# Except add a Real value
table.value.writable = True
# Or Amend the Comments
table.comments.writable = True
settings.customise_project_indicator_data_resource = customise_project_indicator_data_resource
# -------------------------------------------------------------------------
def customise_project_location_resource(r, tablename):
s3db = current.s3db
table = s3db.project_location
table.name.readable = False
table.percentage.readable = table.percentage.writable = False
#ist_fields = s3db.get_config(tablename, "list_fields")
#try:
# list_fields.remove((T("Activity Types"), "activity_type.name"))
#except:
# # Already removed
# pass
settings.customise_project_location_resource = customise_project_location_resource
# -------------------------------------------------------------------------
def customise_project_location_controller(**attr):
s3 = current.response.s3
# Custom postp
#standard_postp = s3.postp
def custom_postp(r, output):
# Call standard postp (just does same thing but different)
#if callable(standard_postp):
# output = standard_postp(r, output)
if r.representation == "plain":
# Map Popup
from gluon import A, TABLE, TR, TD, B, URL
s3db = current.s3db
table = s3db.project_project
project_id = r.record.project_id
resource = s3db.resource("project_project", id=project_id)
list_fields = ("name",
"status_id",
"start_date",
"end_date",
"budget.total_budget",
"budget.currency",
"hazard_project.hazard_id",
"sector_project.sector_id",
"theme_project.theme_id",
# Contact
"human_resource_id",
"overall_status_by_indicators",
)
data = resource.select(list_fields, represent=True)
record = data.rows[0]
item = TABLE(TR(TD(B("%s:" % table.name.label)),
TD(record["project_project.name"]),
),
TR(TD(B("%s:" % table.status_id.label)),
TD(record["project_project.status_id"]),
),
TR(TD(B("%s:" % table.start_date.label)),
TD(record["project_project.start_date"]),
),
TR(TD(B("%s:" % table.end_date.label)),
TD(record["project_project.end_date"]),
),
TR(TD(B("%s:" % T("Budget"))),
TD("%s %s" % (record["budget_budget.currency"],
record["budget_budget.total_budget"])),
),
TR(TD(B("%s:" % s3db.project_hazard_project.hazard_id.label)),
TD(record["project_hazard_project.hazard_id"]),
),
TR(TD(B("%s:" % s3db.project_sector_project.sector_id.label)),
TD(record["project_sector_project.sector_id"]),
),
TR(TD(B("%s:" % s3db.project_theme_project.theme_id.label)),
TD(record["project_theme_project.theme_id"]),
),
TR(TD(B("%s:" % table.human_resource_id.label)),
TD(record["project_project.human_resource_id"]),
),
TR(TD(B("%s:" % T("Cumulative Status"))),
TD(record["project_project.overall_status_by_indicators"]),
),
)
title = s3.crud_strings["project_project"].title_display
# Assume authorised to see details
popup_url = URL(f="project", args=[project_id])
details_btn = A(T("Open"),
_href=popup_url,
_class="btn",
_id="details-btn",
_target="_blank")
output = dict(item = item,
title = title,
details_btn = details_btn,
)
return output
s3.postp = custom_postp
return attr
settings.customise_project_location_controller = customise_project_location_controller
# -------------------------------------------------------------------------
def customise_req_commit_controller(**attr):
# Request is mandatory
field = current.s3db.req_commit.req_id
field.requires = field.requires.other
return attr
settings.customise_req_commit_controller = customise_req_commit_controller
# -------------------------------------------------------------------------
def customise_req_req_resource(r, tablename):
s3db = current.s3db
# Request is mandatory
field = s3db.req_commit.req_id
field.requires = field.requires.other
table = s3db.req_req
table.req_ref.represent = lambda v, show_link=True, pdf=True: \
s3db.req_ref_represent(v, show_link, pdf)
table.site_id.label = T("Deliver To")
# Hide Drivers list_field
list_fields = s3db.get_config("req_req", "list_fields")
try:
list_fields.remove((T("Drivers"), "drivers"))
except ValueError:
# Already removed
pass
# Custom Request Form
s3db.set_method("req", "req",
method = "form",
action = PrintableShipmentForm,
)
settings.customise_req_req_resource = customise_req_req_resource
# =============================================================================
class PrintableShipmentForm(S3Method):
""" REST Method Handler for Printable Shipment Forms """
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Entry point for REST interface.
@param r: the S3Request instance
@param attr: controller attributes
@note: always returns PDF, disregarding the requested format
"""
output = {}
if r.http == "GET":
if r.id:
tablename = r.tablename
if tablename == "req_req":
output = self.request_form(r, **attr)
elif tablename == "inv_send":
output = self.waybill(r, **attr)
elif tablename == "inv_recv":
output = self.goods_received_note(r, **attr)
else:
# Not supported
r.error(405, current.ERROR.BAD_METHOD)
else:
# Record ID is required
r.error(400, current.ERROR.BAD_REQUEST)
else:
r.error(405, current.ERROR.BAD_METHOD)
return output
# -------------------------------------------------------------------------
def request_form(self, r, **attr):
"""
Request Form
@param r: the S3Request instance
@param attr: controller attributes
"""
T = current.T
s3db = current.s3db
# Master record (=req_req)
resource = s3db.resource(r.tablename,
id = r.id,
components = ["req_item"],
)
# Columns and data for the form header
header_fields = ["req_ref",
"date",
"date_required",
(T("Deliver to"), "site_id"),
(T("Reason for Request"), "purpose"),
"requester_id",
"site_id$site_id:inv_warehouse.contact",
"comments",
]
header_data = resource.select(header_fields,
start = 0,
limit = 1,
represent = True,
show_links = False,
raw_data = True,
)
if not header_data:
r.error(404, current.ERROR.BAD_RECORD)
# Generate PDF header
pdf_header = self.request_form_header(header_data)
# Filename from send_ref
header_row = header_data.rows[0]
pdf_filename = header_row["_row"]["req_req.req_ref"]
# Component (=inv_track_item)
component = resource.components["req_item"]
body_fields = ["item_id",
"item_pack_id",
"quantity",
"comments",
]
# Aggregate methods and column names
aggregate = [("sum", "req_req_item.quantity"),
]
# Generate the JSON data dict
json_data = self._json_data(component,
body_fields,
aggregate = aggregate,
)
# Generate the grouped items table
from s3 import S3GroupedItemsTable
output = S3GroupedItemsTable(component,
data = json_data,
totals_label = T("Total"),
title = T("Logistics Requisition"),
pdf_header = pdf_header,
pdf_footer = self.request_form_footer,
)
# ...and export it as PDF
return output.pdf(r, filename=pdf_filename)
# -------------------------------------------------------------------------
@classmethod
def request_form_header(cls, data):
"""
Header for Request Forms
@param data: the S3ResourceData for the req_req
"""
row = data.rows[0]
labels = dict((rfield.colname, rfield.label) for rfield in data.rfields)
def row_(left, right):
return cls._header_row(left, right, row=row, labels=labels)
from gluon import DIV, H2, H4, TABLE, TD, TH, TR, P
T = current.T
# Get organisation name and logo
from .layouts import OM
name, logo = OM().render()
# The title
title = H2(T("Logistics Requisition"))
# Waybill details
dtable = TABLE(
TR(TD(DIV(logo, H4(name)), _colspan = 2),
TD(DIV(title), _colspan = 2),
),
row_("req_req.req_ref", None),
row_("req_req.date", "req_req.date_required"),
row_("req_req.site_id", "req_req.purpose"),
row_("req_req.requester_id", "inv_warehouse.contact"),
)
# Waybill comments
ctable = TABLE(TR(TH(T("Comments"))),
TR(TD(row["req_req.comments"])),
)
return DIV(dtable, P(" "), ctable)
# -------------------------------------------------------------------------
@staticmethod
def request_form_footer(r):
"""
Footer for Request Forms
@param r: the S3Request
"""
T = current.T
from gluon import TABLE, TH, TR
return TABLE(TR(TH(" "),
TH(T("Name")),
TH(T("Signature")),
TH(T("Date")),
),
TR(TH(T("Requester"))),
TR(TH(T("Budget Administrator"))),
TR(TH(T("Finance"))),
)
# -------------------------------------------------------------------------
def waybill(self, r, **attr):
"""
Waybill
@param r: the S3Request instance
@param attr: controller attributes
"""
T = current.T
s3db = current.s3db
# Component declarations to distinguish between the
# origin and destination warehouses
s3db.add_components("inv_send",
inv_warehouse = ({"name": "origin",
"joinby": "site_id",
"pkey": "site_id",
"filterby": False,
"multiple": False,
},
{"name": "destination",
"joinby": "site_id",
"pkey": "to_site_id",
"filterby": False,
"multiple": False,
},
),
)
# Master record (=inv_send)
resource = s3db.resource(r.tablename,
id = r.id,
components = ["origin",
"destination",
"track_item",
],
)
# Columns and data for the form header
header_fields = ["send_ref",
"req_ref",
"date",
"delivery_date",
(T("Origin"), "site_id"),
(T("Destination"), "to_site_id"),
"sender_id",
"origin.contact",
"recipient_id",
"destination.contact",
"transported_by",
"transport_ref",
(T("Delivery Address"), "destination.location_id"),
"comments",
]
header_data = resource.select(header_fields,
start = 0,
limit = 1,
represent = True,
show_links = False,
raw_data = True,
)
if not header_data:
r.error(404, current.ERROR.BAD_RECORD)
# Generate PDF header
pdf_header = self.waybill_header(header_data)
# Filename from send_ref
header_row = header_data.rows[0]
pdf_filename = header_row["_row"]["inv_send.send_ref"]
# Component (=inv_track_item)
component = resource.components["track_item"]
body_fields = ["bin",
"item_id",
"item_pack_id",
"quantity",
(T("Total Volume (m3)"), "total_volume"),
(T("Total Weight (kg)"), "total_weight"),
"supply_org_id",
"inv_item_status",
]
# Any extra fields needed for virtual fields
component.configure(extra_fields = ["item_id$weight",
"item_id$volume",
],
)
# Aggregate methods and column names
aggregate = [("sum", "inv_track_item.quantity"),
("sum", "inv_track_item.total_volume"),
("sum", "inv_track_item.total_weight"),
]
# Generate the JSON data dict
json_data = self._json_data(component,
body_fields,
aggregate = aggregate,
)
# Generate the grouped items table
from s3 import S3GroupedItemsTable
output = S3GroupedItemsTable(component,
data = json_data,
totals_label = T("Total"),
title = T("Waybill"),
pdf_header = pdf_header,
pdf_footer = self.waybill_footer,
)
# ...and export it as PDF
return output.pdf(r, filename=pdf_filename)
# -------------------------------------------------------------------------
@classmethod
def waybill_header(cls, data):
"""
Header for Waybills
@param data: the S3ResourceData for the inv_send
"""
row = data.rows[0]
labels = dict((rfield.colname, rfield.label) for rfield in data.rfields)
def row_(left, right):
return cls._header_row(left, right, row=row, labels=labels)
from gluon import DIV, H2, H4, TABLE, TD, TH, TR, P
T = current.T
# Get organisation name and logo
from .layouts import OM
name, logo = OM().render()
# The title
title = H2(T("Waybill"))
# Waybill details
dtable = TABLE(
TR(TD(DIV(logo, H4(name)), _colspan = 2),
TD(DIV(title), _colspan = 2),
),
row_("inv_send.send_ref", "inv_send.req_ref"),
row_("inv_send.date", "inv_send.delivery_date"),
row_("inv_send.site_id", "inv_send.to_site_id"),
row_("inv_send.sender_id", "inv_send.recipient_id"),
row_("inv_origin_warehouse.contact",
"inv_destination_warehouse.contact",
),
row_("inv_send.transported_by", "inv_send.transport_ref"),
row_("inv_destination_warehouse.location_id", None),
)
# Waybill comments
ctable = TABLE(TR(TH(T("Comments"))),
TR(TD(row["inv_send.comments"])),
)
return DIV(dtable, P(" "), ctable)
# -------------------------------------------------------------------------
@staticmethod
def waybill_footer(r):
"""
Footer for Waybills
@param r: the S3Request
"""
T = current.T
from gluon import TABLE, TD, TH, TR
return TABLE(TR(TH(T("Shipment")),
TH(T("Date")),
TH(T("Function")),
TH(T("Name")),
TH(T("Signature")),
TH(T("Status")),
),
TR(TD(T("Sent by"))),
TR(TD(T("Transported by"))),
TR(TH(T("Received by")),
TH(T("Date")),
TH(T("Function")),
TH(T("Name")),
TH(T("Signature")),
TH(T("Status")),
),
TR(TD(" ")),
)
# -------------------------------------------------------------------------
def goods_received_note(self, r, **attr):
"""
GRN (Goods Received Note)
@param r: the S3Request instance
@param attr: controller attributes
"""
T = current.T
s3db = current.s3db
# Master record (=inv_recv)
resource = s3db.resource(r.tablename,
id = r.id,
components = ["track_item"],
)
# Columns and data for the form header
header_fields = ["eta",
"date",
(T("Origin"), "from_site_id"),
(T("Destination"), "site_id"),
"sender_id",
"recipient_id",
"send_ref",
"recv_ref",
"comments",
]
header_data = resource.select(header_fields,
start = 0,
limit = 1,
represent = True,
show_links = False,
raw_data = True,
)
if not header_data:
r.error(404, current.ERROR.BAD_RECORD)
# Generate PDF header
pdf_header = self.goods_received_note_header(header_data)
# Filename from send_ref
header_row = header_data.rows[0]
pdf_filename = header_row["_row"]["inv_recv.recv_ref"]
# Component (=inv_track_item)
component = resource.components["track_item"]
body_fields = ["recv_bin",
"item_id",
"item_pack_id",
"recv_quantity",
(T("Total Volume (m3)"), "total_recv_volume"),
(T("Total Weight (kg)"), "total_recv_weight"),
"supply_org_id",
"inv_item_status",
]
# Any extra fields needed for virtual fields
component.configure(extra_fields = ["item_id$weight",
"item_id$volume",
],
)
# Aggregate methods and column names
aggregate = [("sum", "inv_track_item.recv_quantity"),
("sum", "inv_track_item.total_recv_volume"),
("sum", "inv_track_item.total_recv_weight"),
]
# Generate the JSON data dict
json_data = self._json_data(component,
body_fields,
aggregate = aggregate,
)
# Generate the grouped items table
from s3 import S3GroupedItemsTable
output = S3GroupedItemsTable(component,
data = json_data,
totals_label = T("Total"),
title = T("Goods Received Note"),
pdf_header = pdf_header,
pdf_footer = self.goods_received_note_footer,
)
# ...and export it as PDF
return output.pdf(r, filename=pdf_filename)
# -------------------------------------------------------------------------
@classmethod
def goods_received_note_header(cls, data):
"""
Header for Goods Received Notes
@param data: the S3ResourceData for the inv_recv
"""
row = data.rows[0]
labels = dict((rfield.colname, rfield.label) for rfield in data.rfields)
def row_(left, right):
return cls._header_row(left, right, row=row, labels=labels)
from gluon import DIV, H2, H4, TABLE, TD, TH, TR, P
T = current.T
# Get organisation name and logo
from .layouts import OM
name, logo = OM().render()
# The title
title = H2(T("Goods Received Note"))
# GRN details
dtable = TABLE(TR(TD(DIV(logo, H4(name)), _colspan = 2),
TD(DIV(title), _colspan = 2),
),
row_("inv_recv.eta", "inv_recv.date"),
row_("inv_recv.from_site_id", "inv_recv.site_id"),
row_("inv_recv.sender_id", "inv_recv.recipient_id"),
row_("inv_recv.send_ref", "inv_recv.recv_ref"),
)
# GRN comments
ctable = TABLE(TR(TH(T("Comments"))),
TR(TD(row["inv_recv.comments"])),
)
return DIV(dtable, P(" "), ctable)
# -------------------------------------------------------------------------
@staticmethod
def goods_received_note_footer(r):
"""
Footer for Goods Received Notes
@param r: the S3Request
"""
T = current.T
from gluon import TABLE, TD, TH, TR
return TABLE(TR(TH(T("Delivered by")),
TH(T("Date")),
TH(T("Function")),
TH(T("Name")),
TH(T("Signature")),
TH(T("Status")),
),
TR(TD(T(" "))),
TR(TH(T("Received by")),
TH(T("Date")),
TH(T("Function")),
TH(T("Name")),
TH(T("Signature")),
TH(T("Status")),
),
TR(TD(" ")),
)
# -------------------------------------------------------------------------
@staticmethod
def _header_row(left, right, row=None, labels=None):
"""
Helper function to generate a 2-column table row
for the PDF header
@param left: the column name for the left column
@param right: the column name for the right column,
or None for an empty column
@param row: the S3ResourceData row
@param labels: dict of labels {colname: label}
"""
from gluon import TD, TH, TR
if right:
header_row = TR(TH(labels[left]),
TD(row[left]),
TH(labels[right]),
TD(row[right]),
)
else:
header_row = TR(TH(labels[left]),
TD(row[left], _colspan = 3),
)
return header_row
# -------------------------------------------------------------------------
@staticmethod
def _json_data(component, list_fields, aggregate=None):
"""
Extract, group and aggregate the data for the form body
@param component: the component (S3Resource)
@param list_fields: the columns for the form body
(list of field selectors)
@param aggregate: aggregation methods and fields,
a list of tuples (method, column name)
"""
# Extract the data
data = component.select(list_fields,
limit = None,
raw_data = True,
represent = True,
show_links = False,
)
# Get the column names and labels
columns = []
append_column = columns.append
labels = {}
for rfield in data.rfields:
colname = rfield.colname
append_column(colname)
labels[colname] = rfield.label
# Group and aggregate the items
from s3 import S3GroupedItems
gi = S3GroupedItems(data.rows,
aggregate = aggregate,
)
# Convert into JSON-serializable dict for S3GroupedItemsTable
json_data = gi.json(fields = columns,
labels = labels,
as_dict = True,
)
return json_data
# END =========================================================================
|
if callable(standard_prep):
result = standard_prep(r)
else:
result = True
if current.auth.s3_has_role("ADMIN"):
from s3 import S3OptionsFilter, S3TextFilter
filter_widgets = [S3TextFilter(["name",
],
label=T("Search")
),
S3OptionsFilter("organisation_id",
),
]
current.s3db.configure("hrm_job_title",
filter_widgets = filter_widgets,
)
return result
|
LogIn.js
|
import React, { Component } from 'react';
import $ from 'jquery';
class Login extends Component {
constructor(props){
super(props);
|
error: null,
}
}
componentDidMount(){
$('.Login--back-button').click(() => this.props.changeStage('Get Started'))
$('form').on("submit", ((event) => {
event.preventDefault();
const submission = $('form').serializeArray()
if (submission[0].value){
this.props.authProcessor.setIntent('login')
this.props.authProcessor.verifyAndSaveValues(submission)
.then(() => this.props.changeStage('Password'))
.catch(error => this.setState({error}))
} else {
this.setState({error: 'Please fill in your email.' })
}
}));
}
render(){
return (
<div className="Login">
<div className="page--center">
{this.state.error}
<form>
<input
type="email"
placeholder="Email"
name="email"
/>
<button type="submit">Next</button>
</form>
OAuth options
<button className="Login--back-button">Back</button>
</div>
</div>
);
}
}
export default Login
|
this.state = {
|
mod.rs
|
pub mod chunks;
pub mod popups;
pub mod statics;
use chrono::offset::Local;
use tui::{
backend::Backend,
layout::{Constraint, Direction, Layout, Rect},
style::{Color, Modifier, Style},
terminal::Frame,
text::Spans,
widgets::{Block, Borders, Cell, Row, Table},
};
use crate::{
handlers::{
app::{App, BufferName, State},
config::CompleteConfig,
data::PayLoad,
},
utils::{styles, text::title_spans},
};
#[derive(Debug, Clone)]
pub struct Verticals {
pub chunks: Vec<Rect>,
pub constraints: Vec<Constraint>,
}
impl Verticals {
pub fn
|
(chunks: Vec<Rect>, constraints: Vec<Constraint>) -> Self {
Self {
chunks,
constraints,
}
}
}
pub fn draw_ui<T: Backend>(frame: &mut Frame<T>, app: &mut App, config: &CompleteConfig) {
let table_widths = app.table_constraints.as_ref().unwrap();
let mut vertical_chunk_constraints = vec![Constraint::Min(1)];
// Allowing the input box to exist in different modes
if let State::MessageInput | State::MessageSearch = app.state {
vertical_chunk_constraints.extend(vec![Constraint::Length(3)]);
}
let margin = if config.frontend.padding { 1 } else { 0 };
let vertical_chunks = Layout::default()
.direction(Direction::Vertical)
.margin(margin)
.constraints(vertical_chunk_constraints.as_ref())
.split(frame.size());
let verticals = Verticals::new(vertical_chunks, vertical_chunk_constraints);
let horizontal_chunks = Layout::default()
.direction(Direction::Horizontal)
.margin(margin)
.constraints(table_widths.as_ref())
.split(frame.size());
// 0'th index because no matter what index is obtained, they're the same height.
let general_chunk_height = verticals.chunks[0].height as usize - 3;
// The chunk furthest to the right is the messages, that's the one we want.
let message_chunk_width = horizontal_chunks[table_widths.len() - 1].width as usize - 4;
// Making sure that messages do have a limit and don't eat up all the RAM.
app.messages.truncate(config.terminal.maximum_messages);
// Accounting for not all heights of rows to be the same due to text wrapping,
// so extra space needs to be used in order to scroll correctly.
let mut total_row_height: usize = 0;
let mut display_rows = std::collections::VecDeque::new();
let mut scroll_offset = app.scroll_offset;
'outer: for data in app.messages.iter() {
if let PayLoad::Message(msg) = data.payload.clone() {
if app.filters.contaminated(msg) {
continue;
}
} else if scroll_offset > 0 {
scroll_offset -= 1;
continue;
}
let buffer = app.current_buffer();
let rows = if !buffer.is_empty() {
data.to_row(
&config.frontend,
&message_chunk_width,
match app.selected_buffer {
BufferName::MessageHighlighter => Some(buffer.to_string()),
_ => None,
},
)
} else {
data.to_row(&config.frontend, &message_chunk_width, None)
};
for row in rows.iter().rev() {
if total_row_height < general_chunk_height {
display_rows.push_front(row.to_owned());
total_row_height += 1;
} else {
break 'outer;
}
}
}
// Padding with empty rows so chat can go from bottom to top.
if general_chunk_height > total_row_height {
for _ in 0..(general_chunk_height - total_row_height) {
display_rows.push_front(Row::new(vec![Cell::from("")]));
}
}
let chat_title_format = || -> Spans {
if config.frontend.title_shown {
title_spans(
vec![
vec![
"Time",
&Local::now()
.format(config.frontend.date_format.as_str())
.to_string(),
],
vec!["Channel", config.twitch.channel.as_str()],
vec![
"Filters",
format!(
"{} / {}",
if app.filters.enabled() {
"enabled"
} else {
"disabled"
},
if app.filters.reversed() {
"reversed"
} else {
"static"
}
)
.as_str(),
],
],
Style::default().fg(Color::Red).add_modifier(Modifier::BOLD),
)
} else {
Spans::default()
}
};
let table = Table::new(display_rows)
.header(
Row::new(app.column_titles.as_ref().unwrap().to_owned()).style(styles::COLUMN_TITLE),
)
.block(
Block::default()
.borders(Borders::ALL)
.title(chat_title_format())
.style(styles::BORDER_NAME),
)
.widths(table_widths.as_ref())
.column_spacing(1);
frame.render_widget(table, verticals.chunks[0]);
match app.state {
// States of the application that require a chunk of the main window
State::MessageInput => {
chunks::chatting::message_input(frame, app, verticals, config.storage.mentions)
}
State::MessageSearch => chunks::message_search::search_messages(frame, app, verticals),
// States that require popups
State::Help => popups::help::show_keybinds(frame),
State::ChannelSwitch => {
popups::channels::switch_channels(frame, app, config.storage.channels)
}
_ => {}
}
}
|
new
|
_wait_cluster.py
|
import argparse
import time
import ray
ray.init(address="auto")
parser = argparse.ArgumentParser()
parser.add_argument(
"num_nodes", type=int, help="Wait for this number of nodes (includes head)"
)
parser.add_argument("max_time_s", type=int, help="Wait for this number of seconds")
|
default=10,
help="Wait for this number of seconds",
)
args = parser.parse_args()
curr_nodes = 0
start = time.time()
next_feedback = start
max_time = start + args.max_time_s
while not curr_nodes >= args.num_nodes:
now = time.time()
if now >= max_time:
raise RuntimeError(
f"Maximum wait time reached, but only "
f"{curr_nodes}/{args.num_nodes} nodes came up. Aborting."
)
if now >= next_feedback:
passed = now - start
print(
f"Waiting for more nodes to come up: "
f"{curr_nodes}/{args.num_nodes} "
f"({passed:.0f} seconds passed)"
)
next_feedback = now + args.feedback_interval_s
time.sleep(5)
curr_nodes = len(ray.nodes())
passed = time.time() - start
print(
f"Cluster is up: {curr_nodes}/{args.num_nodes} nodes online after "
f"{passed:.0f} seconds"
)
|
parser.add_argument(
"--feedback_interval_s",
type=int,
|
pipeline.rs
|
use std::{collections::HashMap, path::PathBuf};
use super::types::Document;
use super::types::MergedPiece;
use crate::error::Error;
use crate::identifiers::FastText;
use crate::io::writer::WriterTrait;
use crate::lang::LANG;
use crate::sources::commoncrawl::Wet;
use log::Level::Debug;
use log::{debug, error, info, log_enabled, warn};
use rayon::prelude::*;
use warc::BufferedBody;
use warc::Record;
use crate::io::LangFiles;
use crate::pipelines::pipeline::Pipeline;
use super::types::WarcHeaders;
/// OSCAR v1.5 generation pipeline
///
/// OSCAR v1.5 is a retrocompatible corpus
/// enhanced with metadata coming from CommonCrawl.
///
/// The CommonCrawl dump is composed of shards,
/// Each shard is composed of records,
/// Each record is composed of a metadata header and a body containing sentences.
///
/// # Processing
/// _every scope is concurrent, that means green threads are created on shards, records and sentences._
/// - We process each record separately, getting a list of sentence-language pairs, along with metadata from the document.
/// - Once we've treated each record of a given shard, we
/// transform out list of sentence-language pairs into chunks of contiguous same-language sentences
/// and we store shard-level line offsets on metadata.
/// Then we group same-language chunks for each language (on shard-level) and we write on disk.
/// - We also keep track of disk-level line offsets to sync shard-level offsets between writes.
///
/// TODO: Better document this step.
pub struct OscarMetadata {
src: PathBuf,
dst: PathBuf,
lid_path: PathBuf,
}
impl OscarMetadata {
pub fn new(src: PathBuf, dst: PathBuf, lid_path: PathBuf) -> Self {
Self { src, dst, lid_path }
}
/// attempt to predict language on provided sentence.
///
/// Returns [None] if no language is detected.
// why return the sentence itself?
// TODO: change return type to Option<&'static str>.
fn identify_sentence(sentence: &str, cls: &FastText) -> Option<(String, &'static str)> {
let prediction = cls.predict(sentence).ok();
if let Some(Some(lang)) = prediction {
//TODO: rewrite these two lines more elegantly
// we can unwrap since predict returns None if no predictions are
// found
let lang = lang.get(0).unwrap();
// check if fasttext provided lang exists
// return None if not
match LANG.get(lang.label.as_str()) {
Some(lang) => Some((sentence.to_string(), *lang)),
None => {
warn!("lang {} does not exist!", lang.label);
None
}
}
} else {
None
}
}
/// Process a provided record.
///
/// Here, sentences that are >100 chars are processed,
/// and the others are discarded.
/// See [String::chars::count].
///
/// Then, we identify language for each sentence
/// and return (sentence, language) along with headers
/// extracted from the WARC.
fn process_record(
record: Record<BufferedBody>,
cls: &FastText,
) -> Option<(Vec<(String, &'static str)>, WarcHeaders)> {
if log_enabled!(Debug) {
debug!("processing record {}", record.warc_id());
};
let body = String::from_utf8(record.body().to_vec()).ok();
// process record if body is utf8-valid
if let Some(sentences) = body {
// filter out lines that does not contain 100 characters.
// then convert into a parallel iterator
let sentences = sentences
.lines()
.filter(|line| line.chars().count() > 100)
.par_bridge();
let results: Vec<(String, &'static str)> = sentences
// predict for each sentence, discarding
// predictions that does not meet threshold
.filter_map(|sentence| Self::identify_sentence(sentence, cls))
.collect();
Some((results, record.into_raw_parts().0.headers))
} else {
error!("body not UTF-8 valid: {:?}", record.warc_id());
None
}
}
}
impl Pipeline<()> for OscarMetadata {
fn version() -> &'static str {
"1.1.0"
}
/// Run the whole pipeline
fn
|
(&self) -> Result<(), Error> {
// let errors;
let cls = FastText::new(&self.lid_path, 1, 0.8)?;
// list files in source folder,
// filter out errors from fs and from gzip/wet.
// This means that invalid gz files and invalid
// wet files are discarded silently
let results = std::fs::read_dir(&self.src)?
.filter_map(|shard| {
shard.map_or_else(
|e| {
error!("error reading shard directory: {}", e);
None
},
Some,
)
})
.map(|shard| shard.path());
// convert to parallel iterator
// /!\: We use par_bridge, that is suboptimal
// compared to implementing IntoParallelIterator
// ourselves.
let results = results.enumerate().par_bridge();
// holds file handles
// let langfiles = match self.part_size {
// Some(ps) => LangFiles::new(&self.dst, Some(ps * 1_000_000))?,
// None => LangFiles::new(&self.dst, None)?,
// };
let langfiles = LangFiles::new(&self.dst, None)?;
// iterate over shards
let r: Vec<Error> = results
.filter_map(|(idx, shard)| {
// holds merged pieces by lang
let mut lang_pieces: HashMap<&'static str, Vec<MergedPiece>> = HashMap::new();
// get an atomic reference to global offsets
// let offsets_global_arc = offsets_global.clone();
info!("processing shard {}: {:?}", idx, &shard);
let shard = Wet::from_path_gzip(&shard);
if shard.is_err() {
error!("Could not read/open shard {}", idx);
return shard.err();
}
let shard = shard.unwrap();
// convert into a parallel iterator
let wetfile = shard.iter.enumerate().par_bridge();
let shard_results: Vec<(Vec<(String, &'static str)>, WarcHeaders)> = wetfile
.filter_map(|(idx_record, record)| match record {
Ok(record) => OscarMetadata::process_record(record, &cls),
Err(e) => {
warn!("Error on record {} of shard {}: {:?}", idx_record, idx, e);
None
}
})
// collect here is blocking
// because we can't write concurrently into a HashMap
// and using Mutexes might ruin performance.
.collect(); //TODO: test with a for_each and a channel to send?
// Iterate over (record, header) tuples
let shard_results = shard_results.into_iter().filter_map(|(record, header)| {
// split between langs and sentences
let langs: Vec<&str> = record.iter().map(|(_, lang)| *lang).collect();
let sentences: Vec<String> =
record.into_iter().map(|(sentences, _)| sentences).collect();
// create new document for current record
let doc = Document::new(header, sentences, langs);
match doc {
Ok(doc) => Some(doc),
Err(e) => {
warn!("{:?}", e);
None
}
}
});
// merge all documents together
// get a vector of merged pieces of difference languages
let docs_merged = shard_results
.map(|doc| doc.into_merged_pieces_lang())
.flatten()
.collect::<Vec<MergedPiece>>();
// sort merged pieces into different langs
// now there's a hashmap that points each lang
// to a vector of merged pieces
for piece in docs_merged {
let e = lang_pieces
.entry(piece.identification())
.or_insert_with(Vec::new);
e.push(piece);
}
// write concurrently
lang_pieces.into_par_iter().for_each(|(lang, pieces)| {
let writer = langfiles.writers().get(lang).unwrap();
let mut writer_lock = writer.lock().unwrap();
writer_lock.write(pieces).unwrap();
});
None
})
.collect();
// fix trailing comma
// langfiles.close_meta()?;
for err in r {
error!("{:?}", err);
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use warc::{EmptyBody, Record};
use crate::identifiers::FastText;
use super::OscarMetadata;
#[test]
fn test_process_record() {
let cls = FastText::new_lid().unwrap();
// let oscar_metadata =
// OscarMetadata::new(temp_dir(), temp_dir(), PathBuf::from("lid.176.bin"));
let record: Record<EmptyBody> = Record::default();
let body = "english test that is longer than one hundred characters. english test that is longer than one hundred characters.
phrase française de plus de cent caractères. Ceci est une phrase française de plus de cent caractères.";
println!("{}", body.len());
let record = record.add_body(body);
let (identifications, _) = OscarMetadata::process_record(record, &cls).unwrap();
for (sentence, id) in identifications {
if id == "en" {
assert_eq!(sentence, "english test that is longer than one hundred characters. english test that is longer than one hundred characters.");
} else if id == "fr" {
assert_eq!(sentence, "phrase française de plus de cent caractères. Ceci est une phrase française de plus de cent caractères.");
}
}
}
}
|
run
|
http.js
|
import axios from 'axios'
import qs from 'qs'
import serverConfig from '../config/server-config'
const Axios = axios.create({
baseURL: serverConfig.api + '/blog/nuxt/', // 本地做反向代理
timeout: 5000,
responseType: 'json',
withCredentials: true, // 是否允许带cookie这些
headers: {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8'
},
proxy: serverConfig.baseProxy
})
// 请求拦截(配置发送请求的信息) 传参序列化
Axios.interceptors.request.use(
config => {
if (
config.method === 'post' ||
config.method === 'put' ||
config.method === 'delete'
) {
// 序列化
config.data = qs.stringify(config.data)
}
return config
},
error => {
return Promise.reject(error)
}
)
// 响应拦截(配置请求回来的信息)
Axios.interceptors.response.use(
function(response) {
// 处理响应数据
return response
},
function(error) {
// 处理响应失败
return Promise.reject(error)
}
)
/**
* get 请求方法
* @param url
* @param params
* @returns {Promise}
*/
export function get(url, params = {}) {
return new Promise((resolve, reject) => {
Axios.get(url, {
params
})
.then(response => {
resolve(response.data)
})
.catch(err => {
reject(err)
})
})
}
/**
* post 请求方法
* @param url
* @param params
* @returns {Promise}
*/
export function post(url, params = {}) {
return new Promise((resolve, reject) => {
Axios.post(url, params).then(
response => {
resolve(response.data)
},
err => {
reject(err)
}
)
})
}
/**
* put方法
* @param url
* @param params
* @returns {Promise}
*/
export function put(url, params = {}) {
return new Promise((resolve, reject) => {
Axios.put(url, params).then(
response => {
resolve(response.data)
},
err => {
reject(err)
}
|
/**
* delete方法
* @param url
* @param params
* @returns {Promise}
*/
export function del(url, params = {}) {
return new Promise((resolve, reject) => {
Axios.delete(url, {
params
})
.then(response => {
resolve(response.data)
})
.catch(err => {
reject(err)
})
})
}
|
)
})
}
|
lib.rs
|
use std::env;
use std::error::Error;
pub mod scoop;
use self::scoop::Scoop;
mod bucket;
use self::bucket::Bucket;
mod app;
use self::app::App;
#[derive(Debug, std::cmp::PartialEq)]
pub struct Args {
pub query: String,
pub exclude_bin: bool,
}
pub fn parse_args(args: env::Args) -> Result<Args, &'static str> {
let args: Vec<String> = args.collect();
let query: String;
let mut exclude_bin = true;
match &args.len() {
1 => query = String::new(),
2 => {
if args[1] == "--bin" {
exclude_bin = false;
query = String::new();
} else if args[1] == "*" {
query = String::new();
} else {
query = args[1].clone();
}
}
3 => {
if args[1] == "--bin" {
exclude_bin = false;
if args[2] == "*" {
query = String::new();
} else {
query = args[2].clone();
}
} else {
return Err("option is not valid");
}
}
_ => return Err("args number incorrect."),
}
Ok(Args {
query: query.to_lowercase(),
exclude_bin,
})
}
///this function display result so you don't need to treat it.
fn search_include_bin(scoop: &Scoop, query: &str) -> Result<(), Box<dyn Error>> {
let bucket_paths = Bucket::get_bucket_paths(scoop);
if Bucket::search_include_bin(&bucket_paths, query).is_none() {
let local_bucket_names = &bucket_paths
.iter()
.map(|path| Bucket::get_name(path).unwrap_or(String::new()))
.collect();
match Bucket::search_remote_buckets(scoop, local_bucket_names, query) {
Some(buckets) => {
println!("Results from other known buckets...");
println!("(add them using 'scoop bucket add <name>')");
println!("");
display_buckets(&buckets);
}
None => println!("No matches found."),
}
}
Ok(())
}
fn search_exclude_bin(scoop: &Scoop, query: &str) -> Result<(), Box<dyn Error>> {
let bucket_paths = Bucket::get_bucket_paths(scoop);
match Bucket::search_exclude_bin(&bucket_paths, query) {
Some(buckets) => display_buckets(&buckets),
None => {
let local_bucket_names = &bucket_paths
.iter()
.map(|path| Bucket::get_name(path).unwrap_or(String::new()))
.collect();
match Bucket::search_remote_buckets(scoop, local_bucket_names, query) {
Some(buckets) => {
println!("Results from other known buckets...");
println!("(add them using 'scoop bucket add <name>')");
println!("");
display_buckets(&buckets);
}
None => match Bucket::search_include_bin(&bucket_paths, query) {
Some(_) => {}
None => println!("No matches found."),
},
}
}
}
Ok(())
}
pub fn run(scoop: &Scoop, args: &Args) -> Result<(), Box<dyn Error>> {
if args.exclude_bin == true {
search_exclude_bin(scoop, &args.query)
} else {
search_include_bin(scoop, &args.query)
}
}
fn display_apps(bucket_name: &str, apps: &Vec<App>) {
if apps.len() > 0 {
println!("'{}' bucket: ", bucket_name,);
for app in apps {
if app.version != "" {
if app.bin.len() > 0 {
println!(
" {} ({}) --> includes '{}'",
app.name, app.version, app.bin[0]
);
} else {
println!(" {} ({})", app.name, app.version);
}
} else {
println!(" {}", app.name);
}
}
println!("");
}
}
fn display_buckets(buckets: &Vec<Bucket>) {
for bucket in buckets {
display_apps(&bucket.name, &bucket.apps);
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_search_apps()
|
#[test]
fn test_search_remote_apps() {
let remote_url =
"https://api.github.com/repos/ScoopInstaller/Main/git/trees/HEAD?recursive=1";
let query = "7zip";
let actual = App::search_remote_apps(remote_url, query).unwrap();
let expect = vec![App {
name: String::from("bucket/7zip"),
version: String::new(),
bin: Vec::new(),
}];
assert_eq!(expect, actual);
}
#[test]
fn test_search_exclude_bin() {
let scoop = Scoop::new();
let bucket_paths = Bucket::get_bucket_paths(&scoop);
let query = "7zip";
let actual = Bucket::search_exclude_bin(&bucket_paths, query);
let expect = Some(vec![
Bucket {
name: String::from("extras"),
apps: Vec::new(),
},
Bucket {
name: String::from("games"),
apps: Vec::new(),
},
Bucket {
name: String::from("java"),
apps: Vec::new(),
},
Bucket {
name: String::from("main"),
apps: vec![App {
name: String::from("7zip"),
version: String::from("19.00"),
bin: Vec::new(),
}],
},
Bucket {
name: String::from("nerd-fonts"),
apps: Vec::new(),
},
]);
assert_eq!(expect, actual);
}
}
|
{
let apps = vec![App {
name: String::from("test_app"),
version: String::from("test_version"),
bin: vec![String::from("test_bin")],
}];
let query = String::from("test");
let expect = vec![App {
name: String::from("test_app"),
version: String::from("test_version"),
bin: Vec::new(),
}];
let actual = App::search_apps(&apps, &query);
assert_eq!(expect, actual);
}
|
test_backup.py
|
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for Backup code."""
import copy
import ddt
import os
import uuid
import mock
from os_brick.initiator.connectors import fake as fake_connectors
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_utils import importutils
from oslo_utils import timeutils
import cinder
from cinder.backup import api
from cinder.backup import manager
from cinder import context
from cinder import db
from cinder import exception
from cinder import objects
from cinder.objects import fields
from cinder import test
from cinder.tests import fake_driver
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import utils
from cinder.volume import rpcapi as volume_rpcapi
CONF = cfg.CONF
class FakeBackupException(Exception):
pass
class BaseBackupTest(test.TestCase):
def setUp(self):
super(BaseBackupTest, self).setUp()
self.backup_mgr = importutils.import_object(CONF.backup_manager)
self.backup_mgr.host = 'testhost'
self.backup_mgr.is_initialized = True
self.ctxt = context.get_admin_context()
paths = ['cinder.volume.rpcapi.VolumeAPI.delete_snapshot',
'cinder.volume.rpcapi.VolumeAPI.delete_volume',
'cinder.volume.rpcapi.VolumeAPI.detach_volume',
'cinder.volume.rpcapi.VolumeAPI.'
'secure_file_operations_enabled']
self.volume_patches = {}
self.volume_mocks = {}
for path in paths:
name = path.split('.')[-1]
self.volume_patches[name] = mock.patch(path)
self.volume_mocks[name] = self.volume_patches[name].start()
self.addCleanup(self.volume_patches[name].stop)
def _create_backup_db_entry(self, volume_id=str(uuid.uuid4()),
restore_volume_id=None,
display_name='test_backup',
display_description='this is a test backup',
container='volumebackups',
status=fields.BackupStatus.CREATING,
size=1,
object_count=0,
project_id=str(uuid.uuid4()),
service=None,
temp_volume_id=None,
temp_snapshot_id=None,
snapshot_id=None,
metadata=None,
parent_id=None,
encryption_key_id=None):
"""Create a backup entry in the DB.
Return the entry ID
"""
kwargs = {}
kwargs['volume_id'] = volume_id
kwargs['restore_volume_id'] = restore_volume_id
kwargs['user_id'] = str(uuid.uuid4())
kwargs['project_id'] = project_id
kwargs['host'] = 'testhost'
kwargs['availability_zone'] = '1'
kwargs['display_name'] = display_name
kwargs['display_description'] = display_description
kwargs['container'] = container
kwargs['status'] = status
kwargs['fail_reason'] = ''
kwargs['service'] = service or CONF.backup_driver
kwargs['snapshot_id'] = snapshot_id
kwargs['parent_id'] = parent_id
kwargs['size'] = size
kwargs['object_count'] = object_count
kwargs['temp_volume_id'] = temp_volume_id
kwargs['temp_snapshot_id'] = temp_snapshot_id
kwargs['metadata'] = metadata or {}
kwargs['encryption_key_id'] = encryption_key_id
backup = objects.Backup(context=self.ctxt, **kwargs)
backup.create()
return backup
def _create_volume_db_entry(self, display_name='test_volume',
display_description='this is a test volume',
status='backing-up',
previous_status='available',
size=1,
host='testhost',
encryption_key_id=None):
"""Create a volume entry in the DB.
Return the entry ID
"""
vol = {}
vol['size'] = size
vol['host'] = host
vol['user_id'] = str(uuid.uuid4())
vol['project_id'] = str(uuid.uuid4())
vol['status'] = status
vol['display_name'] = display_name
vol['display_description'] = display_description
vol['attach_status'] = fields.VolumeAttachStatus.DETACHED
vol['availability_zone'] = '1'
vol['previous_status'] = previous_status
vol['encryption_key_id'] = encryption_key_id
volume = objects.Volume(context=self.ctxt, **vol)
volume.create()
return volume.id
def _create_snapshot_db_entry(self, display_name='test_snapshot',
display_description='test snapshot',
status=fields.SnapshotStatus.AVAILABLE,
size=1,
volume_id=str(uuid.uuid4()),
provider_location=None):
"""Create a snapshot entry in the DB.
Return the entry ID.
"""
kwargs = {}
kwargs['size'] = size
kwargs['user_id'] = str(uuid.uuid4())
kwargs['project_id'] = str(uuid.uuid4())
kwargs['status'] = status
kwargs['display_name'] = display_name
kwargs['display_description'] = display_description
kwargs['volume_id'] = volume_id
kwargs['cgsnapshot_id'] = None
kwargs['volume_size'] = size
kwargs['metadata'] = {}
kwargs['provider_location'] = provider_location
snapshot_obj = objects.Snapshot(context=self.ctxt, **kwargs)
snapshot_obj.create()
return snapshot_obj
def _create_volume_attach(self, volume_id):
values = {'volume_id': volume_id,
'attach_status': fields.VolumeAttachStatus.ATTACHED, }
attachment = db.volume_attach(self.ctxt, values)
db.volume_attached(self.ctxt, attachment['id'], None, 'testhost',
'/dev/vd0')
def _create_exported_record_entry(self, vol_size=1, exported_id=None):
"""Create backup metadata export entry."""
vol_id = self._create_volume_db_entry(status='available',
size=vol_size)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.AVAILABLE, volume_id=vol_id)
if exported_id is not None:
backup.id = exported_id
export = self.backup_mgr.export_record(self.ctxt, backup)
return export
def _create_export_record_db_entry(self,
volume_id=str(uuid.uuid4()),
status=fields.BackupStatus.CREATING,
project_id=str(uuid.uuid4()),
backup_id=None):
"""Create a backup entry in the DB.
Return the entry ID
"""
kwargs = {}
kwargs['volume_id'] = volume_id
kwargs['user_id'] = str(uuid.uuid4())
kwargs['project_id'] = project_id
kwargs['status'] = status
if backup_id:
kwargs['id'] = backup_id
backup = objects.BackupImport(context=self.ctxt, **kwargs)
backup.create()
return backup
@ddt.ddt
class BackupTestCase(BaseBackupTest):
"""Test Case for backups."""
@mock.patch.object(cinder.tests.fake_driver.FakeLoggingVolumeDriver,
'set_initialized')
@mock.patch.object(cinder.tests.fake_driver.FakeLoggingVolumeDriver,
'do_setup')
@mock.patch.object(cinder.tests.fake_driver.FakeLoggingVolumeDriver,
'check_for_setup_error')
@mock.patch('cinder.context.get_admin_context')
def test_init_host(self, mock_get_admin_context, mock_check, mock_setup,
mock_set_initialized):
"""Test stuck volumes and backups.
Make sure stuck volumes and backups are reset to correct
states when backup_manager.init_host() is called
"""
def get_admin_context():
return self.ctxt
self.override_config('backup_service_inithost_offload', False)
self.override_config('periodic_interval', 0)
vol1_id = self._create_volume_db_entry()
self._create_volume_attach(vol1_id)
db.volume_update(self.ctxt, vol1_id, {'status': 'backing-up'})
vol2_id = self._create_volume_db_entry()
self._create_volume_attach(vol2_id)
db.volume_update(self.ctxt, vol2_id, {'status': 'restoring-backup'})
vol3_id = self._create_volume_db_entry()
db.volume_update(self.ctxt, vol3_id, {'status': 'available'})
vol4_id = self._create_volume_db_entry()
db.volume_update(self.ctxt, vol4_id, {'status': 'backing-up'})
temp_vol_id = self._create_volume_db_entry()
db.volume_update(self.ctxt, temp_vol_id, {'status': 'available'})
vol5_id = self._create_volume_db_entry()
db.volume_update(self.ctxt, vol5_id, {'status': 'backing-up'})
temp_snap = self._create_snapshot_db_entry()
temp_snap.status = fields.SnapshotStatus.AVAILABLE
temp_snap.save()
backup1 = self._create_backup_db_entry(
status=fields.BackupStatus.CREATING, volume_id=vol1_id)
backup2 = self._create_backup_db_entry(
status=fields.BackupStatus.RESTORING,
restore_volume_id=vol2_id)
backup3 = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING, volume_id=vol3_id)
self._create_backup_db_entry(status=fields.BackupStatus.CREATING,
volume_id=vol4_id,
temp_volume_id=temp_vol_id)
self._create_backup_db_entry(status=fields.BackupStatus.CREATING,
volume_id=vol5_id,
temp_snapshot_id=temp_snap.id)
mock_get_admin_context.side_effect = get_admin_context
self.volume = importutils.import_object(CONF.volume_manager)
self.backup_mgr.init_host()
vol1 = db.volume_get(self.ctxt, vol1_id)
self.assertEqual('available', vol1['status'])
vol2 = db.volume_get(self.ctxt, vol2_id)
self.assertEqual('error_restoring', vol2['status'])
vol3 = db.volume_get(self.ctxt, vol3_id)
self.assertEqual('available', vol3['status'])
vol4 = db.volume_get(self.ctxt, vol4_id)
self.assertEqual('available', vol4['status'])
vol5 = db.volume_get(self.ctxt, vol5_id)
self.assertEqual('available', vol5['status'])
backup1 = db.backup_get(self.ctxt, backup1.id)
self.assertEqual(fields.BackupStatus.ERROR, backup1['status'])
backup2 = db.backup_get(self.ctxt, backup2.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup2['status'])
self.assertRaises(exception.BackupNotFound,
db.backup_get,
self.ctxt,
backup3.id)
temp_vol = objects.Volume.get_by_id(self.ctxt, temp_vol_id)
self.volume_mocks['delete_volume'].assert_called_once_with(
self.ctxt, temp_vol)
self.assertTrue(self.volume_mocks['detach_volume'].called)
@mock.patch('cinder.objects.backup.BackupList.get_all_by_host')
@mock.patch('cinder.manager.ThreadPoolManager._add_to_threadpool')
def test_init_host_with_service_inithost_offload(self,
mock_add_threadpool,
mock_get_all_by_host):
vol1_id = self._create_volume_db_entry()
db.volume_update(self.ctxt, vol1_id, {'status': 'available'})
backup1 = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING, volume_id=vol1_id)
vol2_id = self._create_volume_db_entry()
db.volume_update(self.ctxt, vol2_id, {'status': 'available'})
backup2 = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING, volume_id=vol2_id)
mock_get_all_by_host.return_value = [backup1, backup2]
self.backup_mgr.init_host()
calls = [mock.call(self.backup_mgr.delete_backup, mock.ANY, backup1),
mock.call(self.backup_mgr.delete_backup, mock.ANY, backup2)]
mock_add_threadpool.assert_has_calls(calls, any_order=True)
self.assertEqual(2, mock_add_threadpool.call_count)
@mock.patch('cinder.objects.service.Service.get_minimum_rpc_version')
@mock.patch('cinder.objects.service.Service.get_minimum_obj_version')
@mock.patch('cinder.rpc.LAST_RPC_VERSIONS', {'cinder-backup': '1.3',
'cinder-volume': '1.7'})
@mock.patch('cinder.rpc.LAST_OBJ_VERSIONS', {'cinder-backup': '1.2',
'cinder-volume': '1.4'})
def test_reset(self, get_min_obj, get_min_rpc):
get_min_obj.return_value = 'liberty'
backup_mgr = manager.BackupManager()
backup_rpcapi = backup_mgr.backup_rpcapi
volume_rpcapi = backup_mgr.volume_rpcapi
self.assertEqual('1.3', backup_rpcapi.client.version_cap)
self.assertEqual('1.2',
backup_rpcapi.client.serializer._base.version_cap)
self.assertEqual('1.7', volume_rpcapi.client.version_cap)
self.assertEqual('1.4',
volume_rpcapi.client.serializer._base.version_cap)
get_min_obj.return_value = objects.base.OBJ_VERSIONS.get_current()
backup_mgr.reset()
backup_rpcapi = backup_mgr.backup_rpcapi
volume_rpcapi = backup_mgr.volume_rpcapi
self.assertEqual(get_min_rpc.return_value,
backup_rpcapi.client.version_cap)
self.assertEqual(get_min_obj.return_value,
backup_rpcapi.client.serializer._base.version_cap)
self.assertIsNone(backup_rpcapi.client.serializer._base.manifest)
self.assertEqual(get_min_rpc.return_value,
volume_rpcapi.client.version_cap)
self.assertEqual(get_min_obj.return_value,
volume_rpcapi.client.serializer._base.version_cap)
self.assertIsNone(volume_rpcapi.client.serializer._base.manifest)
@ddt.data(True, False)
def test_is_working(self, initialized):
self.backup_mgr.is_initialized = initialized
self.assertEqual(initialized, self.backup_mgr.is_working())
def test_cleanup_incomplete_backup_operations_with_exceptions(self):
"""Test cleanup resilience in the face of exceptions."""
fake_backup_list = [{'id': str(uuid.uuid4())},
{'id': str(uuid.uuid4())},
{'id': str(uuid.uuid4())}]
mock_backup_get_by_host = self.mock_object(
objects.BackupList, 'get_all_by_host')
mock_backup_get_by_host.return_value = fake_backup_list
mock_backup_cleanup = self.mock_object(
self.backup_mgr, '_cleanup_one_backup')
mock_backup_cleanup.side_effect = [Exception]
mock_temp_cleanup = self.mock_object(
self.backup_mgr, '_cleanup_temp_volumes_snapshots_for_one_backup')
mock_temp_cleanup.side_effect = [Exception]
self.assertIsNone(
self.backup_mgr._cleanup_incomplete_backup_operations(
self.ctxt))
self.assertEqual(len(fake_backup_list), mock_backup_cleanup.call_count)
self.assertEqual(len(fake_backup_list), mock_temp_cleanup.call_count)
def test_cleanup_one_backing_up_volume(self):
"""Test cleanup_one_volume for volume status 'backing-up'."""
volume_id = self._create_volume_db_entry(status='backing-up',
previous_status='available')
volume = db.volume_get(self.ctxt, volume_id)
self.backup_mgr._cleanup_one_volume(self.ctxt, volume)
volume = db.volume_get(self.ctxt, volume_id)
self.assertEqual('available', volume['status'])
def test_cleanup_one_restoring_backup_volume(self):
"""Test cleanup_one_volume for volume status 'restoring-backup'."""
volume_id = self._create_volume_db_entry(status='restoring-backup')
volume = db.volume_get(self.ctxt, volume_id)
self.backup_mgr._cleanup_one_volume(self.ctxt, volume)
volume = db.volume_get(self.ctxt, volume_id)
self.assertEqual('error_restoring', volume['status'])
def test_cleanup_one_creating_backup(self):
"""Test cleanup_one_backup for volume status 'creating'."""
vol1_id = self._create_volume_db_entry()
self._create_volume_attach(vol1_id)
db.volume_update(self.ctxt, vol1_id, {'status': 'backing-up', })
backup = self._create_backup_db_entry(
status=fields.BackupStatus.CREATING,
volume_id=vol1_id)
self.backup_mgr._cleanup_one_backup(self.ctxt, backup)
self.assertEqual(fields.BackupStatus.ERROR, backup.status)
volume = objects.Volume.get_by_id(self.ctxt, vol1_id)
self.assertEqual('available', volume.status)
def test_cleanup_one_restoring_backup(self):
"""Test cleanup_one_backup for volume status 'restoring'."""
vol1_id = self._create_volume_db_entry()
db.volume_update(self.ctxt, vol1_id, {'status': 'restoring-backup', })
backup = self._create_backup_db_entry(
status=fields.BackupStatus.RESTORING,
restore_volume_id=vol1_id)
self.backup_mgr._cleanup_one_backup(self.ctxt, backup)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup.status)
volume = objects.Volume.get_by_id(self.ctxt, vol1_id)
self.assertEqual('error_restoring', volume.status)
def test_cleanup_one_deleting_backup(self):
"""Test cleanup_one_backup for backup status 'deleting'."""
self.override_config('backup_service_inithost_offload', False)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING)
self.backup_mgr._cleanup_one_backup(self.ctxt, backup)
self.assertRaises(exception.BackupNotFound,
db.backup_get,
self.ctxt,
backup.id)
def test_cleanup_one_deleting_encrypted_backup(self):
"""Test cleanup of backup status 'deleting' (encrypted)."""
self.override_config('backup_service_inithost_offload', False)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING,
encryption_key_id=fake.ENCRYPTION_KEY_ID)
self.backup_mgr._cleanup_one_backup(self.ctxt, backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertIsNotNone(backup)
self.assertEqual(fields.BackupStatus.ERROR_DELETING,
backup.status)
def test_detach_all_attachments_handles_exceptions(self):
"""Test detach_all_attachments with exceptions."""
mock_log = self.mock_object(manager, 'LOG')
self.volume_mocks['detach_volume'].side_effect = [Exception]
fake_attachments = [
{
'id': str(uuid.uuid4()),
'attached_host': 'testhost',
'instance_uuid': None,
},
{
'id': str(uuid.uuid4()),
'attached_host': 'testhost',
'instance_uuid': None,
}
]
fake_volume = {
'id': str(uuid.uuid4()),
'volume_attachment': fake_attachments
}
self.backup_mgr._detach_all_attachments(self.ctxt,
fake_volume)
self.assertEqual(len(fake_attachments), mock_log.exception.call_count)
@ddt.data(KeyError, exception.VolumeNotFound)
def test_cleanup_temp_volumes_snapshots_for_one_backup_volume_not_found(
self, err):
"""Ensure we handle missing volume for a backup."""
mock_volume_get = self.mock_object(db, 'volume_get')
mock_volume_get.side_effect = [err]
backup = self._create_backup_db_entry(
status=fields.BackupStatus.CREATING)
self.assertIsNone(
self.backup_mgr._cleanup_temp_volumes_snapshots_for_one_backup(
self.ctxt,
backup))
def test_cleanup_temp_snapshot_for_one_backup_not_found(self):
"""Ensure we handle missing temp snapshot for a backup."""
vol1_id = self._create_volume_db_entry()
self._create_volume_attach(vol1_id)
db.volume_update(self.ctxt, vol1_id, {'status': 'backing-up'})
backup = self._create_backup_db_entry(
status=fields.BackupStatus.ERROR,
volume_id=vol1_id,
temp_snapshot_id=str(uuid.uuid4()))
self.assertIsNone(
self.backup_mgr._cleanup_temp_volumes_snapshots_for_one_backup(
self.ctxt,
backup))
self.assertFalse(self.volume_mocks['delete_snapshot'].called)
self.assertIsNone(backup.temp_snapshot_id)
backup.destroy()
db.volume_destroy(self.ctxt, vol1_id)
def test_cleanup_temp_volume_for_one_backup_not_found(self):
"""Ensure we handle missing temp volume for a backup."""
vol1_id = self._create_volume_db_entry()
self._create_volume_attach(vol1_id)
db.volume_update(self.ctxt, vol1_id, {'status': 'backing-up'})
backup = self._create_backup_db_entry(status=fields.BackupStatus.ERROR,
volume_id=vol1_id,
temp_volume_id=str(uuid.uuid4()))
self.assertIsNone(
self.backup_mgr._cleanup_temp_volumes_snapshots_for_one_backup(
self.ctxt,
backup))
self.assertFalse(self.volume_mocks['delete_volume'].called)
self.assertIsNone(backup.temp_volume_id)
backup.destroy()
db.volume_destroy(self.ctxt, vol1_id)
def test_create_backup_with_bad_volume_status(self):
"""Test creating a backup from a volume with a bad status."""
vol_id = self._create_volume_db_entry(status='restoring', size=1)
backup = self._create_backup_db_entry(volume_id=vol_id)
self.assertRaises(exception.InvalidVolume,
self.backup_mgr.create_backup,
self.ctxt,
backup)
def test_create_backup_with_bad_backup_status(self):
"""Test creating a backup with a backup with a bad status."""
vol_id = self._create_volume_db_entry(size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.AVAILABLE, volume_id=vol_id)
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.create_backup,
self.ctxt,
backup)
def test_create_backup_with_error(self):
"""Test error handling when error occurs during backup creation."""
vol_id = self._create_volume_db_entry(size=1)
backup = self._create_backup_db_entry(volume_id=vol_id)
mock_run_backup = self.mock_object(self.backup_mgr, '_run_backup')
mock_run_backup.side_effect = FakeBackupException(str(uuid.uuid4()))
self.assertRaises(FakeBackupException,
self.backup_mgr.create_backup,
self.ctxt,
backup)
vol = db.volume_get(self.ctxt, vol_id)
self.assertEqual('available', vol['status'])
self.assertEqual('error_backing-up', vol['previous_status'])
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
self.assertTrue(mock_run_backup.called)
@mock.patch('cinder.backup.manager.BackupManager._run_backup',
side_effect=FakeBackupException(str(uuid.uuid4())))
def test_create_backup_with_snapshot_error(self, mock_run_backup):
"""Test error handling when error occurs during backup creation."""
vol_id = self._create_volume_db_entry(size=1)
snapshot = self._create_snapshot_db_entry(status='backing-up',
volume_id=vol_id)
backup = self._create_backup_db_entry(volume_id=vol_id,
snapshot_id=snapshot.id)
self.assertRaises(FakeBackupException,
self.backup_mgr.create_backup,
self.ctxt,
backup)
snapshot.refresh()
self.assertEqual('available', snapshot.status)
backup.refresh()
self.assertEqual(fields.BackupStatus.ERROR, backup.status)
self.assertTrue(mock_run_backup.called)
@mock.patch('cinder.utils.brick_get_connector_properties')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.utils.temporary_chown')
@mock.patch('six.moves.builtins.open')
@mock.patch.object(os.path, 'isdir', return_value=False)
def test_create_backup(self, mock_isdir, mock_open, mock_temporary_chown,
mock_get_backup_device, mock_get_conn):
"""Test normal backup creation."""
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size)
backup = self._create_backup_db_entry(volume_id=vol_id)
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
backup_device_dict = {'backup_device': vol, 'secure_enabled': False,
'is_snapshot': False, }
mock_get_backup_device.return_value = (
objects.BackupDeviceInfo.from_primitive(backup_device_dict,
self.ctxt,
['admin_metadata',
'metadata']))
attach_info = {'device': {'path': '/dev/null'}}
mock_detach_device = self.mock_object(self.backup_mgr,
'_detach_device')
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = attach_info
properties = {}
mock_get_conn.return_value = properties
mock_open.return_value = open('/dev/null', 'rb')
self.backup_mgr.create_backup(self.ctxt, backup)
mock_temporary_chown.assert_called_once_with('/dev/null')
mock_attach_device.assert_called_once_with(self.ctxt, vol,
properties, False)
mock_get_backup_device.assert_called_once_with(self.ctxt, backup, vol)
mock_get_conn.assert_called_once_with()
mock_detach_device.assert_called_once_with(self.ctxt, attach_info,
vol, properties, False,
force=True,
ignore_errors=True)
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
self.assertEqual('available', vol['status'])
self.assertEqual('backing-up', vol['previous_status'])
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
self.assertEqual(vol_size, backup['size'])
self.assertIsNone(backup.encryption_key_id)
@mock.patch('cinder.utils.brick_get_connector_properties')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.utils.temporary_chown')
@mock.patch('six.moves.builtins.open')
@mock.patch.object(os.path, 'isdir', return_value=True)
def test_create_backup_set_parent_id_to_none(self, mock_isdir, mock_open,
mock_chown,
mock_backup_device,
mock_brick):
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size)
backup = self._create_backup_db_entry(volume_id=vol_id,
parent_id='mock')
with mock.patch.object(self.backup_mgr, 'get_backup_driver') as \
mock_get_backup_driver:
mock_get_backup_driver.return_value.backup.return_value = (
{'parent_id': None})
with mock.patch.object(self.backup_mgr, '_detach_device'):
device_path = '/fake/disk/path/'
attach_info = {'device': {'path': device_path}}
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = attach_info
properties = {}
mock_brick.return_value = properties
mock_open.return_value = open('/dev/null', 'rb')
mock_brick.return_value = properties
self.backup_mgr.create_backup(self.ctxt, backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup.status)
self.assertEqual(vol_size, backup.size)
self.assertIsNone(backup.parent_id)
@mock.patch('cinder.utils.brick_get_connector_properties')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.utils.temporary_chown')
@mock.patch('six.moves.builtins.open')
@mock.patch.object(os.path, 'isdir', return_value=True)
def test_create_backup_set_parent_id(self, mock_isdir, mock_open,
mock_chown, mock_backup_device,
mock_brick):
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size)
backup = self._create_backup_db_entry(volume_id=vol_id)
parent_backup = self._create_backup_db_entry(size=vol_size)
with mock.patch.object(self.backup_mgr, 'get_backup_driver') as \
mock_get_backup_driver:
mock_get_backup_driver.return_value.backup.return_value = (
{'parent_id': parent_backup.id})
with mock.patch.object(self.backup_mgr, '_detach_device'):
device_path = '/fake/disk/path/'
attach_info = {'device': {'path': device_path}}
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = attach_info
properties = {}
mock_brick.return_value = properties
mock_open.return_value = open('/dev/null', 'rb')
mock_brick.return_value = properties
self.backup_mgr.create_backup(self.ctxt, backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup.status)
self.assertEqual(vol_size, backup.size)
self.assertEqual(parent_backup.id, backup.parent_id)
@mock.patch('cinder.utils.brick_get_connector_properties')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.utils.temporary_chown')
@mock.patch('six.moves.builtins.open')
@mock.patch.object(os.path, 'isdir', return_value=True)
def test_create_backup_fail_with_excep(self, mock_isdir, mock_open,
mock_chown, mock_backup_device,
mock_brick):
vol_id = self._create_volume_db_entry()
backup = self._create_backup_db_entry(volume_id=vol_id)
with mock.patch.object(self.backup_mgr, 'get_backup_driver') as \
mock_get_backup_driver:
mock_get_backup_driver.return_value.backup.side_effect = (
FakeBackupException('fake'))
with mock.patch.object(self.backup_mgr, '_detach_device'):
device_path = '/fake/disk/path/'
attach_info = {'device': {'path': device_path}}
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = attach_info
properties = {}
mock_brick.return_value = properties
mock_open.return_value = open('/dev/null', 'rb')
mock_brick.return_value = properties
self.assertRaises(FakeBackupException,
self.backup_mgr.create_backup,
self.ctxt, backup)
vol = db.volume_get(self.ctxt, vol_id)
self.assertEqual('available', vol.status)
self.assertEqual('error_backing-up', vol.previous_status)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.ERROR, backup.status)
@mock.patch('cinder.utils.brick_get_connector_properties')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.utils.temporary_chown')
@mock.patch('six.moves.builtins.open')
@mock.patch.object(os.path, 'isdir', return_value=True)
def test_run_backup_with_dir_device_path(self, mock_isdir,
mock_open,
mock_chown,
mock_backup_device,
mock_brick):
backup_service = lambda: None
backup_service.backup = mock.Mock(
return_value=mock.sentinel.backup_update)
|
vol_id = self._create_volume_db_entry()
backup = self._create_backup_db_entry(volume_id=vol_id)
volume = objects.Volume.get_by_id(self.ctxt, vol_id)
# device_path is represented by a directory
device_path = '/fake/disk/path/'
attach_info = {'device': {'path': device_path}}
self.backup_mgr._attach_device = mock.Mock(
return_value=attach_info)
self.backup_mgr._detach_device = mock.Mock()
output = self.backup_mgr._run_backup(self.ctxt, backup, volume)
mock_chown.assert_not_called()
mock_open.assert_not_called()
backup_service.backup.assert_called_once_with(
backup, device_path)
self.assertEqual(mock.sentinel.backup_update, output)
@mock.patch('cinder.backup.manager.BackupManager._run_backup')
@ddt.data((fields.SnapshotStatus.BACKING_UP, 'available'),
(fields.SnapshotStatus.BACKING_UP, 'in-use'),
(fields.SnapshotStatus.AVAILABLE, 'available'),
(fields.SnapshotStatus.AVAILABLE, 'in-use'))
@ddt.unpack
def test_create_backup_with_snapshot(self, snapshot_status, volume_status,
mock_run_backup):
vol_id = self._create_volume_db_entry(status=volume_status)
snapshot = self._create_snapshot_db_entry(volume_id=vol_id,
status=snapshot_status)
backup = self._create_backup_db_entry(volume_id=vol_id,
snapshot_id=snapshot.id)
if snapshot_status == fields.SnapshotStatus.BACKING_UP:
self.backup_mgr.create_backup(self.ctxt, backup)
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
snapshot = objects.Snapshot.get_by_id(self.ctxt, snapshot.id)
self.assertEqual(volume_status, vol.status)
self.assertEqual(fields.SnapshotStatus.AVAILABLE, snapshot.status)
else:
self.assertRaises(exception.InvalidSnapshot,
self.backup_mgr.create_backup, self.ctxt, backup)
@mock.patch('cinder.utils.brick_get_connector_properties')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.utils.temporary_chown')
@mock.patch('six.moves.builtins.open')
@mock.patch.object(os.path, 'isdir', return_value=False)
def test_create_backup_with_temp_snapshot(self, mock_isdir,
mock_open,
mock_temporary_chown,
mock_get_backup_device,
mock_get_conn):
"""Test backup in-use volume using temp snapshot."""
self.override_config('backup_use_same_host', True)
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size,
previous_status='in-use')
backup = self._create_backup_db_entry(volume_id=vol_id)
snap = self._create_snapshot_db_entry(volume_id=vol_id)
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
mock_get_backup_device.return_value = (
objects.BackupDeviceInfo.from_primitive({
'backup_device': snap, 'secure_enabled': False,
'is_snapshot': True, },
self.ctxt, expected_attrs=['metadata']))
attach_info = {
'device': {'path': '/dev/null'},
'conn': {'data': {}},
'connector': fake_connectors.FakeConnector(None)}
mock_terminate_connection_snapshot = self.mock_object(
volume_rpcapi.VolumeAPI,
'terminate_connection_snapshot')
mock_initialize_connection_snapshot = self.mock_object(
volume_rpcapi.VolumeAPI,
'initialize_connection_snapshot')
mock_connect_device = self.mock_object(
manager.BackupManager,
'_connect_device')
mock_connect_device.return_value = attach_info
properties = {}
mock_get_conn.return_value = properties
mock_open.return_value = open('/dev/null', 'rb')
self.backup_mgr.create_backup(self.ctxt, backup)
mock_temporary_chown.assert_called_once_with('/dev/null')
mock_initialize_connection_snapshot.assert_called_once_with(
self.ctxt, snap, properties)
mock_get_backup_device.assert_called_once_with(self.ctxt, backup, vol)
mock_get_conn.assert_called_once_with()
mock_terminate_connection_snapshot.assert_called_once_with(
self.ctxt, snap, properties, force=True)
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
self.assertEqual('in-use', vol['status'])
self.assertEqual('backing-up', vol['previous_status'])
backup = objects.Backup.get_by_id(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup.status)
self.assertEqual(vol_size, backup.size)
@mock.patch.object(fake_driver.FakeLoggingVolumeDriver, 'create_snapshot')
def test_create_temp_snapshot(self, mock_create_snapshot):
volume_manager = importutils.import_object(CONF.volume_manager)
volume_manager.driver.set_initialized()
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size,
previous_status='in-use')
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
mock_create_snapshot.return_value = {'provider_id':
'fake_provider_id'}
temp_snap = volume_manager.driver._create_temp_snapshot(
self.ctxt, vol)
self.assertEqual('available', temp_snap['status'])
self.assertEqual('fake_provider_id', temp_snap['provider_id'])
@mock.patch.object(fake_driver.FakeLoggingVolumeDriver,
'create_cloned_volume')
def test_create_temp_cloned_volume(self, mock_create_cloned_volume):
volume_manager = importutils.import_object(CONF.volume_manager)
volume_manager.driver.set_initialized()
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size,
previous_status='in-use')
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
mock_create_cloned_volume.return_value = {'provider_id':
'fake_provider_id'}
temp_vol = volume_manager.driver._create_temp_cloned_volume(
self.ctxt, vol)
self.assertEqual('available', temp_vol['status'])
self.assertEqual('fake_provider_id', temp_vol['provider_id'])
@mock.patch.object(fake_driver.FakeLoggingVolumeDriver,
'create_volume_from_snapshot')
def test_create_temp_volume_from_snapshot(self, mock_create_vol_from_snap):
volume_manager = importutils.import_object(CONF.volume_manager)
volume_manager.driver.set_initialized()
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size,
previous_status='in-use')
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
snap = self._create_snapshot_db_entry(volume_id=vol_id)
mock_create_vol_from_snap.return_value = {'provider_id':
'fake_provider_id'}
temp_vol = volume_manager.driver._create_temp_volume_from_snapshot(
self.ctxt, vol, snap)
self.assertEqual('available', temp_vol['status'])
self.assertEqual('fake_provider_id', temp_vol['provider_id'])
@mock.patch('cinder.volume.utils.notify_about_backup_usage')
def test_create_backup_with_notify(self, notify):
"""Test normal backup creation with notifications."""
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size)
backup = self._create_backup_db_entry(volume_id=vol_id)
self.mock_object(self.backup_mgr, '_run_backup')
self.backup_mgr.create_backup(self.ctxt, backup)
self.assertEqual(2, notify.call_count)
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.volume.utils.clone_encryption_key')
@mock.patch('cinder.utils.brick_get_connector_properties')
def test_create_backup_encrypted_volume(self,
mock_connector_properties,
mock_clone_encryption_key,
mock_get_backup_device):
"""Test backup of encrypted volume.
Test whether the volume's encryption key ID is cloned and
saved in the backup.
"""
vol_id = self._create_volume_db_entry(encryption_key_id=fake.UUID1)
backup = self._create_backup_db_entry(volume_id=vol_id)
self.mock_object(self.backup_mgr, '_detach_device')
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = {'device': {'path': '/dev/null'}}
mock_clone_encryption_key.return_value = fake.UUID2
self.backup_mgr.create_backup(self.ctxt, backup)
mock_clone_encryption_key.assert_called_once_with(self.ctxt,
mock.ANY,
fake.UUID1)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fake.UUID2, backup.encryption_key_id)
@mock.patch('cinder.volume.rpcapi.VolumeAPI.get_backup_device')
@mock.patch('cinder.volume.utils.clone_encryption_key')
@mock.patch('cinder.utils.brick_get_connector_properties')
def test_create_backup_encrypted_volume_again(self,
mock_connector_properties,
mock_clone_encryption_key,
mock_get_backup_device):
"""Test backup of encrypted volume.
Test when the backup already has a clone of the volume's encryption
key ID.
"""
vol_id = self._create_volume_db_entry(encryption_key_id=fake.UUID1)
backup = self._create_backup_db_entry(volume_id=vol_id,
encryption_key_id=fake.UUID2)
self.mock_object(self.backup_mgr, '_detach_device')
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = {'device': {'path': '/dev/null'}}
self.backup_mgr.create_backup(self.ctxt, backup)
mock_clone_encryption_key.assert_not_called()
def test_restore_backup_with_bad_volume_status(self):
"""Test error handling.
Test error handling when restoring a backup to a volume
with a bad status.
"""
vol_id = self._create_volume_db_entry(status='available', size=1)
backup = self._create_backup_db_entry(volume_id=vol_id)
self.assertRaises(exception.InvalidVolume,
self.backup_mgr.restore_backup,
self.ctxt,
backup,
vol_id)
backup = db.backup_get(self.ctxt, backup.id)
vol = db.volume_get(self.ctxt, vol_id)
self.assertEqual('error_restoring', vol['status'])
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
def test_restore_backup_with_bad_backup_status(self):
"""Test error handling.
Test error handling when restoring a backup with a backup
with a bad status.
"""
vol_id = self._create_volume_db_entry(status='restoring-backup',
size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.AVAILABLE, volume_id=vol_id)
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.restore_backup,
self.ctxt,
backup,
vol_id)
vol = db.volume_get(self.ctxt, vol_id)
self.assertEqual('error', vol['status'])
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
def test_restore_backup_with_driver_error(self):
"""Test error handling when an error occurs during backup restore."""
vol_id = self._create_volume_db_entry(status='restoring-backup',
size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.RESTORING, volume_id=vol_id)
mock_run_restore = self.mock_object(
self.backup_mgr,
'_run_restore')
mock_run_restore.side_effect = FakeBackupException('fake')
self.assertRaises(FakeBackupException,
self.backup_mgr.restore_backup,
self.ctxt,
backup,
vol_id)
vol = db.volume_get(self.ctxt, vol_id)
self.assertEqual('error_restoring', vol['status'])
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
self.assertTrue(mock_run_restore.called)
def test_restore_backup_with_bad_service(self):
"""Test error handling.
Test error handling when attempting a restore of a backup
with a different service to that used to create the backup.
"""
vol_id = self._create_volume_db_entry(status='restoring-backup',
size=1)
service = 'cinder.tests.backup.bad_service'
backup = self._create_backup_db_entry(
status=fields.BackupStatus.RESTORING, volume_id=vol_id,
service=service)
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.restore_backup,
self.ctxt,
backup,
vol_id)
vol = db.volume_get(self.ctxt, vol_id)
self.assertEqual('error', vol['status'])
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
@mock.patch('cinder.utils.brick_get_connector_properties')
@mock.patch('cinder.utils.temporary_chown')
@mock.patch('six.moves.builtins.open')
@mock.patch.object(os.path, 'isdir', return_value=False)
def test_restore_backup(self, mock_isdir, mock_open,
mock_temporary_chown, mock_get_conn):
"""Test normal backup restoration."""
vol_size = 1
vol_id = self._create_volume_db_entry(status='restoring-backup',
size=vol_size)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.RESTORING, volume_id=vol_id)
properties = {}
mock_get_conn.return_value = properties
mock_open.return_value = open('/dev/null', 'wb')
mock_secure_enabled = (
self.volume_mocks['secure_file_operations_enabled'])
mock_secure_enabled.return_value = False
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
attach_info = {'device': {'path': '/dev/null'}}
mock_detach_device = self.mock_object(self.backup_mgr,
'_detach_device')
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = attach_info
self.backup_mgr.restore_backup(self.ctxt, backup, vol_id)
mock_temporary_chown.assert_called_once_with('/dev/null')
mock_get_conn.assert_called_once_with()
mock_secure_enabled.assert_called_once_with(self.ctxt, vol)
mock_attach_device.assert_called_once_with(self.ctxt, vol,
properties)
mock_detach_device.assert_called_once_with(self.ctxt, attach_info,
vol, properties, force=True)
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
self.assertEqual('available', vol['status'])
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
@mock.patch('cinder.volume.utils.notify_about_backup_usage')
def test_restore_backup_with_notify(self, notify):
"""Test normal backup restoration with notifications."""
vol_size = 1
vol_id = self._create_volume_db_entry(status='restoring-backup',
size=vol_size)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.RESTORING, volume_id=vol_id)
self.backup_mgr._run_restore = mock.Mock()
self.backup_mgr.restore_backup(self.ctxt, backup, vol_id)
self.assertEqual(2, notify.call_count)
@mock.patch('cinder.volume.utils.clone_encryption_key')
@mock.patch('cinder.volume.utils.delete_encryption_key')
@mock.patch(
'cinder.tests.unit.backup.fake_service.FakeBackupService.restore')
@mock.patch('cinder.utils.brick_get_connector_properties')
def test_restore_backup_encrypted_volume(self,
mock_connector_properties,
mock_backup_driver_restore,
mock_delete_encryption_key,
mock_clone_encryption_key):
"""Test restore of encrypted volume.
Test restoring a volume from its own backup. In this situation,
the volume's encryption key ID shouldn't change.
"""
vol_id = self._create_volume_db_entry(status='restoring-backup',
encryption_key_id=fake.UUID1)
backup = self._create_backup_db_entry(
volume_id=vol_id,
status=fields.BackupStatus.RESTORING,
encryption_key_id=fake.UUID2)
self.mock_object(self.backup_mgr, '_detach_device')
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = {'device': {'path': '/dev/null'}}
self.backup_mgr.restore_backup(self.ctxt, backup, vol_id)
volume = db.volume_get(self.ctxt, vol_id)
self.assertEqual(fake.UUID1, volume.encryption_key_id)
mock_clone_encryption_key.assert_not_called()
mock_delete_encryption_key.assert_not_called()
@mock.patch('cinder.volume.utils.clone_encryption_key')
@mock.patch('cinder.volume.utils.delete_encryption_key')
@mock.patch(
'cinder.tests.unit.backup.fake_service.FakeBackupService.restore')
@mock.patch('cinder.utils.brick_get_connector_properties')
def test_restore_backup_new_encrypted_volume(self,
mock_connector_properties,
mock_backup_driver_restore,
mock_delete_encryption_key,
mock_clone_encryption_key):
"""Test restore of encrypted volume.
Test handling of encryption key IDs when retoring to another
encrypted volume, i.e. a volume whose key ID is different from
the volume originally backed up.
- The volume's prior encryption key ID is deleted.
- The volume is assigned a fresh clone of the backup's encryption
key ID.
"""
vol_id = self._create_volume_db_entry(status='restoring-backup',
encryption_key_id=fake.UUID1)
backup = self._create_backup_db_entry(
volume_id=vol_id,
status=fields.BackupStatus.RESTORING,
encryption_key_id=fake.UUID2)
self.mock_object(self.backup_mgr, '_detach_device')
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = {'device': {'path': '/dev/null'}}
mock_clone_encryption_key.return_value = fake.UUID3
# Mimic the driver's side effect where it updates the volume's
# metadata. For backups of encrypted volumes, this will essentially
# overwrite the volume's encryption key ID prior to the restore.
def restore_side_effect(backup, volume_id, volume_file):
db.volume_update(self.ctxt,
volume_id,
{'encryption_key_id': fake.UUID4})
mock_backup_driver_restore.side_effect = restore_side_effect
self.backup_mgr.restore_backup(self.ctxt, backup, vol_id)
# Volume's original encryption key ID should be deleted
mock_delete_encryption_key.assert_called_once_with(self.ctxt,
mock.ANY,
fake.UUID1)
# Backup's encryption key ID should have been cloned
mock_clone_encryption_key.assert_called_once_with(self.ctxt,
mock.ANY,
fake.UUID2)
# Volume should have the cloned backup key ID
volume = db.volume_get(self.ctxt, vol_id)
self.assertEqual(fake.UUID3, volume.encryption_key_id)
# Backup's key ID should not have changed
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fake.UUID2, backup.encryption_key_id)
@mock.patch('cinder.volume.utils.clone_encryption_key')
@mock.patch('cinder.volume.utils.delete_encryption_key')
@mock.patch(
'cinder.tests.unit.backup.fake_service.FakeBackupService.restore')
@mock.patch('cinder.utils.brick_get_connector_properties')
def test_restore_backup_glean_key_id(self,
mock_connector_properties,
mock_backup_driver_restore,
mock_delete_encryption_key,
mock_clone_encryption_key):
"""Test restore of encrypted volume.
Test restoring a backup that was created prior to when the encryption
key ID is saved in the backup DB. The backup encryption key ID is
gleaned from the restored volume.
"""
vol_id = self._create_volume_db_entry(status='restoring-backup',
encryption_key_id=fake.UUID1)
backup = self._create_backup_db_entry(
volume_id=vol_id,
status=fields.BackupStatus.RESTORING)
self.mock_object(self.backup_mgr, '_detach_device')
mock_attach_device = self.mock_object(self.backup_mgr,
'_attach_device')
mock_attach_device.return_value = {'device': {'path': '/dev/null'}}
mock_clone_encryption_key.return_value = fake.UUID3
# Mimic the driver's side effect where it updates the volume's
# metadata. For backups of encrypted volumes, this will essentially
# overwrite the volume's encryption key ID prior to the restore.
def restore_side_effect(backup, volume_id, volume_file):
db.volume_update(self.ctxt,
volume_id,
{'encryption_key_id': fake.UUID4})
mock_backup_driver_restore.side_effect = restore_side_effect
self.backup_mgr.restore_backup(self.ctxt, backup, vol_id)
# Volume's original encryption key ID should be deleted
mock_delete_encryption_key.assert_called_once_with(self.ctxt,
mock.ANY,
fake.UUID1)
# Backup's encryption key ID should have been cloned from
# the value restored from the metadata.
mock_clone_encryption_key.assert_called_once_with(self.ctxt,
mock.ANY,
fake.UUID4)
# Volume should have the cloned backup key ID
volume = db.volume_get(self.ctxt, vol_id)
self.assertEqual(fake.UUID3, volume.encryption_key_id)
# Backup's key ID should have been gleaned from value restored
# from the backup's metadata
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fake.UUID4, backup.encryption_key_id)
def test_delete_backup_with_bad_backup_status(self):
"""Test error handling.
Test error handling when deleting a backup with a backup
with a bad status.
"""
vol_id = self._create_volume_db_entry(size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.AVAILABLE, volume_id=vol_id)
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.delete_backup,
self.ctxt,
backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
def test_delete_backup_with_error(self):
"""Test error handling when an error occurs during backup deletion."""
vol_id = self._create_volume_db_entry(size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING,
display_name='fail_on_delete', volume_id=vol_id)
self.assertRaises(IOError,
self.backup_mgr.delete_backup,
self.ctxt,
backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
def test_delete_backup_with_bad_service(self):
"""Test error handling.
Test error handling when attempting a delete of a backup
with a different service to that used to create the backup.
"""
vol_id = self._create_volume_db_entry(size=1)
service = 'cinder.tests.backup.bad_service'
backup = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING, volume_id=vol_id,
service=service)
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.delete_backup,
self.ctxt,
backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
def test_delete_backup_with_no_service(self):
"""Test error handling.
Test error handling when attempting a delete of a backup
with no service defined for that backup, relates to bug #1162908
"""
vol_id = self._create_volume_db_entry(size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING, volume_id=vol_id)
backup.service = None
backup.save()
self.backup_mgr.delete_backup(self.ctxt, backup)
@ddt.data('cinder.tests.unit.backup.fake_service.FakeBackupService',
'cinder.tests.unit.backup.fake_service')
def test_delete_backup(self, service):
"""Test normal backup deletion."""
vol_id = self._create_volume_db_entry(size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING, volume_id=vol_id,
service=service)
self.backup_mgr.delete_backup(self.ctxt, backup)
self.assertRaises(exception.BackupNotFound,
db.backup_get,
self.ctxt,
backup.id)
ctxt_read_deleted = context.get_admin_context('yes')
backup = db.backup_get(ctxt_read_deleted, backup.id)
self.assertTrue(backup.deleted)
self.assertGreaterEqual(timeutils.utcnow(), backup.deleted_at)
self.assertEqual(fields.BackupStatus.DELETED, backup.status)
@mock.patch('cinder.volume.utils.delete_encryption_key')
def test_delete_backup_of_encrypted_volume(self,
mock_delete_encryption_key):
"""Test deletion of backup of encrypted volume"""
vol_id = self._create_volume_db_entry(
encryption_key_id=fake.UUID1)
backup = self._create_backup_db_entry(
volume_id=vol_id,
status=fields.BackupStatus.DELETING,
encryption_key_id=fake.UUID2)
self.backup_mgr.delete_backup(self.ctxt, backup)
mock_delete_encryption_key.assert_called_once_with(self.ctxt,
mock.ANY,
fake.UUID2)
ctxt_read_deleted = context.get_admin_context('yes')
backup = db.backup_get(ctxt_read_deleted, backup.id)
self.assertTrue(backup.deleted)
self.assertIsNone(backup.encryption_key_id)
@mock.patch('cinder.volume.utils.notify_about_backup_usage')
def test_delete_backup_with_notify(self, notify):
"""Test normal backup deletion with notifications."""
vol_id = self._create_volume_db_entry(size=1)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.DELETING, volume_id=vol_id)
self.backup_mgr.delete_backup(self.ctxt, backup)
self.assertEqual(2, notify.call_count)
def test_list_backup(self):
project_id = str(uuid.uuid4())
backups = db.backup_get_all_by_project(self.ctxt, project_id)
self.assertEqual(0, len(backups))
self._create_backup_db_entry()
b2 = self._create_backup_db_entry(project_id=project_id)
backups = db.backup_get_all_by_project(self.ctxt, project_id)
self.assertEqual(1, len(backups))
self.assertEqual(b2.id, backups[0].id)
def test_backup_get_all_by_project_with_deleted(self):
"""Test deleted backups.
Test deleted backups don't show up in backup_get_all_by_project.
Unless context.read_deleted is 'yes'.
"""
project_id = str(uuid.uuid4())
backups = db.backup_get_all_by_project(self.ctxt, project_id)
self.assertEqual(0, len(backups))
backup_keep = self._create_backup_db_entry(project_id=project_id)
backup = self._create_backup_db_entry(project_id=project_id)
db.backup_destroy(self.ctxt, backup.id)
backups = db.backup_get_all_by_project(self.ctxt, project_id)
self.assertEqual(1, len(backups))
self.assertEqual(backup_keep.id, backups[0].id)
ctxt_read_deleted = context.get_admin_context('yes')
backups = db.backup_get_all_by_project(ctxt_read_deleted, project_id)
self.assertEqual(2, len(backups))
def test_backup_get_all_by_host_with_deleted(self):
"""Test deleted backups.
Test deleted backups don't show up in backup_get_all_by_project.
Unless context.read_deleted is 'yes'
"""
backups = db.backup_get_all_by_host(self.ctxt, 'testhost')
self.assertEqual(0, len(backups))
backup_keep = self._create_backup_db_entry()
backup = self._create_backup_db_entry()
db.backup_destroy(self.ctxt, backup.id)
backups = db.backup_get_all_by_host(self.ctxt, 'testhost')
self.assertEqual(1, len(backups))
self.assertEqual(backup_keep.id, backups[0].id)
ctxt_read_deleted = context.get_admin_context('yes')
backups = db.backup_get_all_by_host(ctxt_read_deleted, 'testhost')
self.assertEqual(2, len(backups))
def test_backup_manager_driver_name(self):
"""Test mapping between backup services and backup drivers."""
self.override_config('backup_driver', "cinder.backup.services.swift")
backup_mgr = \
importutils.import_object(CONF.backup_manager)
self.assertEqual('cinder.backup.drivers.swift',
backup_mgr.driver_name)
def test_export_record_with_bad_service(self):
"""Test error handling.
Test error handling when attempting an export of a backup
record with a different service to that used to create the backup.
"""
vol_id = self._create_volume_db_entry(size=1)
service = 'cinder.tests.backup.bad_service'
backup = self._create_backup_db_entry(
status=fields.BackupStatus.AVAILABLE, volume_id=vol_id,
service=service)
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.export_record,
self.ctxt,
backup)
def test_export_record_with_bad_backup_status(self):
"""Test error handling.
Test error handling when exporting a backup record with a backup
with a bad status.
"""
vol_id = self._create_volume_db_entry(status='available',
size=1)
backup = self._create_backup_db_entry(status=fields.BackupStatus.ERROR,
volume_id=vol_id)
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.export_record,
self.ctxt,
backup)
@ddt.data('cinder.tests.unit.backup.fake_service.FakeBackupService',
'cinder.tests.unit.backup.fake_service')
def test_export_record(self, service):
"""Test normal backup record export."""
vol_size = 1
vol_id = self._create_volume_db_entry(status='available',
size=vol_size)
backup = self._create_backup_db_entry(
status=fields.BackupStatus.AVAILABLE, volume_id=vol_id,
service=service)
export = self.backup_mgr.export_record(self.ctxt, backup)
self.assertEqual(service, export['backup_service'])
self.assertIn('backup_url', export)
def test_import_record_with_verify_not_implemented(self):
"""Test normal backup record import.
Test the case when import succeeds for the case that the
driver does not support verify.
"""
vol_size = 1
backup_id = uuid.uuid4()
export = self._create_exported_record_entry(vol_size=vol_size,
exported_id=backup_id)
imported_record = self._create_export_record_db_entry(
backup_id=backup_id)
backup_hosts = []
self.backup_mgr.import_record(self.ctxt,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts)
backup = db.backup_get(self.ctxt, imported_record.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
self.assertEqual(vol_size, backup['size'])
def test_import_record_with_wrong_id(self):
"""Test normal backup record import.
Test the case when import succeeds for the case that the
driver does not support verify.
"""
vol_size = 1
export = self._create_exported_record_entry(vol_size=vol_size)
imported_record = self._create_export_record_db_entry()
backup_hosts = []
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.import_record,
self.ctxt,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts)
def test_import_record_with_bad_service(self):
"""Test error handling.
Test error handling when attempting an import of a backup
record with a different service to that used to create the backup.
"""
export = self._create_exported_record_entry()
export['backup_service'] = 'cinder.tests.unit.backup.bad_service'
imported_record = self._create_export_record_db_entry()
# Test the case where the additional hosts list is empty
backup_hosts = []
self.assertRaises(exception.ServiceNotFound,
self.backup_mgr.import_record,
self.ctxt,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts)
# Test that the import backup keeps calling other hosts to find a
# suitable host for the backup service
backup_hosts = ['fake1', 'fake2']
backup_hosts_expect = list(backup_hosts)
BackupAPI_import = 'cinder.backup.rpcapi.BackupAPI.import_record'
with mock.patch(BackupAPI_import) as _mock_backup_import:
self.backup_mgr.import_record(self.ctxt,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts)
next_host = backup_hosts_expect.pop()
_mock_backup_import.assert_called_once_with(
self.ctxt,
next_host,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts_expect)
def test_import_record_with_invalid_backup(self):
"""Test error handling.
Test error handling when attempting an import of a backup
record where the backup driver returns an exception.
"""
export = self._create_exported_record_entry()
backup_driver = self.backup_mgr.get_backup_driver(self.ctxt)
_mock_record_import_class = ('%s.%s.%s' %
(backup_driver.__module__,
backup_driver.__class__.__name__,
'import_record'))
imported_record = self._create_export_record_db_entry()
backup_hosts = []
with mock.patch(_mock_record_import_class) as _mock_record_import:
_mock_record_import.side_effect = FakeBackupException('fake')
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.import_record,
self.ctxt,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts)
self.assertTrue(_mock_record_import.called)
backup = db.backup_get(self.ctxt, imported_record.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
def test_not_supported_driver_to_force_delete(self):
"""Test force delete check method for not supported drivers."""
self.override_config('backup_driver', 'cinder.backup.drivers.ceph')
self.backup_mgr = importutils.import_object(CONF.backup_manager)
result = self.backup_mgr.check_support_to_force_delete(self.ctxt)
self.assertFalse(result)
@mock.patch('cinder.backup.drivers.nfs.NFSBackupDriver.'
'_init_backup_repo_path', return_value=None)
@mock.patch('cinder.backup.drivers.nfs.NFSBackupDriver.'
'check_for_setup_error', return_value=None)
def test_check_support_to_force_delete(self, mock_check_configuration,
mock_init_backup_repo_path):
"""Test force delete check method for supported drivers."""
self.override_config('backup_driver', 'cinder.backup.drivers.nfs')
self.backup_mgr = importutils.import_object(CONF.backup_manager)
result = self.backup_mgr.check_support_to_force_delete(self.ctxt)
self.assertTrue(result)
def test_backup_has_dependent_backups(self):
"""Test backup has dependent backups.
Test the query of has_dependent_backups in backup object is correct.
"""
vol_size = 1
vol_id = self._create_volume_db_entry(size=vol_size)
backup = self._create_backup_db_entry(volume_id=vol_id)
self.assertFalse(backup.has_dependent_backups)
class BackupTestCaseWithVerify(BaseBackupTest):
"""Test Case for backups."""
def setUp(self):
self.override_config(
"backup_driver",
"cinder.tests.unit.backup.fake_service_with_verify")
super(BackupTestCaseWithVerify, self).setUp()
def test_import_record_with_verify(self):
"""Test normal backup record import.
Test the case when import succeeds for the case that the
driver implements verify.
"""
vol_size = 1
backup_id = uuid.uuid4()
export = self._create_exported_record_entry(
vol_size=vol_size, exported_id=backup_id)
imported_record = self._create_export_record_db_entry(
backup_id=backup_id)
backup_hosts = []
backup_driver = self.backup_mgr.get_backup_driver(self.ctxt)
_mock_backup_verify_class = ('%s.%s.%s' %
(backup_driver.__module__,
backup_driver.__class__.__name__,
'verify'))
def mock_verify(backup_id):
backup = db.backup_get(self.ctxt, backup_id)
self.assertEqual(fields.BackupStatus.CREATING, backup['status'])
with mock.patch(_mock_backup_verify_class) as mock_backup_verify:
mock_backup_verify.side_effect = mock_verify
self.backup_mgr.import_record(self.ctxt,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts)
backup = db.backup_get(self.ctxt, imported_record.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
self.assertEqual(vol_size, backup['size'])
def test_import_record_with_verify_invalid_backup(self):
"""Test error handling.
Test error handling when attempting an import of a backup
record where the backup driver returns an exception.
"""
vol_size = 1
backup_id = uuid.uuid4()
export = self._create_exported_record_entry(
vol_size=vol_size, exported_id=backup_id)
imported_record = self._create_export_record_db_entry(
backup_id=backup_id)
backup_hosts = []
backup_driver = self.backup_mgr.get_backup_driver(self.ctxt)
_mock_backup_verify_class = ('%s.%s.%s' %
(backup_driver.__module__,
backup_driver.__class__.__name__,
'verify'))
with mock.patch(_mock_backup_verify_class) as _mock_record_verify:
_mock_record_verify.side_effect = \
exception.InvalidBackup(reason='fake')
self.assertRaises(exception.InvalidBackup,
self.backup_mgr.import_record,
self.ctxt,
imported_record,
export['backup_service'],
export['backup_url'],
backup_hosts)
self.assertTrue(_mock_record_verify.called)
backup = db.backup_get(self.ctxt, imported_record.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
@mock.patch.object(manager.BackupManager,
'_cleanup_temp_volumes_snapshots_for_one_backup')
def test_backup_reset_status_from_nonrestoring_to_available(
self, mock_clean_temp):
vol_id = self._create_volume_db_entry(status='available',
size=1)
backup = self._create_backup_db_entry(status=fields.BackupStatus.ERROR,
volume_id=vol_id)
with mock.patch.object(manager.BackupManager,
'_map_service_to_driver') as \
mock_map_service_to_driver:
# It should works when the service name is a string
backup_driver = 'cinder.tests.unit.backup.fake_service_with_verify'
mock_map_service_to_driver.return_value = backup_driver
self.backup_mgr.reset_status(self.ctxt,
backup,
fields.BackupStatus.AVAILABLE)
mock_clean_temp.assert_called_once_with(self.ctxt, backup)
new_backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE,
new_backup['status'])
mock_map_service_to_driver.return_value = backup_driver
self.backup_mgr.reset_status(self.ctxt,
backup,
fields.BackupStatus.ERROR)
mock_clean_temp.reset_mock()
self.backup_mgr.reset_status(self.ctxt,
backup,
fields.BackupStatus.AVAILABLE)
mock_clean_temp.assert_called_once_with(self.ctxt, backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
def test_backup_reset_status_to_available_invalid_backup(self):
volume = db.volume_create(self.ctxt, {'status': 'available',
'host': 'test',
'provider_location': '',
'size': 1})
backup = self._create_backup_db_entry(status=fields.BackupStatus.ERROR,
volume_id=volume['id'])
backup_driver = self.backup_mgr.get_backup_driver(self.ctxt)
_mock_backup_verify_class = ('%s.%s.%s' %
(backup_driver.__module__,
backup_driver.__class__.__name__,
'verify'))
with mock.patch(_mock_backup_verify_class) as \
_mock_record_verify:
_mock_record_verify.side_effect = \
exception.BackupVerifyUnsupportedDriver(reason='fake')
self.assertRaises(exception.BackupVerifyUnsupportedDriver,
self.backup_mgr.reset_status,
self.ctxt,
backup,
fields.BackupStatus.AVAILABLE)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
@mock.patch.object(manager.BackupManager,
'_cleanup_temp_volumes_snapshots_for_one_backup')
def test_backup_reset_status_from_restoring_to_available(
self, mock_clean_temp):
volume = db.volume_create(self.ctxt,
{'status': 'available',
'host': 'test',
'provider_location': '',
'size': 1})
backup = self._create_backup_db_entry(
status=fields.BackupStatus.RESTORING,
volume_id=volume['id'])
self.backup_mgr.reset_status(self.ctxt, backup,
fields.BackupStatus.AVAILABLE)
mock_clean_temp.assert_called_once_with(self.ctxt, backup)
backup = db.backup_get(self.ctxt, backup.id)
self.assertEqual(fields.BackupStatus.AVAILABLE, backup['status'])
@mock.patch.object(manager.BackupManager,
'_cleanup_temp_volumes_snapshots_for_one_backup')
def test_backup_reset_status_to_error(self, mock_clean_temp):
volume = db.volume_create(self.ctxt,
{'status': 'available',
'host': 'test',
'provider_location': '',
'size': 1})
backup = self._create_backup_db_entry(
status=fields.BackupStatus.CREATING,
volume_id=volume['id'])
self.backup_mgr.reset_status(self.ctxt, backup,
fields.BackupStatus.ERROR)
mock_clean_temp.assert_called_once_with(self.ctxt, backup)
backup = db.backup_get(self.ctxt, backup['id'])
self.assertEqual(fields.BackupStatus.ERROR, backup['status'])
@ddt.ddt
class BackupAPITestCase(BaseBackupTest):
def setUp(self):
super(BackupAPITestCase, self).setUp()
self.api = api.API()
def test_get_all_wrong_all_tenants_value(self):
self.assertRaises(exception.InvalidParameterValue,
self.api.get_all, self.ctxt, {'all_tenants': 'bad'})
@mock.patch.object(objects, 'BackupList')
def test_get_all_no_all_tenants_value(self, mock_backuplist):
result = self.api.get_all(self.ctxt, {'key': 'value'})
self.assertFalse(mock_backuplist.get_all.called)
self.assertEqual(mock_backuplist.get_all_by_project.return_value,
result)
mock_backuplist.get_all_by_project.assert_called_once_with(
self.ctxt, self.ctxt.project_id, {'key': 'value'}, None, None,
None, None, None)
@mock.patch.object(objects, 'BackupList')
@ddt.data(False, 'false', '0', 0, 'no')
def test_get_all_false_value_all_tenants(
self, false_value, mock_backuplist):
result = self.api.get_all(self.ctxt, {'all_tenants': false_value,
'key': 'value'})
self.assertFalse(mock_backuplist.get_all.called)
self.assertEqual(mock_backuplist.get_all_by_project.return_value,
result)
mock_backuplist.get_all_by_project.assert_called_once_with(
self.ctxt, self.ctxt.project_id, {'key': 'value'}, None, None,
None, None, None)
@mock.patch.object(objects, 'BackupList')
@ddt.data(True, 'true', '1', 1, 'yes')
def test_get_all_true_value_all_tenants(
self, true_value, mock_backuplist):
result = self.api.get_all(self.ctxt, {'all_tenants': true_value,
'key': 'value'})
self.assertFalse(mock_backuplist.get_all_by_project.called)
self.assertEqual(mock_backuplist.get_all.return_value,
result)
mock_backuplist.get_all.assert_called_once_with(
self.ctxt, {'key': 'value'}, None, None, None, None, None)
@mock.patch.object(objects, 'BackupList')
def test_get_all_true_value_all_tenants_non_admin(self, mock_backuplist):
ctxt = context.RequestContext(uuid.uuid4(), uuid.uuid4())
result = self.api.get_all(ctxt, {'all_tenants': '1',
'key': 'value'})
self.assertFalse(mock_backuplist.get_all.called)
self.assertEqual(mock_backuplist.get_all_by_project.return_value,
result)
mock_backuplist.get_all_by_project.assert_called_once_with(
ctxt, ctxt.project_id, {'key': 'value'}, None, None, None, None,
None)
@mock.patch.object(api.API, '_get_available_backup_service_host',
return_value='fake_host')
@mock.patch.object(db, 'backup_create',
side_effect=db_exc.DBError())
def test_create_when_failed_to_create_backup_object(
self, mock_create,
mock_get_service):
# Create volume in admin context
volume_id = utils.create_volume(self.ctxt)['id']
# Will try to backup from a different context
new_context = copy.copy(self.ctxt)
new_context.user_id = uuid.uuid4()
new_context.project_id = uuid.uuid4()
# The opposite side of this test case is a "NotImplementedError:
# Cannot load 'id' in the base class" being raised.
# More detailed, in the try clause, if backup.create() failed
# with DB exception, backup.id won't be assigned. However,
# in the except clause, backup.destroy() is invoked to do cleanup,
# which internally tries to access backup.id.
self.assertRaises(db_exc.DBError, self.api.create,
context=new_context,
name="test_backup",
description="test backup description",
volume_id=volume_id,
container='volumebackups')
@mock.patch.object(api.API, '_get_available_backup_service_host',
return_value='fake_host')
@mock.patch.object(objects.Backup, '__init__',
side_effect=exception.InvalidInput(
reason='Failed to new'))
def test_create_when_failed_to_new_backup_object(self, mock_new,
mock_get_service):
volume_id = utils.create_volume(self.ctxt)['id']
# The opposite side of this test case is that a "UnboundLocalError:
# local variable 'backup' referenced before assignment" is raised.
# More detailed, in the try clause, backup = objects.Backup(...)
# raises exception, so 'backup' is not assigned. But in the except
# clause, 'backup' is referenced to invoke cleanup methods.
self.assertRaises(exception.InvalidInput, self.api.create,
context=self.ctxt,
name="test_backup",
description="test backup description",
volume_id=volume_id,
container='volumebackups')
@mock.patch('cinder.backup.rpcapi.BackupAPI.create_backup')
@mock.patch('cinder.backup.api.API._is_backup_service_enabled')
def test_create_backup_in_same_host(self, mock_is_enable,
mock_create):
self.override_config('backup_use_same_host', True)
mock_is_enable.return_value = True
self.ctxt.user_id = 'fake_user'
self.ctxt.project_id = 'fake_project'
volume_id = self._create_volume_db_entry(status='available',
host='testhost#lvm',
size=1)
backup = self.api.create(self.ctxt, None, None, volume_id, None)
self.assertEqual('testhost', backup.host)
@mock.patch.object(api.API, '_get_available_backup_service_host',
return_value='fake_host')
@mock.patch('cinder.backup.rpcapi.BackupAPI.create_backup')
def test_create_backup_from_snapshot_with_volume_in_use(
self, mock_create, mock_get_service):
self.ctxt.user_id = 'fake_user'
self.ctxt.project_id = 'fake_project'
volume_id = self._create_volume_db_entry(status='in-use')
snapshot = self._create_snapshot_db_entry(volume_id=volume_id)
backup = self.api.create(self.ctxt, None, None, volume_id, None,
snapshot_id=snapshot.id)
self.assertEqual(fields.BackupStatus.CREATING, backup.status)
volume = objects.Volume.get_by_id(self.ctxt, volume_id)
snapshot = objects.Snapshot.get_by_id(self.ctxt, snapshot.id)
self.assertEqual(fields.SnapshotStatus.BACKING_UP, snapshot.status)
self.assertEqual('in-use', volume.status)
@mock.patch.object(api.API, '_get_available_backup_service_host',
return_value='fake_host')
@mock.patch('cinder.backup.rpcapi.BackupAPI.create_backup')
@ddt.data(True, False)
def test_create_backup_resource_status(self, is_snapshot, mock_create,
mock_get_service):
self.ctxt.user_id = 'fake_user'
self.ctxt.project_id = 'fake_project'
volume_id = self._create_volume_db_entry(status='available')
snapshot = self._create_snapshot_db_entry(volume_id=volume_id)
if is_snapshot:
self.api.create(self.ctxt, None, None, volume_id, None,
snapshot_id=snapshot.id)
volume = objects.Volume.get_by_id(self.ctxt, volume_id)
snapshot = objects.Snapshot.get_by_id(self.ctxt, snapshot.id)
self.assertEqual('backing-up', snapshot.status)
self.assertEqual('available', volume.status)
else:
self.api.create(self.ctxt, None, None, volume_id, None)
volume = objects.Volume.get_by_id(self.ctxt, volume_id)
snapshot = objects.Snapshot.get_by_id(self.ctxt, snapshot.id)
self.assertEqual('available', snapshot.status)
self.assertEqual('backing-up', volume.status)
@mock.patch('cinder.backup.api.API._get_available_backup_service_host')
@mock.patch('cinder.backup.rpcapi.BackupAPI.restore_backup')
def test_restore_volume(self,
mock_rpcapi_restore,
mock_get_backup_host):
volume_id = self._create_volume_db_entry(status='available',
size=1)
backup = self._create_backup_db_entry(size=1,
status='available')
mock_get_backup_host.return_value = 'testhost'
self.api.restore(self.ctxt, backup.id, volume_id)
backup = objects.Backup.get_by_id(self.ctxt, backup.id)
self.assertEqual(volume_id, backup.restore_volume_id)
|
self.backup_mgr.get_backup_driver = lambda x: backup_service
|
mod.rs
|
//! Compile-time graphics API types.
#[cfg(all(feature = "d3d11", target_os = "windows"))]
pub use self::d3d11::{CommandBuffer, Device, Factory, Resources, Window};
#[cfg(all(feature = "metal", target_os = "macos"))]
pub use self::metal::{CommandBuffer, Device, Factory, Resources, Window};
#[cfg(feature = "opengl")]
pub use self::opengl::{CommandBuffer, Device, Factory, Resources, Window};
#[cfg(feature = "vulkan")]
pub use self::vulkan::{CommandBuffer, Device, Factory, Resources, Window};
#[cfg(all(feature = "d3d11", target_os = "windows"))]
mod d3d11;
#[cfg(all(feature = "metal", target_os = "macos"))]
mod metal;
#[cfg(feature = "opengl")]
mod opengl;
#[cfg(feature = "vulkan")]
mod vulkan;
// /// Handle to a typed GPU buffer.
// pub type Buffer<V> = gfx::handle::Buffer<Resources, V>;
/// Color buffer format.
pub type SurfaceFormat = gfx::format::R8_G8_B8_A8;
pub type ChannelFormat = gfx::format::Unorm;
pub type ColorFormat = (SurfaceFormat, ChannelFormat);
/// Depth buffer format.
#[cfg(feature = "metal")]
pub type DepthFormat = gfx::format::Depth32F;
/// Depth buffer format.
#[cfg(not(feature = "metal"))]
pub type DepthFormat = gfx::format::DepthStencil;
|
/// Depth-stencil view type.
pub type DepthStencilView = gfx::handle::DepthStencilView<Resources, DepthFormat>;
/// Command buffer encoder type.
///
/// Created by calling `CommandBuffer::into()`.
pub type Encoder = gfx::Encoder<Resources, CommandBuffer>;
/// Statically-typed pipeline state object (PSO).
pub type PipelineState<M> = gfx::PipelineState<Resources, M>;
/// Handle to a chunk of GPU memory.
///
/// This handle can represent a vertex buffer, index buffer, constant buffer,
/// or staging buffer.
pub type RawBuffer = gfx::handle::RawBuffer<Resources>;
/// Dynamically typed shader resource view.
pub type RawShaderResourceView = gfx::handle::RawShaderResourceView<Resources>;
/// Dynamically typed texture resource.
pub type RawTexture = gfx::handle::RawTexture<Resources>;
/// Render target view type.
pub type RenderTargetView = gfx::handle::RenderTargetView<Resources, ColorFormat>;
/// Texture sampler type.
pub type Sampler = gfx::handle::Sampler<Resources>;
/// Shader resource view type.
pub type ShaderResourceView<T> = gfx::handle::ShaderResourceView<Resources, T>;
/// Slice associated with a vertex buffer.
pub type Slice = gfx::Slice<Resources>;
| |
progress.py
|
import inspect
from typing import Iterable, Optional
from tqdm import tqdm
from ..utils.translations import trans
_tqdm_kwargs = {
p.name
for p in inspect.signature(tqdm.__init__).parameters.values()
if p.kind is not inspect.Parameter.VAR_KEYWORD and p.name != "self"
}
class progress(tqdm):
"""This class inherits from tqdm and provides an interface for
progress bars in the napari viewer. Progress bars can be created
directly by wrapping an iterable or by providing a total number
of expected updates.
See tqdm.tqdm API for valid args and kwargs:
https://tqdm.github.io/docs/tqdm/
Also, any keyword arguments to the :class:`ProgressBar` `QWidget`
are also accepted and will be passed to the ``ProgressBar``.
Examples
--------
>>> def long_running(steps=10, delay=0.1):
... for i in progress(range(steps)):
... sleep(delay)
it can also be used as a context manager:
>>> def long_running(steps=10, repeats=4, delay=0.1):
... with progress(range(steps)) as pbr:
... for i in pbr:
... sleep(delay)
or equivalently, using the `progrange` shorthand
... with progrange(steps) as pbr:
... for i in pbr:
... sleep(delay)
For manual updates:
>>> def manual_updates(total):
... pbr = progress(total=total)
... sleep(10)
... pbr.set_description("Step 1 Complete")
... pbr.update(1)
... # must call pbr.close() when using outside for loop
... # or context manager
... pbr.close()
"""
monitor_interval = 0 # set to 0 to disable the thread
def __init__(
self,
iterable: Optional[Iterable] = None,
desc: Optional[str] = None,
total: Optional[int] = None,
nest_under: Optional['progress'] = None,
*args,
**kwargs,
) -> None:
kwargs = kwargs.copy()
pbar_kwargs = {k: kwargs.pop(k) for k in set(kwargs) - _tqdm_kwargs}
self._group_token = None
# get progress bar added to viewer
try:
from .._qt.dialogs.activity_dialog import get_pbar
pbar = get_pbar(self, nest_under=nest_under, **pbar_kwargs)
except ImportError:
pbar = None
if pbar is not None:
kwargs['gui'] = True
self._pbar = pbar
super().__init__(iterable, desc, total, *args, **kwargs)
if not self._pbar:
return
if self.total is not None:
self._pbar.setRange(self.n, self.total)
self._pbar._set_value(self.n)
else:
self._pbar.setRange(0, 0)
self.total = 0
if desc:
self.set_description(desc)
else:
self.set_description(trans._("progress"))
def display(self, msg: str = None, pos: int = None) -> None:
"""Update the display."""
if not self._pbar:
return super().display(msg=msg, pos=pos)
if self.total != 0:
etas = str(self).split('|')[-1]
try:
self._pbar._set_value(self.n)
self._pbar._set_eta(etas)
except AttributeError:
pass
def increment_with_overflow(self):
"""Update if not exceeding total, else set indeterminate range."""
if self.n == self.total:
self.total = 0
if self._pbar:
self._pbar.setRange(0, 0)
else:
self.update(1)
def set_description(self, desc):
"""Update progress bar description"""
super().set_description(desc, refresh=True)
if self._pbar:
self._pbar._set_description(self.desc)
def close(self):
"""Closes and deletes the progress bar widget"""
if self.disable:
return
if self._pbar:
self.close_pbar()
super().close()
def close_pbar(self):
if self.disable or not self._pbar:
return
from napari._qt.widgets.qt_progress_bar import (
ProgressBar,
ProgressBarGroup,
)
parent_widget = self._pbar.parent()
self._pbar.close()
self._pbar.deleteLater()
if isinstance(parent_widget, ProgressBarGroup):
pbar_children = [
child
for child in parent_widget.children()
if isinstance(child, ProgressBar)
]
if not any(child.isVisible() for child in pbar_children):
parent_widget.close()
self._pbar = None
def progrange(*args, **kwargs):
|
"""Shorthand for `progress(range(*args), **kwargs)`.
Adds tqdm based progress bar to napari viewer, if it
exists, and returns the wrapped range object.
Returns
-------
progress
wrapped range object
"""
return progress(range(*args), **kwargs)
|
|
bases.py
|
# This file is part of Neotest.
# See http://www.neotest.io for more information.
# This program is published under the MIT license.
|
import neotest
class ProcessBase(multiprocessing.Process, neotest.logging.LogClientBase):
def __init__(self, name=None):
multiprocessing.Process.__init__(self, name=name)
# this creates self.log object
neotest.logging.LogClientBase.__init__(self, logger="process", queue=neotest.logging.getQueue())
class ThreadBase(threading.Thread, neotest.logging.LogClientBase):
def __init__(self, name=None):
threading.Thread.__init__(self, name=name)
# this creates self.log object
neotest.logging.LogClientBase.__init__(self, logger="thread", queue=neotest.logging.getQueue())
|
import multiprocessing
import threading
|
parser.rs
|
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(unknown_lints)]
#![allow(ellipsis_inclusive_range_patterns)]
// Generated by rust-peg. Do not edit.
use self::RuleResult::{Failed, Matched};
use ast::*;
use astutil::*;
use env::{Env, Symbol};
use span::{Node, Span};
fn escape_default(s: &str) -> String {
s.chars().flat_map(|c| c.escape_default()).collect()
}
fn char_range_at(s: &str, pos: usize) -> (char, usize) {
let c = &s[pos..].chars().next().unwrap();
let next_pos = pos + c.len_utf8();
(*c, next_pos)
}
#[derive(Clone)]
enum RuleResult<T> {
Matched(usize, T),
Failed,
}
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct ParseError {
pub line: usize,
pub column: usize,
pub offset: usize,
pub expected: ::std::collections::HashSet<&'static str>,
}
pub type ParseResult<T> = Result<T, ParseError>;
impl ::std::fmt::Display for ParseError {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
try!(write!(fmt, "error at {}:{}: expected ", self.line, self.column));
if self.expected.len() == 0 {
try!(write!(fmt, "EOF"));
} else if self.expected.len() == 1 {
try!(write!(fmt, "`{}`", escape_default(self.expected.iter().next().unwrap())));
} else {
let mut iter = self.expected.iter();
try!(write!(fmt, "one of `{}`", escape_default(iter.next().unwrap())));
for elem in iter {
try!(write!(fmt, ", `{}`", escape_default(elem)));
}
}
Ok(())
}
}
impl ::std::error::Error for ParseError {
fn description(&self) -> &str {
"parse error"
}
}
fn slice_eq(input: &str, state: &mut ParseState, pos: usize, m: &'static str) -> RuleResult<()> {
#![inline]
#![allow(dead_code)]
let l = m.len();
if input.len() >= pos + l && &input.as_bytes()[pos..pos + l] == m.as_bytes() {
Matched(pos + l, ())
} else {
state.mark_failure(pos, m)
}
}
fn slice_eq_case_insensitive(input: &str, state: &mut ParseState, pos: usize, m: &'static str) -> RuleResult<()> {
#![inline]
#![allow(dead_code)]
let mut used = 0usize;
let mut input_iter = input[pos..].chars().flat_map(|x| x.to_uppercase());
for m_char_upper in m.chars().flat_map(|x| x.to_uppercase()) {
used += m_char_upper.len_utf8();
let input_char_result = input_iter.next();
if input_char_result.is_none() || input_char_result.unwrap() != m_char_upper {
return state.mark_failure(pos, m);
}
}
Matched(pos + used, ())
}
fn any_char(input: &str, state: &mut ParseState, pos: usize) -> RuleResult<()> {
#![inline]
#![allow(dead_code)]
if input.len() > pos {
let (_, next) = char_range_at(input, pos);
Matched(next, ())
} else {
state.mark_failure(pos, "<character>")
}
}
fn pos_to_line(input: &str, pos: usize) -> (usize, usize) {
let before = &input[..pos];
let line = before.as_bytes().iter().filter(|&&c| c == b'\n').count() + 1;
let col = before.chars().rev().take_while(|&c| c != '\n').count() + 1;
(line, col)
}
impl<'input> ParseState<'input> {
fn mark_failure(&mut self, pos: usize, expected: &'static str) -> RuleResult<()> {
if self.suppress_fail == 0 {
if pos > self.max_err_pos {
self.max_err_pos = pos;
self.expected.clear();
}
if pos == self.max_err_pos {
self.expected.insert(expected);
}
}
Failed
}
}
struct ParseState<'input> {
max_err_pos: usize,
suppress_fail: usize,
expected: ::std::collections::HashSet<&'static str>,
_phantom: ::std::marker::PhantomData<&'input ()>,
postfix_expression0_cache: ::std::collections::HashMap<usize, RuleResult<Expression>>,
}
impl<'input> ParseState<'input> {
fn new() -> ParseState<'input> {
ParseState { max_err_pos: 0, suppress_fail: 0, expected: ::std::collections::HashSet::new(), _phantom: ::std::marker::PhantomData, postfix_expression0_cache: ::std::collections::HashMap::new() }
}
}
fn __parse__<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
__state.suppress_fail += 1;
let res = {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "\n");
match __seq_res {
Matched(__pos, _) => match __parse_directive(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
' ' | '\t' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[ \t]"),
}
} else {
__state.mark_failure(__pos, "[ \t]")
}
}
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
};
__state.suppress_fail -= 1;
res
}
}
fn __parse_directive<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "#");
match __seq_res {
Matched(__pos, _) => {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'\n' => __state.mark_failure(__pos, "[^\n]"),
_ => Matched(__next, ()),
}
} else {
__state.mark_failure(__pos, "[^\n]")
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
}
Failed => Failed,
}
}
}
fn __parse_identifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Identifier>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_identifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_identifier0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Identifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z]")
};
match __seq_res {
Matched(__pos, _) => {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
}
Failed => Failed,
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
match {
if !env.reserved.contains(n) {
Ok(Identifier { name: n.into() })
} else {
Err("identifier")
}
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
}
fn __parse_ohx<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "0");
match __seq_res {
Matched(__pos, _) => {
if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'x' | 'X' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[xX]"),
}
} else {
__state.mark_failure(__pos, "[xX]")
}
}
Failed => Failed,
}
}
}
fn __parse_dec<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[0-9]"),
}
} else {
__state.mark_failure(__pos, "[0-9]")
}
}
fn __parse_oct<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'0'...'7' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[0-7]"),
}
} else {
__state.mark_failure(__pos, "[0-7]")
}
}
fn __parse_hex<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'0'...'9' | 'a'...'f' | 'A'...'F' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[0-9a-fA-F]"),
}
} else {
__state.mark_failure(__pos, "[0-9a-fA-F]")
}
}
fn __parse_constant<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Constant> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'0'...'9' | '.' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[0-9.]"),
}
} else {
__state.mark_failure(__pos, "[0-9.]")
};
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_numeric_constant(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, c) => Matched(__pos, { c }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'\'' | 'u' | 'U' | 'L' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "['uUL]"),
}
} else {
__state.mark_failure(__pos, "['uUL]")
};
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_character_constant(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, c) => Matched(__pos, { Constant::Character(c) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_numeric_constant<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Constant> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_float_constant(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, c) => Matched(__pos, { Constant::Float(c) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_integer_constant(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, c) => Matched(__pos, { Constant::Integer(c) }),
Failed => Failed,
}
}
}
}
}
fn __parse_integer_constant<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Integer> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_integer_number(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse_integer_suffix(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, suffix) => Matched(__pos, {
let (base, number) = n;
Integer { base: base, number: number.to_owned().into_boxed_str(), suffix: suffix }
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_integer_number<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<(IntegerBase, &'input str)> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let str_start = __pos;
match {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'1'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[1-9]"),
}
} else {
__state.mark_failure(__pos, "[1-9]")
};
match __seq_res {
Matched(__pos, _) => {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
}
Failed => Failed,
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { (IntegerBase::Decimal, n) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_ohx(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let str_start = __pos;
match {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_hex(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { (IntegerBase::Hexadecimal, n) }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "0");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let str_start = __pos;
match {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_oct(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { (IntegerBase::Octal, n) }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let str_start = __pos;
match slice_eq(__input, __state, __pos, "0") {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { (IntegerBase::Decimal, n) }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
fn __parse_integer_suffix<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<IntegerSuffix> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
__state.suppress_fail += 1;
let res = __parse_integer_suffix_inner(__input, __state, __pos, env);
__state.suppress_fail -= 1;
res
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
__state.mark_failure(__pos, "integer suffix");
Failed
}
}
}
}
fn __parse_integer_suffix_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<IntegerSuffix> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = {
let __choice_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'u' | 'U' | 'l' | 'L' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[uUlL]"),
}
} else {
__state.mark_failure(__pos, "[uUlL]")
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'i' | 'I' | 'j' | 'J' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[iIjJ]"),
}
} else {
__state.mark_failure(__pos, "[iIjJ]")
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => match { int_suffix(s) } {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
},
Failed => Failed,
}
}
}
fn __parse_float_constant<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Float> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_float_number(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse_float_suffix(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, suffix) => Matched(__pos, {
let (base, number) = n;
Float { base: base, number: number.to_string().into_boxed_str(), suffix: suffix }
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_float_number<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<(FloatBase, &'input str)> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let str_start = __pos;
match __parse_float_decimal(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { (FloatBase::Decimal, n) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_ohx(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let str_start = __pos;
match __parse_float_hexadecimal(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { (FloatBase::Hexadecimal, n) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_float_decimal<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed =>
|
}
}
Matched(__repeat_pos, ())
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __seq_res {
Matched(__pos, _) => match __parse_float_decimal_exp(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => match __parse_float_decimal_exp(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __seq_res {
Matched(__pos, _) => __parse_float_decimal_exp(__input, __state, __pos, env),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_float_decimal_exp<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'e' | 'E' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[eE]"),
}
} else {
__state.mark_failure(__pos, "[eE]")
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'+' | '-' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[+-]"),
}
} else {
__state.mark_failure(__pos, "[+-]")
} {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_float_hexadecimal<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = __parse_hex(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_hex(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __seq_res {
Matched(__pos, _) => __parse_float_binary_exp(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_hex(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => __parse_float_binary_exp(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_hex(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __seq_res {
Matched(__pos, _) => __parse_float_binary_exp(__input, __state, __pos, env),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_float_binary_exp<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'p' | 'P' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[pP]"),
}
} else {
__state.mark_failure(__pos, "[pP]")
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'+' | '-' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[+-]"),
}
} else {
__state.mark_failure(__pos, "[+-]")
} {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_float_suffix<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<FloatSuffix> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
__state.suppress_fail += 1;
let res = __parse_float_suffix_inner(__input, __state, __pos, env);
__state.suppress_fail -= 1;
res
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
__state.mark_failure(__pos, "float literal suffix");
Failed
}
}
}
}
fn __parse_float_suffix_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<FloatSuffix> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'i' | 'I' | 'j' | 'J' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[iIjJ]"),
}
} else {
__state.mark_failure(__pos, "[iIjJ]")
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_float_format(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, fmt) => Matched(__pos, { FloatSuffix { format: fmt, imaginary: true } }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_float_format(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, fmt) => {
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'i' | 'I' | 'j' | 'J' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[iIjJ]"),
}
} else {
__state.mark_failure(__pos, "[iIjJ]")
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, imag) => Matched(__pos, { FloatSuffix { format: fmt, imaginary: imag.is_some() } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_float_format<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<FloatFormat> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_ts18661_float_suffix(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, f) => Matched(__pos, { FloatFormat::TS18661Format(f) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'f' | 'F' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[fF]"),
}
} else {
__state.mark_failure(__pos, "[fF]")
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { FloatFormat::Float }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'l' | 'L' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[lL]"),
}
} else {
__state.mark_failure(__pos, "[lL]")
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { FloatFormat::LongDouble }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => Matched(__pos, { FloatFormat::Double }),
}
}
}
}
}
}
}
fn __parse_character_constant<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<String> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
let __seq_res = match if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'L' | 'u' | 'U' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[LuU]"),
}
} else {
__state.mark_failure(__pos, "[LuU]")
} {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "'");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_character(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __seq_res {
Matched(__pos, _) => slice_eq(__input, __state, __pos, "'"),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, c) => Matched(__pos, { String::from(c) }),
Failed => Failed,
}
}
}
fn __parse_character<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __choice_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'\'' | '\\' | '\n' => __state.mark_failure(__pos, "[^'\\\n]"),
_ => Matched(__next, ()),
}
} else {
__state.mark_failure(__pos, "[^'\\\n]")
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_escape_sequence(__input, __state, __pos, env),
}
}
}
fn __parse_escape_sequence<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "\\");
match __seq_res {
Matched(__pos, _) => {
let __choice_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'\'' | '"' | '?' | '\\' | 'a' | 'b' | 'c' | 'f' | 'n' | 'r' | 't' | 'v' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "['\"?\\abcfnrtv]"),
}
} else {
__state.mark_failure(__pos, "['\"?\\abcfnrtv]")
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
if __repeat_value.len() >= 3 {
break;
}
let __step_res = __parse_oct(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "x");
match __seq_res {
Matched(__pos, _) => {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_hex(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
}
Failed => Failed,
}
}
}
}
}
}
Failed => Failed,
}
}
}
fn __parse_string_literal<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Vec<String>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_string_literal0(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { s }),
Failed => Failed,
}
}
}
fn __parse_string_literal0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<String> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
let __seq_res = match __parse_encoding_prefix(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "\"");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = __parse_string_char(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
};
match __seq_res {
Matched(__pos, _) => slice_eq(__input, __state, __pos, "\""),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { String::from(s) }),
Failed => Failed,
}
}
}
fn __parse_encoding_prefix<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __choice_res = slice_eq(__input, __state, __pos, "u8");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'u' | 'U' | 'L' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[uUL]"),
}
} else {
__state.mark_failure(__pos, "[uUL]")
}
}
}
}
}
fn __parse_string_char<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __choice_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'"' | '\\' | '\n' => __state.mark_failure(__pos, "[^\"\\\n]"),
_ => Matched(__next, ()),
}
} else {
__state.mark_failure(__pos, "[^\"\\\n]")
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_escape_sequence(__input, __state, __pos, env),
}
}
}
fn __parse_primary_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_primary_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_primary_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => Matched(__pos, { Expression::Identifier(Box::new(a)) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_constant(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { Expression::Constant(Box::new(a)) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_string_literal(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => Matched(__pos, { Expression::StringLiteral(Box::new(a)) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { a }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_generic_selection(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { Expression::GenericSelection(Box::new(a)) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_gnu_primary_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_generic_selection<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<GenericSelection> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Generic");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_generic_association(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { GenericSelection { expression: e, associations: a } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_generic_association<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<GenericAssociation> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, {
let span = Span::span(t.span.start, e.span.end);
GenericAssociation::Type(Node::new(GenericAssociationType { type_name: t, expression: e }, span))
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "default");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { GenericAssociation::Default(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_postfix_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_postfix_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_postfix_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
if let Some(entry) = __state.postfix_expression0_cache.get(&__pos) {
return entry.clone();
}
let __rule_result = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_postfix_expression1(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_postfix_expressionT(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { apply_ops(t, e).node }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.postfix_expression0_cache.insert(__pos, __rule_result.clone());
__rule_result
}
fn __parse_postfix_expression1<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __choice_res = __parse_primary_expression0(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_compound_literal(__input, __state, __pos, env),
}
}
}
fn __parse_postfix_expressionT<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Operation> {
#![allow(non_snake_case, unused)]
{
let __choice_res = __parse_index_operator(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_assignment_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Operation::Call(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_member_operator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, o) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { Operation::Member(o, i) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_postfix_operator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, o) => Matched(__pos, { Operation::Unary(o) }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
fn __parse_index_operator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Operation> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_index_operator0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, i) => Matched(__pos, { Operation::Binary(Node::new(BinaryOperator::Index, i.span), i.node) }),
Failed => Failed,
}
}
}
fn __parse_index_operator0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Expression>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "[");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_member_operator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<MemberOperator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { MemberOperator::Direct }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "->");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { MemberOperator::Indirect }),
Failed => Failed,
}
}
}
}
}
fn __parse_postfix_operator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<UnaryOperator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "++");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::PostIncrement }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "--");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::PostDecrement }),
Failed => Failed,
}
}
}
}
}
fn __parse_compound_literal<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_compound_literal_inner(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { Expression::CompoundLiteral(Box::new(n)) }),
Failed => Failed,
}
}
}
fn __parse_compound_literal_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<CompoundLiteral> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "{");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_initializer(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match slice_eq(__input, __state, __pos, ",") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "}");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { CompoundLiteral { type_name: t, initializer_list: i } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_unary_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_unary_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_unary_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __choice_res = __parse_postfix_expression0(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_unary_prefix(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_unary_cast(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_sizeof_expression(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_alignof_expression(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__extension__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_unary_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_unary_prefix<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_unary_prefix_inner(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { Expression::UnaryOperator(Box::new(n)) }),
Failed => Failed,
}
}
}
fn __parse_unary_prefix_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<UnaryOperatorExpression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_prefix_operator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, op) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_unary_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { UnaryOperatorExpression { operator: op, operand: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_prefix_operator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<UnaryOperator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "++");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::PreIncrement }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "--");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::PreDecrement }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "sizeof");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::SizeOf }),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_unary_cast<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_unary_cast_inner(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { Expression::UnaryOperator(Box::new(n)) }),
Failed => Failed,
}
}
}
fn __parse_unary_cast_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<UnaryOperatorExpression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_unary_operator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, op) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_cast_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { UnaryOperatorExpression { operator: op, operand: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_unary_operator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<UnaryOperator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "&");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = slice_eq(__input, __state, __pos, "&");
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::Address }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "*");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::Indirection }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "+");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::Plus }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "-");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::Minus }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "~");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::Complement }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "!");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { UnaryOperator::Negate }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_sizeof_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "sizeof");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Expression::SizeOf(Box::new(t)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_alignof_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "_Alignof");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "__alignof");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Expression::AlignOf(Box::new(t)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_cast_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_cast_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_cast_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_cast_expression_inner(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, c) => Matched(__pos, { Expression::Cast(Box::new(c)) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_unary_expression0(__input, __state, __pos, env),
}
}
}
fn __parse_cast_expression_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<CastExpression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_cast_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { CastExpression { type_name: t, expression: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_binary_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_binary_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_binary_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Expression>> {
#![allow(non_snake_case, unused)]
{
fn __infix_parse<'input>(__min_prec: i32, __input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Expression>> {
if let Matched(__pos, mut __infix_result) = __parse_binary_operand(__input, __state, __pos, env) {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
if 0i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "||");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(1i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::LogicalOr, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 1i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "&&");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(2i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::LogicalAnd, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 2i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "|");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(3i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::BitwiseOr, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 3i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "^");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(4i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::BitwiseXor, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 4i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "&");
match __seq_res {
Matched(__pos, _) => {
__state.suppress_fail += 1;
let __assert_res = slice_eq(__input, __state, __pos, "&");
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(5i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::BitwiseAnd, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 5i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "==");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(6i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Equals, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "!=");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(6i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::NotEquals, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 6i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "<");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(7i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Less, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, ">");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(7i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Greater, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "<=");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(7i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::LessOrEqual, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, ">=");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(7i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::GreaterOrEqual, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 7i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "<<");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(8i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::ShiftLeft, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, ">>");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(8i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::ShiftRight, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 8i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "+");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(9i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Plus, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "-");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(9i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Minus, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
if 9i32 >= __min_prec {
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "*");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(10i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Multiply, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "/");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(10i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Divide, x, y) };
__repeat_pos = __pos;
continue;
}
}
if let Matched(__pos, o) = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = slice_eq(__input, __state, __pos, "%");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { n }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
if let Matched(__pos, y) = __infix_parse(10i32, __input, __state, __pos, env) {
let x = __infix_result;
__infix_result = { infix(o, BinaryOperator::Modulo, x, y) };
__repeat_pos = __pos;
continue;
}
}
}
break;
}
Matched(__repeat_pos, __infix_result)
} else {
Failed
}
}
__infix_parse(0, __input, __state, __pos, env)
}
}
fn __parse_binary_operand<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Expression>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_cast_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_conditional_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_conditional_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_conditional_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_binary_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_conditional_expressionT(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, {
if let Some((b, c)) = t {
let span = Span::span(a.span.start, c.span.end);
Expression::Conditional(Box::new(Node::new(ConditionalExpression { condition: Box::new(a), then_expression: b, else_expression: c }, span)))
} else {
a.node
}
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_conditional_expressionT<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<(Box<Node<Expression>>, Box<Node<Expression>>)> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "?");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_conditional_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, b) => Matched(__pos, { (Box::new(a), Box::new(b)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_assignment_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_assignment_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_assignment_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_assignment_expression_inner(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { Expression::BinaryOperator(Box::new(n)) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_conditional_expression0(__input, __state, __pos, env),
}
}
}
fn __parse_assignment_expression_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<BinaryOperatorExpression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_unary_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_assignment_operator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, op) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, b) => Matched(__pos, { BinaryOperatorExpression { operator: op, lhs: a, rhs: b } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_assignment_operator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<BinaryOperator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::Assign }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "*=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignMultiply }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "/=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignDivide }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "%=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignModulo }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "+=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignPlus }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "-=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignMinus }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "<<=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignShiftLeft }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, ">>=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignShiftRight }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "&=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignBitwiseAnd }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "^=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignBitwiseXor }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "|=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { BinaryOperator::AssignBitwiseOr }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_assignment_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_expressionT(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, {
if t.len() > 0 {
let mut t = t;
t.insert(0, e);
Expression::Comma(Box::new(t))
} else {
e.node
}
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_expressionT<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Expression>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_assignment_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_constant_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
__parse_conditional_expression(__input, __state, __pos, env)
}
fn __parse_constant_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
__parse_conditional_expression0(__input, __state, __pos, env)
}
fn __parse_declaration<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Declaration>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_declaration0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_declaration0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Declaration> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__extension__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration1(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Declaration { specifiers: d.0, declarators: d.1 } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_declaration1<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<(Vec<Node<DeclarationSpecifier>>, Vec<Node<InitDeclarator>>)> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_declaration_specifiers_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration2(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_declaration2<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<(Vec<Node<DeclarationSpecifier>>, Vec<Node<InitDeclarator>>)> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_declaration_typedef(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_typedef_tail(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_declaration_unique_type(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = __parse_declaration_specifiers_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __choice_res = {
let __seq_res = __parse_declaration_typedef(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = __parse_declaration_specifiers_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_type_declarators(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { (s, d) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_declaration_init_declarators(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { (Vec::new(), d) }),
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_declaration_nonunique_type(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = __parse_declaration_specifiers_nonunique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __choice_res = {
let __seq_res = __parse_declaration_typedef(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = __parse_declaration_specifiers_nonunique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_type_declarators(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { (s, d) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_declaration_init_declarators(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { (Vec::new(), d) }),
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_declaration_typedef_tail<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<(Vec<Node<DeclarationSpecifier>>, Vec<Node<InitDeclarator>>)> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_declaration_unique_type(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = __parse_declaration_specifiers_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_type_declarators(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { (s, d) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_declaration_nonunique_type(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, h) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = __parse_declaration_specifiers_nonunique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_type_declarators(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { (s, d) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (concat(h, t.0), t.1) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_declaration_unique_type<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<DeclarationSpecifier>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_declaration_specifier_unique_type0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { vec![n] }),
Failed => Failed,
}
}
}
fn __parse_declaration_nonunique_type<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<DeclarationSpecifier>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_declaration_specifier_nonunique_type0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { vec![n] }),
Failed => Failed,
}
}
}
fn __parse_declaration_specifiers<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<DeclarationSpecifier>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_declaration_specifiers_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_specifiers_tail(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => Matched(__pos, { concat(s, t) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_declaration_specifiers_tail<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<DeclarationSpecifier>>> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_declaration_unique_type(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_specifiers_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { concat(t, s) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_declaration_nonunique_type(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_specifiers_nonunique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { concat(t, s) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_declaration_specifiers_unique<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<DeclarationSpecifier>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_declaration_specifier_nontype(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
}
fn __parse_declaration_specifiers_nonunique<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<DeclarationSpecifier>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = {
let __choice_res = __parse_declaration_specifier_nontype(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_declaration_specifier_nonunique_type0(__input, __state, __pos, env),
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
}
fn __parse_declaration_specifier_nontype<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DeclarationSpecifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_storage_class_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::StorageClass(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_type_qualifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::TypeQualifier(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_function_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::Function(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_alignment_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::Alignment(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_attribute_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::Extension(s) }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
fn __parse_declaration_typedef<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<DeclarationSpecifier>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_declaration_typedef0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { vec![s] }),
Failed => Failed,
}
}
}
fn __parse_declaration_typedef0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DeclarationSpecifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_storage_class_typedef(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::StorageClass(s) }),
Failed => Failed,
}
}
}
fn __parse_declaration_specifier_unique_type0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DeclarationSpecifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_type_specifier_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::TypeSpecifier(s) }),
Failed => Failed,
}
}
}
fn __parse_declaration_specifier_nonunique_type0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DeclarationSpecifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_type_specifier_nonunique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { DeclarationSpecifier::TypeSpecifier(s) }),
Failed => Failed,
}
}
}
fn __parse_declaration_init_declarators<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<InitDeclarator>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_init_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
}
fn __parse_declaration_type_declarators<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<InitDeclarator>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_type_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
}
fn __parse_init_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<InitDeclarator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_init_declarator_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_init_declarator_gnu(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_init_declarator_init(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, i) => Matched(__pos, { InitDeclarator { declarator: with_ext(d, e), initializer: i } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_init_declarator_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Declarator>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, {
env.handle_declarator(&d, Symbol::Identifier);
d
}),
Failed => Failed,
}
}
}
fn __parse_init_declarator_init<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Initializer> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_initializer(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { i }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_init_declarator_gnu<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<Extension>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match __parse_asm_label(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_attribute_specifier_list(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => Matched(__pos, { l.into_iter().chain(a).collect() }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_type_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<InitDeclarator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_init_declarator_gnu(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, {
env.handle_declarator(&d, Symbol::Typename);
InitDeclarator { declarator: with_ext(d, e), initializer: None }
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_storage_class_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<StorageClassSpecifier>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_storage_class_specifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_storage_class_specifier0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StorageClassSpecifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "extern");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StorageClassSpecifier::Extern }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "static");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StorageClassSpecifier::Static }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Thread_local");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StorageClassSpecifier::ThreadLocal }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "auto");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StorageClassSpecifier::Auto }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "register");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StorageClassSpecifier::Register }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
fn __parse_storage_class_typedef<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<StorageClassSpecifier>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_storage_class_typedef0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_storage_class_typedef0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StorageClassSpecifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "typedef");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StorageClassSpecifier::Typedef }),
Failed => Failed,
}
}
}
fn __parse_type_specifier_unique<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TypeSpecifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "void");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Void }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Bool");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Bool }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Atomic");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Atomic(t) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_struct_or_union_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { TypeSpecifier::Struct(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_enum_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { TypeSpecifier::Enum(e) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_typedef_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => Matched(__pos, { TypeSpecifier::TypedefName(t) }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_type_specifier_nonunique<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TypeSpecifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "char");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Char }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "short");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Short }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "int");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Int }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "long");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Long }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "float");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Float }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "double");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Double }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "signed");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "__signed");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Signed }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "unsigned");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Unsigned }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "_Complex");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "__complex");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::Complex }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = __parse_ts18661_float_type_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { TypeSpecifier::TS18661Float(t) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_typeof_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_struct_or_union_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StructType> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_struct_or_union(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_identifier(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_struct_or_union_body(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { StructType { kind: t, identifier: i, declarations: d } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_struct_or_union(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { StructType { kind: t, identifier: Some(i), declarations: None } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_struct_or_union_body<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Option<Vec<Node<StructDeclaration>>>> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "{");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_struct_declaration(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "}");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Some(d) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "{");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => slice_eq(__input, __state, __pos, "}"),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Some(Vec::new()) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => Matched(__pos, { None }),
}
}
}
}
}
fn __parse_struct_or_union<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StructKind> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "struct");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StructKind::Struct }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "union");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StructKind::Union }),
Failed => Failed,
}
}
}
}
}
fn __parse_struct_declaration<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StructDeclaration> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_struct_field(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, f) => Matched(__pos, { StructDeclaration::Field(f) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_static_assert(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { StructDeclaration::StaticAssert(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__extension__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_struct_declaration(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { d }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_struct_field<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StructField> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_specifier_qualifiers(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_struct_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StructField { specifiers: s, declarators: d } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_specifier_qualifiers<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<SpecifierQualifier>>> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_specifier_qualifier_qualifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, before) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_specifier_qualifier_unique_type0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, single) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_specifier_qualifier_qualifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, after) => Matched(__pos, {
let mut before = before;
before.push(single);
before.extend(after);
before
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_specifier_qualifier_qualifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, before) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_specifier_qualifier_nonunique_type0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, single) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __choice_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_specifier_qualifier_nonunique_type0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_specifier_qualifier_qualifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, after) => Matched(__pos, {
let mut before = before;
before.push(single);
before.extend(after);
before
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_specifier_qualifier_unique_type0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<SpecifierQualifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_type_specifier_unique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { SpecifierQualifier::TypeSpecifier(s) }),
Failed => Failed,
}
}
}
fn __parse_specifier_qualifier_nonunique_type0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<SpecifierQualifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_type_specifier_nonunique(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { SpecifierQualifier::TypeSpecifier(s) }),
Failed => Failed,
}
}
}
fn __parse_specifier_qualifier_qualifier0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<SpecifierQualifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_type_qualifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, q) => Matched(__pos, { SpecifierQualifier::TypeQualifier(q) }),
Failed => Failed,
}
}
}
fn __parse_struct_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StructDeclarator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = match __parse_declarator(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_constant_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_attribute_specifier_list(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { StructDeclarator { declarator: d.map(|d| with_ext(d, a)), bit_width: Some(e) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => {
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_attribute_specifier_list(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { StructDeclarator { declarator: Some(with_ext(d, a)), bit_width: None } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_enum_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<EnumType> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "enum");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_identifier(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "{");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_enumerator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match slice_eq(__input, __state, __pos, ",") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "}");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { EnumType { identifier: i, enumerators: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "enum");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { EnumType { identifier: Some(i), enumerators: Vec::new() } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_enumerator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Enumerator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_enumerator_constant(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, {
env.add_symbol(&i.node.name, Symbol::Identifier);
Enumerator { identifier: i, expression: e }
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_enumerator_constant<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_constant_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_type_qualifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<TypeQualifier>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_type_qualifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_type_qualifier0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TypeQualifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "const");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "__const");
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeQualifier::Const }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "restrict");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "__restrict");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeQualifier::Restrict }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "volatile");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "__volatile");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeQualifier::Volatile }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_clang_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Nonnull");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeQualifier::Nonnull }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_clang_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Null_unspecified");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeQualifier::NullUnspecified }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_clang_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Nullable");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeQualifier::Nullable }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Atomic");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = slice_eq(__input, __state, __pos, "(");
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeQualifier::Atomic }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_function_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<FunctionSpecifier>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_function_specifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_function_specifier0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<FunctionSpecifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "inline");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "__inline");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { FunctionSpecifier::Inline }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Noreturn");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { FunctionSpecifier::Noreturn }),
Failed => Failed,
}
}
}
}
}
fn __parse_alignment_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<AlignmentSpecifier>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_alignment_specifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_alignment_specifier0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<AlignmentSpecifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Alignas");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { AlignmentSpecifier::Type(t) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Alignas");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_constant_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { AlignmentSpecifier::Constant(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Declarator>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_declarator0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_declarator0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Declarator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_attribute_specifier_list(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, attr) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_pointer(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, pointer) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_direct_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, kind) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_derived_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, derived) => Matched(__pos, { Declarator { kind: kind, derived: concat(pointer, derived), extensions: attr.unwrap_or_default() } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_direct_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DeclaratorKind> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { DeclaratorKind::Identifier(i) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { DeclaratorKind::Declarator(Box::new(d)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_derived_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DerivedDeclarator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "[");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_array_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { DerivedDeclarator::Array(a) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, {
env.enter_scope();
});
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_function_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
match {
env.leave_scope();
e.ok_or("")
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, f) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { DerivedDeclarator::Function(f) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_identifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, p) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { DerivedDeclarator::KRFunction(p) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_array_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<ArrayDeclarator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::Unknown } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::VariableExpression(e) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "static");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::StaticExpression(e) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "static");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::StaticExpression(e) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "*");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::VariableUnknown } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
fn __parse_function_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<FunctionDeclarator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_parameter_declaration(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, p) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_ellipsis(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { FunctionDeclarator { parameters: p, ellipsis: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_pointer<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<DerivedDeclarator>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_pointer0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_pointer0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DerivedDeclarator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "*");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_pointer_qualifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => Matched(__pos, { DerivedDeclarator::Pointer(q) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_pointer_qualifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<PointerQualifier> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_type_qualifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, q) => Matched(__pos, { PointerQualifier::TypeQualifier(q) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_attribute_specifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { PointerQualifier::Extension(e) }),
Failed => Failed,
}
}
}
}
}
fn __parse_ellipsis<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Ellipsis> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "...");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Ellipsis::Some }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => Matched(__pos, { Ellipsis::None }),
}
}
}
fn __parse_parameter_declaration<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<ParameterDeclaration>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_parameter_declaration0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_parameter_declaration0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<ParameterDeclaration> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_declaration_specifiers(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_parameter_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_attribute_specifier_list(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { ParameterDeclaration { specifiers: s, declarator: d, extensions: a.unwrap_or_default() } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_parameter_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Option<Node<Declarator>>> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, {
env.handle_declarator(&d, Symbol::Identifier);
Some(d)
}),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_abstract_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { Some(d) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => Matched(__pos, { None }),
}
}
}
}
}
fn __parse_type_name<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<TypeName>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_type_name0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_type_name0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TypeName> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_specifier_qualifiers(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_abstract_declarator(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { TypeName { specifiers: s, declarator: d } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_abstract_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Declarator>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_abstract_declarator0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_abstract_declarator0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Declarator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_pointer(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, p) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_direct_abstract_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, k) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_derived_abstract_declarator(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { Declarator { kind: k, derived: concat(p, d), extensions: Vec::new() } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_pointer(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, p) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, k) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_derived_abstract_declarator(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { Declarator { kind: Node::new(DeclaratorKind::Abstract, Span::span(k, k)), derived: concat(p, d), extensions: Vec::new() } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_pointer(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, p) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, k) => Matched(__pos, { Declarator { kind: Node::new(DeclaratorKind::Abstract, Span::span(k, k)), derived: p, extensions: Vec::new() } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_direct_abstract_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DeclaratorKind> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_abstract_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { DeclaratorKind::Declarator(Box::new(d)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_derived_abstract_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<DerivedDeclarator>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_derived_abstract_declarator0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_derived_abstract_declarator0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DerivedDeclarator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "[");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_abstract_array_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { DerivedDeclarator::Array(a) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_abstract_function_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { DerivedDeclarator::Function(d) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_abstract_array_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<ArrayDeclarator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::Unknown } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::VariableExpression(e) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "static");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::StaticExpression(e) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_type_qualifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "static");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: q, size: ArraySize::StaticExpression(e) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "*");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ArrayDeclarator { qualifiers: Vec::new(), size: ArraySize::VariableUnknown } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
fn __parse_abstract_function_declarator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<FunctionDeclarator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_parameter_declaration(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, p) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_ellipsis(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { FunctionDeclarator { parameters: p, ellipsis: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => Matched(__pos, { FunctionDeclarator { parameters: Vec::new(), ellipsis: Ellipsis::None } }),
}
}
}
fn __parse_typedef_name<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Identifier>> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
__state.suppress_fail += 1;
let res = __parse_typedef_name0(__input, __state, __pos, env);
__state.suppress_fail -= 1;
res
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
__state.mark_failure(__pos, "<typedef_name>");
Failed
}
}
}
}
fn __parse_typedef_name0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Identifier>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => {
match {
if env.is_typename(&i.node.name) {
Ok(i)
} else {
Err("<unused>")
}
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
}
fn __parse_initializer<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Initializer> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Initializer::Expression(e) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "{");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_initializer_list_item(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match slice_eq(__input, __state, __pos, ",") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "}");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Initializer::List(i) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "{");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => slice_eq(__input, __state, __pos, "}"),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Initializer::List(Vec::new()) }),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_initializer_list_item<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<InitializerListItem> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match __parse_designation(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_initializer(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, i) => Matched(__pos, { InitializerListItem { designation: d.unwrap_or_default(), initializer: Box::new(i) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_designation<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<Designator>>> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_designator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { d }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_colon_designation(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { vec![d] }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_array_designator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { vec![d] }),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_colon_designation<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Designator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Designator::Member(i) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_designator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Designator> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_array_designator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { d }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { Designator::Member(i) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_array_designator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Designator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "[");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_constant_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_range_designator_ext(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, b) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, {
match b {
Some(b) => {
let span = Span::span(a.span.start, b.span.end);
Designator::Range(Node::new(RangeDesignator { from: a, to: b }, span))
}
None => Designator::Index(a),
}
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_range_designator_ext<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Expression>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "...");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_constant_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_static_assert<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<StaticAssert>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_static_assert0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_static_assert0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<StaticAssert> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__extension__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "_Static_assert");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_constant_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_string_literal(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { StaticAssert { expression: e, message: s } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Statement>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_statement0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Box::new(e) }),
Failed => Failed,
}
}
}
fn __parse_statement0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Statement> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_labeled_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { Statement::Labeled(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = Matched(__pos, {
env.enter_scope();
});
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_compound_statement(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
match {
env.leave_scope();
e.ok_or("")
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_expression_statement(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = Matched(__pos, {
env.enter_scope();
});
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_selection_statement(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
match {
env.leave_scope();
e.ok_or("")
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = Matched(__pos, {
env.enter_scope();
});
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_iteration_statement(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
match {
env.leave_scope();
e.ok_or("")
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_jump_statement(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_asm_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_labeled_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<LabeledStatement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_label(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { LabeledStatement { label: l, statement: s } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_label<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Label> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { Label::Identifier(i) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "case");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_constant_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => Matched(__pos, { Label::Case(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "default");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Label::Default }),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_compound_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Statement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "{");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_block_item(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, b) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "}");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Statement::Compound(b) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_block_item<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<BlockItem> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_declaration(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { BlockItem::Declaration(d) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_static_assert(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { BlockItem::StaticAssert(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_statement0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { BlockItem::Statement(s) }),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_expression_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Statement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match __parse_expression(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Statement::Expression(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_selection_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Statement> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_if_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { Statement::If(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_switch_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { Statement::Switch(s) }),
Failed => Failed,
}
}
}
}
}
fn __parse_if_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<IfStatement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "if");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_else_statement(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, b) => Matched(__pos, { IfStatement { condition: e, then_statement: a, else_statement: b } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_else_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Box<Node<Statement>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "else");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { s }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_switch_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<SwitchStatement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "switch");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { SwitchStatement { expression: e, statement: s } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_iteration_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Statement> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_while_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { Statement::While(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_do_while_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { Statement::DoWhile(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_for_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { Statement::For(s) }),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_while_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<WhileStatement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "while");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { WhileStatement { expression: e, statement: s } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_do_while_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<DoWhileStatement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "do");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "while");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { DoWhileStatement { statement: s, expression: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_for_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<ForStatement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "for");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_for_initializer(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_expression(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, b) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_expression(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, c) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { ForStatement { initializer: a, condition: b, step: c, statement: s } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_for_initializer<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<ForInitializer> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ForInitializer::Expression(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_declaration(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { ForInitializer::Declaration(d) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_static_assert(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { ForInitializer::StaticAssert(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ForInitializer::Empty }),
Failed => Failed,
}
}
}
}
}
}
}
}
}
fn __parse_jump_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Statement> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "goto");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Statement::Goto(i) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "continue");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Statement::Continue }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "break");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Statement::Break }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "return");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_expression(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Statement::Return(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
fn __parse_translation_unit<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TranslationUnit> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match __parse_directive(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_external_declaration(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TranslationUnit(d) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_external_declaration<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<ExternalDeclaration> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = __parse_declaration(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, d) => Matched(__pos, { ExternalDeclaration::Declaration(d) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = __parse_static_assert(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { ExternalDeclaration::StaticAssert(s) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = Matched(__pos, {
env.enter_scope();
});
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_function_definition(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
match {
env.leave_scope();
e.ok_or("")
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { ExternalDeclaration::FunctionDefinition(d) }),
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_function_definition<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<FunctionDefinition> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_gnu_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__extension__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declaration_specifiers(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_declarator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, b) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_declaration(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, c) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_compound_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { FunctionDefinition { specifiers: a, declarator: b, declarations: c, statement: d } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_gnu_guard<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
match {
if env.extensions_gnu {
Ok(())
} else {
Err("gnu extensions disabled")
}
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
fn __parse_attribute_specifier_list<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<Extension>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_attribute_specifier(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => Matched(__pos, { a.into_iter().flat_map(|v| v).collect() }),
Failed => Failed,
}
}
}
fn __parse_attribute_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<Extension>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__attribute__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "((");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_attribute(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "))");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { a }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_attribute<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Extension> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = __parse_clang_guard(__input, __state, __pos, env);
__state.suppress_fail -= 1;
match __assert_res {
Matched(_, __value) => Matched(__pos, __value),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_attr_availability(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, c) => Matched(__pos, { Extension::AvailabilityAttribute(c) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_attribute_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_attribute_parameters(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, p) => Matched(__pos, { Extension::Attribute(Attribute { name: n, arguments: p.unwrap_or_default() }) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
fn __parse_attribute_name<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<String> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
__state.suppress_fail += 1;
let res = {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z]")
};
match __seq_res {
Matched(__pos, _) => {
let mut __repeat_pos = __pos;
loop {
let __pos = __repeat_pos;
let __step_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, ())
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { String::from(n) }),
Failed => Failed,
}
}
}
fn __parse_attribute_parameters<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<Expression>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_assignment_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_attr_availability<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<AvailabilityAttribute> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "availability");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, p) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_attr_availability_clause(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, __repeat_value)
} else {
Failed
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, c) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { AvailabilityAttribute { platform: p, clauses: c } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_attr_availability_clause<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<AvailabilityClause> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "introduced");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_attr_availability_version(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, v) => Matched(__pos, { AvailabilityClause::Introduced(v) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "deprecated");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_attr_availability_version(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, v) => Matched(__pos, { AvailabilityClause::Deprecated(v) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "obsoleted");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_attr_availability_version(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, v) => Matched(__pos, { AvailabilityClause::Obsoleted(v) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "unavailable");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { AvailabilityClause::Unavailable }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "message");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_string_literal(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { AvailabilityClause::Message(s) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "replacement");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "=");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_string_literal(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => Matched(__pos, { AvailabilityClause::Replacement(s) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
}
}
fn __parse_attr_availability_version<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<AvailabilityVersion> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, a) => {
let __seq_res = match {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => {
let str_start = __pos;
match {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, b) => {
let __seq_res = match {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => {
let str_start = __pos;
match {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __step_res = __parse_dec(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
if __repeat_value.len() >= 1 {
Matched(__repeat_pos, ())
} else {
Failed
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, c) => Matched(__pos, { AvailabilityVersion { major: a.into(), minor: b.map(str::to_owned), subminor: c.map(str::to_owned) } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_asm_label<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Node<Extension>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_asm_label0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_asm_label0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Extension> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_asm_label_keyword(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_string_literal(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Extension::AsmLabel(s) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_asm_label_keyword<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
__state.suppress_fail += 1;
let res = {
let __choice_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "asm");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __seq_res = slice_eq(__input, __state, __pos, "__asm");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
}
}
};
__state.suppress_fail -= 1;
res
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
__state.mark_failure(__pos, "asm");
Failed
}
}
}
}
fn __parse_asm_statement<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Statement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_asm_statement0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => Matched(__pos, { Statement::Asm(s) }),
Failed => Failed,
}
}
}
fn __parse_asm_statement0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<AsmStatement> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "asm");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "__asm");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match __parse_type_qualifier(__input, __state, __pos, env) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, q) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_string_literal(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, a) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_asm_operand_list(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_asm_operand_list(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = slice_eq(__input, __state, __pos, ":");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = __parse_string_literal(__input, __state, __pos, env);
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match Matched(__pos, ()) {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (e, t.unwrap_or_default()) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (e, t.unwrap_or_default()) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, t) => Matched(__pos, { (e, t.unwrap_or_default()) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, o) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ";");
match __seq_res {
Matched(__pos, _) => Matched(__pos, {
if let Some((o, (i, (c, ())))) = o {
AsmStatement::GnuExtended(GnuExtendedAsmStatement { qualifier: q, template: a, outputs: o, inputs: i, clobbers: c })
} else {
AsmStatement::GnuBasic(a)
}
}),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_asm_operand_list<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Vec<Node<GnuAsmOperand>>> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => __parse__(__input, __state, __pos, env),
Failed => Failed,
}
}
Failed => Failed,
}
};
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_asm_operand(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
}
}
fn __parse_asm_operand<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<GnuAsmOperand> {
#![allow(non_snake_case, unused)]
{
let __seq_res = match {
let __seq_res = slice_eq(__input, __state, __pos, "[");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => Matched(__pos, { i }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse_string_literal(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { GnuAsmOperand { symbolic_name: i, constraints: s, variable_name: e } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_gnu_primary_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __choice_res = __parse_statement_expression(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_offsetof_expression(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = __parse_va_arg_expression(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_keyword_expression(__input, __state, __pos, env),
}
}
}
}
}
}
}
fn __parse_statement_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, {
env.enter_scope();
});
match __seq_res {
Matched(__pos, _) => {
let __seq_res = match {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_compound_statement(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
} {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, e) => {
match {
env.leave_scope();
e.ok_or("")
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, s) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { Expression::Statement(Box::new(s)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_va_arg_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_va_arg_expression_inner(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { Expression::VaArg(Box::new(n)) }),
Failed => Failed,
}
}
}
fn __parse_va_arg_expression_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<VaArgExpression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__builtin_va_arg");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_assignment_expression(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { VaArgExpression { va_list: e, type_name: t } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_keyword_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = {
let str_start = __pos;
match __parse_keyword_expression0(__input, __state, __pos, env) {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, k) => Matched(__pos, {
let ident = Identifier { name: k.node.to_string() };
Expression::Identifier(Box::new(Node::new(ident, k.span)))
}),
Failed => Failed,
}
}
}
fn __parse_keyword_expression0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__func__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__FUNCTION__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__PRETTY_FUNCTION__");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
}
}
}
}
}
}
fn __parse_offsetof_expression<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<Expression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_offsetof_expression_inner(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { Expression::OffsetOf(Box::new(n)) }),
Failed => Failed,
}
}
}
fn __parse_offsetof_expression_inner<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<OffsetOfExpression> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = slice_eq(__input, __state, __pos, "__builtin_offsetof");
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ",");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_offsetof_designator(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { OffsetOfExpression { type_name: t, designator: d } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_offsetof_designator<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<OffsetDesignator> {
#![allow(non_snake_case, unused)]
{
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = {
let mut __repeat_pos = __pos;
let mut __repeat_value = vec![];
loop {
let __pos = __repeat_pos;
let __pos = if __repeat_value.len() > 0 {
let __sep_res = __parse__(__input, __state, __pos, env);
match __sep_res {
Matched(__newpos, _) => __newpos,
Failed => break,
}
} else {
__pos
};
let __step_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_offsetof_member(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __step_res {
Matched(__newpos, __value) => {
__repeat_pos = __newpos;
__repeat_value.push(__value);
}
Failed => {
break;
}
}
}
Matched(__repeat_pos, __repeat_value)
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { e }),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, d) => Matched(__pos, { OffsetDesignator { base: i, members: d } }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_offsetof_member<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<OffsetMember> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, ".");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { OffsetMember::Member(i) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = slice_eq(__input, __state, __pos, "->");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_identifier(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, i) => Matched(__pos, { OffsetMember::IndirectMember(i) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "[");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "]");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { OffsetMember::Index(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
fn __parse_typeof_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TypeSpecifier> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
__state.suppress_fail += 1;
let res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "typeof");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = slice_eq(__input, __state, __pos, "__typeof");
match __seq_res {
Matched(__pos, _) => match slice_eq(__input, __state, __pos, "__") {
Matched(__newpos, _) => Matched(__newpos, ()),
Failed => Matched(__pos, ()),
},
Failed => Failed,
}
}
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = {
__state.suppress_fail += 1;
let __assert_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'_' | 'a'...'z' | 'A'...'Z' | '0'...'9' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[_a-zA-Z0-9]"),
}
} else {
__state.mark_failure(__pos, "[_a-zA-Z0-9]")
};
__state.suppress_fail -= 1;
match __assert_res {
Failed => Matched(__pos, ()),
Matched(..) => Failed,
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { e }),
Failed => Failed,
}
}
Failed => Failed,
}
};
__state.suppress_fail -= 1;
res
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, "(");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_typeof_specifier0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => {
let __seq_res = __parse__(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, _) => {
let __seq_res = slice_eq(__input, __state, __pos, ")");
match __seq_res {
Matched(__pos, _) => Matched(__pos, { TypeSpecifier::TypeOf(e) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_typeof_specifier0<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TypeOf> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, l) => {
let __seq_res = __parse_expression0(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, e) => {
let __seq_res = Matched(__pos, __pos);
match __seq_res {
Matched(__pos, r) => Matched(__pos, { Node::new(e, Span::span(l, r)) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, e) => Matched(__pos, { TypeOf::Expression(e) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = __parse_type_name(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, t) => Matched(__pos, { TypeOf::Type(t) }),
Failed => Failed,
}
}
}
}
}
fn __parse_ts18661_float_type_specifier<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TS18661FloatType> {
#![allow(non_snake_case, unused)]
{
let __choice_res = __parse_ts18661_binary_float(__input, __state, __pos, env);
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => __parse_ts18661_decimal_float(__input, __state, __pos, env),
}
}
}
fn __parse_ts18661_binary_float<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TS18661FloatType> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "_Float");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_ts18661_binary_width(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, width) => {
let __seq_res = match slice_eq(__input, __state, __pos, "x") {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, extended) => Matched(__pos, { ts18661_float(true, width, extended.is_some()) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_ts18661_binary_width<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<usize> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
let __choice_res = slice_eq(__input, __state, __pos, "16");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = slice_eq(__input, __state, __pos, "32");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = slice_eq(__input, __state, __pos, "64");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => slice_eq(__input, __state, __pos, "128"),
}
}
}
}
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { n.parse().unwrap() }),
Failed => Failed,
}
}
}
fn __parse_ts18661_decimal_float<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TS18661FloatType> {
#![allow(non_snake_case, unused)]
{
let __seq_res = slice_eq(__input, __state, __pos, "_Decimal");
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_ts18661_decimal_width(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, width) => {
let __seq_res = match slice_eq(__input, __state, __pos, "x") {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, extended) => Matched(__pos, { ts18661_float(false, width, extended.is_some()) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
fn __parse_ts18661_decimal_width<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<usize> {
#![allow(non_snake_case, unused)]
{
let __seq_res = {
let str_start = __pos;
match {
let __choice_res = slice_eq(__input, __state, __pos, "32");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = slice_eq(__input, __state, __pos, "64");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => slice_eq(__input, __state, __pos, "128"),
}
}
}
} {
Matched(__newpos, _) => Matched(__newpos, &__input[str_start..__newpos]),
Failed => Failed,
}
};
match __seq_res {
Matched(__pos, n) => Matched(__pos, { n.parse().unwrap() }),
Failed => Failed,
}
}
}
fn __parse_ts18661_float_suffix<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<TS18661FloatType> {
#![allow(non_snake_case, unused)]
{
let __choice_res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "df");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => slice_eq(__input, __state, __pos, "DF"),
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ts18661_float(false, 32, false) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "dd");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => slice_eq(__input, __state, __pos, "DD"),
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ts18661_float(false, 64, false) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = {
let __choice_res = slice_eq(__input, __state, __pos, "dl");
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => slice_eq(__input, __state, __pos, "DL"),
}
};
match __seq_res {
Matched(__pos, _) => Matched(__pos, { ts18661_float(false, 128, false) }),
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __choice_res = {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'f' | 'F' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[fF]"),
}
} else {
__state.mark_failure(__pos, "[fF]")
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_ts18661_binary_width(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, width) => {
let __seq_res = match slice_eq(__input, __state, __pos, "x") {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, extended) => Matched(__pos, { ts18661_float(true, width, extended.is_some()) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
};
match __choice_res {
Matched(__pos, __value) => Matched(__pos, __value),
Failed => {
let __seq_res = if __input.len() > __pos {
let (__ch, __next) = char_range_at(__input, __pos);
match __ch {
'd' | 'D' => Matched(__next, ()),
_ => __state.mark_failure(__pos, "[dD]"),
}
} else {
__state.mark_failure(__pos, "[dD]")
};
match __seq_res {
Matched(__pos, _) => {
let __seq_res = __parse_ts18661_decimal_width(__input, __state, __pos, env);
match __seq_res {
Matched(__pos, width) => {
let __seq_res = match slice_eq(__input, __state, __pos, "x") {
Matched(__newpos, __value) => Matched(__newpos, Some(__value)),
Failed => Matched(__pos, None),
};
match __seq_res {
Matched(__pos, extended) => Matched(__pos, { ts18661_float(false, width, extended.is_some()) }),
Failed => Failed,
}
}
Failed => Failed,
}
}
Failed => Failed,
}
}
}
}
}
}
}
}
}
}
}
fn __parse_clang_guard<'input>(__input: &'input str, __state: &mut ParseState<'input>, __pos: usize, env: &mut Env) -> RuleResult<()> {
#![allow(non_snake_case, unused)]
match {
if env.extensions_clang {
Ok(())
} else {
Err("clang extensions disabled")
}
} {
Ok(res) => Matched(__pos, res),
Err(expected) => {
__state.mark_failure(__pos, expected);
Failed
}
}
}
pub fn constant<'input>(__input: &'input str, env: &mut Env) -> ParseResult<Constant> {
#![allow(non_snake_case, unused)]
let mut __state = ParseState::new();
match __parse_constant(__input, &mut __state, 0, env) {
Matched(__pos, __value) => {
if __pos == __input.len() {
return Ok(__value);
}
}
_ => {}
}
let (__line, __col) = pos_to_line(__input, __state.max_err_pos);
Err(ParseError { line: __line, column: __col, offset: __state.max_err_pos, expected: __state.expected })
}
pub fn string_literal<'input>(__input: &'input str, env: &mut Env) -> ParseResult<Node<Vec<String>>> {
#![allow(non_snake_case, unused)]
let mut __state = ParseState::new();
match __parse_string_literal(__input, &mut __state, 0, env) {
Matched(__pos, __value) => {
if __pos == __input.len() {
return Ok(__value);
}
}
_ => {}
}
let (__line, __col) = pos_to_line(__input, __state.max_err_pos);
Err(ParseError { line: __line, column: __col, offset: __state.max_err_pos, expected: __state.expected })
}
pub fn expression<'input>(__input: &'input str, env: &mut Env) -> ParseResult<Box<Node<Expression>>> {
#![allow(non_snake_case, unused)]
let mut __state = ParseState::new();
match __parse_expression(__input, &mut __state, 0, env) {
Matched(__pos, __value) => {
if __pos == __input.len() {
return Ok(__value);
}
}
_ => {}
}
let (__line, __col) = pos_to_line(__input, __state.max_err_pos);
Err(ParseError { line: __line, column: __col, offset: __state.max_err_pos, expected: __state.expected })
}
pub fn declaration<'input>(__input: &'input str, env: &mut Env) -> ParseResult<Node<Declaration>> {
#![allow(non_snake_case, unused)]
let mut __state = ParseState::new();
match __parse_declaration(__input, &mut __state, 0, env) {
Matched(__pos, __value) => {
if __pos == __input.len() {
return Ok(__value);
}
}
_ => {}
}
let (__line, __col) = pos_to_line(__input, __state.max_err_pos);
Err(ParseError { line: __line, column: __col, offset: __state.max_err_pos, expected: __state.expected })
}
pub fn statement<'input>(__input: &'input str, env: &mut Env) -> ParseResult<Box<Node<Statement>>> {
#![allow(non_snake_case, unused)]
let mut __state = ParseState::new();
match __parse_statement(__input, &mut __state, 0, env) {
Matched(__pos, __value) => {
if __pos == __input.len() {
return Ok(__value);
}
}
_ => {}
}
let (__line, __col) = pos_to_line(__input, __state.max_err_pos);
Err(ParseError { line: __line, column: __col, offset: __state.max_err_pos, expected: __state.expected })
}
pub fn translation_unit<'input>(__input: &'input str, env: &mut Env) -> ParseResult<TranslationUnit> {
#![allow(non_snake_case, unused)]
let mut __state = ParseState::new();
match __parse_translation_unit(__input, &mut __state, 0, env) {
Matched(__pos, __value) => {
if __pos == __input.len() {
return Ok(__value);
}
}
_ => {}
}
let (__line, __col) = pos_to_line(__input, __state.max_err_pos);
Err(ParseError { line: __line, column: __col, offset: __state.max_err_pos, expected: __state.expected })
}
|
{
break;
}
|
score.py
|
import argparse
import json
import os
from blanc import BlancHelp, BlancTune
def main(args):
|
if __name__ == "__main__":
argp = argparse.ArgumentParser()
argp.add_argument("--input-file", required=True)
argp.add_argument("--type", required=True, choices=["help", "tune"])
argp.add_argument("--device", required=True, type=int)
argp.add_argument("--random-seed", required=True, type=int)
argp.add_argument("--kwargs", required=True)
argp.add_argument("--output-file", required=True)
args = argp.parse_args()
main(args)
|
kwargs = json.loads(args.kwargs)
device = "cpu" if args.device == -1 else "cuda"
if args.type == "tune":
blanc = BlancTune(device=device, random_seed=args.random_seed, **kwargs)
elif args.type == "help":
blanc = BlancHelp(device=device, **kwargs)
else:
raise Exception(f"Unknown BLANC type: {args.type}")
documents = []
summaries_list = []
with open(args.input_file, "r") as f:
for line in f:
data = json.loads(line)
documents.append(data["document"])
summaries_list.append(data["summaries"])
scores_list = blanc.eval_summaries_for_docs(documents, summaries_list)
dirname = os.path.dirname(args.output_file)
if dirname:
os.makedirs(dirname, exist_ok=True)
with open(args.output_file, "w") as out:
out.write(json.dumps(scores_list))
|
auth_test.go
|
package inmem
import (
"context"
"testing"
"github.com/influxdata/platform"
platformtesting "github.com/influxdata/platform/testing"
)
func initAuthorizationService(f platformtesting.AuthorizationFields, t *testing.T) (platform.AuthorizationService, string, func()) {
s := NewService()
s.IDGenerator = f.IDGenerator
s.TokenGenerator = f.TokenGenerator
ctx := context.Background()
for _, u := range f.Users {
if err := s.PutUser(ctx, u); err != nil {
t.Fatalf("failed to populate users")
}
}
for _, o := range f.Orgs {
if err := s.PutOrganization(ctx, o); err != nil {
t.Fatalf("failed to populate organizations")
}
}
for _, u := range f.Authorizations {
if err := s.PutAuthorization(ctx, u); err != nil {
t.Fatalf("failed to populate authorizations")
}
}
|
func TestAuthorizationService(t *testing.T) {
platformtesting.AuthorizationService(initAuthorizationService, t)
}
|
return s, OpPrefix, func() {}
}
|
tl_check_change_phone_number_code_gen.go
|
// Code generated by gotdgen, DO NOT EDIT.
package tdapi
import (
"context"
"errors"
"fmt"
"sort"
"strings"
"go.uber.org/multierr"
"github.com/gotd/td/bin"
"github.com/gotd/td/tdjson"
"github.com/gotd/td/tdp"
"github.com/gotd/td/tgerr"
)
// No-op definition for keeping imports.
var (
_ = bin.Buffer{}
_ = context.Background()
_ = fmt.Stringer(nil)
_ = strings.Builder{}
_ = errors.Is
_ = multierr.AppendInto
_ = sort.Ints
_ = tdp.Format
_ = tgerr.Error{}
_ = tdjson.Encoder{}
)
// CheckChangePhoneNumberCodeRequest represents TL type `checkChangePhoneNumberCode#9976a263`.
type CheckChangePhoneNumberCodeRequest struct {
// Authentication code to check
Code string
}
// CheckChangePhoneNumberCodeRequestTypeID is TL type id of CheckChangePhoneNumberCodeRequest.
const CheckChangePhoneNumberCodeRequestTypeID = 0x9976a263
// Ensuring interfaces in compile-time for CheckChangePhoneNumberCodeRequest.
var (
_ bin.Encoder = &CheckChangePhoneNumberCodeRequest{}
_ bin.Decoder = &CheckChangePhoneNumberCodeRequest{}
_ bin.BareEncoder = &CheckChangePhoneNumberCodeRequest{}
_ bin.BareDecoder = &CheckChangePhoneNumberCodeRequest{}
)
func (c *CheckChangePhoneNumberCodeRequest) Zero() bool {
if c == nil {
return true
}
if !(c.Code == "") {
return false
}
return true
}
// String implements fmt.Stringer.
func (c *CheckChangePhoneNumberCodeRequest) String() string {
if c == nil {
return "CheckChangePhoneNumberCodeRequest(nil)"
}
type Alias CheckChangePhoneNumberCodeRequest
return fmt.Sprintf("CheckChangePhoneNumberCodeRequest%+v", Alias(*c))
}
// TypeID returns type id in TL schema.
//
// See https://core.telegram.org/mtproto/TL-tl#remarks.
func (*CheckChangePhoneNumberCodeRequest) TypeID() uint32 {
return CheckChangePhoneNumberCodeRequestTypeID
}
// TypeName returns name of type in TL schema.
func (*CheckChangePhoneNumberCodeRequest) TypeName() string {
return "checkChangePhoneNumberCode"
}
// TypeInfo returns info about TL type.
func (c *CheckChangePhoneNumberCodeRequest) TypeInfo() tdp.Type {
typ := tdp.Type{
Name: "checkChangePhoneNumberCode",
ID: CheckChangePhoneNumberCodeRequestTypeID,
}
if c == nil {
typ.Null = true
return typ
}
typ.Fields = []tdp.Field{
{
Name: "Code",
SchemaName: "code",
},
}
return typ
}
// Encode implements bin.Encoder.
func (c *CheckChangePhoneNumberCodeRequest) Encode(b *bin.Buffer) error {
if c == nil {
return fmt.Errorf("can't encode checkChangePhoneNumberCode#9976a263 as nil")
}
b.PutID(CheckChangePhoneNumberCodeRequestTypeID)
return c.EncodeBare(b)
}
// EncodeBare implements bin.BareEncoder.
func (c *CheckChangePhoneNumberCodeRequest) EncodeBare(b *bin.Buffer) error {
if c == nil {
return fmt.Errorf("can't encode checkChangePhoneNumberCode#9976a263 as nil")
}
b.PutString(c.Code)
return nil
}
// Decode implements bin.Decoder.
func (c *CheckChangePhoneNumberCodeRequest) Decode(b *bin.Buffer) error {
if c == nil {
return fmt.Errorf("can't decode checkChangePhoneNumberCode#9976a263 to nil")
}
if err := b.ConsumeID(CheckChangePhoneNumberCodeRequestTypeID); err != nil {
return fmt.Errorf("unable to decode checkChangePhoneNumberCode#9976a263: %w", err)
}
return c.DecodeBare(b)
}
// DecodeBare implements bin.BareDecoder.
func (c *CheckChangePhoneNumberCodeRequest) DecodeBare(b *bin.Buffer) error {
if c == nil {
return fmt.Errorf("can't decode checkChangePhoneNumberCode#9976a263 to nil")
}
{
value, err := b.String()
if err != nil {
return fmt.Errorf("unable to decode checkChangePhoneNumberCode#9976a263: field code: %w", err)
}
c.Code = value
}
return nil
}
|
func (c *CheckChangePhoneNumberCodeRequest) EncodeTDLibJSON(b tdjson.Encoder) error {
if c == nil {
return fmt.Errorf("can't encode checkChangePhoneNumberCode#9976a263 as nil")
}
b.ObjStart()
b.PutID("checkChangePhoneNumberCode")
b.Comma()
b.FieldStart("code")
b.PutString(c.Code)
b.Comma()
b.StripComma()
b.ObjEnd()
return nil
}
// DecodeTDLibJSON implements tdjson.TDLibDecoder.
func (c *CheckChangePhoneNumberCodeRequest) DecodeTDLibJSON(b tdjson.Decoder) error {
if c == nil {
return fmt.Errorf("can't decode checkChangePhoneNumberCode#9976a263 to nil")
}
return b.Obj(func(b tdjson.Decoder, key []byte) error {
switch string(key) {
case tdjson.TypeField:
if err := b.ConsumeID("checkChangePhoneNumberCode"); err != nil {
return fmt.Errorf("unable to decode checkChangePhoneNumberCode#9976a263: %w", err)
}
case "code":
value, err := b.String()
if err != nil {
return fmt.Errorf("unable to decode checkChangePhoneNumberCode#9976a263: field code: %w", err)
}
c.Code = value
default:
return b.Skip()
}
return nil
})
}
// GetCode returns value of Code field.
func (c *CheckChangePhoneNumberCodeRequest) GetCode() (value string) {
if c == nil {
return
}
return c.Code
}
// CheckChangePhoneNumberCode invokes method checkChangePhoneNumberCode#9976a263 returning error if any.
func (c *Client) CheckChangePhoneNumberCode(ctx context.Context, code string) error {
var ok Ok
request := &CheckChangePhoneNumberCodeRequest{
Code: code,
}
if err := c.rpc.Invoke(ctx, request, &ok); err != nil {
return err
}
return nil
}
|
// EncodeTDLibJSON implements tdjson.TDLibEncoder.
|
index.tsx
|
/* VIEW COMPONENT: HOME
========================================================================== */
|
export default ViewHome;
|
const ViewHome = () => {
return <>{"Home"}</>;
};
|
result.rs
|
use crate::events::http_interaction::HttpInteraction;
use crate::learn_shape::TrailObservationsResult;
use crate::shapes::{JsonTrail, ShapeDiffResult};
use crate::state::endpoint::{PathComponentId, RequestId, ResponseId, ShapeId};
use serde::{Deserialize, Serialize};
use std::collections::hash_map::{DefaultHasher, HashMap};
use std::hash::{Hash, Hasher};
#[derive(Debug, Deserialize, Serialize, Hash)]
pub enum InteractionDiffResult {
UnmatchedRequestUrl(UnmatchedRequestUrl),
UnmatchedRequestBodyContentType(UnmatchedRequestBodyContentType),
UnmatchedRequestBodyShape(UnmatchedRequestBodyShape),
UnmatchedResponseBodyContentType(UnmatchedResponseBodyContentType),
UnmatchedResponseBodyShape(UnmatchedResponseBodyShape),
//
// Matches
// -------
#[serde(skip)]
MatchedRequestBodyContentType(MatchedRequestBodyContentType),
#[serde(skip)]
MatchedResponseBodyContentType(MatchedResponseBodyContentType),
}
impl InteractionDiffResult {
pub fn fingerprint(&self) -> String {
let mut hash_state = DefaultHasher::new();
Hash::hash(self, &mut hash_state);
format!("{:x}", hash_state.finish())
}
pub fn interaction_trail(&self) -> &InteractionTrail {
match self {
InteractionDiffResult::UnmatchedRequestUrl(diff) => &diff.interaction_trail,
InteractionDiffResult::UnmatchedRequestBodyContentType(diff) => &diff.interaction_trail,
InteractionDiffResult::UnmatchedRequestBodyShape(diff) => &diff.interaction_trail,
InteractionDiffResult::UnmatchedResponseBodyContentType(diff) => &diff.interaction_trail,
InteractionDiffResult::UnmatchedResponseBodyShape(diff) => &diff.interaction_trail,
InteractionDiffResult::MatchedRequestBodyContentType(diff) => &diff.interaction_trail,
InteractionDiffResult::MatchedResponseBodyContentType(diff) => &diff.interaction_trail,
}
}
pub fn requests_trail(&self) -> &RequestSpecTrail {
match self {
InteractionDiffResult::UnmatchedRequestUrl(diff) => &diff.requests_trail,
InteractionDiffResult::UnmatchedRequestBodyContentType(diff) => &diff.requests_trail,
InteractionDiffResult::UnmatchedRequestBodyShape(diff) => &diff.requests_trail,
InteractionDiffResult::UnmatchedResponseBodyContentType(diff) => &diff.requests_trail,
InteractionDiffResult::UnmatchedResponseBodyShape(diff) => &diff.requests_trail,
InteractionDiffResult::MatchedRequestBodyContentType(diff) => &diff.requests_trail,
InteractionDiffResult::MatchedResponseBodyContentType(diff) => &diff.requests_trail,
}
}
pub fn json_trail(&self) -> Option<&JsonTrail> {
let shape_diff_result = match self {
InteractionDiffResult::UnmatchedRequestBodyShape(diff) => Some(&diff.shape_diff_result),
InteractionDiffResult::UnmatchedResponseBodyShape(diff) => Some(&diff.shape_diff_result),
_ => None,
}?;
match shape_diff_result {
ShapeDiffResult::UnmatchedShape { json_trail, .. } => Some(json_trail),
ShapeDiffResult::UnspecifiedShape { json_trail, .. } => Some(json_trail),
}
}
}
////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct UnmatchedRequestUrl {
pub interaction_trail: InteractionTrail,
pub requests_trail: RequestSpecTrail,
}
impl UnmatchedRequestUrl {
pub fn new(interaction_trail: InteractionTrail, requests_trail: RequestSpecTrail) -> Self {
return UnmatchedRequestUrl {
interaction_trail,
requests_trail,
};
}
}
#[derive(Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct UnmatchedRequestBodyContentType {
pub interaction_trail: InteractionTrail,
pub requests_trail: RequestSpecTrail,
}
impl UnmatchedRequestBodyContentType {
pub fn new(interaction_trail: InteractionTrail, requests_trail: RequestSpecTrail) -> Self {
return UnmatchedRequestBodyContentType {
interaction_trail,
requests_trail,
};
}
}
#[derive(Clone, Debug, Serialize, Hash)]
pub struct MatchedRequestBodyContentType {
pub interaction_trail: InteractionTrail,
pub requests_trail: RequestSpecTrail,
pub root_shape_id: ShapeId,
}
impl MatchedRequestBodyContentType {
pub fn new(
interaction_trail: InteractionTrail,
requests_trail: RequestSpecTrail,
root_shape_id: ShapeId,
) -> Self {
return MatchedRequestBodyContentType {
interaction_trail,
requests_trail,
root_shape_id,
};
}
pub fn into_shape_diff(self, shape_diff_result: ShapeDiffResult) -> UnmatchedRequestBodyShape {
UnmatchedRequestBodyShape::new(
self.interaction_trail,
self.requests_trail,
shape_diff_result,
)
}
}
#[derive(Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct UnmatchedResponseBodyContentType {
pub interaction_trail: InteractionTrail,
pub requests_trail: RequestSpecTrail,
}
impl UnmatchedResponseBodyContentType {
pub fn new(interaction_trail: InteractionTrail, requests_trail: RequestSpecTrail) -> Self {
return UnmatchedResponseBodyContentType {
interaction_trail,
requests_trail,
};
}
}
#[derive(Debug, Serialize, Clone, Hash)]
pub struct MatchedResponseBodyContentType {
pub interaction_trail: InteractionTrail,
pub requests_trail: RequestSpecTrail,
pub root_shape_id: ShapeId,
}
impl MatchedResponseBodyContentType {
pub fn new(
interaction_trail: InteractionTrail,
requests_trail: RequestSpecTrail,
root_shape_id: ShapeId,
) -> Self {
return MatchedResponseBodyContentType {
interaction_trail,
requests_trail,
root_shape_id,
};
}
pub fn into_shape_diff(self, shape_diff_result: ShapeDiffResult) -> UnmatchedResponseBodyShape {
UnmatchedResponseBodyShape::new(
self.interaction_trail,
self.requests_trail,
shape_diff_result,
)
}
}
#[derive(Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct UnmatchedRequestBodyShape {
pub interaction_trail: InteractionTrail,
pub requests_trail: RequestSpecTrail,
pub shape_diff_result: ShapeDiffResult,
}
impl UnmatchedRequestBodyShape {
pub fn new(
interaction_trail: InteractionTrail,
requests_trail: RequestSpecTrail,
shape_diff_result: ShapeDiffResult,
) -> Self {
return UnmatchedRequestBodyShape {
interaction_trail,
requests_trail,
shape_diff_result,
};
}
}
#[derive(Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct UnmatchedResponseBodyShape {
pub interaction_trail: InteractionTrail,
pub requests_trail: RequestSpecTrail,
pub shape_diff_result: ShapeDiffResult,
}
impl UnmatchedResponseBodyShape {
pub fn new(
interaction_trail: InteractionTrail,
requests_trail: RequestSpecTrail,
shape_diff_result: ShapeDiffResult,
) -> Self {
return UnmatchedResponseBodyShape {
interaction_trail,
requests_trail,
shape_diff_result,
};
}
}
////////////////////////////////////////////////////////////////////////////////
#[derive(Clone, Debug)]
pub struct BodyAnalysisResult {
pub body_location: BodyAnalysisLocation,
pub trail_observations: TrailObservationsResult,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum BodyAnalysisLocation {
UnmatchedRequest {
path_id: PathComponentId,
method: String,
content_type: Option<String>,
},
UnmatchedResponse {
path_id: PathComponentId,
method: String,
content_type: Option<String>,
status_code: u16,
},
MatchedRequest {
request_id: RequestId,
content_type: Option<String>,
},
MatchedResponse {
response_id: ResponseId,
content_type: Option<String>,
status_code: u16,
},
}
impl BodyAnalysisLocation {
pub fn content_type(&self) -> Option<&String> {
match self {
BodyAnalysisLocation::UnmatchedRequest { content_type, .. } => content_type.as_ref(),
BodyAnalysisLocation::UnmatchedResponse { content_type, .. } => content_type.as_ref(),
BodyAnalysisLocation::MatchedRequest { content_type, .. } => content_type.as_ref(),
BodyAnalysisLocation::MatchedResponse { content_type, .. } => content_type.as_ref(),
}
}
}
impl From<UnmatchedRequestBodyContentType> for BodyAnalysisLocation {
fn from(diff: UnmatchedRequestBodyContentType) -> Self {
let interaction_trail = diff.interaction_trail;
Self::UnmatchedRequest {
path_id: diff
.requests_trail
.get_path_id()
.expect("UnmatchedRequestBodyContentType implies request to have a known path")
.clone(),
method: interaction_trail
.get_method()
.expect("UnmatchedRequestBodyContentType implies request to have a known method")
.clone(),
content_type: interaction_trail.get_request_content_type().cloned(),
}
}
}
impl From<UnmatchedResponseBodyContentType> for BodyAnalysisLocation {
fn from(diff: UnmatchedResponseBodyContentType) -> Self {
let interaction_trail = diff.interaction_trail;
Self::UnmatchedResponse {
path_id: diff
.requests_trail
.get_path_id()
.expect("UnmatchedResponseBodyContentType implies response to have a known path")
.clone(),
method: interaction_trail
.get_method()
.expect("UnmatchedResponseBodyContentType implies response to have a known method")
.clone(),
content_type: interaction_trail.get_response_content_type().cloned(),
status_code: interaction_trail
.get_response_status_code()
.expect("UnmatchedResponseBodyContentType implies response to have a status code"),
}
}
}
impl From<MatchedRequestBodyContentType> for BodyAnalysisLocation {
fn from(diff: MatchedRequestBodyContentType) -> Self {
let interaction_trail = diff.interaction_trail;
Self::MatchedRequest {
request_id: diff
.requests_trail
.get_request_id()
.expect("MatchedRequestBodyContentType implies request to have a known request id")
.clone(),
content_type: interaction_trail.get_request_content_type().cloned(),
}
}
}
impl From<MatchedResponseBodyContentType> for BodyAnalysisLocation {
fn from(diff: MatchedResponseBodyContentType) -> Self {
let interaction_trail = diff.interaction_trail;
Self::MatchedResponse {
response_id: diff
.requests_trail
.get_response_id()
.expect("MatchedResponseBodyContentType implies response to have a known response id")
.clone(),
content_type: interaction_trail.get_response_content_type().cloned(),
status_code: interaction_trail
.get_response_status_code()
.expect("MatchedResponseBodyContentType implies response to have a status code"),
}
}
}
////////////////////////////////////////////////////////////////////////////////
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
pub struct InteractionTrail {
pub path: Vec<InteractionTrailPathComponent>,
}
impl InteractionTrail {
pub fn new(path: Vec<InteractionTrailPathComponent>) -> Self {
InteractionTrail { path }
}
pub fn empty() -> Self {
InteractionTrail { path: vec![] }
}
pub fn with_url(&mut self, url: String) {
self
.path
.push(InteractionTrailPathComponent::Url { path: url })
}
pub fn with_method(&mut self, method: String) {
self
.path
.push(InteractionTrailPathComponent::Method { method })
}
pub fn with_request_body(&mut self, content_type: String) {
self
.path
.push(InteractionTrailPathComponent::RequestBody { content_type })
}
pub fn with_response_body(&mut self, content_type: String, status_code: u16) {
self.path.push(InteractionTrailPathComponent::ResponseBody {
content_type,
status_code,
})
}
pub fn get_method(&self) -> Option<&String> {
self.path.iter().find_map(|component| match component {
InteractionTrailPathComponent::Method { method } => Some(method),
_ => None,
})
}
pub fn get_request_content_type(&self) -> Option<&String> {
self.path.iter().find_map(|component| match component {
InteractionTrailPathComponent::RequestBody { content_type } => Some(content_type),
_ => None,
})
}
pub fn get_response_content_type(&self) -> Option<&String> {
self.path.iter().find_map(|component| match component {
InteractionTrailPathComponent::ResponseBody { content_type, .. } => Some(content_type),
_ => None,
})
}
pub fn get_response_status_code(&self) -> Option<u16> {
self.path.iter().find_map(|component| match component {
InteractionTrailPathComponent::ResponseBody { status_code, .. } => Some(*status_code),
InteractionTrailPathComponent::ResponseStatusCode { status_code } => Some(*status_code),
InteractionTrailPathComponent::Method { .. }
| InteractionTrailPathComponent::RequestBody { .. }
| InteractionTrailPathComponent::Url { .. } => None,
})
}
pub fn matches_interaction(&self, interaction: &HttpInteraction) -> bool {
#[derive(Default, Debug)]
struct InteractionIdentifiers<'a> {
path: Option<&'a String>,
method: Option<&'a String>,
request_content_type: Option<&'a String>,
response_content_type: Option<&'a String>,
response_status_code: Option<u16>,
}
impl<'a> From<&'a InteractionTrail> for InteractionIdentifiers<'a> {
fn from(trail: &'a InteractionTrail) -> Self {
trail.path.iter().fold(
InteractionIdentifiers::default(),
|mut identifiers, component| {
match component {
InteractionTrailPathComponent::Url { path } => {
identifiers.path.replace(path);
}
InteractionTrailPathComponent::Method { method } => {
identifiers.method.replace(method);
}
InteractionTrailPathComponent::RequestBody { content_type } => {
identifiers.request_content_type.replace(content_type);
}
InteractionTrailPathComponent::ResponseStatusCode { status_code } => {
identifiers.response_status_code.replace(*status_code);
}
InteractionTrailPathComponent::ResponseBody {
content_type,
status_code,
} => {
identifiers.response_status_code.replace(*status_code);
identifiers.response_content_type.replace(content_type);
}
};
identifiers
},
)
}
}
let identifiers = InteractionIdentifiers::from(self);
let conditions = [
matches!(identifiers.path, Some(path) if path == &interaction.request.path),
matches!(identifiers.method, Some(method) if method == &interaction.request.method),
matches!(identifiers.response_status_code, Some(status_code) if status_code == interaction.response.status_code),
identifiers.request_content_type == interaction.request.body.content_type.as_ref(),
identifiers.response_content_type == interaction.response.body.content_type.as_ref(),
];
// dbg!(&identifiers, &conditions);
conditions.iter().all(|c| *c)
}
}
////////////////////////////////////////////////////////////////////////////////
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
pub enum RequestSpecTrail {
SpecRoot(SpecRoot),
SpecPath(SpecPath),
SpecRequestRoot(SpecRequestRoot),
SpecRequestBody(SpecRequestBody),
SpecResponseRoot(SpecResponseRoot),
SpecResponseBody(SpecResponseBody),
}
impl RequestSpecTrail {
pub fn get_path_id(&self) -> Option<&String> {
match self {
RequestSpecTrail::SpecPath(spec_path) => Some(&spec_path.path_id),
_ => None,
}
}
pub fn get_request_id(&self) -> Option<&String> {
match self {
RequestSpecTrail::SpecRequestBody(spec_body) => Some(&spec_body.request_id),
RequestSpecTrail::SpecRequestRoot(spec_body) => Some(&spec_body.request_id),
_ => None,
}
}
pub fn
|
(&self) -> Option<&String> {
match self {
RequestSpecTrail::SpecResponseBody(spec_body) => Some(&spec_body.response_id),
RequestSpecTrail::SpecResponseRoot(spec_body) => Some(&spec_body.response_id),
_ => None,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
pub struct SpecRoot {}
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct SpecPath {
pub path_id: PathComponentId,
}
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct SpecRequestRoot {
pub request_id: RequestId,
}
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct SpecRequestBody {
pub request_id: RequestId,
}
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct SpecResponseRoot {
pub response_id: ResponseId,
}
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
#[serde(rename_all = "camelCase")]
pub struct SpecResponseBody {
pub response_id: ResponseId,
}
//@GOTCHA make sure these serialize matching the existing scala code
#[derive(Clone, Debug, Deserialize, Serialize, Hash)]
pub enum InteractionTrailPathComponent {
Url {
path: String,
},
Method {
method: String,
},
#[serde(rename_all = "camelCase")]
RequestBody {
content_type: String,
},
#[serde(rename_all = "camelCase")]
ResponseStatusCode {
status_code: u16,
},
#[serde(rename_all = "camelCase")]
ResponseBody {
content_type: String,
status_code: u16,
},
}
|
get_response_id
|
index.ts
|
export {default} from './ResourceActionsMenu';
| ||
response.go
|
package goat
import "strings"
// Response is the type for manage response parameters.
type Response struct {
// Status is the response status code
Status int
// Headers is the response headers
Headers []H
// Body is the response body
Body string
}
func (r *Response) isJson() bool {
for _, header := range r.Headers {
if header.Key == "Content-Type" && strings.HasPrefix(header.Value, contentTypeJson)
|
}
return false
}
|
{
return true
}
|
example.go
|
package connect
import (
"errors"
"fmt"
"strings"
)
const (
white = 1 << iota
black
connectedWhite
connectedBlack
)
type colorFlags struct {
color int8
connected int8
}
var flagsBlack = colorFlags{
color: black,
connected: connectedBlack,
}
var flagsWhite = colorFlags{
color: white,
connected: connectedWhite,
}
type coord struct {
x int
y int
}
type board struct {
height int
width int
fields [][]int8
}
func newBoard(lines []string) (board, error) {
if len(lines) < 1 {
return board{}, errors.New("No lines given")
}
height := len(lines)
if len(lines[0]) < 1 {
return board{}, errors.New("First line is empty string")
}
width := len(lines[0])
// This trick for 2D arrays comes from Effective Go
fields := make([][]int8, height)
fieldsBacker := make([]int8, height*width)
for i := range fields {
fields[i], fieldsBacker = fieldsBacker[:width], fieldsBacker[width:]
}
for y, line := range lines {
for x, c := range line {
switch c {
case 'X':
fields[y][x] = black
case 'O':
fields[y][x] = white
}
// No need for default, zero value already means no stone
}
}
return board{
height: height,
width: width,
fields: fields,
}, nil
}
// Whether there is a stone of the given color at the given location.
//
// Returns both whether there is a stone of the correct color and
// whether the connected flag was set for it.
func (b board) at(c coord, cf colorFlags) (isCorrectColor, isConnected bool) {
f := b.fields[c.y][c.x]
return f&cf.color == cf.color,
f&cf.connected == cf.connected
}
func (b board) markConnected(c coord, cf colorFlags) {
b.fields[c.y][c.x] |= cf.connected
}
func (b board) validCoord(c coord) bool {
return c.x >= 0 && c.x < b.width && c.y >= 0 && c.y < b.height
}
func (b board) neighbors(c coord) []coord {
coords := make([]coord, 0, 6)
dirs := []coord{{1, 0}, {-1, 0}, {0, 1}, {0, -1}, {-1, 1}, {1, -1}}
for _, dir := range dirs {
nc := coord{x: c.x + dir.x, y: c.y + dir.y}
if b.validCoord(nc) {
coords = append(coords, nc)
}
}
return coords
}
func (b board) startCoords(cf colorFlags) []coord {
if cf.color == white {
coords := make([]coord, b.width)
for i := 0; i < b.width; i++ {
coords[i] = coord{x: i}
}
return coords
}
coords := make([]coord, b.height)
for i := 0; i < b.height; i++ {
coords[i] = coord{y: i}
}
return coords
}
func (b board) isTargetCoord(c coord, cf colorFlags) bool {
if cf.color == white {
return c.y == b.height-1
}
return c.x == b.width-1
}
func (b board) evaluate(c coord, cf colorFlags) bool {
stone, connected := b.at(c, cf)
if stone && !connected {
b.markConnected(c, cf)
if b.isTargetCoord(c, cf) {
return true
}
for _, nc := range b.neighbors(c) {
if b.evaluate(nc, cf) {
return true
}
}
}
return false
}
// Helper for debugging.
func (b board) dump() {
for y := 0; y < b.height; y++ {
spaces := strings.Repeat(" ", y)
chars := make([]string, b.width)
for x := 0; x < b.width; x++ {
switch {
case b.fields[y][x]&white == white:
if b.fields[y][x]&connectedWhite == connectedWhite {
chars[x] = "O"
} else {
chars[x] = "o"
}
case b.fields[y][x]&black == black:
if b.fields[y][x]&connectedBlack == connectedBlack {
chars[x] = "X"
} else {
chars[x] = "x"
}
default:
chars[x] = "."
}
}
fmt.Printf("%s%s\n", spaces, strings.Join(chars, " "))
}
}
// ResultOf evaluates the board and return the winner, "black" or
// "white". If there's no winnner ResultOf returns "".
func
|
(lines []string) (string, error) {
board, err := newBoard(lines)
if err != nil {
return "", err
}
for _, c := range board.startCoords(flagsBlack) {
if board.evaluate(c, flagsBlack) {
return "X", nil
}
}
for _, c := range board.startCoords(flagsWhite) {
if board.evaluate(c, flagsWhite) {
return "O", nil
}
}
return "", nil
}
|
ResultOf
|
qrcode.js
|
/*
*copyright Ryan Day 2012
*
* Licensed under the MIT license:
* http://www.opensource.org/licenses/mit-license.php
*
* this is the main server side application file for node-qrcode.
* these exports use serverside canvas api methods for file IO and buffers
*
*/
var QRCodeLib = require(__dirname+'/lib/qrcode-draw')
, terminalRender = require(__dirname+'/lib/termialrender.js')
, Canvas = require('canvas')
, fs = require('fs');
var QRCodeDraw = QRCodeLib.QRCodeDraw,
QRCode = QRCodeLib.QRCode;
//EXPORTS
//
// breaking change to 0.1 this used to be an instance. now it returns the constructor.
//
exports.QRCodeDraw = QRCodeDraw;
//
// export error correct levels.
//
exports.errorCorrectLevels = QRCodeLib.QRErrorCorrectLevel;
//
// export original canvas to be used with draw method, esp. Canvas.Image
//
exports.canvas=Canvas;
/*
* provide an api to return the max characters allowed for given dimensions, and miniumum error correction level
* the qr code library will always use the maximum error correction level for the given numbar of chars constrained by size
*/
exports.getMaxChars = function(minErrorCorrectionLevel,width,moduleScale){
//TODO THIS NEEDS TO WORK
console.log('this doesnt work yet. comming soon =)');
};
var parseOptions = function(options) {
var textKeys = {'minimum':"L",'medium':"M",'high':"Q",'max':"H"}
if(options.errorCorrectLevel) {
var ec = options.errorCorrectLevel;
if(textKeys[ec]){
options.errorCorrectLevel = textKeys[ec];
}
}
return options;
};
// returns Canvas Object with qr code drawn on it
/*
* String text, optional Object options, Function callback
*/
var draw = exports.draw = function(text,options,cb){
var args = Array.prototype.slice.call(arguments);
cb = args.pop();
if(typeof cb != 'function') {
throw new TypeError('last argument must be a function');
}
text = args.shift();
options = args.shift()||{};
options=parseOptions(options);
//NOTE the width and height are determined from within the qr code lib and are not configurable from the outside yet
var drawInstance = new QRCodeDraw();
drawInstance.draw(new Canvas(200,200),text,options,function(error,canvas,qrWidth){
cb(error,canvas,qrWidth)
});
};
//returns data uri for drawn qrcode png
exports.toDataURL = exports.toDataURI = function(text,options,cb){
if(typeof options == 'function') {
cb = options;
options = {};
}
draw(text,options,function(error,canvas){
if(error) {
cb(error);
} else {
canvas.toDataURL(cb);
}
});
}
//synchronous PNGStream
exports.toPNGStream = function (text, WSpath, options,cb) {
if(typeof options == 'function'){
cb = options;
options = {};
}
var out = fs.createWriteStream(WSpath);
draw(text, options, function (error,canvas) {
if(error) {
cb(error,'');
} else {
stream = canvas.createPNGStream();
}
stream.pipe(out);
stream.on('end', function () {
cb(error,'');
});
stream.pipe(out);
});
return out;
}
//returns bytes written to file
exports.save = function(path,text,options,cb){
if(typeof options == 'function'){
cb = options;
options = {};
}
draw(text, options, function(error,canvas){
var fd,buf,fdAndBuf = function(){
fs.write(fd, buf, 0, buf.length, 0, function(fsErr, written){
fs.close(fd);
if(cb) cb(fsErr, written);
});
};
//run non dependent async calls at the same time ish
canvas.toBuffer(function(canvasErr, _buf){
if(canvasErr) return cb(canvasErr);
buf = _buf
if(fd) fdAndBuf();
});
|
if(fsErr) return cb(fsErr);
fd = _fd
if(buf) fdAndBuf();
});
});
};
//
//this returns an array of points that have either a 0 or 1 value representing 0 for light and 1 for dark
//these values include points in the white edge of the qrcode because that edge is actually part of the spec
//
exports.drawBitArray = function(text,options,cb){
if(typeof options == 'function'){
cb = options;
options = {};
}
options = parseOptions(options);
var drawInstance = new QRCodeDraw();
drawInstance.drawBitArray(text,options,function(error,bits,width){
cb(error,bits,width);
});
}
//
// draw qr in your terminal!
//
exports.drawText = function(text,options,cb){
if(typeof options == 'function'){
cb = options;
options = {};
}
var drawInstance = new QRCodeDraw();
drawInstance.drawBitArray(text,function(error,bits,width){
if (!error) {
var code = terminalRender.renderBits(bits,width);
cb(error,code);
} else {
cb(error,null);
}
});
}
|
fs.open(path, 'w', 0666, function(fsErr,_fd){
|
main.go
|
package main
import (
"log"
"github.com/danstis/go-zimi/internal/version"
)
// Main entry point for the app.
func
|
() {
log.Printf("Version %q", version.Version)
}
|
main
|
bot_test.go
|
package bot
import (
"errors"
"github.com/bwmarrin/discordgo"
"github.com/juan-medina/cecibot/prototype"
"testing"
)
type fakeCfg struct {
}
func (f fakeCfg) GetOwner() string {
return "12345"
}
func (f fakeCfg) GetToken() string {
return "12345"
}
var fakeError = errors.New("fake error")
type FakeDiscordClientSpy struct {
failOnOpen bool
failOnClose bool
failOnChannelMessageSend bool
failOnAddHandler bool
failure bool
lastError error
lastMethod string
lastMessage string
lastChannelTo string
}
func (f *FakeDiscordClientSpy) recordError(method string, err error) error {
f.failure = true
f.lastError = err
f.lastMethod = method
return err
}
func (f *FakeDiscordClientSpy) recordSuccess(method string) {
f.failure = false
f.lastError = nil
f.lastMethod = method
}
func (f *FakeDiscordClientSpy) Open() error {
if f.failOnOpen {
return f.recordError("Open()", fakeError)
}
f.recordSuccess("Open()")
return nil
}
func (f *FakeDiscordClientSpy) Close() error {
if f.failOnClose {
return f.recordError("Close()", fakeError)
}
f.recordSuccess("Close()")
return nil
}
func (f *FakeDiscordClientSpy) AddHandler(interface{}) func() {
if f.failOnAddHandler {
_ = f.recordError("AddHandler()", fakeError)
return nil
}
f.recordSuccess("AddHandler()")
return nil
}
func (f *FakeDiscordClientSpy) ChannelMessageSend(channelID string, content string) (*discordgo.Message, error) {
if f.failOnChannelMessageSend {
return nil, f.recordError("ChannelMessageSend()", fakeError)
}
f.recordSuccess("ChannelMessageSend()")
f.lastMessage = content
f.lastChannelTo = channelID
return nil, nil
}
func assertSpySuccess(t *testing.T, spy *FakeDiscordClientSpy, method string) bool
|
func assertSpyFailure(t *testing.T, spy *FakeDiscordClientSpy, method string, err error) bool {
t.Helper()
if method != spy.lastMethod {
t.Errorf("want spy last method to be %q, got %q", method, spy.lastMethod)
return false
}
if spy.failure != true {
t.Errorf("want spy failure but was sucess")
return false
}
if spy.lastError != err {
t.Errorf("want spy last error to be %q, got %q", err, spy.lastError)
return false
}
return true
}
type fakeProcessor struct {
failOnInit bool
}
func (f *fakeProcessor) IsOwner(userId string) bool {
return false
}
func (f *fakeProcessor) GetCommandHelp(key string) string {
return ""
}
func (f *fakeProcessor) GetHelp() string {
return ""
}
func (f *fakeProcessor) Init(bot prototype.Bot) error {
if f.failOnInit {
return fakeError
}
return nil
}
func (f fakeProcessor) End() {
}
func (f fakeProcessor) ProcessMessage(text string, author string) string {
return author + " told me : " + text
}
func TestNew(t *testing.T) {
cfg := fakeCfg{}
got, err := New(cfg)
if err != nil {
t.Errorf("want not error, got %v", err)
return
}
if got == nil {
t.Errorf("want new bot, got nil")
return
}
if got.GetConfig() != cfg {
t.Errorf("want config %v, got %v", cfg, got.GetConfig())
return
}
}
func Test_bot_connect(t *testing.T) {
cfg := fakeCfg{}
discord := &FakeDiscordClientSpy{}
prc := &fakeProcessor{}
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
t.Run("it should connect correctly", func(t *testing.T) {
err := b.connect()
if err != nil {
t.Errorf("want not error, got %v", err)
return
}
assertSpySuccess(t, discord, "Open()")
})
t.Run("it should fail on connect", func(t *testing.T) {
discord.failOnOpen = true
err := b.connect()
if err == nil {
t.Errorf("want error, got nil")
return
}
if err != fakeError {
t.Errorf("want fake error, got %v", err)
return
}
assertSpyFailure(t, discord, "Open()", fakeError)
})
t.Run("it should fail with failing processor", func(t *testing.T) {
prc.failOnInit = true
err := b.connect()
if err != fakeError {
t.Errorf("want fake error, got %v", err)
return
}
prc.failOnInit = false
})
t.Run("it should fail without client", func(t *testing.T) {
b.discord = nil
err := b.connect()
if err != errInvalidDiscordClient {
t.Errorf("want invalid discord client, got %v", err)
return
}
})
}
func Test_bot_disconnect(t *testing.T) {
cfg := fakeCfg{}
discord := &FakeDiscordClientSpy{}
prc := &fakeProcessor{}
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
t.Run("it should disconnect correctly", func(t *testing.T) {
b.disconnect()
assertSpySuccess(t, discord, "Close()")
})
t.Run("it should not fail on disconnect failure", func(t *testing.T) {
discord.failOnClose = true
b.disconnect()
assertSpyFailure(t, discord, "Close()", fakeError)
})
t.Run("it should not fail without client", func(t *testing.T) {
b.discord = nil
b.disconnect()
})
}
func Test_bot_sendMessage(t *testing.T) {
cfg := fakeCfg{}
discord := &FakeDiscordClientSpy{}
prc := &fakeProcessor{}
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
t.Run("it should send message correctly", func(t *testing.T) {
b.sendMessage("chanel", "text")
assertSpySuccess(t, discord, "ChannelMessageSend()")
})
t.Run("it should fail sending message"+
"e", func(t *testing.T) {
discord.failOnChannelMessageSend = true
b.sendMessage("chanel", "text")
assertSpyFailure(t, discord, "ChannelMessageSend()", fakeError)
})
}
func Test_bot_Run(t *testing.T) {
noop := func() {}
cfg := fakeCfg{}
prc := &fakeProcessor{}
t.Run("it should not fail", func(t *testing.T) {
discord := &FakeDiscordClientSpy{}
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
b.wait = noop
err := b.Run()
if err != nil {
t.Errorf("want not error, got %v", err)
return
}
})
t.Run("it should fail on failure on open", func(t *testing.T) {
discord := &FakeDiscordClientSpy{}
discord.failOnOpen = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
b.wait = noop
err := b.Run()
if err != fakeError {
t.Errorf("want fake error, got %v", err)
return
}
assertSpyFailure(t, discord, "Open()", fakeError)
})
t.Run("it should not fail on failure on close", func(t *testing.T) {
discord := &FakeDiscordClientSpy{}
discord.failOnClose = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
b.wait = noop
err := b.Run()
if err != nil {
t.Errorf("want not error, got %v", err)
return
}
assertSpyFailure(t, discord, "Close()", fakeError)
})
t.Run("it should not fail on failure on addHandler", func(t *testing.T) {
discord := &FakeDiscordClientSpy{}
discord.failOnAddHandler = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
b.wait = noop
err := b.Run()
if err != nil {
t.Errorf("want not error, got %v", err)
return
}
assertSpySuccess(t, discord, "Close()")
})
}
func Test_bot_isSelfMessage(t *testing.T) {
cfg := fakeCfg{}
prc := &fakeProcessor{}
discord := &FakeDiscordClientSpy{}
discord.failOnClose = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
u := &discordgo.User{ID: "123"}
t.Run("should get a self message", func(t *testing.T) {
m := &discordgo.MessageCreate{
Message: &discordgo.Message{Author: u},
}
got := b.isSelfMessage(m, u)
if got != true {
t.Errorf("is should be self message got %v", got)
}
})
t.Run("should not get a self message", func(t *testing.T) {
m := &discordgo.MessageCreate{
Message: &discordgo.Message{Author: &discordgo.User{ID: "456"}},
}
got := b.isSelfMessage(m, u)
if got == true {
t.Errorf("is should not be self message got %v", got)
}
})
}
func Test_bot_removeBotMention(t *testing.T) {
cfg := fakeCfg{}
prc := &fakeProcessor{}
discord := &FakeDiscordClientSpy{}
discord.failOnClose = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
botUser := &discordgo.User{ID: "123"}
t.Run("we should remove the mention", func(t *testing.T) {
m := &discordgo.MessageCreate{
Message: &discordgo.Message{
Author: &discordgo.User{ID: "456"},
Content: "<@123> this is a message",
},
}
got := b.removeBotMention(m, botUser)
want := "this is a message"
if got != want {
t.Errorf("got %q, want %q", got, want)
}
})
t.Run("we should not remove the mention", func(t *testing.T) {
m := &discordgo.MessageCreate{
Message: &discordgo.Message{
Author: &discordgo.User{ID: "456"},
Content: "<@456> this is a another",
},
}
got := b.removeBotMention(m, botUser)
want := "<@456> this is a another"
if got != want {
t.Errorf("got %q, want %q", got, want)
}
})
t.Run("there is not mention", func(t *testing.T) {
m := &discordgo.MessageCreate{
Message: &discordgo.Message{
Author: &discordgo.User{ID: "456"},
Content: "there is no mention",
},
}
got := b.removeBotMention(m, botUser)
want := "there is no mention"
if got != want {
t.Errorf("got %q, want %q", got, want)
}
})
}
func Test_bot_getMessageToBoot(t *testing.T) {
cfg := fakeCfg{}
prc := &fakeProcessor{}
discord := &FakeDiscordClientSpy{}
discord.failOnClose = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
botUser := &discordgo.User{ID: "123"}
t.Run("we should get the message in a mention", func(t *testing.T) {
m := &discordgo.MessageCreate{
Message: &discordgo.Message{
Author: &discordgo.User{ID: "456"},
Content: "<@123> this is a message",
Mentions: []*discordgo.User{botUser},
},
}
got := b.getMessageToBoot(m, botUser)
want := "this is a message"
if got != want {
t.Errorf("got %q, want %q", got, want)
}
})
t.Run("we should not get the message without mention", func(t *testing.T) {
m := &discordgo.MessageCreate{
Message: &discordgo.Message{
Author: &discordgo.User{ID: "456"},
Content: "this is a message",
Mentions: []*discordgo.User{},
},
}
got := b.getMessageToBoot(m, botUser)
want := ""
if got != want {
t.Errorf("got %q, want %q", got, want)
}
})
}
func Test_bot_replyToMessage(t *testing.T) {
cfg := fakeCfg{}
prc := &fakeProcessor{}
discord := &FakeDiscordClientSpy{}
discord.failOnClose = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
m := &discordgo.MessageCreate{
Message: &discordgo.Message{
ChannelID: "chanel1",
Author: &discordgo.User{ID: "456"},
Content: "this is a message",
Mentions: []*discordgo.User{},
},
}
b.replyToMessage(m, "hello world")
wantChannel := "chanel1"
gotChannel := discord.lastChannelTo
if wantChannel != gotChannel {
t.Errorf("want message reply to %q, got %q", wantChannel, gotChannel)
}
wantMessage := "<@456> hello world"
gotMessage := discord.lastMessage
if wantMessage != gotMessage {
t.Errorf("want message %q, got %q", wantMessage, gotMessage)
}
}
func Test_getResponseToMessage(t *testing.T) {
cfg := fakeCfg{}
discord := &FakeDiscordClientSpy{}
prc := &fakeProcessor{}
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
got := b.getResponseToMessage("hello", "user1")
want := "user1 told me : hello"
if got != want {
t.Errorf("want message %q, got %q", want, got)
}
}
func Test_bot_onChannelMessage(t *testing.T) {
cfg := fakeCfg{}
prc := &fakeProcessor{}
discord := &FakeDiscordClientSpy{}
discord.failOnClose = true
b := &bot{
cfg: cfg,
discord: discord,
prc: prc,
}
botUser := &discordgo.User{ID: "123"}
sta := discordgo.NewState()
sta.User = botUser
ses := &discordgo.Session{State: sta}
m := &discordgo.MessageCreate{
Message: &discordgo.Message{
ChannelID: "chanel1",
Author: &discordgo.User{ID: "456"},
Content: "<@123> this is a message",
Mentions: []*discordgo.User{botUser},
},
}
b.onChannelMessage(ses, m)
wantChannel := "chanel1"
gotChannel := discord.lastChannelTo
if wantChannel != gotChannel {
t.Errorf("want message reply to %q, got %q", wantChannel, gotChannel)
}
wantMessage := "<@456> 456 told me : this is a message"
gotMessage := discord.lastMessage
if wantMessage != gotMessage {
t.Errorf("want message %q, got %q", wantMessage, gotMessage)
}
}
|
{
t.Helper()
if method != spy.lastMethod {
t.Errorf("want spy last method to be %q, got %q", method, spy.lastMethod)
return false
}
if spy.failure != false {
t.Errorf("want spy sucess what was failure")
return false
}
return true
}
|
custom.py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=no-self-use,too-many-lines
from __future__ import print_function
import json
import os
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse # pylint: disable=import-error
# the urlopen is imported for automation purpose
from six.moves.urllib.request import urlopen # noqa, pylint: disable=import-error,unused-import,ungrouped-imports
from knack.log import get_logger
from knack.util import CLIError
from azure.cli.command_modules.vm._validators import _get_resource_group_from_vault_name
from azure.cli.core.commands.validators import validate_file_or_dict
from azure.cli.core.commands import LongRunningOperation, DeploymentOutputLongRunningOperation
from azure.cli.core.commands.client_factory import get_mgmt_service_client, get_data_service_client
from azure.cli.core.profiles import ResourceType
from azure.cli.core.util import sdk_no_wait
from ._vm_utils import read_content_if_is_file
from ._vm_diagnostics_templates import get_default_diag_config
from ._actions import (load_images_from_aliases_doc, load_extension_images_thru_services,
load_images_thru_services, _get_latest_image_version)
from ._client_factory import (_compute_client_factory, cf_public_ip_addresses, cf_vm_image_term,
_dev_test_labs_client_factory)
logger = get_logger(__name__)
# Use the same name by portal, so people can update from both cli and portal
# (VM doesn't allow multiple handlers for the same extension)
_ACCESS_EXT_HANDLER_NAME = 'enablevmaccess'
_LINUX_ACCESS_EXT = 'VMAccessForLinux'
_WINDOWS_ACCESS_EXT = 'VMAccessAgent'
_LINUX_DIAG_EXT = 'LinuxDiagnostic'
_WINDOWS_DIAG_EXT = 'IaaSDiagnostics'
_LINUX_OMS_AGENT_EXT = 'OmsAgentForLinux'
_WINDOWS_OMS_AGENT_EXT = 'MicrosoftMonitoringAgent'
extension_mappings = {
_LINUX_ACCESS_EXT: {
'version': '1.5',
'publisher': 'Microsoft.OSTCExtensions'
},
_WINDOWS_ACCESS_EXT: {
'version': '2.4',
'publisher': 'Microsoft.Compute'
},
_LINUX_DIAG_EXT: {
'version': '3.0',
'publisher': 'Microsoft.Azure.Diagnostics'
},
_WINDOWS_DIAG_EXT: {
'version': '1.5',
'publisher': 'Microsoft.Azure.Diagnostics'
},
_LINUX_OMS_AGENT_EXT: {
'version': '1.0',
'publisher': 'Microsoft.EnterpriseCloud.Monitoring'
},
_WINDOWS_OMS_AGENT_EXT: {
'version': '1.0',
'publisher': 'Microsoft.EnterpriseCloud.Monitoring'
}
}
def _construct_identity_info(identity_scope, identity_role, implicit_identity, external_identities):
info = {}
if identity_scope:
info['scope'] = identity_scope
info['role'] = str(identity_role) # could be DefaultStr, so convert to string
info['userAssignedIdentities'] = external_identities or {}
info['systemAssignedIdentity'] = implicit_identity or ''
return info
# for injecting test seams to produce predicatable role assignment id for playback
def _gen_guid():
import uuid
return uuid.uuid4()
def _get_access_extension_upgrade_info(extensions, name):
version = extension_mappings[name]['version']
publisher = extension_mappings[name]['publisher']
auto_upgrade = None
if extensions:
extension = next((e for e in extensions if e.name == name), None)
from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
if extension and LooseVersion(extension.type_handler_version) < LooseVersion(version):
auto_upgrade = True
elif extension and LooseVersion(extension.type_handler_version) > LooseVersion(version):
version = extension.type_handler_version
return publisher, version, auto_upgrade
def _get_extension_instance_name(instance_view, publisher, extension_type_name,
suggested_name=None):
extension_instance_name = suggested_name or extension_type_name
full_type_name = '.'.join([publisher, extension_type_name])
if instance_view.extensions:
ext = next((x for x in instance_view.extensions
if x.type and (x.type.lower() == full_type_name.lower())), None)
if ext:
extension_instance_name = ext.name
return extension_instance_name
def _get_storage_management_client(cli_ctx):
return get_mgmt_service_client(cli_ctx, ResourceType.MGMT_STORAGE)
def _get_disk_lun(data_disks):
# start from 0, search for unused int for lun
if not data_disks:
return 0
existing_luns = sorted([d.lun for d in data_disks])
for i, current in enumerate(existing_luns):
if current != i:
return i
return len(existing_luns)
def _get_private_config(cli_ctx, resource_group_name, storage_account):
storage_mgmt_client = _get_storage_management_client(cli_ctx)
# pylint: disable=no-member
keys = storage_mgmt_client.storage_accounts.list_keys(resource_group_name, storage_account).keys
private_config = {
'storageAccountName': storage_account,
'storageAccountKey': keys[0].value
}
return private_config
def _get_resource_group_location(cli_ctx, resource_group_name):
client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES)
# pylint: disable=no-member
return client.resource_groups.get(resource_group_name).location
def _get_sku_object(cmd, sku):
if cmd.supported_api_version(min_api='2017-03-30'):
DiskSku = cmd.get_models('DiskSku')
return DiskSku(name=sku)
return sku
def _grant_access(cmd, resource_group_name, name, duration_in_seconds, is_disk, access_level):
AccessLevel = cmd.get_models('AccessLevel')
client = _compute_client_factory(cmd.cli_ctx)
op = client.disks if is_disk else client.snapshots
return op.grant_access(resource_group_name, name, access_level or AccessLevel.read, duration_in_seconds)
def _is_linux_os(vm):
os_type = vm.storage_profile.os_disk.os_type.value if vm.storage_profile.os_disk.os_type else None
if os_type:
return os_type.lower() == 'linux'
# the os_type could be None for VM scaleset, let us check out os configurations
if vm.os_profile.linux_configuration:
return bool(vm.os_profile.linux_configuration)
return False
def _merge_secrets(secrets):
"""
Merge a list of secrets. Each secret should be a dict fitting the following JSON structure:
[{ "sourceVault": { "id": "value" },
"vaultCertificates": [{ "certificateUrl": "value",
"certificateStore": "cert store name (only on windows)"}] }]
The array of secrets is merged on sourceVault.id.
:param secrets:
:return:
"""
merged = {}
vc_name = 'vaultCertificates'
for outer in secrets:
for secret in outer:
if secret['sourceVault']['id'] not in merged:
merged[secret['sourceVault']['id']] = []
merged[secret['sourceVault']['id']] = \
secret[vc_name] + merged[secret['sourceVault']['id']]
# transform the reduced map to vm format
formatted = [{'sourceVault': {'id': source_id},
'vaultCertificates': value}
for source_id, value in list(merged.items())]
return formatted
def _normalize_extension_version(cli_ctx, publisher, vm_extension_name, version, location):
def _trim_away_build_number(version):
# workaround a known issue: the version must only contain "major.minor", even though
# "extension image list" gives more detail
return '.'.join(version.split('.')[0:2])
if not version:
result = load_extension_images_thru_services(cli_ctx, publisher, vm_extension_name, None, location,
show_latest=True, partial_match=False)
if not result:
raise CLIError('Failed to find the latest version for the extension "{}"'.format(vm_extension_name))
# with 'show_latest' enabled, we will only get one result.
version = result[0]['version']
version = _trim_away_build_number(version)
return version
def _parse_rg_name(strid):
'''From an ID, extract the contained (resource group, name) tuple.'''
from msrestazure.tools import parse_resource_id
parts = parse_resource_id(strid)
return (parts['resource_group'], parts['name'])
def _set_sku(cmd, instance, sku):
if cmd.supported_api_version(min_api='2017-03-30'):
instance.sku = cmd.get_models('DiskSku')(name=sku)
else:
instance.account_type = sku
def _show_missing_access_warning(resource_group, name, command):
warn = ("No access was given yet to the '{1}', because '--scope' was not provided. "
"You should setup by creating a role assignment, e.g. "
"'az role assignment create --assignee <principal-id> --role contributor -g {0}' "
"would let it access the current resource group. To get the pricipal id, run "
"'az {2} show -g {0} -n {1} --query \"identity.principalId\" -otsv'".format(resource_group, name, command))
logger.warning(warn)
def _parse_aux_subscriptions(resource_id):
from msrestazure.tools import is_valid_resource_id, parse_resource_id
if is_valid_resource_id(resource_id):
res = parse_resource_id(resource_id)
return [res['subscription']]
return None
# Hide extension information from output as the info is not correct and unhelpful; also
# commands using it mean to hide the extension concept from users.
class ExtensionUpdateLongRunningOperation(LongRunningOperation): # pylint: disable=too-few-public-methods
pass
# region Disks (Managed)
def create_managed_disk(cmd, resource_group_name, disk_name, location=None, # pylint: disable=too-many-locals, too-many-branches, too-many-statements
size_gb=None, sku='Premium_LRS', os_type=None,
source=None, for_upload=None, upload_size_bytes=None, # pylint: disable=unused-argument
# below are generated internally from 'source'
source_blob_uri=None, source_disk=None, source_snapshot=None,
source_storage_account_id=None, no_wait=False, tags=None, zone=None,
disk_iops_read_write=None, disk_mbps_read_write=None, hyper_v_generation=None,
encryption_type=None, disk_encryption_set=None, max_shares=None,
disk_iops_read_only=None, disk_mbps_read_only=None,
image_reference=None, image_reference_lun=None,
gallery_image_reference=None, gallery_image_reference_lun=None,
network_access_policy=None, disk_access=None):
from msrestazure.tools import resource_id, is_valid_resource_id
from azure.cli.core.commands.client_factory import get_subscription_id
Disk, CreationData, DiskCreateOption, Encryption = cmd.get_models(
'Disk', 'CreationData', 'DiskCreateOption', 'Encryption')
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
if source_blob_uri:
option = DiskCreateOption.import_enum
elif source_disk or source_snapshot:
option = DiskCreateOption.copy
elif for_upload:
option = DiskCreateOption.upload
elif image_reference or gallery_image_reference:
option = DiskCreateOption.from_image
else:
option = DiskCreateOption.empty
if source_storage_account_id is None and source_blob_uri is not None:
subscription_id = get_subscription_id(cmd.cli_ctx)
storage_account_name = source_blob_uri.split('.')[0].split('/')[-1]
source_storage_account_id = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Storage', type='storageAccounts', name=storage_account_name)
if upload_size_bytes is not None and for_upload is not True:
raise CLIError('usage error: --upload-size-bytes should be used together with --for-upload')
if image_reference is not None:
if not is_valid_resource_id(image_reference):
# URN or name
terms = image_reference.split(':')
if len(terms) == 4: # URN
disk_publisher, disk_offer, disk_sku, disk_version = terms[0], terms[1], terms[2], terms[3]
if disk_version.lower() == 'latest':
disk_version = _get_latest_image_version(cmd.cli_ctx, location, disk_publisher, disk_offer,
disk_sku)
client = _compute_client_factory(cmd.cli_ctx)
response = client.virtual_machine_images.get(location, disk_publisher, disk_offer, disk_sku,
disk_version)
image_reference = response.id
else: # error
raise CLIError('usage error: --image-reference should be ID or URN (publisher:offer:sku:version).')
# image_reference is an ID now
image_reference = {'id': image_reference}
if image_reference_lun is not None:
image_reference['lun'] = image_reference_lun
if gallery_image_reference is not None:
gallery_image_reference = {'id': gallery_image_reference}
if gallery_image_reference_lun is not None:
gallery_image_reference['lun'] = gallery_image_reference_lun
creation_data = CreationData(create_option=option, source_uri=source_blob_uri,
image_reference=image_reference, gallery_image_reference=gallery_image_reference,
source_resource_id=source_disk or source_snapshot,
storage_account_id=source_storage_account_id,
upload_size_bytes=upload_size_bytes)
if size_gb is None and upload_size_bytes is None and (option == DiskCreateOption.empty or for_upload):
raise CLIError('usage error: --size-gb or --upload-size-bytes required to create an empty disk')
if disk_encryption_set is not None and not is_valid_resource_id(disk_encryption_set):
disk_encryption_set = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=disk_encryption_set)
if disk_access is not None and not is_valid_resource_id(disk_access):
disk_access = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskAccesses', name=disk_access)
encryption = None
if disk_encryption_set:
encryption = Encryption(type=encryption_type, disk_encryption_set_id=disk_encryption_set)
disk = Disk(location=location, creation_data=creation_data, tags=(tags or {}),
sku=_get_sku_object(cmd, sku), disk_size_gb=size_gb, os_type=os_type, encryption=encryption)
if hyper_v_generation:
disk.hyper_vgeneration = hyper_v_generation
if zone:
disk.zones = zone
if disk_iops_read_write is not None:
disk.disk_iops_read_write = disk_iops_read_write
if disk_mbps_read_write is not None:
disk.disk_mbps_read_write = disk_mbps_read_write
if max_shares is not None:
disk.max_shares = max_shares
if disk_iops_read_only is not None:
disk.disk_iops_read_only = disk_iops_read_only
if disk_mbps_read_only is not None:
disk.disk_mbps_read_only = disk_mbps_read_only
if network_access_policy is not None:
disk.network_access_policy = network_access_policy
if disk_access is not None:
disk.disk_access_id = disk_access
client = _compute_client_factory(cmd.cli_ctx)
return sdk_no_wait(no_wait, client.disks.create_or_update, resource_group_name, disk_name, disk)
def grant_disk_access(cmd, resource_group_name, disk_name, duration_in_seconds, access_level=None):
return _grant_access(cmd, resource_group_name, disk_name, duration_in_seconds, is_disk=True,
access_level=access_level)
def list_managed_disks(cmd, resource_group_name=None):
client = _compute_client_factory(cmd.cli_ctx)
if resource_group_name:
return client.disks.list_by_resource_group(resource_group_name)
return client.disks.list()
def update_managed_disk(cmd, resource_group_name, instance, size_gb=None, sku=None, disk_iops_read_write=None,
disk_mbps_read_write=None, encryption_type=None, disk_encryption_set=None,
network_access_policy=None, disk_access=None):
|
# endregion
# region Images (Managed)
def create_image(cmd, resource_group_name, name, source, os_type=None, data_disk_sources=None, location=None, # pylint: disable=too-many-locals,unused-argument
# below are generated internally from 'source' and 'data_disk_sources'
source_virtual_machine=None, storage_sku=None, hyper_v_generation=None,
os_blob_uri=None, data_blob_uris=None,
os_snapshot=None, data_snapshots=None,
os_disk=None, os_disk_caching=None, data_disks=None, data_disk_caching=None,
tags=None, zone_resilient=None):
ImageOSDisk, ImageDataDisk, ImageStorageProfile, Image, SubResource, OperatingSystemStateTypes = cmd.get_models(
'ImageOSDisk', 'ImageDataDisk', 'ImageStorageProfile', 'Image', 'SubResource', 'OperatingSystemStateTypes')
if source_virtual_machine:
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
image_storage_profile = None if zone_resilient is None else ImageStorageProfile(zone_resilient=zone_resilient)
image = Image(location=location, source_virtual_machine=SubResource(id=source_virtual_machine),
storage_profile=image_storage_profile, tags=(tags or {}))
else:
os_disk = ImageOSDisk(os_type=os_type,
os_state=OperatingSystemStateTypes.generalized,
caching=os_disk_caching,
snapshot=SubResource(id=os_snapshot) if os_snapshot else None,
managed_disk=SubResource(id=os_disk) if os_disk else None,
blob_uri=os_blob_uri,
storage_account_type=storage_sku)
all_data_disks = []
lun = 0
if data_blob_uris:
for d in data_blob_uris:
all_data_disks.append(ImageDataDisk(lun=lun, blob_uri=d, caching=data_disk_caching))
lun += 1
if data_snapshots:
for d in data_snapshots:
all_data_disks.append(ImageDataDisk(lun=lun, snapshot=SubResource(id=d), caching=data_disk_caching))
lun += 1
if data_disks:
for d in data_disks:
all_data_disks.append(ImageDataDisk(lun=lun, managed_disk=SubResource(id=d), caching=data_disk_caching))
lun += 1
image_storage_profile = ImageStorageProfile(os_disk=os_disk, data_disks=all_data_disks)
if zone_resilient is not None:
image_storage_profile.zone_resilient = zone_resilient
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
# pylint disable=no-member
image = Image(location=location, storage_profile=image_storage_profile, tags=(tags or {}))
if hyper_v_generation:
image.hyper_vgeneration = hyper_v_generation
client = _compute_client_factory(cmd.cli_ctx)
return client.images.create_or_update(resource_group_name, name, image)
def update_image(instance, tags=None):
if tags is not None:
instance.tags = tags
return instance
def list_images(cmd, resource_group_name=None):
client = _compute_client_factory(cmd.cli_ctx)
if resource_group_name:
return client.images.list_by_resource_group(resource_group_name)
return client.images.list()
# endregion
# region Snapshots
# pylint: disable=unused-argument,too-many-locals
def create_snapshot(cmd, resource_group_name, snapshot_name, location=None, size_gb=None, sku='Standard_LRS',
source=None, for_upload=None, incremental=None,
# below are generated internally from 'source'
source_blob_uri=None, source_disk=None, source_snapshot=None, source_storage_account_id=None,
hyper_v_generation=None, tags=None, no_wait=False, disk_encryption_set=None,
encryption_type=None, network_access_policy=None, disk_access=None):
from msrestazure.tools import resource_id, is_valid_resource_id
from azure.cli.core.commands.client_factory import get_subscription_id
Snapshot, CreationData, DiskCreateOption, Encryption = cmd.get_models(
'Snapshot', 'CreationData', 'DiskCreateOption', 'Encryption')
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
if source_blob_uri:
option = DiskCreateOption.import_enum
elif source_disk or source_snapshot:
option = DiskCreateOption.copy
elif for_upload:
option = DiskCreateOption.upload
else:
option = DiskCreateOption.empty
creation_data = CreationData(create_option=option, source_uri=source_blob_uri,
image_reference=None,
source_resource_id=source_disk or source_snapshot,
storage_account_id=source_storage_account_id)
if size_gb is None and option == DiskCreateOption.empty:
raise CLIError('Please supply size for the snapshots')
if disk_encryption_set is not None and not is_valid_resource_id(disk_encryption_set):
disk_encryption_set = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=disk_encryption_set)
if disk_access is not None and not is_valid_resource_id(disk_access):
disk_access = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskAccesses', name=disk_access)
if disk_encryption_set is not None and encryption_type is None:
raise CLIError('usage error: Please specify --encryption-type.')
if encryption_type is not None:
encryption = Encryption(type=encryption_type, disk_encryption_set_id=disk_encryption_set)
else:
encryption = None
snapshot = Snapshot(location=location, creation_data=creation_data, tags=(tags or {}),
sku=_get_sku_object(cmd, sku), disk_size_gb=size_gb, incremental=incremental,
encryption=encryption)
if hyper_v_generation:
snapshot.hyper_vgeneration = hyper_v_generation
if network_access_policy is not None:
snapshot.network_access_policy = network_access_policy
if disk_access is not None:
snapshot.disk_access_id = disk_access
client = _compute_client_factory(cmd.cli_ctx)
return sdk_no_wait(no_wait, client.snapshots.create_or_update, resource_group_name, snapshot_name, snapshot)
def grant_snapshot_access(cmd, resource_group_name, snapshot_name, duration_in_seconds, access_level=None):
return _grant_access(cmd, resource_group_name, snapshot_name, duration_in_seconds, is_disk=False,
access_level=access_level)
def list_snapshots(cmd, resource_group_name=None):
client = _compute_client_factory(cmd.cli_ctx)
if resource_group_name:
return client.snapshots.list_by_resource_group(resource_group_name)
return client.snapshots.list()
def update_snapshot(cmd, resource_group_name, instance, sku=None, disk_encryption_set=None,
encryption_type=None, network_access_policy=None, disk_access=None):
from msrestazure.tools import resource_id, is_valid_resource_id
from azure.cli.core.commands.client_factory import get_subscription_id
if sku is not None:
_set_sku(cmd, instance, sku)
if disk_encryption_set is not None:
if instance.encryption.type != 'EncryptionAtRestWithCustomerKey' and \
encryption_type != 'EncryptionAtRestWithCustomerKey':
raise CLIError('usage error: Please set --encryption-type to EncryptionAtRestWithCustomerKey')
if not is_valid_resource_id(disk_encryption_set):
disk_encryption_set = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=disk_encryption_set)
instance.encryption.disk_encryption_set_id = disk_encryption_set
if encryption_type is not None:
instance.encryption.type = encryption_type
if network_access_policy is not None:
instance.network_access_policy = network_access_policy
if disk_access is not None and not is_valid_resource_id(disk_access):
disk_access = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskAccesses', name=disk_access)
instance.disk_access_id = disk_access
return instance
# endregion
# region VirtualMachines Identity
def show_vm_identity(cmd, resource_group_name, vm_name):
client = _compute_client_factory(cmd.cli_ctx)
return client.virtual_machines.get(resource_group_name, vm_name).identity
def show_vmss_identity(cmd, resource_group_name, vm_name):
client = _compute_client_factory(cmd.cli_ctx)
return client.virtual_machine_scale_sets.get(resource_group_name, vm_name).identity
def assign_vm_identity(cmd, resource_group_name, vm_name, assign_identity=None, identity_role='Contributor',
identity_role_id=None, identity_scope=None):
VirtualMachineIdentity, ResourceIdentityType, VirtualMachineUpdate = cmd.get_models('VirtualMachineIdentity',
'ResourceIdentityType',
'VirtualMachineUpdate')
VirtualMachineIdentityUserAssignedIdentitiesValue = cmd.get_models(
'VirtualMachineIdentityUserAssignedIdentitiesValue')
from azure.cli.core.commands.arm import assign_identity as assign_identity_helper
client = _compute_client_factory(cmd.cli_ctx)
_, _, external_identities, enable_local_identity = _build_identities_info(assign_identity)
def getter():
return client.virtual_machines.get(resource_group_name, vm_name)
def setter(vm, external_identities=external_identities):
if vm.identity and vm.identity.type == ResourceIdentityType.system_assigned_user_assigned:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif vm.identity and vm.identity.type == ResourceIdentityType.system_assigned and external_identities:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif vm.identity and vm.identity.type == ResourceIdentityType.user_assigned and enable_local_identity:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif external_identities and enable_local_identity:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif external_identities:
identity_types = ResourceIdentityType.user_assigned
else:
identity_types = ResourceIdentityType.system_assigned
vm.identity = VirtualMachineIdentity(type=identity_types)
if external_identities:
vm.identity.user_assigned_identities = {}
for identity in external_identities:
vm.identity.user_assigned_identities[identity] = VirtualMachineIdentityUserAssignedIdentitiesValue()
vm_patch = VirtualMachineUpdate()
vm_patch.identity = vm.identity
return patch_vm(cmd, resource_group_name, vm_name, vm_patch)
assign_identity_helper(cmd.cli_ctx, getter, setter, identity_role=identity_role_id, identity_scope=identity_scope)
vm = client.virtual_machines.get(resource_group_name, vm_name)
return _construct_identity_info(identity_scope, identity_role, vm.identity.principal_id,
vm.identity.user_assigned_identities)
# endregion
# region VirtualMachines
def capture_vm(cmd, resource_group_name, vm_name, vhd_name_prefix,
storage_container='vhds', overwrite=True):
VirtualMachineCaptureParameters = cmd.get_models('VirtualMachineCaptureParameters')
client = _compute_client_factory(cmd.cli_ctx)
parameter = VirtualMachineCaptureParameters(vhd_prefix=vhd_name_prefix,
destination_container_name=storage_container,
overwrite_vhds=overwrite)
poller = client.virtual_machines.capture(resource_group_name, vm_name, parameter)
result = LongRunningOperation(cmd.cli_ctx)(poller)
output = getattr(result, 'output', None) or result.resources[0]
print(json.dumps(output, indent=2)) # pylint: disable=no-member
# pylint: disable=too-many-locals, unused-argument, too-many-statements, too-many-branches
def create_vm(cmd, vm_name, resource_group_name, image=None, size='Standard_DS1_v2', location=None, tags=None,
no_wait=False, authentication_type=None, admin_password=None, computer_name=None,
admin_username=None, ssh_dest_key_path=None, ssh_key_value=None, generate_ssh_keys=False,
availability_set=None, nics=None, nsg=None, nsg_rule=None, accelerated_networking=None,
private_ip_address=None, public_ip_address=None, public_ip_address_allocation='dynamic',
public_ip_address_dns_name=None, public_ip_sku=None, os_disk_name=None, os_type=None,
storage_account=None, os_caching=None, data_caching=None, storage_container_name=None, storage_sku=None,
use_unmanaged_disk=False, attach_os_disk=None, os_disk_size_gb=None, attach_data_disks=None,
data_disk_sizes_gb=None, disk_info=None,
vnet_name=None, vnet_address_prefix='10.0.0.0/16', subnet=None, subnet_address_prefix='10.0.0.0/24',
storage_profile=None, os_publisher=None, os_offer=None, os_sku=None, os_version=None,
storage_account_type=None, vnet_type=None, nsg_type=None, public_ip_address_type=None, nic_type=None,
validate=False, custom_data=None, secrets=None, plan_name=None, plan_product=None, plan_publisher=None,
plan_promotion_code=None, license_type=None, assign_identity=None, identity_scope=None,
identity_role='Contributor', identity_role_id=None, application_security_groups=None, zone=None,
boot_diagnostics_storage=None, ultra_ssd_enabled=None, ephemeral_os_disk=None,
proximity_placement_group=None, dedicated_host=None, dedicated_host_group=None, aux_subscriptions=None,
priority=None, max_price=None, eviction_policy=None, enable_agent=None, workspace=None, vmss=None,
os_disk_encryption_set=None, data_disk_encryption_sets=None, specialized=None,
encryption_at_host=None, enable_auto_update=None, patch_mode=None):
from azure.cli.core.commands.client_factory import get_subscription_id
from azure.cli.core.util import random_string, hash_string
from azure.cli.core.commands.arm import ArmTemplateBuilder
from azure.cli.command_modules.vm._template_builder import (build_vm_resource,
build_storage_account_resource, build_nic_resource,
build_vnet_resource, build_nsg_resource,
build_public_ip_resource, StorageProfile,
build_msi_role_assignment,
build_vm_linux_log_analytics_workspace_agent,
build_vm_windows_log_analytics_workspace_agent)
from msrestazure.tools import resource_id, is_valid_resource_id, parse_resource_id
subscription_id = get_subscription_id(cmd.cli_ctx)
if os_disk_encryption_set is not None and not is_valid_resource_id(os_disk_encryption_set):
os_disk_encryption_set = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=os_disk_encryption_set)
if data_disk_encryption_sets is None:
data_disk_encryption_sets = []
for i, des in enumerate(data_disk_encryption_sets):
if des is not None and not is_valid_resource_id(des):
data_disk_encryption_sets[i] = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=des)
storage_sku = disk_info['os'].get('storageAccountType')
network_id_template = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Network')
vm_id = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='virtualMachines', name=vm_name)
# determine final defaults and calculated values
tags = tags or {}
os_disk_name = os_disk_name or ('osdisk_{}'.format(hash_string(vm_id, length=10)) if use_unmanaged_disk else None)
storage_container_name = storage_container_name or 'vhds'
# Build up the ARM template
master_template = ArmTemplateBuilder()
vm_dependencies = []
if storage_account_type == 'new':
storage_account = storage_account or 'vhdstorage{}'.format(
hash_string(vm_id, length=14, force_lower=True))
vm_dependencies.append('Microsoft.Storage/storageAccounts/{}'.format(storage_account))
master_template.add_resource(build_storage_account_resource(cmd, storage_account, location,
tags, storage_sku))
nic_name = None
if nic_type == 'new':
nic_name = '{}VMNic'.format(vm_name)
vm_dependencies.append('Microsoft.Network/networkInterfaces/{}'.format(nic_name))
nic_dependencies = []
if vnet_type == 'new':
subnet = subnet or '{}Subnet'.format(vm_name)
vnet_exists = False
if vnet_name:
from azure.cli.command_modules.vm._vm_utils import check_existence
vnet_exists = \
check_existence(cmd.cli_ctx, vnet_name, resource_group_name, 'Microsoft.Network', 'virtualNetworks')
if vnet_exists:
from azure.cli.core.commands import cached_get, cached_put, upsert_to_collection
from azure.cli.command_modules.vm._validators import get_network_client
client = get_network_client(cmd.cli_ctx).virtual_networks
vnet = cached_get(cmd, client.get, resource_group_name, vnet_name)
Subnet = cmd.get_models('Subnet', resource_type=ResourceType.MGMT_NETWORK)
subnet_obj = Subnet(
name=subnet,
address_prefixes=[subnet_address_prefix],
address_prefix=subnet_address_prefix
)
upsert_to_collection(vnet, 'subnets', subnet_obj, 'name')
try:
cached_put(cmd, client.create_or_update, vnet, resource_group_name, vnet_name).result()
except Exception:
raise CLIError('Subnet({}) does not exist, but failed to create a new subnet with address '
'prefix {}. It may be caused by name or address prefix conflict. Please specify '
'an appropriate subnet name with --subnet or a valid address prefix value with '
'--subnet-address-prefix.'.format(subnet, subnet_address_prefix))
if not vnet_exists:
vnet_name = vnet_name or '{}VNET'.format(vm_name)
nic_dependencies.append('Microsoft.Network/virtualNetworks/{}'.format(vnet_name))
master_template.add_resource(build_vnet_resource(
cmd, vnet_name, location, tags, vnet_address_prefix, subnet, subnet_address_prefix))
if nsg_type == 'new':
if nsg_rule is None:
nsg_rule = 'RDP' if os_type.lower() == 'windows' else 'SSH'
nsg = nsg or '{}NSG'.format(vm_name)
nic_dependencies.append('Microsoft.Network/networkSecurityGroups/{}'.format(nsg))
master_template.add_resource(build_nsg_resource(cmd, nsg, location, tags, nsg_rule))
if public_ip_address_type == 'new':
public_ip_address = public_ip_address or '{}PublicIP'.format(vm_name)
nic_dependencies.append('Microsoft.Network/publicIpAddresses/{}'.format(
public_ip_address))
master_template.add_resource(build_public_ip_resource(cmd, public_ip_address, location, tags,
public_ip_address_allocation,
public_ip_address_dns_name,
public_ip_sku, zone))
subnet_id = subnet if is_valid_resource_id(subnet) else \
'{}/virtualNetworks/{}/subnets/{}'.format(network_id_template, vnet_name, subnet)
nsg_id = None
if nsg:
nsg_id = nsg if is_valid_resource_id(nsg) else \
'{}/networkSecurityGroups/{}'.format(network_id_template, nsg)
public_ip_address_id = None
if public_ip_address:
public_ip_address_id = public_ip_address if is_valid_resource_id(public_ip_address) \
else '{}/publicIPAddresses/{}'.format(network_id_template, public_ip_address)
nics = [
{'id': '{}/networkInterfaces/{}'.format(network_id_template, nic_name)}
]
nic_resource = build_nic_resource(
cmd, nic_name, location, tags, vm_name, subnet_id, private_ip_address, nsg_id,
public_ip_address_id, application_security_groups, accelerated_networking=accelerated_networking)
nic_resource['dependsOn'] = nic_dependencies
master_template.add_resource(nic_resource)
else:
# Using an existing NIC
invalid_parameters = [nsg, public_ip_address, subnet, vnet_name, application_security_groups]
if any(invalid_parameters):
raise CLIError('When specifying an existing NIC, do not specify NSG, '
'public IP, ASGs, VNet or subnet.')
if accelerated_networking is not None:
logger.warning('When specifying an existing NIC, do not specify accelerated networking. '
'Ignore --accelerated-networking now. '
'This will trigger an error instead of a warning in future releases.')
os_vhd_uri = None
if storage_profile in [StorageProfile.SACustomImage, StorageProfile.SAPirImage]:
storage_account_name = storage_account.rsplit('/', 1)
storage_account_name = storage_account_name[1] if \
len(storage_account_name) > 1 else storage_account_name[0]
os_vhd_uri = 'https://{}.blob.{}/{}/{}.vhd'.format(
storage_account_name, cmd.cli_ctx.cloud.suffixes.storage_endpoint, storage_container_name, os_disk_name)
elif storage_profile == StorageProfile.SASpecializedOSDisk:
os_vhd_uri = attach_os_disk
os_disk_name = attach_os_disk.rsplit('/', 1)[1][:-4]
if custom_data:
custom_data = read_content_if_is_file(custom_data)
if secrets:
secrets = _merge_secrets([validate_file_or_dict(secret) for secret in secrets])
vm_resource = build_vm_resource(
cmd=cmd, name=vm_name, location=location, tags=tags, size=size, storage_profile=storage_profile, nics=nics,
admin_username=admin_username, availability_set_id=availability_set, admin_password=admin_password,
ssh_key_values=ssh_key_value, ssh_key_path=ssh_dest_key_path, image_reference=image,
os_disk_name=os_disk_name, custom_image_os_type=os_type, authentication_type=authentication_type,
os_publisher=os_publisher, os_offer=os_offer, os_sku=os_sku, os_version=os_version, os_vhd_uri=os_vhd_uri,
attach_os_disk=attach_os_disk, os_disk_size_gb=os_disk_size_gb, custom_data=custom_data, secrets=secrets,
license_type=license_type, zone=zone, disk_info=disk_info,
boot_diagnostics_storage_uri=boot_diagnostics_storage, ultra_ssd_enabled=ultra_ssd_enabled,
proximity_placement_group=proximity_placement_group, computer_name=computer_name,
dedicated_host=dedicated_host, priority=priority, max_price=max_price, eviction_policy=eviction_policy,
enable_agent=enable_agent, vmss=vmss, os_disk_encryption_set=os_disk_encryption_set,
data_disk_encryption_sets=data_disk_encryption_sets, specialized=specialized,
encryption_at_host=encryption_at_host, dedicated_host_group=dedicated_host_group,
enable_auto_update=enable_auto_update, patch_mode=patch_mode)
vm_resource['dependsOn'] = vm_dependencies
if plan_name:
vm_resource['plan'] = {
'name': plan_name,
'publisher': plan_publisher,
'product': plan_product,
'promotionCode': plan_promotion_code
}
enable_local_identity = None
if assign_identity is not None:
vm_resource['identity'], _, _, enable_local_identity = _build_identities_info(assign_identity)
role_assignment_guid = None
if identity_scope:
role_assignment_guid = str(_gen_guid())
master_template.add_resource(build_msi_role_assignment(vm_name, vm_id, identity_role_id,
role_assignment_guid, identity_scope))
if workspace is not None:
workspace_id = _prepare_workspace(cmd, resource_group_name, workspace)
master_template.add_secure_parameter('workspaceId', workspace_id)
if os_type.lower() == 'linux':
vm_mmaExtension_resource = build_vm_linux_log_analytics_workspace_agent(cmd, vm_name, location)
master_template.add_resource(vm_mmaExtension_resource)
elif os_type.lower() == 'windows':
vm_mmaExtension_resource = build_vm_windows_log_analytics_workspace_agent(cmd, vm_name, location)
master_template.add_resource(vm_mmaExtension_resource)
else:
logger.warning("Unsupported OS type. Skip the connection step for log analytics workspace.")
master_template.add_resource(vm_resource)
if admin_password:
master_template.add_secure_parameter('adminPassword', admin_password)
template = master_template.build()
parameters = master_template.build_parameters()
# deploy ARM template
deployment_name = 'vm_deploy_' + random_string(32)
client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
aux_subscriptions=aux_subscriptions).deployments
DeploymentProperties = cmd.get_models('DeploymentProperties', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
properties = DeploymentProperties(template=template, parameters=parameters, mode='incremental')
if validate:
from azure.cli.command_modules.vm._vm_utils import log_pprint_template
log_pprint_template(template)
log_pprint_template(parameters)
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
deployment = Deployment(properties=properties)
if validate:
validation_poller = client.validate(resource_group_name, deployment_name, deployment)
return LongRunningOperation(cmd.cli_ctx)(validation_poller)
# creates the VM deployment
if no_wait:
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, deployment_name, deployment)
LongRunningOperation(cmd.cli_ctx)(client.create_or_update(resource_group_name, deployment_name, deployment))
else:
if validate:
return client.validate(resource_group_name, deployment_name, properties)
# creates the VM deployment
if no_wait:
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, deployment_name, properties)
LongRunningOperation(cmd.cli_ctx)(client.create_or_update(resource_group_name, deployment_name, properties))
vm = get_vm_details(cmd, resource_group_name, vm_name)
if assign_identity is not None:
if enable_local_identity and not identity_scope:
_show_missing_access_warning(resource_group_name, vm_name, 'vm')
setattr(vm, 'identity', _construct_identity_info(identity_scope, identity_role, vm.identity.principal_id,
vm.identity.user_assigned_identities))
if workspace is not None:
workspace_name = parse_resource_id(workspace_id)['name']
_set_data_source_for_workspace(cmd, os_type, resource_group_name, workspace_name)
return vm
def auto_shutdown_vm(cmd, resource_group_name, vm_name, off=None, email=None, webhook=None, time=None,
location=None):
from msrestazure.tools import resource_id
from azure.mgmt.devtestlabs.models import Schedule
from azure.cli.core.commands.client_factory import get_subscription_id
subscription_id = get_subscription_id(cmd.cli_ctx)
client = _dev_test_labs_client_factory(cmd.cli_ctx, subscription_id)
name = 'shutdown-computevm-' + vm_name
vm_id = resource_id(subscription=client.config.subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='virtualMachines', name=vm_name)
if off:
if email is not None or webhook is not None or time is not None:
# I don't want to disrupt users. So I warn instead of raising an error.
logger.warning('If --off, other parameters will be ignored.')
return client.global_schedules.delete(resource_group_name, name)
if time is None:
raise CLIError('usage error: --time is a required parameter')
daily_recurrence = {'time': time}
notification_settings = None
if webhook:
notification_settings = {
'emailRecipient': email,
'webhookUrl': webhook,
'timeInMinutes': 30,
'status': 'Enabled'
}
schedule = Schedule(status='Enabled',
target_resource_id=vm_id,
daily_recurrence=daily_recurrence,
notification_settings=notification_settings,
time_zone_id='UTC',
task_type='ComputeVmShutdownTask',
location=location)
return client.global_schedules.create_or_update(resource_group_name, name, schedule)
def get_instance_view(cmd, resource_group_name, vm_name):
return get_vm(cmd, resource_group_name, vm_name, 'instanceView')
def get_vm(cmd, resource_group_name, vm_name, expand=None):
client = _compute_client_factory(cmd.cli_ctx)
return client.virtual_machines.get(resource_group_name, vm_name, expand=expand)
def get_vm_details(cmd, resource_group_name, vm_name):
from msrestazure.tools import parse_resource_id
from azure.cli.command_modules.vm._vm_utils import get_target_network_api
result = get_instance_view(cmd, resource_group_name, vm_name)
network_client = get_mgmt_service_client(
cmd.cli_ctx, ResourceType.MGMT_NETWORK, api_version=get_target_network_api(cmd.cli_ctx))
public_ips = []
fqdns = []
private_ips = []
mac_addresses = []
# pylint: disable=line-too-long,no-member
for nic_ref in result.network_profile.network_interfaces:
nic_parts = parse_resource_id(nic_ref.id)
nic = network_client.network_interfaces.get(nic_parts['resource_group'], nic_parts['name'])
if nic.mac_address:
mac_addresses.append(nic.mac_address)
for ip_configuration in nic.ip_configurations:
if ip_configuration.private_ip_address:
private_ips.append(ip_configuration.private_ip_address)
if ip_configuration.public_ip_address:
res = parse_resource_id(ip_configuration.public_ip_address.id)
public_ip_info = network_client.public_ip_addresses.get(res['resource_group'],
res['name'])
if public_ip_info.ip_address:
public_ips.append(public_ip_info.ip_address)
if public_ip_info.dns_settings:
fqdns.append(public_ip_info.dns_settings.fqdn)
setattr(result, 'power_state',
','.join([s.display_status for s in result.instance_view.statuses if s.code.startswith('PowerState/')]))
setattr(result, 'public_ips', ','.join(public_ips))
setattr(result, 'fqdns', ','.join(fqdns))
setattr(result, 'private_ips', ','.join(private_ips))
setattr(result, 'mac_addresses', ','.join(mac_addresses))
del result.instance_view # we don't need other instance_view info as people won't care
return result
def list_skus(cmd, location=None, size=None, zone=None, show_all=None, resource_type=None):
from ._vm_utils import list_sku_info
result = list_sku_info(cmd.cli_ctx, location)
if not show_all:
result = [x for x in result if not [y for y in (x.restrictions or [])
if y.reason_code == 'NotAvailableForSubscription']]
if resource_type:
result = [x for x in result if x.resource_type.lower() == resource_type.lower()]
if size:
result = [x for x in result if x.resource_type == 'virtualMachines' and size.lower() in x.name.lower()]
if zone:
result = [x for x in result if x.location_info and x.location_info[0].zones]
return result
def list_vm(cmd, resource_group_name=None, show_details=False):
ccf = _compute_client_factory(cmd.cli_ctx)
vm_list = ccf.virtual_machines.list(resource_group_name=resource_group_name) \
if resource_group_name else ccf.virtual_machines.list_all()
if show_details:
return [get_vm_details(cmd, _parse_rg_name(v.id)[0], v.name) for v in vm_list]
return list(vm_list)
def list_vm_ip_addresses(cmd, resource_group_name=None, vm_name=None):
# We start by getting NICs as they are the smack in the middle of all data that we
# want to collect for a VM (as long as we don't need any info on the VM than what
# is available in the Id, we don't need to make any calls to the compute RP)
#
# Since there is no guarantee that a NIC is in the same resource group as a given
# Virtual Machine, we can't constrain the lookup to only a single group...
network_client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_NETWORK)
nics = network_client.network_interfaces.list_all()
public_ip_addresses = network_client.public_ip_addresses.list_all()
ip_address_lookup = {pip.id: pip for pip in list(public_ip_addresses)}
result = []
for nic in [n for n in list(nics) if n.virtual_machine]:
nic_resource_group, nic_vm_name = _parse_rg_name(nic.virtual_machine.id)
# If provided, make sure that resource group name and vm name match the NIC we are
# looking at before adding it to the result...
same_resource_group_name = (resource_group_name is None or
resource_group_name.lower() == nic_resource_group.lower())
same_vm_name = (vm_name is None or
vm_name.lower() == nic_vm_name.lower())
if same_resource_group_name and same_vm_name:
network_info = {
'privateIpAddresses': [],
'publicIpAddresses': []
}
for ip_configuration in nic.ip_configurations:
network_info['privateIpAddresses'].append(ip_configuration.private_ip_address)
if ip_configuration.public_ip_address and ip_configuration.public_ip_address.id in ip_address_lookup:
public_ip_address = ip_address_lookup[ip_configuration.public_ip_address.id]
public_ip_addr_info = {
'id': public_ip_address.id,
'name': public_ip_address.name,
'ipAddress': public_ip_address.ip_address,
'ipAllocationMethod': public_ip_address.public_ip_allocation_method
}
try:
public_ip_addr_info['zone'] = public_ip_address.zones[0]
except (AttributeError, IndexError, TypeError):
pass
network_info['publicIpAddresses'].append(public_ip_addr_info)
result.append({
'virtualMachine': {
'resourceGroup': nic_resource_group,
'name': nic_vm_name,
'network': network_info
}
})
return result
def open_vm_port(cmd, resource_group_name, vm_name, port, priority=900, network_security_group_name=None,
apply_to_subnet=False):
from msrestazure.tools import parse_resource_id
network = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_NETWORK)
vm = get_vm(cmd, resource_group_name, vm_name)
location = vm.location
if not vm.network_profile:
raise CLIError("Network profile not found for VM '{}'".format(vm_name))
nic_ids = list(vm.network_profile.network_interfaces)
if len(nic_ids) > 1:
raise CLIError('Multiple NICs is not supported for this command. Create rules on the NSG '
'directly.')
if not nic_ids:
raise CLIError("No NIC associated with VM '{}'".format(vm_name))
# get existing NSG or create a new one
created_nsg = False
nic = network.network_interfaces.get(resource_group_name, os.path.split(nic_ids[0].id)[1])
if not apply_to_subnet:
nsg = nic.network_security_group
else:
subnet_id = parse_resource_id(nic.ip_configurations[0].subnet.id)
subnet = network.subnets.get(resource_group_name, subnet_id['name'], subnet_id['child_name_1'])
nsg = subnet.network_security_group
if not nsg:
NetworkSecurityGroup = \
cmd.get_models('NetworkSecurityGroup', resource_type=ResourceType.MGMT_NETWORK)
nsg = LongRunningOperation(cmd.cli_ctx, 'Creating network security group')(
network.network_security_groups.create_or_update(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
parameters=NetworkSecurityGroup(location=location)
)
)
created_nsg = True
# update the NSG with the new rule to allow inbound traffic
SecurityRule = cmd.get_models('SecurityRule', resource_type=ResourceType.MGMT_NETWORK)
rule_name = 'open-port-all' if port == '*' else 'open-port-{}'.format(port)
rule = SecurityRule(protocol='*', access='allow', direction='inbound', name=rule_name,
source_port_range='*', destination_port_range=port, priority=priority,
source_address_prefix='*', destination_address_prefix='*')
nsg_name = nsg.name or os.path.split(nsg.id)[1]
LongRunningOperation(cmd.cli_ctx, 'Adding security rule')(
network.security_rules.create_or_update(
resource_group_name, nsg_name, rule_name, rule)
)
# update the NIC or subnet if a new NSG was created
if created_nsg and not apply_to_subnet:
nic.network_security_group = nsg
LongRunningOperation(cmd.cli_ctx, 'Updating NIC')(network.network_interfaces.create_or_update(
resource_group_name, nic.name, nic))
elif created_nsg and apply_to_subnet:
subnet.network_security_group = nsg
LongRunningOperation(cmd.cli_ctx, 'Updating subnet')(network.subnets.create_or_update(
resource_group_name=resource_group_name,
virtual_network_name=subnet_id['name'],
subnet_name=subnet_id['child_name_1'],
subnet_parameters=subnet
))
return network.network_security_groups.get(resource_group_name, nsg_name)
def resize_vm(cmd, resource_group_name, vm_name, size, no_wait=False):
vm = get_vm(cmd, resource_group_name, vm_name)
if vm.hardware_profile.vm_size == size:
logger.warning("VM is already %s", size)
return None
vm.hardware_profile.vm_size = size # pylint: disable=no-member
return set_vm(cmd, vm, no_wait=no_wait)
def restart_vm(cmd, resource_group_name, vm_name, no_wait=False, force=False):
client = _compute_client_factory(cmd.cli_ctx)
if force:
return sdk_no_wait(no_wait, client.virtual_machines.redeploy, resource_group_name, vm_name)
return sdk_no_wait(no_wait, client.virtual_machines.restart, resource_group_name, vm_name)
def set_vm(cmd, instance, lro_operation=None, no_wait=False):
instance.resources = None # Issue: https://github.com/Azure/autorest/issues/934
client = _compute_client_factory(cmd.cli_ctx)
parsed_id = _parse_rg_name(instance.id)
poller = sdk_no_wait(no_wait, client.virtual_machines.create_or_update,
resource_group_name=parsed_id[0],
vm_name=parsed_id[1],
parameters=instance)
if lro_operation:
return lro_operation(poller)
return LongRunningOperation(cmd.cli_ctx)(poller)
def patch_vm(cmd, resource_group_name, vm_name, vm):
client = _compute_client_factory(cmd.cli_ctx)
poller = client.virtual_machines.update(resource_group_name, vm_name, vm)
return LongRunningOperation(cmd.cli_ctx)(poller)
def show_vm(cmd, resource_group_name, vm_name, show_details=False):
return get_vm_details(cmd, resource_group_name, vm_name) if show_details \
else get_vm(cmd, resource_group_name, vm_name)
def update_vm(cmd, resource_group_name, vm_name, os_disk=None, disk_caching=None,
write_accelerator=None, license_type=None, no_wait=False, ultra_ssd_enabled=None,
priority=None, max_price=None, proximity_placement_group=None, workspace=None, **kwargs):
from msrestazure.tools import parse_resource_id, resource_id, is_valid_resource_id
from ._vm_utils import update_write_accelerator_settings, update_disk_caching
vm = kwargs['parameters']
if os_disk is not None:
if is_valid_resource_id(os_disk):
disk_id, disk_name = os_disk, parse_resource_id(os_disk)['name']
else:
res = parse_resource_id(vm.id)
disk_id = resource_id(subscription=res['subscription'], resource_group=res['resource_group'],
namespace='Microsoft.Compute', type='disks', name=os_disk)
disk_name = os_disk
vm.storage_profile.os_disk.managed_disk.id = disk_id
vm.storage_profile.os_disk.name = disk_name
if write_accelerator is not None:
update_write_accelerator_settings(vm.storage_profile, write_accelerator)
if disk_caching is not None:
update_disk_caching(vm.storage_profile, disk_caching)
if license_type is not None:
vm.license_type = license_type
if ultra_ssd_enabled is not None:
if vm.additional_capabilities is None:
AdditionalCapabilities = cmd.get_models('AdditionalCapabilities')
vm.additional_capabilities = AdditionalCapabilities(ultra_ssd_enabled=ultra_ssd_enabled)
else:
vm.additional_capabilities.ultra_ssd_enabled = ultra_ssd_enabled
if priority is not None:
vm.priority = priority
if max_price is not None:
if vm.billing_profile is None:
BillingProfile = cmd.get_models('BillingProfile')
vm.billing_profile = BillingProfile(max_price=max_price)
else:
vm.billing_profile.max_price = max_price
if proximity_placement_group is not None:
vm.proximity_placement_group = {'id': proximity_placement_group}
if workspace is not None:
workspace_id = _prepare_workspace(cmd, resource_group_name, workspace)
workspace_name = parse_resource_id(workspace_id)['name']
_set_log_analytics_workspace_extension(cmd=cmd,
resource_group_name=resource_group_name,
vm=vm,
vm_name=vm_name,
workspace_name=workspace_name)
os_type = vm.storage_profile.os_disk.os_type.value if vm.storage_profile.os_disk.os_type else None
_set_data_source_for_workspace(cmd, os_type, resource_group_name, workspace_name)
aux_subscriptions = None
if vm and vm.storage_profile and vm.storage_profile.image_reference and vm.storage_profile.image_reference.id:
aux_subscriptions = _parse_aux_subscriptions(vm.storage_profile.image_reference.id)
client = _compute_client_factory(cmd.cli_ctx, aux_subscriptions=aux_subscriptions)
return sdk_no_wait(no_wait, client.virtual_machines.create_or_update, resource_group_name, vm_name, **kwargs)
# endregion
# region VirtualMachines AvailabilitySets
def _get_availset(cmd, resource_group_name, name):
return _compute_client_factory(cmd.cli_ctx).availability_sets.get(resource_group_name, name)
def _set_availset(cmd, resource_group_name, name, **kwargs):
return _compute_client_factory(cmd.cli_ctx).availability_sets.create_or_update(resource_group_name, name, **kwargs)
# pylint: disable=inconsistent-return-statements
def convert_av_set_to_managed_disk(cmd, resource_group_name, availability_set_name):
av_set = _get_availset(cmd, resource_group_name, availability_set_name)
if av_set.sku.name != 'Aligned':
av_set.sku.name = 'Aligned'
# let us double check whether the existing FD number is supported
skus = list_skus(cmd, av_set.location)
av_sku = next((s for s in skus if s.resource_type == 'availabilitySets' and s.name == 'Aligned'), None)
if av_sku and av_sku.capabilities:
max_fd = int(next((c.value for c in av_sku.capabilities if c.name == 'MaximumPlatformFaultDomainCount'),
'0'))
if max_fd and max_fd < av_set.platform_fault_domain_count:
logger.warning("The fault domain count will be adjusted from %s to %s so to stay within region's "
"limitation", av_set.platform_fault_domain_count, max_fd)
av_set.platform_fault_domain_count = max_fd
return _set_availset(cmd, resource_group_name=resource_group_name, name=availability_set_name,
parameters=av_set)
logger.warning('Availability set %s is already configured for managed disks.', availability_set_name)
def create_av_set(cmd, availability_set_name, resource_group_name, platform_fault_domain_count=2,
platform_update_domain_count=None, location=None, proximity_placement_group=None, unmanaged=False,
no_wait=False, tags=None, validate=False):
from azure.cli.core.util import random_string
from azure.cli.core.commands.arm import ArmTemplateBuilder
from azure.cli.command_modules.vm._template_builder import build_av_set_resource
tags = tags or {}
# Build up the ARM template
master_template = ArmTemplateBuilder()
av_set_resource = build_av_set_resource(cmd, availability_set_name, location, tags,
platform_update_domain_count,
platform_fault_domain_count, unmanaged,
proximity_placement_group=proximity_placement_group)
master_template.add_resource(av_set_resource)
template = master_template.build()
# deploy ARM template
deployment_name = 'av_set_deploy_' + random_string(32)
client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES).deployments
DeploymentProperties = cmd.get_models('DeploymentProperties', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
properties = DeploymentProperties(template=template, parameters={}, mode='incremental')
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
deployment = Deployment(properties=properties)
if validate:
validation_poller = client.validate(resource_group_name, deployment_name, deployment)
return LongRunningOperation(cmd.cli_ctx)(validation_poller)
if no_wait:
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, deployment_name, deployment)
LongRunningOperation(cmd.cli_ctx)(sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, deployment_name, deployment))
else:
if validate:
return client.validate(resource_group_name, deployment_name, properties)
if no_wait:
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, deployment_name, properties)
LongRunningOperation(cmd.cli_ctx)(sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, deployment_name, properties))
compute_client = _compute_client_factory(cmd.cli_ctx)
return compute_client.availability_sets.get(resource_group_name, availability_set_name)
def update_av_set(instance, resource_group_name, proximity_placement_group=None):
if proximity_placement_group is not None:
instance.proximity_placement_group = {'id': proximity_placement_group}
return instance
def list_av_sets(cmd, resource_group_name=None):
op_group = _compute_client_factory(cmd.cli_ctx).availability_sets
if resource_group_name:
return op_group.list(resource_group_name)
return op_group.list_by_subscription(expand='virtualMachines/$ref')
# endregion
# region VirtualMachines BootDiagnostics
def disable_boot_diagnostics(cmd, resource_group_name, vm_name):
vm = get_vm(cmd, resource_group_name, vm_name)
diag_profile = vm.diagnostics_profile
if not (diag_profile and diag_profile.boot_diagnostics and diag_profile.boot_diagnostics.enabled):
return
diag_profile.boot_diagnostics.enabled = False
diag_profile.boot_diagnostics.storage_uri = None
set_vm(cmd, vm, ExtensionUpdateLongRunningOperation(cmd.cli_ctx, 'disabling boot diagnostics', 'done'))
def enable_boot_diagnostics(cmd, resource_group_name, vm_name, storage):
from azure.cli.command_modules.vm._vm_utils import get_storage_blob_uri
vm = get_vm(cmd, resource_group_name, vm_name)
storage_uri = get_storage_blob_uri(cmd.cli_ctx, storage)
if (vm.diagnostics_profile and
vm.diagnostics_profile.boot_diagnostics and
vm.diagnostics_profile.boot_diagnostics.enabled and
vm.diagnostics_profile.boot_diagnostics.storage_uri and
vm.diagnostics_profile.boot_diagnostics.storage_uri.lower() == storage_uri.lower()):
return
DiagnosticsProfile, BootDiagnostics = cmd.get_models('DiagnosticsProfile', 'BootDiagnostics')
boot_diag = BootDiagnostics(enabled=True, storage_uri=storage_uri)
if vm.diagnostics_profile is None:
vm.diagnostics_profile = DiagnosticsProfile(boot_diagnostics=boot_diag)
else:
vm.diagnostics_profile.boot_diagnostics = boot_diag
set_vm(cmd, vm, ExtensionUpdateLongRunningOperation(cmd.cli_ctx, 'enabling boot diagnostics', 'done'))
class BootLogStreamWriter: # pylint: disable=too-few-public-methods
def __init__(self, out):
self.out = out
def write(self, str_or_bytes):
content = str_or_bytes
if isinstance(str_or_bytes, bytes):
content = str_or_bytes.decode('utf8')
try:
self.out.write(content)
except UnicodeEncodeError:
# e.g. 'charmap' codec can't encode characters in position 258829-258830: character maps to <undefined>
import unicodedata
ascii_content = unicodedata.normalize('NFKD', content).encode('ascii', 'ignore')
self.out.write(ascii_content.decode())
logger.warning("A few unicode characters have been ignored because the shell is not able to display. "
"To see the full log, use a shell with unicode capacity")
def get_boot_log(cmd, resource_group_name, vm_name):
import re
import sys
from azure.cli.core.profiles import get_sdk
BlockBlobService = get_sdk(cmd.cli_ctx, ResourceType.DATA_STORAGE, 'blob.blockblobservice#BlockBlobService')
client = _compute_client_factory(cmd.cli_ctx)
virtual_machine = client.virtual_machines.get(resource_group_name, vm_name, expand='instanceView')
# pylint: disable=no-member
if (not virtual_machine.instance_view.boot_diagnostics or
not virtual_machine.instance_view.boot_diagnostics.serial_console_log_blob_uri):
raise CLIError('Please enable boot diagnostics.')
blob_uri = virtual_machine.instance_view.boot_diagnostics.serial_console_log_blob_uri
# Find storage account for diagnostics
storage_mgmt_client = _get_storage_management_client(cmd.cli_ctx)
if not blob_uri:
raise CLIError('No console log available')
try:
storage_accounts = storage_mgmt_client.storage_accounts.list()
matching_storage_account = (a for a in list(storage_accounts)
if blob_uri.startswith(a.primary_endpoints.blob))
storage_account = next(matching_storage_account)
except StopIteration:
raise CLIError('Failed to find storage accont for console log file')
regex = r'/subscriptions/[^/]+/resourceGroups/(?P<rg>[^/]+)/.+'
match = re.search(regex, storage_account.id, re.I)
rg = match.group('rg')
# Get account key
keys = storage_mgmt_client.storage_accounts.list_keys(rg, storage_account.name)
# Extract container and blob name from url...
container, blob = urlparse(blob_uri).path.split('/')[-2:]
storage_client = get_data_service_client(
cmd.cli_ctx,
BlockBlobService,
storage_account.name,
keys.keys[0].value,
endpoint_suffix=cmd.cli_ctx.cloud.suffixes.storage_endpoint) # pylint: disable=no-member
# our streamwriter not seekable, so no parallel.
storage_client.get_blob_to_stream(container, blob, BootLogStreamWriter(sys.stdout), max_connections=1)
# endregion
# region VirtualMachines Diagnostics
def set_diagnostics_extension(
cmd, resource_group_name, vm_name, settings, protected_settings=None, version=None,
no_auto_upgrade=False):
client = _compute_client_factory(cmd.cli_ctx)
vm = client.virtual_machines.get(resource_group_name, vm_name, 'instanceView')
# pylint: disable=no-member
is_linux_os = _is_linux_os(vm)
vm_extension_name = _LINUX_DIAG_EXT if is_linux_os else _WINDOWS_DIAG_EXT
if is_linux_os: # check incompatible version
exts = vm.instance_view.extensions or []
major_ver = extension_mappings[_LINUX_DIAG_EXT]['version'].split('.')[0]
if next((e for e in exts if e.name == vm_extension_name and
not e.type_handler_version.startswith(major_ver + '.')), None):
logger.warning('There is an incompatible version of diagnostics extension installed. '
'We will update it with a new version')
poller = client.virtual_machine_extensions.delete(resource_group_name, vm_name,
vm_extension_name)
LongRunningOperation(cmd.cli_ctx)(poller)
return set_extension(cmd, resource_group_name, vm_name, vm_extension_name,
extension_mappings[vm_extension_name]['publisher'],
version or extension_mappings[vm_extension_name]['version'],
settings,
protected_settings,
no_auto_upgrade)
def show_default_diagnostics_configuration(is_windows_os=False):
public_settings = get_default_diag_config(is_windows_os)
# pylint: disable=line-too-long
protected_settings_info = json.dumps({
'storageAccountName': "__STORAGE_ACCOUNT_NAME__",
# LAD and WAD are not consistent on sas token format. Call it out here
"storageAccountSasToken": "__SAS_TOKEN_{}__".format("WITH_LEADING_QUESTION_MARK" if is_windows_os else "WITHOUT_LEADING_QUESTION_MARK")
}, indent=2)
logger.warning('Protected settings with storage account info is required to work with the default configurations, e.g. \n%s', protected_settings_info)
return public_settings
# endregion
# region VirtualMachines Disks (Managed)
def attach_managed_data_disk(cmd, resource_group_name, vm_name, disk, new=False, sku=None,
size_gb=1023, lun=None, caching=None, enable_write_accelerator=False):
'''attach a managed disk'''
from msrestazure.tools import parse_resource_id
vm = get_vm(cmd, resource_group_name, vm_name)
DataDisk, ManagedDiskParameters, DiskCreateOption = cmd.get_models(
'DataDisk', 'ManagedDiskParameters', 'DiskCreateOptionTypes')
# pylint: disable=no-member
if lun is None:
lun = _get_disk_lun(vm.storage_profile.data_disks)
if new:
data_disk = DataDisk(lun=lun, create_option=DiskCreateOption.empty,
name=parse_resource_id(disk)['name'],
disk_size_gb=size_gb, caching=caching,
managed_disk=ManagedDiskParameters(storage_account_type=sku))
else:
params = ManagedDiskParameters(id=disk, storage_account_type=sku)
data_disk = DataDisk(lun=lun, create_option=DiskCreateOption.attach, managed_disk=params, caching=caching)
if enable_write_accelerator:
data_disk.write_accelerator_enabled = enable_write_accelerator
vm.storage_profile.data_disks.append(data_disk)
set_vm(cmd, vm)
def detach_data_disk(cmd, resource_group_name, vm_name, disk_name):
# here we handle both unmanaged or managed disk
vm = get_vm(cmd, resource_group_name, vm_name)
# pylint: disable=no-member
leftovers = [d for d in vm.storage_profile.data_disks if d.name.lower() != disk_name.lower()]
if len(vm.storage_profile.data_disks) == len(leftovers):
raise CLIError("No disk with the name '{}' was found".format(disk_name))
vm.storage_profile.data_disks = leftovers
set_vm(cmd, vm)
# endregion
# region VirtualMachines Extensions
def list_extensions(cmd, resource_group_name, vm_name):
vm = get_vm(cmd, resource_group_name, vm_name)
extension_type = 'Microsoft.Compute/virtualMachines/extensions'
result = [r for r in (vm.resources or []) if r.type == extension_type]
return result
def set_extension(cmd, resource_group_name, vm_name, vm_extension_name, publisher, version=None, settings=None,
protected_settings=None, no_auto_upgrade=False, force_update=False, no_wait=False,
extension_instance_name=None):
vm = get_vm(cmd, resource_group_name, vm_name, 'instanceView')
client = _compute_client_factory(cmd.cli_ctx)
if not extension_instance_name:
extension_instance_name = vm_extension_name
VirtualMachineExtension = cmd.get_models('VirtualMachineExtension')
instance_name = _get_extension_instance_name(vm.instance_view, publisher, vm_extension_name,
suggested_name=extension_instance_name)
if instance_name != extension_instance_name:
msg = "A %s extension with name %s already exists. Updating it with your settings..."
logger.warning(msg, vm_extension_name, instance_name)
version = _normalize_extension_version(cmd.cli_ctx, publisher, vm_extension_name, version, vm.location)
ext = VirtualMachineExtension(location=vm.location,
publisher=publisher,
virtual_machine_extension_type=vm_extension_name,
protected_settings=protected_settings,
type_handler_version=version,
settings=settings,
auto_upgrade_minor_version=(not no_auto_upgrade))
if force_update:
ext.force_update_tag = str(_gen_guid())
return sdk_no_wait(no_wait, client.virtual_machine_extensions.create_or_update,
resource_group_name, vm_name, instance_name, ext)
# endregion
# region VirtualMachines Extension Images
def list_vm_extension_images(
cmd, image_location=None, publisher_name=None, name=None, version=None, latest=False):
return load_extension_images_thru_services(
cmd.cli_ctx, publisher_name, name, version, image_location, latest)
# endregion
# region VirtualMachines Identity
def _remove_identities(cmd, resource_group_name, name, identities, getter, setter):
from ._vm_utils import MSI_LOCAL_ID
ResourceIdentityType = cmd.get_models('ResourceIdentityType', operation_group='virtual_machines')
remove_system_assigned_identity = False
if MSI_LOCAL_ID in identities:
remove_system_assigned_identity = True
identities.remove(MSI_LOCAL_ID)
resource = getter(cmd, resource_group_name, name)
if resource.identity is None:
return None
emsis_to_remove = []
if identities:
existing_emsis = {x.lower() for x in list((resource.identity.user_assigned_identities or {}).keys())}
emsis_to_remove = {x.lower() for x in identities}
non_existing = emsis_to_remove.difference(existing_emsis)
if non_existing:
raise CLIError("'{}' are not associated with '{}'".format(','.join(non_existing), name))
if not list(existing_emsis - emsis_to_remove): # if all emsis are gone, we need to update the type
if resource.identity.type == ResourceIdentityType.user_assigned:
resource.identity.type = ResourceIdentityType.none
elif resource.identity.type == ResourceIdentityType.system_assigned_user_assigned:
resource.identity.type = ResourceIdentityType.system_assigned
resource.identity.user_assigned_identities = None
if remove_system_assigned_identity:
resource.identity.type = (ResourceIdentityType.none
if resource.identity.type == ResourceIdentityType.system_assigned
else ResourceIdentityType.user_assigned)
if emsis_to_remove:
if resource.identity.type not in [ResourceIdentityType.none, ResourceIdentityType.system_assigned]:
resource.identity.user_assigned_identities = {}
for identity in emsis_to_remove:
resource.identity.user_assigned_identities[identity] = None
result = LongRunningOperation(cmd.cli_ctx)(setter(resource_group_name, name, resource))
return result.identity
def remove_vm_identity(cmd, resource_group_name, vm_name, identities=None):
def setter(resource_group_name, vm_name, vm):
client = _compute_client_factory(cmd.cli_ctx)
VirtualMachineUpdate = cmd.get_models('VirtualMachineUpdate', operation_group='virtual_machines')
vm_update = VirtualMachineUpdate(identity=vm.identity)
return client.virtual_machines.update(resource_group_name, vm_name, vm_update)
if identities is None:
from ._vm_utils import MSI_LOCAL_ID
identities = [MSI_LOCAL_ID]
return _remove_identities(cmd, resource_group_name, vm_name, identities, get_vm, setter)
# endregion
# region VirtualMachines Images
def list_vm_images(cmd, image_location=None, publisher_name=None, offer=None, sku=None,
all=False): # pylint: disable=redefined-builtin
load_thru_services = all
if load_thru_services:
if not publisher_name and not offer and not sku:
logger.warning("You are retrieving all the images from server which could take more than a minute. "
"To shorten the wait, provide '--publisher', '--offer' or '--sku'. Partial name search "
"is supported.")
all_images = load_images_thru_services(cmd.cli_ctx, publisher_name, offer, sku, image_location)
else:
all_images = load_images_from_aliases_doc(cmd.cli_ctx, publisher_name, offer, sku)
logger.warning(
'You are viewing an offline list of images, use --all to retrieve an up-to-date list')
for i in all_images:
i['urn'] = ':'.join([i['publisher'], i['offer'], i['sku'], i['version']])
return all_images
def show_vm_image(cmd, urn=None, publisher=None, offer=None, sku=None, version=None, location=None):
from azure.cli.core.commands.parameters import get_one_of_subscription_locations
usage_err = 'usage error: --plan STRING --offer STRING --publish STRING --version STRING | --urn STRING'
location = location or get_one_of_subscription_locations(cmd.cli_ctx)
if urn:
if any([publisher, offer, sku, version]):
raise CLIError(usage_err)
publisher, offer, sku, version = urn.split(":")
if version.lower() == 'latest':
version = _get_latest_image_version(cmd.cli_ctx, location, publisher, offer, sku)
elif not publisher or not offer or not sku or not version:
raise CLIError(usage_err)
client = _compute_client_factory(cmd.cli_ctx)
return client.virtual_machine_images.get(location, publisher, offer, sku, version)
def accept_market_ordering_terms(cmd, urn=None, publisher=None, offer=None, plan=None):
from azure.mgmt.marketplaceordering import MarketplaceOrderingAgreements
usage_err = 'usage error: --plan STRING --offer STRING --publish STRING |--urn STRING'
if urn:
if any([publisher, offer, plan]):
raise CLIError(usage_err)
publisher, offer, _, _ = urn.split(':')
image = show_vm_image(cmd, urn)
if not image.plan:
logger.warning("Image '%s' has no terms to accept.", urn)
return
plan = image.plan.name
else:
if not publisher or not offer or not plan:
raise CLIError(usage_err)
market_place_client = get_mgmt_service_client(cmd.cli_ctx, MarketplaceOrderingAgreements)
term = market_place_client.marketplace_agreements.get(publisher, offer, plan)
term.accepted = True
return market_place_client.marketplace_agreements.create(publisher, offer, plan, term)
# endregion
def _terms_prepare(cmd, urn, publisher, offer, plan):
if urn:
if any([publisher, offer, plan]):
raise CLIError('usage error: If using --urn, do not use any of --plan, --offer, --publisher.')
terms = urn.split(':')
if len(terms) != 4:
raise CLIError('usage error: urn should be in the format of publisher:offer:sku:version.')
publisher, offer = terms[0], terms[1]
image = show_vm_image(cmd, urn)
if not image.plan:
raise CLIError("Image '%s' has no terms to accept." % urn)
plan = image.plan.name
else:
if not all([publisher, offer, plan]):
raise CLIError(
'usage error: If not using --urn, all of --plan, --offer and --publisher should be provided.')
return publisher, offer, plan
def _accept_cancel_terms(cmd, urn, publisher, offer, plan, accept):
publisher, offer, plan = _terms_prepare(cmd, urn, publisher, offer, plan)
op = cf_vm_image_term(cmd.cli_ctx, '')
terms = op.get(publisher, offer, plan)
terms.accepted = accept
return op.create(publisher, offer, plan, terms)
def accept_terms(cmd, urn=None, publisher=None, offer=None, plan=None):
"""
Accept Azure Marketplace image terms so that the image can be used to create VMs.
:param cmd:cmd
:param urn:URN, in the format of 'publisher:offer:sku:version'. If specified, other argument values can be omitted
:param publisher:Image publisher
:param offer:Image offer
:param plan:Image billing plan
:return:
"""
return _accept_cancel_terms(cmd, urn, publisher, offer, plan, True)
def cancel_terms(cmd, urn=None, publisher=None, offer=None, plan=None):
"""
Cancel Azure Marketplace image terms.
:param cmd:cmd
:param urn:URN, in the format of 'publisher:offer:sku:version'. If specified, other argument values can be omitted
:param publisher:Image publisher
:param offer:Image offer
:param plan:Image billing plan
:return:
"""
return _accept_cancel_terms(cmd, urn, publisher, offer, plan, False)
def get_terms(cmd, urn=None, publisher=None, offer=None, plan=None):
"""
Get the details of Azure Marketplace image terms.
:param cmd:cmd
:param urn:URN, in the format of 'publisher:offer:sku:version'. If specified, other argument values can be omitted
:param publisher:Image publisher
:param offer:Image offer
:param plan:Image billing plan
:return:
"""
publisher, offer, plan = _terms_prepare(cmd, urn, publisher, offer, plan)
op = cf_vm_image_term(cmd.cli_ctx, '')
terms = op.get(publisher, offer, plan)
return terms
# region VirtualMachines NetworkInterfaces (NICs)
def show_vm_nic(cmd, resource_group_name, vm_name, nic):
from msrestazure.tools import parse_resource_id
vm = get_vm(cmd, resource_group_name, vm_name)
found = next(
(n for n in vm.network_profile.network_interfaces if nic.lower() == n.id.lower()), None
# pylint: disable=no-member
)
if found:
network_client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_NETWORK)
nic_name = parse_resource_id(found.id)['name']
return network_client.network_interfaces.get(resource_group_name, nic_name)
raise CLIError("NIC '{}' not found on VM '{}'".format(nic, vm_name))
def list_vm_nics(cmd, resource_group_name, vm_name):
vm = get_vm(cmd, resource_group_name, vm_name)
return vm.network_profile.network_interfaces # pylint: disable=no-member
def add_vm_nic(cmd, resource_group_name, vm_name, nics, primary_nic=None):
vm = get_vm(cmd, resource_group_name, vm_name)
new_nics = _build_nic_list(cmd, nics)
existing_nics = _get_existing_nics(vm)
return _update_vm_nics(cmd, vm, existing_nics + new_nics, primary_nic)
def remove_vm_nic(cmd, resource_group_name, vm_name, nics, primary_nic=None):
def to_delete(nic_id):
return [n for n in nics_to_delete if n.id.lower() == nic_id.lower()]
vm = get_vm(cmd, resource_group_name, vm_name)
nics_to_delete = _build_nic_list(cmd, nics)
existing_nics = _get_existing_nics(vm)
survived = [x for x in existing_nics if not to_delete(x.id)]
return _update_vm_nics(cmd, vm, survived, primary_nic)
def set_vm_nic(cmd, resource_group_name, vm_name, nics, primary_nic=None):
vm = get_vm(cmd, resource_group_name, vm_name)
nics = _build_nic_list(cmd, nics)
return _update_vm_nics(cmd, vm, nics, primary_nic)
def _build_nic_list(cmd, nic_ids):
NetworkInterfaceReference = cmd.get_models('NetworkInterfaceReference')
nic_list = []
if nic_ids:
# pylint: disable=no-member
network_client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_NETWORK)
for nic_id in nic_ids:
rg, name = _parse_rg_name(nic_id)
nic = network_client.network_interfaces.get(rg, name)
nic_list.append(NetworkInterfaceReference(id=nic.id, primary=False))
return nic_list
def _get_existing_nics(vm):
network_profile = getattr(vm, 'network_profile', None)
nics = []
if network_profile is not None:
nics = network_profile.network_interfaces or []
return nics
def _update_vm_nics(cmd, vm, nics, primary_nic):
NetworkProfile = cmd.get_models('NetworkProfile')
if primary_nic:
try:
_, primary_nic_name = _parse_rg_name(primary_nic)
except IndexError:
primary_nic_name = primary_nic
matched = [n for n in nics if _parse_rg_name(n.id)[1].lower() == primary_nic_name.lower()]
if not matched:
raise CLIError('Primary Nic {} is not found'.format(primary_nic))
if len(matched) > 1:
raise CLIError('Duplicate Nic entries with name {}'.format(primary_nic))
for n in nics:
n.primary = False
matched[0].primary = True
elif nics:
if not [n for n in nics if n.primary]:
nics[0].primary = True
network_profile = getattr(vm, 'network_profile', None)
if network_profile is None:
vm.network_profile = NetworkProfile(network_interfaces=nics)
else:
network_profile.network_interfaces = nics
return set_vm(cmd, vm).network_profile.network_interfaces
# endregion
# region VirtualMachines RunCommand
def run_command_invoke(cmd, resource_group_name, vm_vmss_name, command_id, scripts=None, parameters=None, instance_id=None): # pylint: disable=line-too-long
RunCommandInput, RunCommandInputParameter = cmd.get_models('RunCommandInput', 'RunCommandInputParameter')
parameters = parameters or []
run_command_input_parameters = []
auto_arg_name_num = 0
for p in parameters:
if '=' in p:
n, v = p.split('=', 1)
else:
# RunCommand API requires named arguments, which doesn't make lots of sense for bash scripts
# using positional arguments, so here we provide names just to get API happy
# note, we don't handle mixing styles, but will consolidate by GA when API is settled
auto_arg_name_num += 1
n = 'arg{}'.format(auto_arg_name_num)
v = p
run_command_input_parameters.append(RunCommandInputParameter(name=n, value=v))
client = _compute_client_factory(cmd.cli_ctx)
# if instance_id, this is a vmss instance
if instance_id:
return client.virtual_machine_scale_set_vms.run_command(resource_group_name, vm_vmss_name, instance_id,
RunCommandInput(command_id=command_id, script=scripts,
parameters=run_command_input_parameters)) # pylint: disable=line-too-long
# otherwise this is a regular vm instance
return client.virtual_machines.run_command(resource_group_name, vm_vmss_name,
RunCommandInput(command_id=command_id, script=scripts,
parameters=run_command_input_parameters))
def vm_run_command_invoke(cmd, resource_group_name, vm_name, command_id, scripts=None, parameters=None):
return run_command_invoke(cmd, resource_group_name, vm_name, command_id, scripts, parameters)
# endregion
# region VirtualMachines Secrets
def _get_vault_id_from_name(cli_ctx, client, vault_name):
group_name = _get_resource_group_from_vault_name(cli_ctx, vault_name)
if not group_name:
raise CLIError("unable to find vault '{}' in current subscription.".format(vault_name))
vault = client.get(group_name, vault_name)
return vault.id
def get_vm_format_secret(cmd, secrets, certificate_store=None, keyvault=None, resource_group_name=None):
from azure.keyvault import KeyVaultId
import re
client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_KEYVAULT).vaults
grouped_secrets = {}
merged_secrets = []
for s in secrets:
merged_secrets += s.splitlines()
# group secrets by source vault
for secret in merged_secrets:
parsed = KeyVaultId.parse_secret_id(secret)
match = re.search('://(.+?)\\.', parsed.vault)
vault_name = match.group(1)
if vault_name not in grouped_secrets:
grouped_secrets[vault_name] = {
'vaultCertificates': [],
'id': keyvault or _get_vault_id_from_name(cmd.cli_ctx, client, vault_name)
}
vault_cert = {'certificateUrl': secret}
if certificate_store:
vault_cert['certificateStore'] = certificate_store
grouped_secrets[vault_name]['vaultCertificates'].append(vault_cert)
# transform the reduced map to vm format
formatted = [{'sourceVault': {'id': value['id']},
'vaultCertificates': value['vaultCertificates']}
for _, value in list(grouped_secrets.items())]
return formatted
def add_vm_secret(cmd, resource_group_name, vm_name, keyvault, certificate, certificate_store=None):
from msrestazure.tools import parse_resource_id
from ._vm_utils import create_keyvault_data_plane_client, get_key_vault_base_url
VaultSecretGroup, SubResource, VaultCertificate = cmd.get_models(
'VaultSecretGroup', 'SubResource', 'VaultCertificate')
vm = get_vm(cmd, resource_group_name, vm_name)
if '://' not in certificate: # has a cert name rather a full url?
keyvault_client = create_keyvault_data_plane_client(cmd.cli_ctx)
cert_info = keyvault_client.get_certificate(
get_key_vault_base_url(cmd.cli_ctx, parse_resource_id(keyvault)['name']), certificate, '')
certificate = cert_info.sid
if not _is_linux_os(vm):
certificate_store = certificate_store or 'My'
elif certificate_store:
raise CLIError('Usage error: --certificate-store is only applicable on Windows VM')
vault_cert = VaultCertificate(certificate_url=certificate, certificate_store=certificate_store)
vault_secret_group = next((x for x in vm.os_profile.secrets
if x.source_vault and x.source_vault.id.lower() == keyvault.lower()), None)
if vault_secret_group:
vault_secret_group.vault_certificates.append(vault_cert)
else:
vault_secret_group = VaultSecretGroup(source_vault=SubResource(id=keyvault), vault_certificates=[vault_cert])
vm.os_profile.secrets.append(vault_secret_group)
vm = set_vm(cmd, vm)
return vm.os_profile.secrets
def list_vm_secrets(cmd, resource_group_name, vm_name):
vm = get_vm(cmd, resource_group_name, vm_name)
if vm.os_profile:
return vm.os_profile.secrets
return []
def remove_vm_secret(cmd, resource_group_name, vm_name, keyvault, certificate=None):
vm = get_vm(cmd, resource_group_name, vm_name)
# support 2 kinds of filter:
# a. if only keyvault is supplied, we delete its whole vault group.
# b. if both keyvault and certificate are supplied, we only delete the specific cert entry.
to_keep = vm.os_profile.secrets
keyvault_matched = []
if keyvault:
keyvault = keyvault.lower()
keyvault_matched = [x for x in to_keep if x.source_vault and x.source_vault.id.lower() == keyvault]
if keyvault and not certificate:
to_keep = [x for x in to_keep if x not in keyvault_matched]
elif certificate:
temp = keyvault_matched if keyvault else to_keep
cert_url_pattern = certificate.lower()
if '://' not in cert_url_pattern: # just a cert name?
cert_url_pattern = '/' + cert_url_pattern + '/'
for x in temp:
x.vault_certificates = ([v for v in x.vault_certificates
if not(v.certificate_url and cert_url_pattern in v.certificate_url.lower())])
to_keep = [x for x in to_keep if x.vault_certificates] # purge all groups w/o any cert entries
vm.os_profile.secrets = to_keep
vm = set_vm(cmd, vm)
return vm.os_profile.secrets
# endregion
# region VirtualMachines UnmanagedDisks
def attach_unmanaged_data_disk(cmd, resource_group_name, vm_name, new=False, vhd_uri=None, lun=None,
disk_name=None, size_gb=1023, caching=None):
DataDisk, DiskCreateOptionTypes, VirtualHardDisk = cmd.get_models(
'DataDisk', 'DiskCreateOptionTypes', 'VirtualHardDisk')
if not new and not disk_name:
raise CLIError('Please provide the name of the existing disk to attach')
create_option = DiskCreateOptionTypes.empty if new else DiskCreateOptionTypes.attach
vm = get_vm(cmd, resource_group_name, vm_name)
if disk_name is None:
import datetime
disk_name = vm_name + '-' + datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
# pylint: disable=no-member
if vhd_uri is None:
if not hasattr(vm.storage_profile.os_disk, 'vhd') or not vm.storage_profile.os_disk.vhd:
raise CLIError('Adding unmanaged disks to a VM with managed disks is not supported')
blob_uri = vm.storage_profile.os_disk.vhd.uri
vhd_uri = blob_uri[0:blob_uri.rindex('/') + 1] + disk_name + '.vhd'
if lun is None:
lun = _get_disk_lun(vm.storage_profile.data_disks)
disk = DataDisk(lun=lun, vhd=VirtualHardDisk(uri=vhd_uri), name=disk_name,
create_option=create_option,
caching=caching, disk_size_gb=size_gb if new else None)
if vm.storage_profile.data_disks is None:
vm.storage_profile.data_disks = []
vm.storage_profile.data_disks.append(disk)
return set_vm(cmd, vm)
def list_unmanaged_disks(cmd, resource_group_name, vm_name):
vm = get_vm(cmd, resource_group_name, vm_name)
return vm.storage_profile.data_disks # pylint: disable=no-member
# endregion
# region VirtualMachines Users
def _update_linux_access_extension(cmd, vm_instance, resource_group_name, protected_settings,
no_wait=False):
client = _compute_client_factory(cmd.cli_ctx)
VirtualMachineExtension = cmd.get_models('VirtualMachineExtension')
# pylint: disable=no-member
instance_name = _get_extension_instance_name(vm_instance.instance_view,
extension_mappings[_LINUX_ACCESS_EXT]['publisher'],
_LINUX_ACCESS_EXT,
_ACCESS_EXT_HANDLER_NAME)
publisher, version, auto_upgrade = _get_access_extension_upgrade_info(
vm_instance.resources, _LINUX_ACCESS_EXT)
ext = VirtualMachineExtension(location=vm_instance.location, # pylint: disable=no-member
publisher=publisher,
virtual_machine_extension_type=_LINUX_ACCESS_EXT,
protected_settings=protected_settings,
type_handler_version=version,
settings={},
auto_upgrade_minor_version=auto_upgrade)
return sdk_no_wait(no_wait, client.virtual_machine_extensions.create_or_update,
resource_group_name, vm_instance.name, instance_name, ext)
def _set_linux_user(cmd, vm_instance, resource_group_name, username,
password=None, ssh_key_value=None, no_wait=False):
protected_settings = {}
protected_settings['username'] = username
if password:
protected_settings['password'] = password
elif not ssh_key_value and not password: # default to ssh
ssh_key_value = os.path.join(os.path.expanduser('~'), '.ssh', 'id_rsa.pub')
if ssh_key_value:
protected_settings['ssh_key'] = read_content_if_is_file(ssh_key_value)
if no_wait:
return _update_linux_access_extension(cmd, vm_instance, resource_group_name,
protected_settings, no_wait)
poller = _update_linux_access_extension(cmd, vm_instance, resource_group_name,
protected_settings)
return ExtensionUpdateLongRunningOperation(cmd.cli_ctx, 'setting user', 'done')(poller)
def _reset_windows_admin(cmd, vm_instance, resource_group_name, username, password, no_wait=False):
'''Update the password. You can only change the password. Adding a new user is not supported. '''
client = _compute_client_factory(cmd.cli_ctx)
VirtualMachineExtension = cmd.get_models('VirtualMachineExtension')
publisher, version, auto_upgrade = _get_access_extension_upgrade_info(
vm_instance.resources, _WINDOWS_ACCESS_EXT)
# pylint: disable=no-member
instance_name = _get_extension_instance_name(vm_instance.instance_view,
publisher,
_WINDOWS_ACCESS_EXT,
_ACCESS_EXT_HANDLER_NAME)
ext = VirtualMachineExtension(location=vm_instance.location, # pylint: disable=no-member
publisher=publisher,
virtual_machine_extension_type=_WINDOWS_ACCESS_EXT,
protected_settings={'Password': password},
type_handler_version=version,
settings={'UserName': username},
auto_upgrade_minor_version=auto_upgrade)
if no_wait:
return sdk_no_wait(no_wait, client.virtual_machine_extensions.create_or_update,
resource_group_name, vm_instance.name, instance_name, ext)
poller = client.virtual_machine_extensions.create_or_update(resource_group_name,
vm_instance.name,
instance_name, ext)
return ExtensionUpdateLongRunningOperation(cmd.cli_ctx, 'resetting admin', 'done')(poller)
def set_user(cmd, resource_group_name, vm_name, username, password=None, ssh_key_value=None,
no_wait=False):
vm = get_vm(cmd, resource_group_name, vm_name, 'instanceView')
if _is_linux_os(vm):
return _set_linux_user(cmd, vm, resource_group_name, username, password, ssh_key_value, no_wait)
if ssh_key_value:
raise CLIError('SSH key is not appliable on a Windows VM')
return _reset_windows_admin(cmd, vm, resource_group_name, username, password, no_wait)
def delete_user(cmd, resource_group_name, vm_name, username, no_wait=False):
vm = get_vm(cmd, resource_group_name, vm_name, 'instanceView')
if not _is_linux_os(vm):
raise CLIError('Deleting a user is not supported on Windows VM')
if no_wait:
return _update_linux_access_extension(cmd, vm, resource_group_name,
{'remove_user': username}, no_wait)
poller = _update_linux_access_extension(cmd, vm, resource_group_name,
{'remove_user': username})
return ExtensionUpdateLongRunningOperation(cmd.cli_ctx, 'deleting user', 'done')(poller)
def reset_linux_ssh(cmd, resource_group_name, vm_name, no_wait=False):
vm = get_vm(cmd, resource_group_name, vm_name, 'instanceView')
if not _is_linux_os(vm):
raise CLIError('Resetting SSH is not supported in Windows VM')
if no_wait:
return _update_linux_access_extension(cmd, vm, resource_group_name,
{'reset_ssh': True}, no_wait)
poller = _update_linux_access_extension(cmd, vm, resource_group_name,
{'reset_ssh': True})
return ExtensionUpdateLongRunningOperation(cmd.cli_ctx, 'resetting SSH', 'done')(poller)
# endregion
# region VirtualMachineScaleSets
def assign_vmss_identity(cmd, resource_group_name, vmss_name, assign_identity=None, identity_role='Contributor',
identity_role_id=None, identity_scope=None):
VirtualMachineScaleSetIdentity, UpgradeMode, ResourceIdentityType, VirtualMachineScaleSetUpdate = cmd.get_models(
'VirtualMachineScaleSetIdentity', 'UpgradeMode', 'ResourceIdentityType', 'VirtualMachineScaleSetUpdate')
IdentityUserAssignedIdentitiesValue = cmd.get_models('VirtualMachineScaleSetIdentityUserAssignedIdentitiesValue')
from azure.cli.core.commands.arm import assign_identity as assign_identity_helper
client = _compute_client_factory(cmd.cli_ctx)
_, _, external_identities, enable_local_identity = _build_identities_info(assign_identity)
def getter():
return client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
def setter(vmss, external_identities=external_identities):
if vmss.identity and vmss.identity.type == ResourceIdentityType.system_assigned_user_assigned:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif vmss.identity and vmss.identity.type == ResourceIdentityType.system_assigned and external_identities:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif vmss.identity and vmss.identity.type == ResourceIdentityType.user_assigned and enable_local_identity:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif external_identities and enable_local_identity:
identity_types = ResourceIdentityType.system_assigned_user_assigned
elif external_identities:
identity_types = ResourceIdentityType.user_assigned
else:
identity_types = ResourceIdentityType.system_assigned
vmss.identity = VirtualMachineScaleSetIdentity(type=identity_types)
if external_identities:
vmss.identity.user_assigned_identities = {}
for identity in external_identities:
vmss.identity.user_assigned_identities[identity] = IdentityUserAssignedIdentitiesValue()
vmss_patch = VirtualMachineScaleSetUpdate()
vmss_patch.identity = vmss.identity
poller = client.virtual_machine_scale_sets.update(resource_group_name, vmss_name, vmss_patch)
return LongRunningOperation(cmd.cli_ctx)(poller)
assign_identity_helper(cmd.cli_ctx, getter, setter, identity_role=identity_role_id, identity_scope=identity_scope)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
if vmss.upgrade_policy.mode == UpgradeMode.manual:
logger.warning("With manual upgrade mode, you will need to run 'az vmss update-instances -g %s -n %s "
"--instance-ids *' to propagate the change", resource_group_name, vmss_name)
return _construct_identity_info(identity_scope, identity_role, vmss.identity.principal_id,
vmss.identity.user_assigned_identities)
# pylint: disable=too-many-locals, too-many-statements
def create_vmss(cmd, vmss_name, resource_group_name, image=None,
disable_overprovision=False, instance_count=2,
location=None, tags=None, upgrade_policy_mode='manual', validate=False,
admin_username=None, admin_password=None, authentication_type=None,
vm_sku=None, no_wait=False,
ssh_dest_key_path=None, ssh_key_value=None, generate_ssh_keys=False,
load_balancer=None, load_balancer_sku=None, application_gateway=None,
app_gateway_subnet_address_prefix=None,
app_gateway_sku='Standard_Large', app_gateway_capacity=10,
backend_pool_name=None, nat_pool_name=None, backend_port=None, health_probe=None,
public_ip_address=None, public_ip_address_allocation=None,
public_ip_address_dns_name=None, accelerated_networking=None,
public_ip_per_vm=False, vm_domain_name=None, dns_servers=None, nsg=None,
os_caching=None, data_caching=None,
storage_container_name='vhds', storage_sku=None,
os_type=None, os_disk_name=None,
use_unmanaged_disk=False, data_disk_sizes_gb=None, disk_info=None,
vnet_name=None, vnet_address_prefix='10.0.0.0/16',
subnet=None, subnet_address_prefix=None,
os_offer=None, os_publisher=None, os_sku=None, os_version=None,
load_balancer_type=None, app_gateway_type=None, vnet_type=None,
public_ip_address_type=None, storage_profile=None,
single_placement_group=None, custom_data=None, secrets=None, platform_fault_domain_count=None,
plan_name=None, plan_product=None, plan_publisher=None, plan_promotion_code=None, license_type=None,
assign_identity=None, identity_scope=None, identity_role='Contributor',
identity_role_id=None, zones=None, priority=None, eviction_policy=None,
application_security_groups=None, ultra_ssd_enabled=None, ephemeral_os_disk=None,
proximity_placement_group=None, aux_subscriptions=None, terminate_notification_time=None,
max_price=None, computer_name_prefix=None, orchestration_mode='ScaleSetVM', scale_in_policy=None,
os_disk_encryption_set=None, data_disk_encryption_sets=None, data_disk_iops=None, data_disk_mbps=None,
automatic_repairs_grace_period=None, specialized=None, os_disk_size_gb=None, encryption_at_host=None,
host_group=None):
from azure.cli.core.commands.client_factory import get_subscription_id
from azure.cli.core.util import random_string, hash_string
from azure.cli.core.commands.arm import ArmTemplateBuilder
from azure.cli.command_modules.vm._template_builder import (StorageProfile, build_vmss_resource,
build_vnet_resource, build_public_ip_resource,
build_load_balancer_resource,
build_vmss_storage_account_pool_resource,
build_application_gateway_resource,
build_msi_role_assignment, build_nsg_resource)
# Build up the ARM template
master_template = ArmTemplateBuilder()
scale_set_vm_str = 'ScaleSetVM'
vm_str = 'VM'
if orchestration_mode.lower() == scale_set_vm_str.lower():
from msrestazure.tools import resource_id, is_valid_resource_id
storage_sku = disk_info['os'].get('storageAccountType')
subscription_id = get_subscription_id(cmd.cli_ctx)
if os_disk_encryption_set is not None and not is_valid_resource_id(os_disk_encryption_set):
os_disk_encryption_set = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=os_disk_encryption_set)
if data_disk_encryption_sets is None:
data_disk_encryption_sets = []
for i, des in enumerate(data_disk_encryption_sets):
if des is not None and not is_valid_resource_id(des):
data_disk_encryption_sets[i] = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=des)
network_id_template = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Network')
vmss_id = resource_id(
subscription=subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='virtualMachineScaleSets', name=vmss_name)
scrubbed_name = vmss_name.replace('-', '').lower()[:5]
naming_prefix = '{}{}'.format(scrubbed_name,
hash_string(vmss_id,
length=(9 - len(scrubbed_name)),
force_lower=True))
# determine final defaults and calculated values
tags = tags or {}
os_disk_name = os_disk_name or ('osdisk_{}'.format(hash_string(vmss_id, length=10))
if use_unmanaged_disk else None)
load_balancer = load_balancer or '{}LB'.format(vmss_name)
app_gateway = application_gateway or '{}AG'.format(vmss_name)
backend_pool_name = backend_pool_name or '{}BEPool'.format(load_balancer or application_gateway)
vmss_dependencies = []
# VNET will always be a dependency
if vnet_type == 'new':
vnet_name = vnet_name or '{}VNET'.format(vmss_name)
subnet = subnet or '{}Subnet'.format(vmss_name)
vmss_dependencies.append('Microsoft.Network/virtualNetworks/{}'.format(vnet_name))
vnet = build_vnet_resource(
cmd, vnet_name, location, tags, vnet_address_prefix, subnet, subnet_address_prefix)
if app_gateway_type:
vnet['properties']['subnets'].append({
'name': 'appGwSubnet',
'properties': {
'addressPrefix': app_gateway_subnet_address_prefix
}
})
master_template.add_resource(vnet)
subnet_id = subnet if is_valid_resource_id(subnet) else \
'{}/virtualNetworks/{}/subnets/{}'.format(network_id_template, vnet_name, subnet)
gateway_subnet_id = ('{}/virtualNetworks/{}/subnets/appGwSubnet'.format(network_id_template, vnet_name)
if app_gateway_type == 'new' else None)
# public IP is used by either load balancer/application gateway
public_ip_address_id = None
if public_ip_address:
public_ip_address_id = (public_ip_address if is_valid_resource_id(public_ip_address)
else '{}/publicIPAddresses/{}'.format(network_id_template,
public_ip_address))
def _get_public_ip_address_allocation(value, sku):
IPAllocationMethod = cmd.get_models('IPAllocationMethod', resource_type=ResourceType.MGMT_NETWORK)
if not value:
value = IPAllocationMethod.static.value if (sku and sku.lower() == 'standard') \
else IPAllocationMethod.dynamic.value
return value
# Handle load balancer creation
if load_balancer_type == 'new':
vmss_dependencies.append('Microsoft.Network/loadBalancers/{}'.format(load_balancer))
lb_dependencies = []
if vnet_type == 'new':
lb_dependencies.append('Microsoft.Network/virtualNetworks/{}'.format(vnet_name))
if public_ip_address_type == 'new':
public_ip_address = public_ip_address or '{}PublicIP'.format(load_balancer)
lb_dependencies.append(
'Microsoft.Network/publicIpAddresses/{}'.format(public_ip_address))
master_template.add_resource(build_public_ip_resource(
cmd, public_ip_address, location, tags,
_get_public_ip_address_allocation(public_ip_address_allocation, load_balancer_sku),
public_ip_address_dns_name, load_balancer_sku, zones))
public_ip_address_id = '{}/publicIPAddresses/{}'.format(network_id_template,
public_ip_address)
# calculate default names if not provided
nat_pool_name = nat_pool_name or '{}NatPool'.format(load_balancer)
if not backend_port:
backend_port = 3389 if os_type == 'windows' else 22
lb_resource = build_load_balancer_resource(
cmd, load_balancer, location, tags, backend_pool_name, nat_pool_name, backend_port,
'loadBalancerFrontEnd', public_ip_address_id, subnet_id, private_ip_address='',
private_ip_allocation='Dynamic', sku=load_balancer_sku, instance_count=instance_count,
disable_overprovision=disable_overprovision)
lb_resource['dependsOn'] = lb_dependencies
master_template.add_resource(lb_resource)
# Per https://docs.microsoft.com/azure/load-balancer/load-balancer-standard-overview#nsg
if load_balancer_sku and load_balancer_sku.lower() == 'standard' and nsg is None:
nsg_name = '{}NSG'.format(vmss_name)
master_template.add_resource(build_nsg_resource(
None, nsg_name, location, tags, 'rdp' if os_type.lower() == 'windows' else 'ssh'))
nsg = "[resourceId('Microsoft.Network/networkSecurityGroups', '{}')]".format(nsg_name)
vmss_dependencies.append('Microsoft.Network/networkSecurityGroups/{}'.format(nsg_name))
# Or handle application gateway creation
if app_gateway_type == 'new':
vmss_dependencies.append('Microsoft.Network/applicationGateways/{}'.format(app_gateway))
ag_dependencies = []
if vnet_type == 'new':
ag_dependencies.append('Microsoft.Network/virtualNetworks/{}'.format(vnet_name))
if public_ip_address_type == 'new':
public_ip_address = public_ip_address or '{}PublicIP'.format(app_gateway)
ag_dependencies.append(
'Microsoft.Network/publicIpAddresses/{}'.format(public_ip_address))
master_template.add_resource(build_public_ip_resource(
cmd, public_ip_address, location, tags,
_get_public_ip_address_allocation(public_ip_address_allocation, None), public_ip_address_dns_name,
None, zones))
public_ip_address_id = '{}/publicIPAddresses/{}'.format(network_id_template,
public_ip_address)
# calculate default names if not provided
backend_port = backend_port or 80
ag_resource = build_application_gateway_resource(
cmd, app_gateway, location, tags, backend_pool_name, backend_port, 'appGwFrontendIP',
public_ip_address_id, subnet_id, gateway_subnet_id, private_ip_address='',
private_ip_allocation='Dynamic', sku=app_gateway_sku, capacity=app_gateway_capacity)
ag_resource['dependsOn'] = ag_dependencies
master_template.add_variable(
'appGwID',
"[resourceId('Microsoft.Network/applicationGateways', '{}')]".format(app_gateway))
master_template.add_resource(ag_resource)
# create storage accounts if needed for unmanaged disk storage
if storage_profile == StorageProfile.SAPirImage:
master_template.add_resource(build_vmss_storage_account_pool_resource(
cmd, 'storageLoop', location, tags, storage_sku))
master_template.add_variable('storageAccountNames', [
'{}{}'.format(naming_prefix, x) for x in range(5)
])
master_template.add_variable('vhdContainers', [
"[concat('https://', variables('storageAccountNames')[{}], '.blob.{}/{}')]".format(
x, cmd.cli_ctx.cloud.suffixes.storage_endpoint, storage_container_name) for x in range(5)
])
vmss_dependencies.append('storageLoop')
backend_address_pool_id = None
inbound_nat_pool_id = None
if load_balancer_type or app_gateway_type:
network_balancer = load_balancer if load_balancer_type else app_gateway
balancer_type = 'loadBalancers' if load_balancer_type else 'applicationGateways'
if is_valid_resource_id(network_balancer):
# backend address pool needed by load balancer or app gateway
backend_address_pool_id = '{}/backendAddressPools/{}'.format(network_balancer, backend_pool_name)
if nat_pool_name:
inbound_nat_pool_id = '{}/inboundNatPools/{}'.format(network_balancer, nat_pool_name)
else:
# backend address pool needed by load balancer or app gateway
backend_address_pool_id = '{}/{}/{}/backendAddressPools/{}'.format(
network_id_template, balancer_type, network_balancer, backend_pool_name)
if nat_pool_name:
inbound_nat_pool_id = '{}/{}/{}/inboundNatPools/{}'.format(
network_id_template, balancer_type, network_balancer, nat_pool_name)
if health_probe and not is_valid_resource_id(health_probe):
health_probe = '{}/loadBalancers/{}/probes/{}'.format(network_id_template, load_balancer, health_probe)
ip_config_name = '{}IPConfig'.format(naming_prefix)
nic_name = '{}Nic'.format(naming_prefix)
if custom_data:
custom_data = read_content_if_is_file(custom_data)
if secrets:
secrets = _merge_secrets([validate_file_or_dict(secret) for secret in secrets])
if computer_name_prefix is not None and isinstance(computer_name_prefix, str):
naming_prefix = computer_name_prefix
if os_version and os_version != 'latest':
logger.warning('You are deploying VMSS pinned to a specific image version from Azure Marketplace. '
'Consider using "latest" as the image version.')
vmss_resource = build_vmss_resource(
cmd=cmd, name=vmss_name, naming_prefix=naming_prefix, location=location, tags=tags,
overprovision=not disable_overprovision, upgrade_policy_mode=upgrade_policy_mode, vm_sku=vm_sku,
instance_count=instance_count, ip_config_name=ip_config_name, nic_name=nic_name, subnet_id=subnet_id,
public_ip_per_vm=public_ip_per_vm, vm_domain_name=vm_domain_name, dns_servers=dns_servers, nsg=nsg,
accelerated_networking=accelerated_networking, admin_username=admin_username,
authentication_type=authentication_type, storage_profile=storage_profile, os_disk_name=os_disk_name,
disk_info=disk_info, os_type=os_type, image=image, admin_password=admin_password,
ssh_key_values=ssh_key_value, ssh_key_path=ssh_dest_key_path, os_publisher=os_publisher, os_offer=os_offer,
os_sku=os_sku, os_version=os_version, backend_address_pool_id=backend_address_pool_id,
inbound_nat_pool_id=inbound_nat_pool_id, health_probe=health_probe,
single_placement_group=single_placement_group, platform_fault_domain_count=platform_fault_domain_count,
custom_data=custom_data, secrets=secrets, license_type=license_type, zones=zones, priority=priority,
eviction_policy=eviction_policy, application_security_groups=application_security_groups,
ultra_ssd_enabled=ultra_ssd_enabled, proximity_placement_group=proximity_placement_group,
terminate_notification_time=terminate_notification_time, max_price=max_price,
scale_in_policy=scale_in_policy, os_disk_encryption_set=os_disk_encryption_set,
data_disk_encryption_sets=data_disk_encryption_sets, data_disk_iops=data_disk_iops,
data_disk_mbps=data_disk_mbps, automatic_repairs_grace_period=automatic_repairs_grace_period,
specialized=specialized, os_disk_size_gb=os_disk_size_gb, encryption_at_host=encryption_at_host,
host_group=host_group)
vmss_resource['dependsOn'] = vmss_dependencies
if plan_name:
vmss_resource['plan'] = {
'name': plan_name,
'publisher': plan_publisher,
'product': plan_product,
'promotionCode': plan_promotion_code
}
enable_local_identity = None
if assign_identity is not None:
vmss_resource['identity'], _, _, enable_local_identity = _build_identities_info(
assign_identity)
if identity_scope:
role_assignment_guid = str(_gen_guid())
master_template.add_resource(build_msi_role_assignment(vmss_name, vmss_id, identity_role_id,
role_assignment_guid, identity_scope, False))
elif orchestration_mode.lower() == vm_str.lower():
if platform_fault_domain_count is None:
raise CLIError("usage error: --platform-fault-domain-count is required in VM mode")
vmss_resource = {
'type': 'Microsoft.Compute/virtualMachineScaleSets',
'name': vmss_name,
'location': location,
'tags': tags,
'apiVersion': cmd.get_api_version(ResourceType.MGMT_COMPUTE, operation_group='virtual_machine_scale_sets'),
'properties': {
'singlePlacementGroup': single_placement_group,
'provisioningState': 0,
'platformFaultDomainCount': platform_fault_domain_count
}
}
if zones is not None:
vmss_resource['zones'] = zones
if proximity_placement_group is not None:
vmss_resource['properties']['proximityPlacementGroup'] = {
'id': proximity_placement_group
}
else:
raise CLIError('usage error: --orchestration-mode (ScaleSet | VM)')
master_template.add_resource(vmss_resource)
master_template.add_output('VMSS', vmss_name, 'Microsoft.Compute', 'virtualMachineScaleSets',
output_type='object')
if orchestration_mode.lower() == scale_set_vm_str.lower() and admin_password:
master_template.add_secure_parameter('adminPassword', admin_password)
template = master_template.build()
parameters = master_template.build_parameters()
# deploy ARM template
deployment_name = 'vmss_deploy_' + random_string(32)
client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
aux_subscriptions=aux_subscriptions).deployments
DeploymentProperties = cmd.get_models('DeploymentProperties', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
properties = DeploymentProperties(template=template, parameters=parameters, mode='incremental')
if validate:
from azure.cli.command_modules.vm._vm_utils import log_pprint_template
log_pprint_template(template)
log_pprint_template(parameters)
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
deployment = Deployment(properties=properties)
if validate:
validation_poller = client.validate(resource_group_name, deployment_name, deployment)
return LongRunningOperation(cmd.cli_ctx)(validation_poller)
# creates the VMSS deployment
deployment_result = DeploymentOutputLongRunningOperation(cmd.cli_ctx)(
sdk_no_wait(no_wait, client.create_or_update, resource_group_name, deployment_name, deployment))
else:
if validate:
return client.validate(resource_group_name, deployment_name, properties)
# creates the VMSS deployment
deployment_result = DeploymentOutputLongRunningOperation(cmd.cli_ctx)(
sdk_no_wait(no_wait, client.create_or_update, resource_group_name, deployment_name, properties))
if orchestration_mode.lower() == scale_set_vm_str.lower() and assign_identity is not None:
vmss_info = get_vmss(cmd, resource_group_name, vmss_name)
if enable_local_identity and not identity_scope:
_show_missing_access_warning(resource_group_name, vmss_name, 'vmss')
deployment_result['vmss']['identity'] = _construct_identity_info(identity_scope, identity_role,
vmss_info.identity.principal_id,
vmss_info.identity.user_assigned_identities)
return deployment_result
def _build_identities_info(identities):
from ._vm_utils import MSI_LOCAL_ID
identities = identities or []
identity_types = []
if not identities or MSI_LOCAL_ID in identities:
identity_types.append('SystemAssigned')
external_identities = [x for x in identities if x != MSI_LOCAL_ID]
if external_identities:
identity_types.append('UserAssigned')
identity_types = ','.join(identity_types)
info = {'type': identity_types}
if external_identities:
info['userAssignedIdentities'] = {e: {} for e in external_identities}
return (info, identity_types, external_identities, 'SystemAssigned' in identity_types)
def deallocate_vmss(cmd, resource_group_name, vm_scale_set_name, instance_ids=None, no_wait=False):
client = _compute_client_factory(cmd.cli_ctx)
if instance_ids and len(instance_ids) == 1:
return sdk_no_wait(no_wait, client.virtual_machine_scale_set_vms.deallocate,
resource_group_name, vm_scale_set_name, instance_ids[0])
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.deallocate,
resource_group_name, vm_scale_set_name, instance_ids=instance_ids)
def delete_vmss_instances(cmd, resource_group_name, vm_scale_set_name, instance_ids, no_wait=False):
client = _compute_client_factory(cmd.cli_ctx)
if len(instance_ids) == 1:
return sdk_no_wait(no_wait, client.virtual_machine_scale_set_vms.delete,
resource_group_name, vm_scale_set_name, instance_ids[0])
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.delete_instances,
resource_group_name, vm_scale_set_name, instance_ids)
def get_vmss(cmd, resource_group_name, name, instance_id=None):
client = _compute_client_factory(cmd.cli_ctx)
if instance_id is not None:
return client.virtual_machine_scale_set_vms.get(resource_group_name, name, instance_id)
return client.virtual_machine_scale_sets.get(resource_group_name, name)
def get_vmss_instance_view(cmd, resource_group_name, vm_scale_set_name, instance_id=None):
client = _compute_client_factory(cmd.cli_ctx)
if instance_id:
if instance_id == '*':
return [x.instance_view for x in (client.virtual_machine_scale_set_vms.list(
resource_group_name, vm_scale_set_name, select='instanceView', expand='instanceView'))]
return client.virtual_machine_scale_set_vms.get_instance_view(resource_group_name, vm_scale_set_name,
instance_id)
return client.virtual_machine_scale_sets.get_instance_view(resource_group_name, vm_scale_set_name)
def list_vmss(cmd, resource_group_name=None):
client = _compute_client_factory(cmd.cli_ctx)
if resource_group_name:
return client.virtual_machine_scale_sets.list(resource_group_name)
return client.virtual_machine_scale_sets.list_all()
def list_vmss_instance_connection_info(cmd, resource_group_name, vm_scale_set_name):
from msrestazure.tools import parse_resource_id
client = _compute_client_factory(cmd.cli_ctx)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vm_scale_set_name)
# find the load balancer
nic_configs = vmss.virtual_machine_profile.network_profile.network_interface_configurations
primary_nic_config = next((n for n in nic_configs if n.primary), None)
if primary_nic_config is None:
raise CLIError('could not find a primary NIC which is needed to search to load balancer')
ip_configs = primary_nic_config.ip_configurations
ip_config = next((ip for ip in ip_configs if ip.load_balancer_inbound_nat_pools), None)
if not ip_config:
raise CLIError('No load balancer exists to retrieve public IP address')
res_id = ip_config.load_balancer_inbound_nat_pools[0].id
lb_info = parse_resource_id(res_id)
lb_name = lb_info['name']
lb_rg = lb_info['resource_group']
# get public ip
network_client = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_NETWORK)
lb = network_client.load_balancers.get(lb_rg, lb_name)
if getattr(lb.frontend_ip_configurations[0], 'public_ip_address', None):
res_id = lb.frontend_ip_configurations[0].public_ip_address.id
public_ip_info = parse_resource_id(res_id)
public_ip_name = public_ip_info['name']
public_ip_rg = public_ip_info['resource_group']
public_ip = network_client.public_ip_addresses.get(public_ip_rg, public_ip_name)
public_ip_address = public_ip.ip_address
# loop around inboundnatrule
instance_addresses = {}
for rule in lb.inbound_nat_rules:
instance_id = parse_resource_id(rule.backend_ip_configuration.id)['child_name_1']
instance_addresses['instance ' + instance_id] = '{}:{}'.format(public_ip_address,
rule.frontend_port)
return instance_addresses
raise CLIError('The VM scale-set uses an internal load balancer, hence no connection information')
def list_vmss_instance_public_ips(cmd, resource_group_name, vm_scale_set_name):
result = cf_public_ip_addresses(cmd.cli_ctx).list_virtual_machine_scale_set_public_ip_addresses(
resource_group_name, vm_scale_set_name)
# filter away over-provisioned instances which are deleted after 'create/update' returns
return [r for r in result if r.ip_address]
def reimage_vmss(cmd, resource_group_name, vm_scale_set_name, instance_id=None, no_wait=False):
client = _compute_client_factory(cmd.cli_ctx)
if instance_id:
return sdk_no_wait(no_wait, client.virtual_machine_scale_set_vms.reimage,
resource_group_name, vm_scale_set_name, instance_id)
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.reimage, resource_group_name, vm_scale_set_name)
def restart_vmss(cmd, resource_group_name, vm_scale_set_name, instance_ids=None, no_wait=False):
client = _compute_client_factory(cmd.cli_ctx)
if instance_ids and len(instance_ids) == 1:
return sdk_no_wait(no_wait, client.virtual_machine_scale_set_vms.restart,
resource_group_name, vm_scale_set_name, instance_ids[0])
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.restart, resource_group_name, vm_scale_set_name,
instance_ids=instance_ids)
# pylint: disable=inconsistent-return-statements
def scale_vmss(cmd, resource_group_name, vm_scale_set_name, new_capacity, no_wait=False):
VirtualMachineScaleSet = cmd.get_models('VirtualMachineScaleSet')
client = _compute_client_factory(cmd.cli_ctx)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vm_scale_set_name)
# pylint: disable=no-member
if vmss.sku.capacity == new_capacity:
return
vmss.sku.capacity = new_capacity
vmss_new = VirtualMachineScaleSet(location=vmss.location, sku=vmss.sku)
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.create_or_update,
resource_group_name, vm_scale_set_name, vmss_new)
def start_vmss(cmd, resource_group_name, vm_scale_set_name, instance_ids=None, no_wait=False):
client = _compute_client_factory(cmd.cli_ctx)
if instance_ids and len(instance_ids) == 1:
return sdk_no_wait(no_wait, client.virtual_machine_scale_set_vms.start,
resource_group_name, vm_scale_set_name, instance_ids[0])
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.start,
resource_group_name, vm_scale_set_name, instance_ids=instance_ids)
def stop_vmss(cmd, resource_group_name, vm_scale_set_name, instance_ids=None, no_wait=False, skip_shutdown=False):
client = _compute_client_factory(cmd.cli_ctx)
if instance_ids and len(instance_ids) == 1:
return sdk_no_wait(no_wait, client.virtual_machine_scale_set_vms.power_off, resource_group_name,
vm_scale_set_name, instance_id=instance_ids[0], skip_shutdown=skip_shutdown)
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.power_off, resource_group_name, vm_scale_set_name,
instance_ids=instance_ids, skip_shutdown=skip_shutdown)
def update_vmss_instances(cmd, resource_group_name, vm_scale_set_name, instance_ids, no_wait=False):
client = _compute_client_factory(cmd.cli_ctx)
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.update_instances,
resource_group_name, vm_scale_set_name, instance_ids)
def update_vmss(cmd, resource_group_name, name, license_type=None, no_wait=False, instance_id=None,
protect_from_scale_in=None, protect_from_scale_set_actions=None,
enable_terminate_notification=None, terminate_notification_time=None, ultra_ssd_enabled=None,
scale_in_policy=None, priority=None, max_price=None, proximity_placement_group=None,
enable_automatic_repairs=None, automatic_repairs_grace_period=None, **kwargs):
vmss = kwargs['parameters']
aux_subscriptions = None
# pylint: disable=too-many-boolean-expressions
if vmss and hasattr(vmss, 'virtual_machine_profile') and vmss.virtual_machine_profile and \
vmss.virtual_machine_profile.storage_profile and \
vmss.virtual_machine_profile.storage_profile.image_reference and \
vmss.virtual_machine_profile.storage_profile.image_reference.id:
aux_subscriptions = _parse_aux_subscriptions(vmss.virtual_machine_profile.storage_profile.image_reference.id)
client = _compute_client_factory(cmd.cli_ctx, aux_subscriptions=aux_subscriptions)
VMProtectionPolicy = cmd.get_models('VirtualMachineScaleSetVMProtectionPolicy')
# handle vmss instance update
if instance_id is not None:
if license_type is not None:
vmss.license_type = license_type
if not vmss.protection_policy:
vmss.protection_policy = VMProtectionPolicy()
if protect_from_scale_in is not None:
vmss.protection_policy.protect_from_scale_in = protect_from_scale_in
if protect_from_scale_set_actions is not None:
vmss.protection_policy.protect_from_scale_set_actions = protect_from_scale_set_actions
return sdk_no_wait(no_wait, client.virtual_machine_scale_set_vms.update,
resource_group_name, name, instance_id, **kwargs)
# else handle vmss update
if license_type is not None:
vmss.virtual_machine_profile.license_type = license_type
if enable_terminate_notification is not None or terminate_notification_time is not None:
if vmss.virtual_machine_profile.scheduled_events_profile is None:
ScheduledEventsProfile = cmd.get_models('ScheduledEventsProfile')
vmss.virtual_machine_profile.scheduled_events_profile = ScheduledEventsProfile()
TerminateNotificationProfile = cmd.get_models('TerminateNotificationProfile')
vmss.virtual_machine_profile.scheduled_events_profile.terminate_notification_profile =\
TerminateNotificationProfile(not_before_timeout=terminate_notification_time,
enable=enable_terminate_notification)
if enable_automatic_repairs is not None or automatic_repairs_grace_period is not None:
AutomaticRepairsPolicy = cmd.get_models('AutomaticRepairsPolicy')
vmss.automatic_repairs_policy = \
AutomaticRepairsPolicy(enabled="true", grace_period=automatic_repairs_grace_period)
if ultra_ssd_enabled is not None:
if cmd.supported_api_version(min_api='2019-03-01', operation_group='virtual_machine_scale_sets'):
if vmss.additional_capabilities is None:
AdditionalCapabilities = cmd.get_models('AdditionalCapabilities')
vmss.additional_capabilities = AdditionalCapabilities(ultra_ssd_enabled=ultra_ssd_enabled)
else:
vmss.additional_capabilities.ultra_ssd_enabled = ultra_ssd_enabled
else:
if vmss.virtual_machine_profile.additional_capabilities is None:
AdditionalCapabilities = cmd.get_models('AdditionalCapabilities')
vmss.virtual_machine_profile.additional_capabilities = AdditionalCapabilities(
ultra_ssd_enabled=ultra_ssd_enabled)
else:
vmss.virtual_machine_profile.additional_capabilities.ultra_ssd_enabled = ultra_ssd_enabled
if scale_in_policy is not None:
ScaleInPolicy = cmd.get_models('ScaleInPolicy')
vmss.scale_in_policy = ScaleInPolicy(rules=scale_in_policy)
if priority is not None:
vmss.virtual_machine_profile.priority = priority
if max_price is not None:
if vmss.virtual_machine_profile.billing_profile is None:
BillingProfile = cmd.get_models('BillingProfile')
vmss.virtual_machine_profile.billing_profile = BillingProfile(max_price=max_price)
else:
vmss.virtual_machine_profile.billing_profile.max_price = max_price
if proximity_placement_group is not None:
vmss.proximity_placement_group = {'id': proximity_placement_group}
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.create_or_update,
resource_group_name, name, **kwargs)
# endregion
# region VirtualMachineScaleSets Diagnostics
def set_vmss_diagnostics_extension(
cmd, resource_group_name, vmss_name, settings, protected_settings=None, version=None,
no_auto_upgrade=False):
client = _compute_client_factory(cmd.cli_ctx)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
# pylint: disable=no-member
is_linux_os = _is_linux_os(vmss.virtual_machine_profile)
vm_extension_name = _LINUX_DIAG_EXT if is_linux_os else _WINDOWS_DIAG_EXT
if is_linux_os and vmss.virtual_machine_profile.extension_profile: # check incompatibles
exts = vmss.virtual_machine_profile.extension_profile.extensions or []
major_ver = extension_mappings[_LINUX_DIAG_EXT]['version'].split('.')[0]
# For VMSS, we don't do auto-removal like VM because there is no reliable API to wait for
# the removal done before we can install the newer one
if next((e for e in exts if e.name == _LINUX_DIAG_EXT and
not e.type_handler_version.startswith(major_ver + '.')), None):
delete_cmd = 'az vmss extension delete -g {} --vmss-name {} -n {}'.format(
resource_group_name, vmss_name, vm_extension_name)
raise CLIError("There is an incompatible version of diagnostics extension installed. "
"Please remove it by running '{}', and retry. 'az vmss update-instances'"
" might be needed if with manual upgrade policy".format(delete_cmd))
poller = set_vmss_extension(cmd, resource_group_name, vmss_name, vm_extension_name,
extension_mappings[vm_extension_name]['publisher'],
version or extension_mappings[vm_extension_name]['version'],
settings,
protected_settings,
no_auto_upgrade)
result = LongRunningOperation(cmd.cli_ctx)(poller)
UpgradeMode = cmd.get_models('UpgradeMode')
if vmss.upgrade_policy.mode == UpgradeMode.manual:
poller2 = update_vmss_instances(cmd, resource_group_name, vmss_name, ['*'])
LongRunningOperation(cmd.cli_ctx)(poller2)
return result
# endregion
# region VirtualMachineScaleSets Disks (Managed)
def attach_managed_data_disk_to_vmss(cmd, resource_group_name, vmss_name, size_gb=None, instance_id=None, lun=None,
caching=None, disk=None, sku=None):
def _init_data_disk(storage_profile, lun, existing_disk=None):
data_disks = storage_profile.data_disks or []
if lun is None:
lun = _get_disk_lun(data_disks)
if existing_disk is None:
data_disk = DataDisk(lun=lun, create_option=DiskCreateOptionTypes.empty, disk_size_gb=size_gb,
caching=caching, managed_disk=ManagedDiskParameters(storage_account_type=sku))
else:
data_disk = DataDisk(lun=lun, create_option=DiskCreateOptionTypes.attach, caching=caching,
managed_disk=ManagedDiskParameters(id=existing_disk, storage_account_type=sku))
data_disks.append(data_disk)
storage_profile.data_disks = data_disks
DiskCreateOptionTypes, ManagedDiskParameters = cmd.get_models(
'DiskCreateOptionTypes', 'ManagedDiskParameters')
if disk is None:
DataDisk = cmd.get_models('VirtualMachineScaleSetDataDisk')
else:
DataDisk = cmd.get_models('DataDisk')
client = _compute_client_factory(cmd.cli_ctx)
if instance_id is None:
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
# pylint: disable=no-member
_init_data_disk(vmss.virtual_machine_profile.storage_profile, lun)
return client.virtual_machine_scale_sets.create_or_update(resource_group_name, vmss_name, vmss)
vmss_vm = client.virtual_machine_scale_set_vms.get(resource_group_name, vmss_name, instance_id)
_init_data_disk(vmss_vm.storage_profile, lun, disk)
return client.virtual_machine_scale_set_vms.update(resource_group_name, vmss_name, instance_id, vmss_vm)
def detach_disk_from_vmss(cmd, resource_group_name, vmss_name, lun, instance_id=None):
client = _compute_client_factory(cmd.cli_ctx)
if instance_id is None:
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
# pylint: disable=no-member
data_disks = vmss.virtual_machine_profile.storage_profile.data_disks
else:
vmss_vm = client.virtual_machine_scale_set_vms.get(resource_group_name, vmss_name, instance_id)
data_disks = vmss_vm.storage_profile.data_disks
if not data_disks:
raise CLIError("Data disk doesn't exist")
leftovers = [d for d in data_disks if d.lun != lun]
if len(data_disks) == len(leftovers):
raise CLIError("Could not find the data disk with lun '{}'".format(lun))
if instance_id is None:
vmss.virtual_machine_profile.storage_profile.data_disks = leftovers
return client.virtual_machine_scale_sets.create_or_update(resource_group_name, vmss_name, vmss)
vmss_vm.storage_profile.data_disks = leftovers
return client.virtual_machine_scale_set_vms.update(resource_group_name, vmss_name, instance_id, vmss_vm)
# endregion
# region VirtualMachineScaleSets Extensions
def delete_vmss_extension(cmd, resource_group_name, vmss_name, extension_name):
client = _compute_client_factory(cmd.cli_ctx)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
# pylint: disable=no-member
if not vmss.virtual_machine_profile.extension_profile:
raise CLIError('Scale set has no extensions to delete')
keep_list = [e for e in vmss.virtual_machine_profile.extension_profile.extensions
if e.name != extension_name]
if len(keep_list) == len(vmss.virtual_machine_profile.extension_profile.extensions):
raise CLIError('Extension {} not found'.format(extension_name))
vmss.virtual_machine_profile.extension_profile.extensions = keep_list
return client.virtual_machine_scale_sets.create_or_update(resource_group_name, vmss_name, vmss)
# pylint: disable=inconsistent-return-statements
def get_vmss_extension(cmd, resource_group_name, vmss_name, extension_name):
client = _compute_client_factory(cmd.cli_ctx)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
# pylint: disable=no-member
if not vmss.virtual_machine_profile.extension_profile:
return
return next((e for e in vmss.virtual_machine_profile.extension_profile.extensions
if e.name == extension_name), None)
def list_vmss_extensions(cmd, resource_group_name, vmss_name):
client = _compute_client_factory(cmd.cli_ctx)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
# pylint: disable=no-member
if vmss.virtual_machine_profile and vmss.virtual_machine_profile.extension_profile:
return vmss.virtual_machine_profile.extension_profile.extensions
return None
def set_vmss_extension(cmd, resource_group_name, vmss_name, extension_name, publisher, version=None,
settings=None, protected_settings=None, no_auto_upgrade=False, force_update=False,
no_wait=False, extension_instance_name=None, provision_after_extensions=None):
if not extension_instance_name:
extension_instance_name = extension_name
client = _compute_client_factory(cmd.cli_ctx)
vmss = client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
VirtualMachineScaleSetExtension, VirtualMachineScaleSetExtensionProfile = cmd.get_models(
'VirtualMachineScaleSetExtension', 'VirtualMachineScaleSetExtensionProfile')
# pylint: disable=no-member
version = _normalize_extension_version(cmd.cli_ctx, publisher, extension_name, version, vmss.location)
extension_profile = vmss.virtual_machine_profile.extension_profile
if extension_profile:
extensions = extension_profile.extensions
if extensions:
extension_profile.extensions = [x for x in extensions if
x.type1.lower() != extension_name.lower() or x.publisher.lower() != publisher.lower()] # pylint: disable=line-too-long
ext = VirtualMachineScaleSetExtension(name=extension_instance_name,
publisher=publisher,
type1=extension_name,
protected_settings=protected_settings,
type_handler_version=version,
settings=settings,
auto_upgrade_minor_version=(not no_auto_upgrade),
provision_after_extensions=provision_after_extensions)
if force_update:
ext.force_update_tag = str(_gen_guid())
if not vmss.virtual_machine_profile.extension_profile:
vmss.virtual_machine_profile.extension_profile = VirtualMachineScaleSetExtensionProfile(extensions=[])
vmss.virtual_machine_profile.extension_profile.extensions.append(ext)
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.create_or_update,
resource_group_name, vmss_name, vmss)
def set_orchestration_service_state(cmd, resource_group_name, vm_scale_set_name, service_name, action, no_wait=False):
# currently service_name has only one available value "AutomaticRepairs". And SDK does not accept service_name,
# instead SDK assign it to "AutomaticRepairs" in its own logic. As there may be more service name to be supported,
# we define service_name as a required parameter here to avoid introducing a breaking change in the future.
client = _compute_client_factory(cmd.cli_ctx)
return sdk_no_wait(no_wait, client.virtual_machine_scale_sets.set_orchestration_service_state,
resource_group_name, vm_scale_set_name, action)
# endregion
# region VirtualMachineScaleSets RunCommand
def vmss_run_command_invoke(cmd, resource_group_name, vmss_name, command_id, instance_id, scripts=None, parameters=None): # pylint: disable=line-too-long
return run_command_invoke(cmd, resource_group_name, vmss_name, command_id, scripts, parameters, instance_id)
# endregion
# region VirtualMachineScaleSets Identity
def remove_vmss_identity(cmd, resource_group_name, vmss_name, identities=None):
client = _compute_client_factory(cmd.cli_ctx)
def _get_vmss(_, resource_group_name, vmss_name):
return client.virtual_machine_scale_sets.get(resource_group_name, vmss_name)
def _set_vmss(resource_group_name, name, vmss_instance):
VirtualMachineScaleSetUpdate = cmd.get_models('VirtualMachineScaleSetUpdate',
operation_group='virtual_machine_scale_sets')
vmss_update = VirtualMachineScaleSetUpdate(identity=vmss_instance.identity)
return client.virtual_machine_scale_sets.update(resource_group_name, vmss_name, vmss_update)
if identities is None:
from ._vm_utils import MSI_LOCAL_ID
identities = [MSI_LOCAL_ID]
return _remove_identities(cmd, resource_group_name, vmss_name, identities,
_get_vmss,
_set_vmss)
# endregion
# region image galleries
def list_image_galleries(cmd, resource_group_name=None):
client = _compute_client_factory(cmd.cli_ctx)
if resource_group_name:
return client.galleries.list_by_resource_group(resource_group_name)
return client.galleries.list()
def create_image_gallery(cmd, resource_group_name, gallery_name, description=None,
location=None, no_wait=False, tags=None):
client = _compute_client_factory(cmd.cli_ctx)
Gallery = cmd.get_models('Gallery')
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
gallery = Gallery(description=description, location=location, tags=(tags or {}))
client = _compute_client_factory(cmd.cli_ctx)
return sdk_no_wait(no_wait, client.galleries.create_or_update, resource_group_name, gallery_name, gallery)
def create_gallery_image(cmd, resource_group_name, gallery_name, gallery_image_name, os_type, publisher, offer, sku,
os_state='Generalized', end_of_life_date=None, privacy_statement_uri=None,
release_note_uri=None, eula=None, description=None, location=None,
minimum_cpu_core=None, maximum_cpu_core=None, minimum_memory=None, maximum_memory=None,
disallowed_disk_types=None, plan_name=None, plan_publisher=None, plan_product=None, tags=None,
hyper_v_generation='V1'):
# pylint: disable=line-too-long
GalleryImage, GalleryImageIdentifier, RecommendedMachineConfiguration, ResourceRange, Disallowed, ImagePurchasePlan = cmd.get_models(
'GalleryImage', 'GalleryImageIdentifier', 'RecommendedMachineConfiguration', 'ResourceRange', 'Disallowed', 'ImagePurchasePlan')
client = _compute_client_factory(cmd.cli_ctx)
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
end_of_life_date = fix_gallery_image_date_info(end_of_life_date)
recommendation = None
if any([minimum_cpu_core, maximum_cpu_core, minimum_memory, maximum_memory]):
cpu_recommendation, memory_recommendation = None, None
if any([minimum_cpu_core, maximum_cpu_core]):
cpu_recommendation = ResourceRange(min=minimum_cpu_core, max=maximum_cpu_core)
if any([minimum_memory, maximum_memory]):
memory_recommendation = ResourceRange(min=minimum_memory, max=maximum_memory)
recommendation = RecommendedMachineConfiguration(v_cp_us=cpu_recommendation, memory=memory_recommendation)
purchase_plan = None
if any([plan_name, plan_publisher, plan_product]):
purchase_plan = ImagePurchasePlan(name=plan_name, publisher=plan_publisher, product=plan_product)
image = GalleryImage(identifier=GalleryImageIdentifier(publisher=publisher, offer=offer, sku=sku),
os_type=os_type, os_state=os_state, end_of_life_date=end_of_life_date,
recommended=recommendation, disallowed=Disallowed(disk_types=disallowed_disk_types),
purchase_plan=purchase_plan, location=location, eula=eula, tags=(tags or {}),
hyper_vgeneration=hyper_v_generation)
return client.gallery_images.create_or_update(resource_group_name, gallery_name, gallery_image_name, image)
def create_image_version(cmd, resource_group_name, gallery_name, gallery_image_name, gallery_image_version,
location=None, target_regions=None, storage_account_type=None,
end_of_life_date=None, exclude_from_latest=None, replica_count=None, tags=None,
os_snapshot=None, data_snapshots=None, managed_image=None, data_snapshot_luns=None,
target_region_encryption=None):
# print(target_regions)
from msrestazure.tools import resource_id, is_valid_resource_id
ImageVersionPublishingProfile, GalleryArtifactSource, ManagedArtifact, ImageVersion, TargetRegion = cmd.get_models(
'GalleryImageVersionPublishingProfile', 'GalleryArtifactSource', 'ManagedArtifact', 'GalleryImageVersion',
'TargetRegion')
aux_subscriptions = None
if managed_image:
aux_subscriptions = _parse_aux_subscriptions(managed_image)
client = _compute_client_factory(cmd.cli_ctx, aux_subscriptions=aux_subscriptions)
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
end_of_life_date = fix_gallery_image_date_info(end_of_life_date)
if managed_image and not is_valid_resource_id(managed_image):
managed_image = resource_id(subscription=client.config.subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='images', name=managed_image)
if os_snapshot and not is_valid_resource_id(os_snapshot):
os_snapshot = resource_id(subscription=client.config.subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='snapshots', name=os_snapshot)
if data_snapshots:
for i, s in enumerate(data_snapshots):
if not is_valid_resource_id(data_snapshots[i]):
data_snapshots[i] = resource_id(
subscription=client.config.subscription_id, resource_group=resource_group_name,
namespace='Microsoft.Compute', type='snapshots', name=s)
source = GalleryArtifactSource(managed_image=ManagedArtifact(id=managed_image))
profile = ImageVersionPublishingProfile(exclude_from_latest=exclude_from_latest, end_of_life_date=end_of_life_date,
target_regions=target_regions or [TargetRegion(name=location)],
source=source, replica_count=replica_count,
storage_account_type=storage_account_type)
if cmd.supported_api_version(min_api='2019-07-01', operation_group='gallery_image_versions'):
if managed_image is None and os_snapshot is None:
raise CLIError('usage error: Please provide --managed-image or --os-snapshot')
GalleryImageVersionStorageProfile = cmd.get_models('GalleryImageVersionStorageProfile')
GalleryArtifactVersionSource = cmd.get_models('GalleryArtifactVersionSource')
GalleryOSDiskImage = cmd.get_models('GalleryOSDiskImage')
GalleryDataDiskImage = cmd.get_models('GalleryDataDiskImage')
source = os_disk_image = data_disk_images = None
if managed_image is not None:
source = GalleryArtifactVersionSource(id=managed_image)
if os_snapshot is not None:
os_disk_image = GalleryOSDiskImage(source=GalleryArtifactVersionSource(id=os_snapshot))
if data_snapshot_luns and not data_snapshots:
raise CLIError('usage error: --data-snapshot-luns must be used together with --data-snapshots')
if data_snapshots:
if data_snapshot_luns and len(data_snapshots) != len(data_snapshot_luns):
raise CLIError('usage error: Length of --data-snapshots and --data-snapshot-luns should be equal.')
if not data_snapshot_luns:
data_snapshot_luns = [i for i in range(len(data_snapshots))]
data_disk_images = []
for i, s in enumerate(data_snapshots):
data_disk_images.append(GalleryDataDiskImage(source=GalleryArtifactVersionSource(id=s),
lun=data_snapshot_luns[i]))
storage_profile = GalleryImageVersionStorageProfile(source=source, os_disk_image=os_disk_image,
data_disk_images=data_disk_images)
image_version = ImageVersion(publishing_profile=profile, location=location, tags=(tags or {}),
storage_profile=storage_profile)
else:
if managed_image is None:
raise CLIError('usage error: Please provide --managed-image')
image_version = ImageVersion(publishing_profile=profile, location=location, tags=(tags or {}))
return client.gallery_image_versions.create_or_update(resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_image_name=gallery_image_name,
gallery_image_version_name=gallery_image_version,
gallery_image_version=image_version)
def fix_gallery_image_date_info(date_info):
# here we add needed time, if only date is provided, so the setting can be accepted by servie end
if date_info and 't' not in date_info.lower():
date_info += 'T12:59:59Z'
return date_info
def update_image_version(cmd, resource_group_name, gallery_name, gallery_image_name, gallery_image_version_name,
target_regions=None, replica_count=None, no_wait=False, **kwargs):
image_version = kwargs['gallery_image_version']
if target_regions:
image_version.publishing_profile.target_regions = target_regions
if replica_count:
image_version.publishing_profile.replica_count = replica_count
if image_version.storage_profile.source is not None:
image_version.storage_profile.os_disk_image = image_version.storage_profile.data_disk_images = None
aux_subscriptions = None
if image_version.storage_profile and image_version.storage_profile.source and \
image_version.storage_profile.source.id:
aux_subscriptions = _parse_aux_subscriptions(image_version.storage_profile.source.id)
client = _compute_client_factory(cmd.cli_ctx, aux_subscriptions=aux_subscriptions)
return sdk_no_wait(no_wait, client.gallery_image_versions.create_or_update, resource_group_name, gallery_name,
gallery_image_name, gallery_image_version_name, **kwargs)
# endregion
# region proximity placement groups
def create_proximity_placement_group(cmd, client, proximity_placement_group_name, resource_group_name,
ppg_type=None, location=None, tags=None):
from knack.arguments import CaseInsensitiveList
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
ProximityPlacementGroup, PPGType = cmd.get_models('ProximityPlacementGroup', 'ProximityPlacementGroupType')
choices = CaseInsensitiveList([x.value for x in PPGType])
if ppg_type and ppg_type not in choices:
logger.info("Valid choices: %s", str(choices))
raise CLIError("Usage error: invalid value for --type/-t")
ppg_params = ProximityPlacementGroup(name=proximity_placement_group_name, proximity_placement_group_type=ppg_type,
location=location, tags=(tags or {}))
return client.create_or_update(resource_group_name=resource_group_name,
proximity_placement_group_name=proximity_placement_group_name, parameters=ppg_params)
def list_proximity_placement_groups(client, resource_group_name=None):
if resource_group_name:
return client.list_by_resource_group(resource_group_name=resource_group_name)
return client.list_by_subscription()
# endregion
# region dedicated host
def create_dedicated_host_group(cmd, client, host_group_name, resource_group_name, platform_fault_domain_count=None,
automatic_placement=None, location=None, zones=None, tags=None):
DedicatedHostGroup = cmd.get_models('DedicatedHostGroup')
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
host_group_params = DedicatedHostGroup(location=location, platform_fault_domain_count=platform_fault_domain_count,
support_automatic_placement=automatic_placement, zones=zones, tags=tags)
return client.create_or_update(resource_group_name, host_group_name, parameters=host_group_params)
def list_dedicated_host_groups(cmd, client, resource_group_name=None):
if resource_group_name:
return client.list_by_resource_group(resource_group_name)
return client.list_by_subscription()
def get_dedicated_host_group_instance_view(client, host_group_name, resource_group_name):
return client.get(resource_group_name, host_group_name, expand="instanceView")
def create_dedicated_host(cmd, client, host_group_name, host_name, resource_group_name, sku, platform_fault_domain=None,
auto_replace_on_failure=None, license_type=None, location=None, tags=None):
DedicatedHostType = cmd.get_models('DedicatedHost')
SkuType = cmd.get_models('Sku')
location = location or _get_resource_group_location(cmd.cli_ctx, resource_group_name)
sku = SkuType(name=sku)
host_params = DedicatedHostType(location=location, platform_fault_domain=platform_fault_domain,
auto_replace_on_failure=auto_replace_on_failure, license_type=license_type,
sku=sku, tags=tags)
return client.create_or_update(resource_group_name, host_group_name, host_name, parameters=host_params)
def get_dedicated_host_instance_view(client, host_group_name, host_name, resource_group_name):
return client.get(resource_group_name, host_group_name, host_name, expand="instanceView")
# endregion
# region VMMonitor
def _get_log_analytics_client(cmd):
from ._client_factory import cf_log_analytics
from azure.cli.core.commands.client_factory import get_subscription_id
subscription_id = get_subscription_id(cmd.cli_ctx)
return cf_log_analytics(cmd.cli_ctx, subscription_id)
def _prepare_workspace(cmd, resource_group_name, workspace):
from msrestazure.tools import is_valid_resource_id
from msrestazure.azure_exceptions import CloudError
workspace_id = None
if not is_valid_resource_id(workspace):
workspace_name = workspace
log_client = _get_log_analytics_client(cmd)
workspace_result = None
try:
workspace_result = log_client.workspaces.get(resource_group_name, workspace_name)
except CloudError:
from azure.mgmt.loganalytics.models import Workspace, WorkspaceSku, WorkspaceSkuNameEnum
sku = WorkspaceSku(name=WorkspaceSkuNameEnum.per_gb2018.value)
retention_time = 30 # default value
location = _get_resource_group_location(cmd.cli_ctx, resource_group_name)
workspace_instance = Workspace(location=location,
sku=sku,
retention_in_days=retention_time)
workspace_result = LongRunningOperation(cmd.cli_ctx)(log_client.workspaces.create_or_update(
resource_group_name,
workspace_name,
workspace_instance))
workspace_id = workspace_result.id
else:
workspace_id = workspace
return workspace_id
def _set_data_source_for_workspace(cmd, os_type, resource_group_name, workspace_name):
from ._client_factory import cf_log_analytics_data_sources
from azure.cli.core.commands.client_factory import get_subscription_id
from azure.mgmt.loganalytics.models import DataSource
from msrestazure.azure_exceptions import CloudError
subscription_id = get_subscription_id(cmd.cli_ctx)
data_sources_client = cf_log_analytics_data_sources(cmd.cli_ctx, subscription_id)
data_source_name_template = "DataSource_{}_{}"
default_data_sources = None
if os_type.lower() == 'linux':
from ._workspace_data_source_settings import default_linux_data_sources
default_data_sources = default_linux_data_sources
elif os_type.lower() == 'windows':
from ._workspace_data_source_settings import default_windows_data_sources
default_data_sources = default_windows_data_sources
if default_data_sources is not None:
for data_source_kind, data_source_settings in default_data_sources.items():
for data_source_setting in data_source_settings:
data_source = DataSource(kind=data_source_kind,
properties=data_source_setting)
data_source_name = data_source_name_template.format(data_source_kind, _gen_guid())
try:
data_sources_client.create_or_update(resource_group_name,
workspace_name,
data_source_name,
data_source)
except CloudError as ex:
logger.warning("Failed to set data source due to %s. "
"Skip this step and need manual work later.", ex.message)
else:
logger.warning("Unsupported OS type. Skip the default settings for log analytics workspace.")
def execute_query_for_vm(cmd, client, resource_group_name, vm_name, analytics_query, timespan=None):
"""Executes a query against the Log Analytics workspace linked with a vm."""
from azure.loganalytics.models import QueryBody
vm = get_vm(cmd, resource_group_name, vm_name)
workspace = None
extension_resources = vm.resources or []
for resource in extension_resources:
if resource.name == "MicrosoftMonitoringAgent" or resource.name == "OmsAgentForLinux":
workspace = resource.settings.get('workspaceId', None)
if workspace is None:
raise CLIError('Cannot find the corresponding log analytics workspace. '
'Please check the status of log analytics workpsace.')
return client.query(workspace, QueryBody(query=analytics_query, timespan=timespan))
def _set_log_analytics_workspace_extension(cmd, resource_group_name, vm, vm_name, workspace_name):
is_linux_os = _is_linux_os(vm)
vm_extension_name = _LINUX_OMS_AGENT_EXT if is_linux_os else _WINDOWS_OMS_AGENT_EXT
log_client = _get_log_analytics_client(cmd)
customer_id = log_client.workspaces.get(resource_group_name, workspace_name).customer_id
settings = {
'workspaceId': customer_id,
'stopOnMultipleConnections': 'true'
}
primary_shared_key = log_client.shared_keys.get_shared_keys(resource_group_name, workspace_name).primary_shared_key
protected_settings = {
'workspaceKey': primary_shared_key,
}
return set_extension(cmd, resource_group_name, vm_name, vm_extension_name,
extension_mappings[vm_extension_name]['publisher'],
extension_mappings[vm_extension_name]['version'],
settings,
protected_settings)
# endregion
# disk encryption set
def create_disk_encryption_set(cmd, client, resource_group_name, disk_encryption_set_name,
key_url, source_vault, encryption_type=None, location=None, tags=None, no_wait=False):
from msrestazure.tools import resource_id, is_valid_resource_id
DiskEncryptionSet, EncryptionSetIdentity, KeyVaultAndKeyReference, SourceVault = cmd.get_models(
'DiskEncryptionSet', 'EncryptionSetIdentity', 'KeyVaultAndKeyReference', 'SourceVault')
encryption_set_identity = EncryptionSetIdentity(type='SystemAssigned')
if not is_valid_resource_id(source_vault):
source_vault = resource_id(subscription=client.config.subscription_id, resource_group=resource_group_name,
namespace='Microsoft.KeyVault', type='vaults', name=source_vault)
source_vault = SourceVault(id=source_vault)
keyVault_and_key_reference = KeyVaultAndKeyReference(source_vault=source_vault, key_url=key_url)
disk_encryption_set = DiskEncryptionSet(location=location, tags=tags, identity=encryption_set_identity,
active_key=keyVault_and_key_reference, encryption_type=encryption_type)
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, disk_encryption_set_name,
disk_encryption_set)
def list_disk_encryption_sets(cmd, client, resource_group_name=None):
if resource_group_name:
return client.list_by_resource_group(resource_group_name)
return client.list()
def update_disk_encryption_set(instance, client, resource_group_name, key_url=None, source_vault=None):
from msrestazure.tools import resource_id, is_valid_resource_id
if not is_valid_resource_id(source_vault):
source_vault = resource_id(subscription=client.config.subscription_id, resource_group=resource_group_name,
namespace='Microsoft.KeyVault', type='vaults', name=source_vault)
if key_url:
instance.active_key.key_url = key_url
if source_vault:
instance.active_key.source_vault.id = source_vault
return instance
# endregion
# region Disk Access
def create_disk_access(cmd, client, resource_group_name, disk_access_name, location=None, tags=None, no_wait=False):
return sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, disk_access_name,
location=location, tags=tags)
def list_disk_accesses(cmd, client, resource_group_name=None):
if resource_group_name:
return client.list_by_resource_group(resource_group_name)
return client.list()
def set_disk_access(cmd, client, parameters, resource_group_name, disk_access_name, tags=None, no_wait=False):
location = _get_resource_group_location(cmd.cli_ctx, resource_group_name)
return sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, disk_access_name,
location=location, tags=tags)
# endregion
|
from msrestazure.tools import resource_id, is_valid_resource_id
from azure.cli.core.commands.client_factory import get_subscription_id
if size_gb is not None:
instance.disk_size_gb = size_gb
if sku is not None:
_set_sku(cmd, instance, sku)
if disk_iops_read_write is not None:
instance.disk_iops_read_write = disk_iops_read_write
if disk_mbps_read_write is not None:
instance.disk_mbps_read_write = disk_mbps_read_write
if disk_encryption_set is not None:
if instance.encryption.type != 'EncryptionAtRestWithCustomerKey' and \
encryption_type != 'EncryptionAtRestWithCustomerKey':
raise CLIError('usage error: Please set --encryption-type to EncryptionAtRestWithCustomerKey')
if not is_valid_resource_id(disk_encryption_set):
disk_encryption_set = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskEncryptionSets', name=disk_encryption_set)
instance.encryption.disk_encryption_set_id = disk_encryption_set
if encryption_type is not None:
instance.encryption.type = encryption_type
if network_access_policy is not None:
instance.network_access_policy = network_access_policy
if disk_access is not None and not is_valid_resource_id(disk_access):
disk_access = resource_id(
subscription=get_subscription_id(cmd.cli_ctx), resource_group=resource_group_name,
namespace='Microsoft.Compute', type='diskAccesses', name=disk_access)
instance.disk_access_id = disk_access
return instance
|
day_05.rs
|
use crate::intcode::intcode::Intcode;
pub fn part_1(program: Vec<isize>) -> String {
let mut intcode = Intcode::new();
intcode.load_program(program);
intcode.set_input(1); // Air Conditioners - System ID 1
intcode.run();
format!("Part 1: {}", intcode.read_output(0)).to_string()
}
pub fn part_2(program: Vec<isize>) -> String
|
{
let mut intcode = Intcode::new();
intcode.load_program(program);
intcode.set_input(5); // Thermal Radiators - System ID 5
intcode.run();
format!("Part 2: {}", intcode.read_output(0)).to_string()
}
|
|
beacon.py
|
# -*- coding: utf-8 -*-
'''
Management of the Salt beacons
==============================
.. versionadded:: 2015.8.0
.. code-block:: yaml
ps:
beacon.present:
- save: True
- enable: False
- services:
salt-master: running
apache2: stopped
sh:
beacon.present: []
load:
beacon.present:
- averages:
1m:
- 0.0
- 2.0
5m:
- 0.0
- 1.5
15m:
- 0.1
- 1.0
'''
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt libs
from salt.ext import six
import logging
log = logging.getLogger(__name__)
def present(name,
save=False,
**kwargs):
'''
Ensure beacon is configured with the included beacon data.
name
The name of the beacon ensure is configured.
save
True/False, if True the beacons.conf file be updated too. Default is False.
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
beacon_data = [{k: v} for k, v in six.iteritems(kwargs)]
if name in current_beacons:
if beacon_data == current_beacons[name]:
ret['comment'].append('Job {0} in correct state'.format(name))
else:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.modify'](name, beacon_data, **kwargs)
ret['comment'].append(result['comment'])
ret['changes'] = result['changes']
else:
result = __salt__['beacons.modify'](name, beacon_data, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
if 'changes' in result:
ret['comment'].append('Modifying {0} in beacons'.format(name))
ret['changes'] = result['changes']
else:
ret['comment'].append(result['comment'])
else:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.add'](name, beacon_data, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.add'](name, beacon_data, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Adding {0} to beacons'.format(name))
if save:
if __opts__.get('test'):
ret['comment'].append('Beacon {0} would be saved'.format(name))
else:
result = __salt__['beacons.save']()
ret['comment'].append('Beacon {0} saved'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def absent(name,
save=False,
**kwargs):
'''
Ensure beacon is absent.
name
The name of the beacon ensured absent.
save
True/False, if True the beacons.conf file be updated too. Default is False.
'''
### NOTE: The keyword arguments in **kwargs are ignored in this state, but
### cannot be removed from the function definition, otherwise the use
### of unsupported arguments will result in a traceback.
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.delete'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.delete'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Removed {0} from beacons'.format(name))
else:
ret['comment'].append('{0} not configured in beacons'.format(name))
if save:
if __opts__.get('test'):
ret['comment'].append('Beacon {0} would be saved'.format(name))
else:
result = __salt__['beacons.save']()
ret['comment'].append('Beacon {0} saved'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
|
Enable a beacon.
name
The name of the beacon to enable.
'''
### NOTE: The keyword arguments in **kwargs are ignored in this state, but
### cannot be removed from the function definition, otherwise the use
### of unsupported arguments will result in a traceback.
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.enable_beacon'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.enable_beacon'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Enabled {0} from beacons'.format(name))
else:
ret['comment'].append('{0} not a configured beacon'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def disabled(name, **kwargs):
'''
Disable a beacon.
name
The name of the beacon to disable.
'''
### NOTE: The keyword arguments in **kwargs are ignored in this state, but
### cannot be removed from the function definition, otherwise the use
### of unsupported arguments will result in a traceback.
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.disable_beacon'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.disable_beacon'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Disabled beacon {0}.'.format(name))
else:
ret['comment'].append('Job {0} is not configured.'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
|
def enabled(name, **kwargs):
'''
|
utils.rs
|
use std::io::{Read, Write};
use serde::de::DeserializeOwned;
use std::process::Command;
pub fn get<T: DeserializeOwned>(url: &str) -> serde_json::Result<T> {
let client = reqwest::blocking::Client::builder().user_agent("cargo-release-action").build().expect("Couldn't create client.");
let mut res = client.execute(client.get(url).build().expect("Couldn't create request.")).expect("Couldn't get response");
let mut body = String::new();
res.read_to_string(&mut body).expect("Couldn't read body to string.");
serde_json::from_str(&body)
}
fn execute_with_output(command: &str, args: &[&str]) -> Result<String, String> {
println!("Executing: {} {:?}", command, args);
let output = Command::new(command)
.args(args.iter())
.output()
.map_err(|_| "Couldn't get Output.".to_string())?;
std::io::stdout().write_all(&output.stdout).unwrap();
std::io::stderr().write_all(&output.stderr).unwrap();
let stderr = String::from_utf8_lossy(&output.stderr);
let stdout = String::from_utf8_lossy(&output.stdout);
let stdio = format!("stderr: {}\nstdout: {}", stderr, stdout);
if output.status.success() {
Ok(stdio)
} else {
Err(format!("{} {:?}: execution failed.\n{}", command, args, stdio))
}
}
fn execute(command: &str, args: &[&str]) -> Result<(), String> {
println!("Executing: {} {:?}", command, args);
let status = Command::new(command)
.args(args.iter())
.status()
.expect("Couldn't get Output.");
if status.success() {
Ok(())
} else {
Err(format!("{} {:?}: execution failed", command, args))
}
}
pub fn publish(release: &str, cargo_token: &str) -> Result<(), String>
|
pub fn check_publish() -> Result<(), String> {
let output = execute_with_output("cargo", &["publish", "--dry-run"])?;
let warning_count = output
.lines()
.filter(|line| {
line.find("warning").is_some()
}).count();
// Because it will always print "warning: aborting upload due to dry run", we count if we have
// more warnings than 1.
if warning_count > 1 {
Err("Can't publish crate.".to_string())
} else {
Ok(())
}
}
|
{
execute("git", &["config", "--local", "user.email", "41898282+github-actions[bot]@users.noreply.github.com"])?;
execute("git", &["config", "--local", "user.name", "github-actions[bot]"])?;
execute("cargo", &["login", &cargo_token])?;
execute("cargo", &["install", "cargo-release"])?;
execute("cargo", &["release", release, "--no-confirm", "--skip-publish"])?;
execute("cargo", &["release", "--no-confirm", "--skip-push"])
}
|
stream_test.go
|
package luigi // import "go.cryptoscope.co/luigi"
import (
"context"
"testing"
"time"
)
func TestChanSource(t *testing.T)
|
func TestChanSink(t *testing.T) {
type testcase struct {
values []interface{}
}
test := func(tc testcase) {
ch := make(chan interface{})
echoCh := make(chan interface{})
var err error
cs := &chanSink{ch: ch, nonBlocking: false, closeErr: &err}
for _, v := range tc.values {
go func() {
echoCh <- (<-ch)
}()
err := cs.Pour(context.TODO(), v)
if err != nil {
t.Errorf("expected nil error but got %s", err)
break
}
v_ := <-echoCh
if v != v_ {
t.Errorf("expected value %#v, but got %#v", v, v_)
}
}
go func() {
_, closed := <-ch
if !closed {
echoCh <- nil
} else {
close(echoCh)
}
}()
cs.Close()
_, closed := <-echoCh
if !closed {
t.Error("expected closed channel, but read was successful")
}
}
cases := []testcase{
{[]interface{}{1, 2, 3}},
{[]interface{}{}},
{[]interface{}{nil, 0, ""}},
}
for _, tc := range cases {
test(tc)
}
}
func TestPipe(t *testing.T) {
type testcase struct {
values []interface{}
doClose bool
}
test := func(tc testcase) {
src, sink := NewPipe()
errCh := make(chan error)
for _, v := range tc.values {
go func(v_ interface{}) {
errCh <- sink.Pour(context.TODO(), v_)
}(v)
v_, err := src.Next(context.TODO())
if v != v_ {
t.Errorf("expected value %#v, but got %#v", v, v_)
}
if err != nil {
t.Errorf("expected nil error but got %s", err)
}
err = <-errCh
if err != nil {
t.Errorf("expected nil error but got %s", err)
}
}
if tc.doClose {
err := sink.Close()
if err != nil {
t.Errorf("sink close: expected nil error, got %v", err)
}
_, err = src.Next(context.TODO())
if !IsEOS(err) {
t.Errorf("expected end-of-stream error but got %s", err)
}
} else {
ctx, cancel := context.WithTimeout(
context.Background(), 5*time.Millisecond)
defer cancel()
_, err := src.Next(ctx)
if err != context.DeadlineExceeded {
t.Errorf("expected deadline exceeded error, got %v", err)
}
}
}
cases := []testcase{
{[]interface{}{1, 2, 3}, true},
{[]interface{}{}, true},
{[]interface{}{nil, 0, ""}, false},
}
for _, tc := range cases {
test(tc)
}
}
|
{
type testcase struct {
values []interface{}
doClose bool
}
test := func(tc testcase) {
ch := make(chan interface{})
var err error
cs := &chanSource{ch: ch, nonBlocking: false, closeErr: &err}
for _, v := range tc.values {
go func(v_ interface{}) {
ch <- v_
}(v)
v_, err := cs.Next(context.TODO())
if v != v_ {
t.Errorf("expected value %#v, but got %#v", v, v_)
}
if err != nil {
t.Errorf("expected nil error but got %s", err)
}
}
if tc.doClose {
close(ch)
_, err := cs.Next(context.TODO())
if !IsEOS(err) {
t.Errorf("expected end-of-stream error but got %s", err)
}
} else {
ctx, cancel := context.WithTimeout(
context.Background(), 5*time.Millisecond)
defer cancel()
_, err := cs.Next(ctx)
if err != context.DeadlineExceeded {
t.Errorf("expected deadline exceeded error, got %v", err)
}
}
}
cases := []testcase{
{[]interface{}{1, 2, 3}, true},
{[]interface{}{}, true},
{[]interface{}{nil, 0, ""}, false},
}
for _, tc := range cases {
test(tc)
}
}
|
devextreme-angular-ui-scroll-view.d.ts
|
/**
* Generated bundle index. Do not edit.
*/
export * from './index';
|
//# sourceMappingURL=devextreme-angular-ui-scroll-view.d.ts.map
|
|
apps.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class
|
(AppConfig):
name = 'test_app'
verbose_name = 'Test App'
|
TestAppConfig
|
gatsby-ssr.js
|
import PageLayout from './src/components/page-layout';
import React from 'react';
export const onRenderBody = (
{setPostBodyComponents, setHeadComponents},
{ffWidgetId}
) => {
if (ffWidgetId) {
setHeadComponents([
<script
key="feedback"
dangerouslySetInnerHTML={{
__html: `
var ffWidgetId = '${ffWidgetId}';
var ffWidgetScript = document.createElement("script");
ffWidgetScript.type = "text/javascript";
ffWidgetScript.src = 'https://freddyfeedback.com/widget/freddyfeedback.js';
document.head.appendChild(ffWidgetScript);
`
}}
/>,
<script key="utm" src="https://www.apollographql.com/utm-grabber.js" />
]);
}
setPostBodyComponents([
React.createElement('script', {
key: 'docsearch',
|
src:
'https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js'
})
]);
};
export const wrapPageElement = (
{element, props}, // eslint-disable-line react/prop-types
pluginOptions
) => (
<PageLayout {...props} pluginOptions={pluginOptions}>
{element}
</PageLayout>
);
| |
avatar.js
|
module.exports = {
name: 'avatar',
aliases: ['icon', 'pfp'],
description: 'Get profile picture of a user',
args: true,
usage: '<user>',
execute(message, args) {
if (!message.mentions.users.size) {
return message.channel.send(
`Your avatar: ${message.author.displayAvatarURL}`
);
}
const avatarList = message.mentions.users.map(user => {
|
return `${user.toString()}'s avatar: ${user.displayAvatarURL}`;
});
// send the entire array of strings as a message
// by default, discord.js will `.join()` the array with `\n`
message.channel.send(avatarList);
}
};
| |
documentation_generator.py
|
from pathlib import Path
from pprint import pprint
import keyword
import builtins
import textwrap
from ursina import color, lerp, application
def indentation(line):
return len(line) - len(line.lstrip())
def get_module_attributes(str):
attrs = list()
for l in str.split('\n'):
if len(l) == 0:
continue
if l.startswith(tuple(keyword.kwlist) + tuple(dir(builtins)) + (' ', '#', '\'', '\"', '_')):
continue
attrs.append(l)
return attrs
def get_classes(str):
classes = dict()
for c in str.split('\nclass ')[1:]:
class_name = c.split(':', 1)[0]
if class_name.startswith(('\'', '"')):
continue
# print(class_name)
classes[class_name] = c.split(':', 1)[1]
return classes
def get_class_attributes(str):
attributes = list()
lines = str.split('\n')
start = 0
end = len(lines)
for i, line in enumerate(lines):
if line == '''if __name__ == '__main__':''':
break
found_init = False
if line.strip().startswith('def __init__'):
if found_init:
break
start = i
for j in range(i+1, len(lines)):
if (indentation(lines[j]) == indentation(line)
and not lines[j].strip().startswith('def late_init')
):
end = j
found_init = True
break
init_section = lines[start:end]
# print('init_section:', start, end, init_section)
for i, line in enumerate(init_section):
if line.strip().startswith('self.') and ' = ' in line and line.startswith(' '*8) and not line.startswith(' '*9):
stripped_line = line.split('self.', 1)[1]
if '.' in stripped_line.split(' ')[0] or stripped_line.startswith('_'):
continue
key = stripped_line.split(' = ')[0]
value = stripped_line.split(' = ')[1]
if i < len(init_section) and indentation(init_section[i+1]) > indentation(line):
# value = 'multiline'
start = i
end = i
indent = indentation(line)
for j in range(i+1, len(init_section)):
if indentation(init_section[j]) <= indent:
end = j
break
for l in init_section[start+1:end]:
value += '\n' + l[4:]
attributes.append(key + ' = ' + value)
if '@property' in code:
for i, line in enumerate(lines):
if line.strip().startswith('@property'):
name = lines[i+1].split('def ')[1].split('(')[0]
# include comments for properties
if '#' in lines[i+1]:
name += ((20-len(name)) * ' ') + '<gray>#' + lines[i+1].split('#',1)[1] + '</gray>'
if not name in [e.split(' = ')[0] for e in attributes]:
attributes.append(name)
return attributes
def get_functions(str, is_class=False):
functions = dict()
lines = str.split('\n')
functions = list()
lines = str.split('\n')
ignore_functions_for_property_generation = 'generate_properties(' in str
for i, line in enumerate(lines):
if line == '''if __name__ == '__main__':''' or 'docignore' in line:
break
if line.strip().startswith('def '):
if not is_class and line.split('(')[1].startswith('self'):
continue
name = line.split('def ')[1].split('(')[0]
if name.startswith('_') or lines[i-1].strip().startswith('@'):
continue
if ignore_functions_for_property_generation:
if name.startswith('get_') or name.startswith('set_'):
continue
params = line.replace('(self, ', '(')
params = params.replace('(self)', '()')
params = params.split('(', 1)[1].rsplit(')', 1)[0]
comment = ''
if '#' in line:
comment = '#' + line.split('#')[1]
functions.append((name, params, comment))
return functions
def clear_tags(str):
for tag in ('purple', 'olive', 'yellow', 'blue'):
str = str.replace(f'<{tag}>', '')
str = str.replace(f'</{tag}>', '')
return str
def get_example(str, name=None): # use name to highlight the relevant class
key = '''if __name__ == '__main__':'''
lines = list()
example_started = False
for l in str.split('\n'):
if example_started:
lines.append(l)
if l == key:
example_started = True
example = '\n'.join(lines)
example = textwrap.dedent(example)
example = example.split('# test\n')[0]
ignore = ('app = Ursina()', 'app.run()', 'from ursina import *')
if 'class Ursina' in str: # don't ignore in main.py
ignore = ()
lines = [e for e in example.split('\n') if not e in ignore and not e.strip().startswith('#')]
import re
styled_lines = list()
for line in lines:
line = line.replace('def ', '<purple>def</purple> ')
line = line.replace('from ', '<purple>from</purple> ')
line = line.replace('import ', '<purple>import</purple> ')
line = line.replace('for ', '<purple>for</purple> ')
line = line.replace('elif ', '<purple>elif</purple> ')
line = line.replace('if ', '<purple>if</purple> ')
line = line.replace(' not ', ' <purple>not</purple> ')
line = line.replace('else:', '<purple>else</purple>:')
line = line.replace('Entity', '<olive>Entity</olive>')
for e in ('print', 'range', 'hasattr', 'getattr', 'setattr'):
line = line.replace(f'{e}(' , f'<blue>{e}</blue>(')
# colorize ursina specific params
for e in ('enabled', 'parent', 'world_parent', 'model', 'highlight_color', 'color',
'texture_scale', 'texture', 'visible',
'position', 'z', 'y', 'z',
'rotation', 'rotation_x', 'rotation_y', 'rotation_z',
'scale', 'scale_x', 'scale_y', 'scale_z',
'origin', 'origin_x', 'origin_y', 'origin_z',
'text', 'on_click', 'icon', 'collider', 'shader', 'curve', 'ignore',
'vertices', 'triangles', 'uvs', 'normals', 'colors', 'mode', 'thickness'
):
line = line.replace(f'{e}=' , f'<olive>{e}</olive>=')
# colorize numbers
for i in range(10):
line = line.replace(f'{i}', f'<yellow>{i}</yellow>')
# destyle Vec2 and Vec3
line = line.replace(f'<yellow>3</yellow>(', '3(')
line = line.replace(f'<yellow>2</yellow>(', '2(')
# highlight class name
if name:
if '(' in name:
name = name.split('(')[0]
line = line.replace(f'{name}(', f'<purple><b>{name}</b></purple>(')
line = line.replace(f'={name}(', f'=<purple><b>{name}</b></purple>(')
# line = line.replace(f'.{name}', f'.<font colorK
if ' #' in line:
# remove colored words inside strings
line = clear_tags(line)
line = line.replace(' #', ' <gray>#')
line += '</gray>'
styled_lines.append(line)
lines = styled_lines
example = '\n'.join(lines)
# find triple qutoted strings
if example.count("'''") % 2 == 0 and example.count("'''") > 1:
parts = example.strip().split("'''")
parts = [e for e in parts if e]
is_quote = example.strip().startswith("'''")
for i in range(not is_quote, len(parts), 2):
parts[i] = clear_tags(parts[i])
parts[i] = "<green>'''" + parts[i] + "'''</green>"
example = ''.join(parts)
# find single quoted words
styled_lines = []
for line in example.split('\n'):
quotes = re.findall('\'(.*?)\'', line)
quotes = ['\'' + q + '\'' for q in quotes]
for q in quotes:
line = line.replace(q, '<green>' + clear_tags(q) + '</green>')
styled_lines.append(line)
example = '\n'.join(styled_lines)
return example.strip()
def is_singleton(str):
for l in str.split('\n'):
# if l.startswith('sys.modules['):
if l.startswith('instance = '):
return True
result = False
path = application.package_folder
most_used_info = dict()
module_info = dict()
class_info = dict()
# ignore files that are not committed
ignored_files = list()
for f in ignored_files:
print('ignoring:', f)
ignored_files.append(path / 'gamepad.py')
for f in path.glob('*.py'):
if f in ignored_files:
continue
if f.name.startswith('_') or f.name == 'build.py':
module_info['build'] = (
f,
'python -m ursina.build',
{},
'',
'''open cmd at your project folder and run 'python -m ursina.build' to package your app for windows.'''
)
continue
with open(f, encoding='utf8') as t:
code = t.read()
code = code.replace('<', '<').replace('>', '>')
if not is_singleton(code):
name = f.stem
attrs, funcs = list(), list()
attrs = get_module_attributes(code)
funcs = get_functions(code)
example = get_example(code, name)
if attrs or funcs:
module_info[name] = (f, '', attrs, funcs, example)
# continue
classes = get_classes(code)
for class_name, class_definition in classes.items():
if 'Enum' in class_name:
class_definition = class_definition.split('def ')[0]
attrs = [l.strip() for l in class_definition.split('\n') if ' = ' in l]
class_info[class_name] = (f, '', attrs, '', '')
continue
if 'def __init__' in class_definition:
# init line
params = '__init__('+ class_definition.split('def __init__(')[1].split('\n')[0][:-1]
attrs = get_class_attributes(class_definition)
methods = get_functions(class_definition, is_class=True)
example = get_example(code, class_name)
class_info[class_name] = (f, params, attrs, methods, example)
# singletons
else:
module_name = f.name.split('.')[0]
classes = get_classes(code)
for class_name, class_definition in classes.items():
# print(module_name)
attrs, methods = list(), list()
attrs = get_class_attributes(class_definition)
methods = get_functions(class_definition, is_class=True)
example = get_example(code, class_name)
module_info[module_name] = (f, '', attrs, methods, example)
prefab_info = dict()
for f in path.glob('prefabs/*.py'):
if f.name.startswith('_') or f in ignored_files:
continue
with open(f, encoding='utf8') as t:
code = t.read()
code = code.replace('<', '<').replace('>', '>')
classes = get_classes(code)
for class_name, class_definition in classes.items():
if 'def __init__' in class_definition:
params = '__init__('+ class_definition.split('def __init__(')[1].split('\n')[0][:-1]
attrs = get_class_attributes(class_definition)
methods = get_functions(class_definition, is_class=True)
example = get_example(code, class_name)
prefab_info[class_name] = (f, params, attrs, methods, example)
script_info = dict()
for f in path.glob('scripts/*.py'):
if f.name.startswith('_') or f in ignored_files:
continue
# if f.is_file() and f.name.endswith(('.py', )):
with open(f, encoding='utf8') as t:
code = t.read()
if not 'class ' in code:
name = f.name.split('.')[0]
attrs, funcs = list(), list()
attrs = get_module_attributes(code)
funcs = get_functions(code)
example = get_example(code)
if attrs or funcs:
script_info[name] = (f, '', attrs, funcs, example)
classes = get_classes(code)
for class_name, class_definition in classes.items():
if 'def __init__' in class_definition:
params = '__init__('+ class_definition.split('def __init__(')[1].split('\n')[0][:-1]
attrs = get_class_attributes(class_definition)
methods = get_functions(class_definition, is_class=True)
example = get_example(code, class_name)
script_info[class_name] = (f, params, attrs, methods, example)
asset_info = dict()
model_names = [f'\'{f.stem}\'' for f in path.glob('models_compressed/*.ursinamesh')]
asset_info['models'] = ('', '', model_names, '', '''e = Entity(model='quad')''')
texture_names = [f'\'{f.stem}\'' for f in path.glob('textures/*.*')]
asset_info['textures'] = ('', '', texture_names, '', '''e = Entity(model='cube', texture='brick')''')
shaders = [f'{f.stem}' for f in path.glob('shaders/*.*')] + [f'{f.stem}' for f in path.glob('shaders/screenspace_shaders/*.*')]
shaders = [e for e in shaders if not e.startswith('_')]
asset_info['shaders'] = ('', '', shaders, '', '''from ursina.shaders import normals_shader\ne = Entity(shader=normals_shader)''')
for f in path.glob('models/procedural/*.py'):
if f.name.startswith('_') or f in ignored_files:
continue
with open(f, encoding='utf8') as t:
code = t.read()
classes = get_classes(code)
for class_name, class_definition in classes.items():
if 'def __init__' in class_definition:
params = '__init__('+ class_definition.split('def __init__(')[1].split('\n')[0][:-1]
attrs = get_class_attributes(class_definition)
methods = get_functions(class_definition, is_class=True)
example = get_example(code, class_name)
asset_info[class_name] = (f, params, attrs, methods, example)
most_used_info = dict()
for name in ('Entity(NodePath)', 'Text(Entity)', 'Button(Entity)', 'mouse', 'raycaster',):
for d in (module_info, class_info, prefab_info):
if name in d:
most_used_info[name] = d[name]
del d[name]
def html_color(color):
return f'hsl({color.h}, {int(color.s*100)}%, {int(color.v*100)}%)'
|
background_color = lerp(base_color, base_color.invert(), 0)
else:
base_color = color.color(60, 1, .01)
background_color = lerp(base_color, base_color.invert(), .125)
text_color = lerp(background_color, background_color.invert(), .9)
example_color = lerp(background_color, text_color, .1)
scrollbar_color = html_color(lerp(background_color, text_color, .1))
link_color = html_color(color.gray)
init_color = html_color(base_color.invert())
style = f'''
<style>
html {{
scrollbar-face-color: {html_color(text_color)};
scrollbar-base-color: {html_color(text_color)};
scrollbar-3dlight-color: {html_color(text_color)}4;
scrollbar-highlight-color: {html_color(text_color)};
scrollbar-track-color: {html_color(background_color)};
scrollbar-arrow-color: {html_color(background_color)};
scrollbar-shadow-color: {html_color(text_color)};
scrollbar-darkshadow-color: {html_color(text_color)};
}}
::-webkit-scrollbar {{ width: 8px; height: 3px;}}
::-webkit-scrollbar {{ width: 8px; height: 3px;}}
::-webkit-scrollbar-button {{ background-color: {scrollbar_color}; }}
::-webkit-scrollbar-track {{ background-color: {html_color(background_color)};}}
::-webkit-scrollbar-track-piece {{ background-color: {html_color(background_color)};}}
::-webkit-scrollbar-thumb {{ height: 50px; background-color: {scrollbar_color}; border-radius: 3px;}}
::-webkit-scrollbar-corner {{ background-color: {html_color(background_color)};}}
::-webkit-resizer {{ background-color: {html_color(background_color)};}}
body {{
margin: auto;
background-color: {html_color(background_color)};
color: {html_color(text_color)};
font-family: monospace;
position: absolute;
top:0;
left: 24em;
font-size: 1.375em;
font-weight: lighter;
max-width: 100%;
overflow-x: hidden;
white-space: pre-wrap;
}}
a {{
color: {link_color};
}}
purple {{color: hsl(289.0, 50%, 50%);}}
gray {{color: gray;}}
olive {{color: olive;}}
yellow {{color: darkgoldenrod;}}
green {{color: seagreen;}}
blue {{color: hsl(210, 50%, 50%);}}
.example {{
padding-left: 1em;
background-color: {html_color(example_color)};
}}
.params {{
color:{init_color};
font-weight:bold;
}}
</style>
'''
# return style
html = '<title> ursina cheat sheet</title>'
html += '''
<b>Ursina cheat sheet</b>
This document lists most modules and classes in ursina. Each section is structured as follows:
ClassName(BaseClass)
module location
parameters
How instantiate the class, ie. Button(text='', **kwargs).
'**kwargs' in this case, means you can give it optional keyword arguments.
For example, Button('Start', scale=.25, color=color.blue, position=(-.1,.25)) also includes
information on how big the button should be, its color and its position.
attributes
Names of values we can get/set, sometimes followed by its starting value and a short explanation.
For example, 'scale', 'color' and 'position' are
attributes we gave the Button above. These are members of Entity, which Button class
inherits from, so the Button class can also access these.
methods/functions
these ends with (), which means they are functions that can be called.
Also lists their parameters and default arguments.
For example, Entity has a method called 'look_at()'. You need to give it a
'target' (an Entity or position) to look at and optionally say
which axis will be facing the target.
example
You can search the document with Ctrl+F for instant search results.
'''
sidebar = '''
<div class="sidebar" style="
left: 0px;
position: fixed;
top: 0px;
padding-top:40px;
padding-left:20px;
bottom: 0;
overflow-y: scroll;
width: 15em;
z-index: 1;
">
<a href="cheat_sheet.html">light</a> <a href="cheat_sheet_dark.html">dark</a>
'''
for i, class_dictionary in enumerate((most_used_info, module_info, class_info, prefab_info, script_info, asset_info)):
for name, attrs_and_functions in class_dictionary.items():
print('generating docs for', name)
location, params, attrs, funcs, example = attrs_and_functions
params = params.replace('__init__', name.split('(')[0])
params = params.replace('(self, ', '(')
params = params.replace('(self)', '()')
name = name.replace('ShowBase', '')
name = name.replace('NodePath', '')
for parent_class in ('Entity', 'Button', 'Draggable', 'Text', 'Collider', 'Mesh', 'Prismatoid'):
name = name.replace(f'({parent_class})', f'(<a style="color: gray;" href="#{parent_class}">{parent_class}</a>)')
base_name = name
if '(' in base_name:
base_name = base_name.split('(')[0]
base_name = base_name.split(')')[0]
name = name.replace('(', '<gray>(')
name = name.replace(')', ')</gray>')
v = lerp(text_color.v, background_color.v, .2)
# v = .5
col = color.color(50-(i*30), .9, v)
col = html_color(col)
sidebar += f'''<a style="color:{col};" href="#{base_name}">{base_name}</a>\n'''
html += '\n'
html += f'''<div id="{base_name}"><div id="{base_name}" style="color:{col}; font-size:1.75em; font-weight:normal;">{name}</div>'''
html += '<div style="position:relative; padding:0em 0em 2em 1em; margin:0;">'
# location
location = str(location)
if 'ursina' in location:
location = location.split('ursina')[-1]
github_link = 'https://github.com/pokepetter/ursina/tree/master/ursina' + location.replace('\\', '/')
location = location.replace('\\', '.')[:-3]
html += f'''<a href="{github_link}"><gray>ursina{location}</gray></a><br><br>'''
if params:
params = f'<params class="params">{params}</params>\n'
html += params + '\n'
for e in attrs:
if ' = ' in e:
e = f'''{e.split(' = ')[0]}<gray> = {e.split(' = ')[1]}</gray> '''
html += f'''{e}\n'''
html += '\n'
for e in funcs:
e = f'{e[0]}(<gray>{e[1]}</gray>) <gray>{e[2]}</gray>'
html += e + '\n'
if example:
html += '\n<div class="example">' + example +'\n</div>'
html += '\n</div></div>'
html = html.replace('<gray></gray>', '')
sidebar += '\n'
sidebar += '</div>'
html += '</div>'
html = sidebar + style + '<div id="content">' + html + '</div>' + '</body>'
with open(file_name, 'w', encoding='utf-8') as f:
f.write(html)
make_html('light', 'cheat_sheet.html')
make_html('dark', 'cheat_sheet_dark.html')
|
def make_html(style, file_name):
if style == 'light':
base_color = color.color(60, 0, .99)
|
navigation-item.component.ts
|
import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core';
import { SidenavItem } from '../../sidenav/sidenav-item/sidenav-item.interface';
@Component({
selector: 'fury-navigation-item',
templateUrl: './navigation-item.component.html',
styleUrls: ['./navigation-item.component.scss']
})
export class NavigationItemComponent implements OnInit {
// tslint:disable-next-line:no-input-rename
@Input('item') item: SidenavItem;
// tslint:disable-next-line:no-input-rename
@Input('currentlyOpen') currentlyOpen: SidenavItem[] = [];
@Output() handleClick = new EventEmitter<SidenavItem>();
constructor() { }
|
}
|
ngOnInit() {
}
|
run.py
|
################################################################################
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
import bz2, json, click
from newsroom import jsonl
from . import readiter
from tqdm import tqdm
################################################################################
def _writer(process, dataset_file, keys):
for article in dataset_file:
subset = {k: article[k] for k in keys if k in article}
encoded = json.dumps(subset).encode("utf-8")
process.stdin.write(encoded + b"\n")
process.stdin.close()
################################################################################
articles_file = click.Path(
exists = True,
dir_okay = False,
readable = True,
resolve_path = True,
)
summaries_file = click.Path(
exists = False,
dir_okay = False,
writable = True,
resolve_path = True,
)
################################################################################
@click.command()
@click.option(
"--system",
type = str,
required = True,
help = "Name of docker image."
)
@click.option(
"--dataset",
type = articles_file,
required = True,
help = "Input path to full dataset."
)
@click.option(
"--summaries",
type = summaries_file,
required = True,
help = "Output path for system generated summaries."
)
@click.option(
"--keys",
type = str,
default = "text",
help = "List of dataset keys to pass to system. [default = text]"
)
################################################################################
def main(system, dataset, summaries, keys):
|
################################################################################
|
print("Starting", system, "Docker image.")
process = Popen(
[
"docker", "run", "--rm",
"-a", "stdin", "-a", "stdout",
"-i", system
],
stdin = PIPE,
stdout = PIPE,
)
dataset_file = jsonl.open(dataset, gzip = True)
# Check the size of the dataset.
# As a sanity check and for the progress bar.
print("Loading articles... ", end = "", flush = True)
dataset_length = len(dataset_file)
print("found", dataset_length, "articles.\n")
# Start new thread to feed summaries into container.
Thread(
target = _writer,
args = (process, dataset_file, keys.split(","))
).start()
# Start progress bar.
progress = tqdm(
readiter(process.stdout),
total = dataset_length,
desc = "Running " + system,
)
# Prepare to decode summaries.
is_json = True
with jsonl.open(summaries, gzip = True) as summaries_file:
summaries_file.delete()
with progress as output:
for line in output:
summaries_file.appendline({"system": line})
print("\nRun complete. Next, evaluate with newsroom-score.")
|
_one_hot.py
|
import torch
import torch.nn.functional
from e3nn.o3 import Irreps
from e3nn.util.jit import compile_mode
from nequip.data import AtomicDataDict
from .._graph_mixin import GraphModuleMixin
@compile_mode("script")
class OneHotAtomEncoding(GraphModuleMixin, torch.nn.Module):
num_types: int
set_features: bool
# TODO: use torch.unique?
# TODO: type annotation
# Docstrings
def __init__(
self,
num_types: int,
set_features: bool = True,
irreps_in=None,
):
super().__init__()
self.num_types = num_types
self.set_features = set_features
# Output irreps are num_types even (invariant) scalars
irreps_out = {AtomicDataDict.NODE_ATTRS_KEY: Irreps([(self.num_types, (0, 1))])}
if self.set_features:
irreps_out[AtomicDataDict.NODE_FEATURES_KEY] = irreps_out[
AtomicDataDict.NODE_ATTRS_KEY
]
self._init_irreps(irreps_in=irreps_in, irreps_out=irreps_out)
|
def forward(self, data: AtomicDataDict.Type):
type_numbers = data[AtomicDataDict.ATOM_TYPE_KEY].squeeze(-1)
one_hot = torch.nn.functional.one_hot(
type_numbers, num_classes=self.num_types
).to(device=type_numbers.device, dtype=data[AtomicDataDict.POSITIONS_KEY].dtype)
data[AtomicDataDict.NODE_ATTRS_KEY] = one_hot
if self.set_features:
data[AtomicDataDict.NODE_FEATURES_KEY] = one_hot
return data
| |
injector.js
|
'use strict';
/**
* @ngdoc function
* @module ng
* @name angular.injector
* @kind function
*
* @description
* Creates an injector object that can be used for retrieving services as well as for
* dependency injection (see {@link guide/di dependency injection}).
*
* @param {Array.<string|Function>} modules A list of module functions or their aliases. See
* {@link angular.module}. The `ng` module must be explicitly added.
* @param {boolean=} [strictDi=false] Whether the injector should be in strict mode, which
* disallows argument name annotation inference.
* @returns {injector} Injector object. See {@link auto.$injector $injector}.
*
* @example
* Typical usage
* ```js
* // create an injector
* var $injector = angular.injector(['ng']);
*
* // use the injector to kick off your application
* // use the type inference to auto inject arguments, or use implicit injection
* $injector.invoke(function($rootScope, $compile, $document) {
* $compile($document)($rootScope);
* $rootScope.$digest();
* });
* ```
*
* Sometimes you want to get access to the injector of a currently running AngularJS app
* from outside AngularJS. Perhaps, you want to inject and compile some markup after the
* application has been bootstrapped. You can do this using the extra `injector()` added
* to JQuery/jqLite elements. See {@link angular.element}.
*
* *This is fairly rare but could be the case if a third party library is injecting the
* markup.*
*
* In the following example a new block of HTML containing a `ng-controller`
* directive is added to the end of the document body by JQuery. We then compile and link
* it into the current AngularJS scope.
*
* ```js
* var $div = $('<div ng-controller="MyCtrl">{{content.label}}</div>');
* $(document.body).append($div);
*
* angular.element(document).injector().invoke(function($compile) {
* var scope = angular.element($div).scope();
* $compile($div)(scope);
* });
* ```
*/
/**
* @ngdoc module
* @name auto
* @installation
* @description
*
* Implicit module which gets automatically added to each {@link auto.$injector $injector}.
*/
var ARROW_ARG = /^([^(]+?)=>/;
var FN_ARGS = /^[^(]*\(\s*([^)]*)\)/m;
var FN_ARG_SPLIT = /,/;
var FN_ARG = /^\s*(_?)(\S+?)\1\s*$/;
var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg;
var $injectorMinErr = minErr('$injector');
function stringifyFn(fn) {
return Function.prototype.toString.call(fn);
}
function extractArgs(fn) {
var fnText = stringifyFn(fn).replace(STRIP_COMMENTS, ''),
args = fnText.match(ARROW_ARG) || fnText.match(FN_ARGS);
return args;
}
function anonFn(fn) {
// For anonymous functions, showing at the very least the function signature can help in
// debugging.
var args = extractArgs(fn);
if (args) {
return 'function(' + (args[1] || '').replace(/[\s\r\n]+/, ' ') + ')';
}
return 'fn';
}
function annotate(fn, strictDi, name) {
var $inject,
argDecl,
last;
if (typeof fn === 'function') {
if (!($inject = fn.$inject)) {
$inject = [];
if (fn.length) {
if (strictDi) {
if (!isString(name) || !name) {
name = fn.name || anonFn(fn);
}
throw $injectorMinErr('strictdi',
'{0} is not using explicit annotation and cannot be invoked in strict mode', name);
}
argDecl = extractArgs(fn);
forEach(argDecl[1].split(FN_ARG_SPLIT), function(arg) {
arg.replace(FN_ARG, function(all, underscore, name) {
$inject.push(name);
});
});
}
fn.$inject = $inject;
}
} else if (isArray(fn)) {
last = fn.length - 1;
assertArgFn(fn[last], 'fn');
$inject = fn.slice(0, last);
} else {
assertArgFn(fn, 'fn', true);
}
return $inject;
}
///////////////////////////////////////
/**
* @ngdoc service
* @name $injector
*
* @description
*
* `$injector` is used to retrieve object instances as defined by
* {@link auto.$provide provider}, instantiate types, invoke methods,
* and load modules.
*
* The following always holds true:
*
* ```js
* var $injector = angular.injector();
* expect($injector.get('$injector')).toBe($injector);
* expect($injector.invoke(function($injector) {
* return $injector;
* })).toBe($injector);
* ```
*
* # Injection Function Annotation
*
* JavaScript does not have annotations, and annotations are needed for dependency injection. The
* following are all valid ways of annotating function with injection arguments and are equivalent.
*
* ```js
* // inferred (only works if code not minified/obfuscated)
* $injector.invoke(function(serviceA){});
*
* // annotated
* function explicit(serviceA) {};
* explicit.$inject = ['serviceA'];
* $injector.invoke(explicit);
*
* // inline
* $injector.invoke(['serviceA', function(serviceA){}]);
* ```
*
* ## Inference
*
* In JavaScript calling `toString()` on a function returns the function definition. The definition
* can then be parsed and the function arguments can be extracted. This method of discovering
* annotations is disallowed when the injector is in strict mode.
* *NOTE:* This does not work with minification, and obfuscation tools since these tools change the
* argument names.
*
* ## `$inject` Annotation
* By adding an `$inject` property onto a function the injection parameters can be specified.
*
* ## Inline
* As an array of injection names, where the last item in the array is the function to call.
*/
/**
* @ngdoc method
* @name $injector#get
*
* @description
* Return an instance of the service.
*
* @param {string} name The name of the instance to retrieve.
* @param {string=} caller An optional string to provide the origin of the function call for error messages.
* @return {*} The instance.
*/
/**
* @ngdoc method
* @name $injector#invoke
*
* @description
* Invoke the method and supply the method arguments from the `$injector`.
*
* @param {Function|Array.<string|Function>} fn The injectable function to invoke. Function parameters are
* injected according to the {@link guide/di $inject Annotation} rules.
* @param {Object=} self The `this` for the invoked method.
* @param {Object=} locals Optional object. If preset then any argument names are read from this
* object first, before the `$injector` is consulted.
* @returns {*} the value returned by the invoked `fn` function.
*/
/**
* @ngdoc method
* @name $injector#has
*
* @description
* Allows the user to query if the particular service exists.
*
* @param {string} name Name of the service to query.
* @returns {boolean} `true` if injector has given service.
*/
/**
* @ngdoc method
* @name $injector#instantiate
* @description
* Create a new instance of JS type. The method takes a constructor function, invokes the new
* operator, and supplies all of the arguments to the constructor function as specified by the
* constructor annotation.
*
* @param {Function} Type Annotated constructor function.
* @param {Object=} locals Optional object. If preset then any argument names are read from this
* object first, before the `$injector` is consulted.
* @returns {Object} new instance of `Type`.
*/
/**
* @ngdoc method
* @name $injector#annotate
*
* @description
* Returns an array of service names which the function is requesting for injection. This API is
* used by the injector to determine which services need to be injected into the function when the
* function is invoked. There are three ways in which the function can be annotated with the needed
* dependencies.
*
* # Argument names
*
* The simplest form is to extract the dependencies from the arguments of the function. This is done
* by converting the function into a string using `toString()` method and extracting the argument
* names.
* ```js
* // Given
* function MyController($scope, $route) {
* // ...
* }
*
* // Then
* expect(injector.annotate(MyController)).toEqual(['$scope', '$route']);
* ```
*
* You can disallow this method by using strict injection mode.
*
* This method does not work with code minification / obfuscation. For this reason the following
* annotation strategies are supported.
*
* # The `$inject` property
*
* If a function has an `$inject` property and its value is an array of strings, then the strings
* represent names of services to be injected into the function.
* ```js
* // Given
* var MyController = function(obfuscatedScope, obfuscatedRoute) {
* // ...
* }
* // Define function dependencies
* MyController['$inject'] = ['$scope', '$route'];
*
* // Then
* expect(injector.annotate(MyController)).toEqual(['$scope', '$route']);
* ```
*
* # The array notation
*
* It is often desirable to inline Injected functions and that's when setting the `$inject` property
* is very inconvenient. In these situations using the array notation to specify the dependencies in
* a way that survives minification is a better choice:
*
* ```js
* // We wish to write this (not minification / obfuscation safe)
* injector.invoke(function($compile, $rootScope) {
* // ...
* });
*
* // We are forced to write break inlining
* var tmpFn = function(obfuscatedCompile, obfuscatedRootScope) {
* // ...
* };
* tmpFn.$inject = ['$compile', '$rootScope'];
* injector.invoke(tmpFn);
*
* // To better support inline function the inline annotation is supported
* injector.invoke(['$compile', '$rootScope', function(obfCompile, obfRootScope) {
* // ...
* }]);
*
* // Therefore
* expect(injector.annotate(
* ['$compile', '$rootScope', function(obfus_$compile, obfus_$rootScope) {}])
* ).toEqual(['$compile', '$rootScope']);
* ```
*
* @param {Function|Array.<string|Function>} fn Function for which dependent service names need to
* be retrieved as described above.
*
* @param {boolean=} [strictDi=false] Disallow argument name annotation inference.
*
* @returns {Array.<string>} The names of the services which the function requires.
*/
/**
* @ngdoc service
* @name $provide
*
* @description
*
* The {@link auto.$provide $provide} service has a number of methods for registering components
* with the {@link auto.$injector $injector}. Many of these functions are also exposed on
* {@link angular.Module}.
*
* An AngularJS **service** is a singleton object created by a **service factory**. These **service
* factories** are functions which, in turn, are created by a **service provider**.
* The **service providers** are constructor functions. When instantiated they must contain a
* property called `$get`, which holds the **service factory** function.
*
* When you request a service, the {@link auto.$injector $injector} is responsible for finding the
* correct **service provider**, instantiating it and then calling its `$get` **service factory**
* function to get the instance of the **service**.
*
* Often services have no configuration options and there is no need to add methods to the service
* provider. The provider will be no more than a constructor function with a `$get` property. For
* these cases the {@link auto.$provide $provide} service has additional helper methods to register
* services without specifying a provider.
*
* * {@link auto.$provide#provider provider(name, provider)} - registers a **service provider** with the
* {@link auto.$injector $injector}
* * {@link auto.$provide#constant constant(name, obj)} - registers a value/object that can be accessed by
* providers and services.
* * {@link auto.$provide#value value(name, obj)} - registers a value/object that can only be accessed by
* services, not providers.
* * {@link auto.$provide#factory factory(name, fn)} - registers a service **factory function**
* that will be wrapped in a **service provider** object, whose `$get` property will contain the
* given factory function.
* * {@link auto.$provide#service service(name, Fn)} - registers a **constructor function**
* that will be wrapped in a **service provider** object, whose `$get` property will instantiate
* a new object using the given constructor function.
* * {@link auto.$provide#decorator decorator(name, decorFn)} - registers a **decorator function** that
* will be able to modify or replace the implementation of another service.
*
* See the individual methods for more information and examples.
*/
/**
* @ngdoc method
* @name $provide#provider
* @description
*
* Register a **provider function** with the {@link auto.$injector $injector}. Provider functions
* are constructor functions, whose instances are responsible for "providing" a factory for a
* service.
*
* Service provider names start with the name of the service they provide followed by `Provider`.
* For example, the {@link ng.$log $log} service has a provider called
* {@link ng.$logProvider $logProvider}.
*
* Service provider objects can have additional methods which allow configuration of the provider
* and its service. Importantly, you can configure what kind of service is created by the `$get`
* method, or how that service will act. For example, the {@link ng.$logProvider $logProvider} has a
* method {@link ng.$logProvider#debugEnabled debugEnabled}
* which lets you specify whether the {@link ng.$log $log} service will log debug messages to the
* console or not.
*
* @param {string} name The name of the instance. NOTE: the provider will be available under `name +
'Provider'` key.
* @param {(Object|function())} provider If the provider is:
*
* - `Object`: then it should have a `$get` method. The `$get` method will be invoked using
* {@link auto.$injector#invoke $injector.invoke()} when an instance needs to be created.
* - `Constructor`: a new instance of the provider will be created using
* {@link auto.$injector#instantiate $injector.instantiate()}, then treated as `object`.
*
* @returns {Object} registered provider instance
* @example
*
* The following example shows how to create a simple event tracking service and register it using
* {@link auto.$provide#provider $provide.provider()}.
*
* ```js
* // Define the eventTracker provider
* function EventTrackerProvider() {
* var trackingUrl = '/track';
*
* // A provider method for configuring where the tracked events should been saved
* this.setTrackingUrl = function(url) {
* trackingUrl = url;
* };
*
* // The service factory function
* this.$get = ['$http', function($http) {
* var trackedEvents = {};
* return {
* // Call this to track an event
* event: function(event) {
* var count = trackedEvents[event] || 0;
* count += 1;
* trackedEvents[event] = count;
* return count;
* },
* // Call this to save the tracked events to the trackingUrl
* save: function() {
* $http.post(trackingUrl, trackedEvents);
* }
* };
* }];
* }
*
* describe('eventTracker', function() {
* var postSpy;
*
* beforeEach(module(function($provide) {
* // Register the eventTracker provider
* $provide.provider('eventTracker', EventTrackerProvider);
* }));
*
* beforeEach(module(function(eventTrackerProvider) {
* // Configure eventTracker provider
* eventTrackerProvider.setTrackingUrl('/custom-track');
* }));
*
* it('tracks events', inject(function(eventTracker) {
* expect(eventTracker.event('login')).toEqual(1);
* expect(eventTracker.event('login')).toEqual(2);
* }));
*
* it('saves to the tracking url', inject(function(eventTracker, $http) {
* postSpy = spyOn($http, 'post');
* eventTracker.event('login');
* eventTracker.save();
* expect(postSpy).toHaveBeenCalled();
* expect(postSpy.mostRecentCall.args[0]).not.toEqual('/track');
* expect(postSpy.mostRecentCall.args[0]).toEqual('/custom-track');
* expect(postSpy.mostRecentCall.args[1]).toEqual({ 'login': 1 });
* }));
* });
* ```
*/
/**
* @ngdoc method
* @name $provide#factory
* @description
*
* Register a **service factory**, which will be called to return the service instance.
* This is short for registering a service where its provider consists of only a `$get` property,
* which is the given service factory function.
* You should use {@link auto.$provide#factory $provide.factory(getFn)} if you do not need to
* configure your service in a provider.
*
* @param {string} name The name of the instance.
* @param {Function|Array.<string|Function>} $getFn The injectable $getFn for the instance creation.
* Internally this is a short hand for `$provide.provider(name, {$get: $getFn})`.
* @returns {Object} registered provider instance
*
* @example
* Here is an example of registering a service
* ```js
* $provide.factory('ping', ['$http', function($http) {
* return function ping() {
* return $http.send('/ping');
* };
* }]);
* ```
* You would then inject and use this service like this:
* ```js
* someModule.controller('Ctrl', ['ping', function(ping) {
* ping();
* }]);
* ```
*/
/**
* @ngdoc method
* @name $provide#service
* @description
*
* Register a **service constructor**, which will be invoked with `new` to create the service
* instance.
* This is short for registering a service where its provider's `$get` property is a factory
* function that returns an instance instantiated by the injector from the service constructor
* function.
*
* Internally it looks a bit like this:
*
* ```
* {
* $get: function() {
* return $injector.instantiate(constructor);
* }
* }
* ```
*
*
* You should use {@link auto.$provide#service $provide.service(class)} if you define your service
* as a type/class.
*
* @param {string} name The name of the instance.
* @param {Function|Array.<string|Function>} constructor An injectable class (constructor function)
* that will be instantiated.
* @returns {Object} registered provider instance
*
* @example
* Here is an example of registering a service using
* {@link auto.$provide#service $provide.service(class)}.
* ```js
* var Ping = function($http) {
* this.$http = $http;
* };
*
* Ping.$inject = ['$http'];
*
* Ping.prototype.send = function() {
* return this.$http.get('/ping');
* };
* $provide.service('ping', Ping);
* ```
* You would then inject and use this service like this:
* ```js
* someModule.controller('Ctrl', ['ping', function(ping) {
* ping.send();
* }]);
* ```
*/
/**
* @ngdoc method
* @name $provide#value
* @description
*
* Register a **value service** with the {@link auto.$injector $injector}, such as a string, a
* number, an array, an object or a function. This is short for registering a service where its
* provider's `$get` property is a factory function that takes no arguments and returns the **value
* service**. That also means it is not possible to inject other services into a value service.
*
* Value services are similar to constant services, except that they cannot be injected into a
* module configuration function (see {@link angular.Module#config}) but they can be overridden by
* an AngularJS {@link auto.$provide#decorator decorator}.
*
* @param {string} name The name of the instance.
* @param {*} value The value.
* @returns {Object} registered provider instance
*
* @example
* Here are some examples of creating value services.
* ```js
* $provide.value('ADMIN_USER', 'admin');
*
* $provide.value('RoleLookup', { admin: 0, writer: 1, reader: 2 });
*
* $provide.value('halfOf', function(value) {
* return value / 2;
* });
* ```
*/
/**
* @ngdoc method
* @name $provide#constant
* @description
*
* Register a **constant service** with the {@link auto.$injector $injector}, such as a string,
* a number, an array, an object or a function. Like the {@link auto.$provide#value value}, it is not
* possible to inject other services into a constant.
*
* But unlike {@link auto.$provide#value value}, a constant can be
* injected into a module configuration function (see {@link angular.Module#config}) and it cannot
* be overridden by an AngularJS {@link auto.$provide#decorator decorator}.
*
* @param {string} name The name of the constant.
* @param {*} value The constant value.
* @returns {Object} registered instance
*
* @example
* Here a some examples of creating constants:
* ```js
* $provide.constant('SHARD_HEIGHT', 306);
*
* $provide.constant('MY_COLOURS', ['red', 'blue', 'grey']);
*
* $provide.constant('double', function(value) {
* return value * 2;
* });
* ```
*/
/**
* @ngdoc method
* @name $provide#decorator
* @description
*
* Register a **decorator function** with the {@link auto.$injector $injector}. A decorator function
* intercepts the creation of a service, allowing it to override or modify the behavior of the
* service. The return value of the decorator function may be the original service, or a new service
* that replaces (or wraps and delegates to) the original service.
*
* You can find out more about using decorators in the {@link guide/decorators} guide.
*
* @param {string} name The name of the service to decorate.
* @param {Function|Array.<string|Function>} decorator This function will be invoked when the service needs to be
* provided and should return the decorated service instance. The function is called using
* the {@link auto.$injector#invoke injector.invoke} method and is therefore fully injectable.
* Local injection arguments:
*
* * `$delegate` - The original service instance, which can be replaced, monkey patched, configured,
* decorated or delegated to.
*
* @example
* Here we decorate the {@link ng.$log $log} service to convert warnings to errors by intercepting
* calls to {@link ng.$log#error $log.warn()}.
* ```js
* $provide.decorator('$log', ['$delegate', function($delegate) {
* $delegate.warn = $delegate.error;
* return $delegate;
* }]);
* ```
*/
function createInjector(modulesToLoad, strictDi) {
strictDi = (strictDi === true);
var INSTANTIATING = {},
providerSuffix = 'Provider',
path = [],
loadedModules = new NgMap(),
providerCache = {
$provide: {
provider: supportObject(provider),
factory: supportObject(factory),
service: supportObject(service),
value: supportObject(value),
constant: supportObject(constant),
decorator: decorator
}
},
providerInjector = (providerCache.$injector =
createInternalInjector(providerCache, function(serviceName, caller) {
if (angular.isString(caller)) {
path.push(caller);
}
throw $injectorMinErr('unpr', 'Unknown provider: {0}', path.join(' <- '));
})),
instanceCache = {},
protoInstanceInjector =
createInternalInjector(instanceCache, function(serviceName, caller) {
var provider = providerInjector.get(serviceName + providerSuffix, caller);
return instanceInjector.invoke(
provider.$get, provider, undefined, serviceName);
}),
instanceInjector = protoInstanceInjector;
providerCache['$injector' + providerSuffix] = { $get: valueFn(protoInstanceInjector) };
var runBlocks = loadModules(modulesToLoad);
instanceInjector = protoInstanceInjector.get('$injector');
instanceInjector.strictDi = strictDi;
forEach(runBlocks, function(fn) { if (fn) instanceInjector.invoke(fn); });
return instanceInjector;
////////////////////////////////////
// $provider
////////////////////////////////////
function supportObject(delegate) {
return function(key, value) {
if (isObject(key)) {
forEach(key, reverseParams(delegate));
} else {
return delegate(key, value);
}
};
}
function provider(name, provider_) {
assertNotHasOwnProperty(name, 'service');
if (isFunction(provider_) || isArray(provider_)) {
provider_ = providerInjector.instantiate(provider_);
}
if (!provider_.$get) {
throw $injectorMinErr('pget', 'Provider \'{0}\' must define $get factory method.', name);
}
return (providerCache[name + providerSuffix] = provider_);
}
function enforceReturnValue(name, factory) {
return /** @this */ function enforcedReturnValue() {
var result = instanceInjector.invoke(factory, this);
if (isUndefined(result)) {
throw $injectorMinErr('undef', 'Provider \'{0}\' must return a value from $get factory method.', name);
}
return result;
};
}
function factory(name, factoryFn, enforce) {
return provider(name, {
$get: enforce !== false ? enforceReturnValue(name, factoryFn) : factoryFn
});
}
function service(name, constructor) {
return factory(name, ['$injector', function($injector) {
return $injector.instantiate(constructor);
}]);
}
function value(name, val) { return factory(name, valueFn(val), false); }
function constant(name, value) {
assertNotHasOwnProperty(name, 'constant');
providerCache[name] = value;
instanceCache[name] = value;
}
function decorator(serviceName, decorFn) {
var origProvider = providerInjector.get(serviceName + providerSuffix),
orig$get = origProvider.$get;
origProvider.$get = function() {
var origInstance = instanceInjector.invoke(orig$get, origProvider);
return instanceInjector.invoke(decorFn, null, {$delegate: origInstance});
};
}
////////////////////////////////////
// Module Loading
////////////////////////////////////
function loadModules(modulesToLoad) {
assertArg(isUndefined(modulesToLoad) || isArray(modulesToLoad), 'modulesToLoad', 'not an array');
var runBlocks = [], moduleFn;
forEach(modulesToLoad, function(module) {
if (loadedModules.get(module)) return;
loadedModules.set(module, true);
function
|
(queue) {
var i, ii;
for (i = 0, ii = queue.length; i < ii; i++) {
var invokeArgs = queue[i],
provider = providerInjector.get(invokeArgs[0]);
provider[invokeArgs[1]].apply(provider, invokeArgs[2]);
}
}
try {
if (isString(module)) {
moduleFn = angularModule(module);
runBlocks = runBlocks.concat(loadModules(moduleFn.requires)).concat(moduleFn._runBlocks);
runInvokeQueue(moduleFn._invokeQueue);
runInvokeQueue(moduleFn._configBlocks);
} else if (isFunction(module)) {
runBlocks.push(providerInjector.invoke(module));
} else if (isArray(module)) {
runBlocks.push(providerInjector.invoke(module));
} else {
assertArgFn(module, 'module');
}
} catch (e) {
if (isArray(module)) {
module = module[module.length - 1];
}
if (e.message && e.stack && e.stack.indexOf(e.message) === -1) {
// Safari & FF's stack traces don't contain error.message content
// unlike those of Chrome and IE
// So if stack doesn't contain message, we create a new string that contains both.
// Since error.stack is read-only in Safari, I'm overriding e and not e.stack here.
// eslint-disable-next-line no-ex-assign
e = e.message + '\n' + e.stack;
}
throw $injectorMinErr('modulerr', 'Failed to instantiate module {0} due to:\n{1}',
module, e.stack || e.message || e);
}
});
return runBlocks;
}
////////////////////////////////////
// internal Injector
////////////////////////////////////
function createInternalInjector(cache, factory) {
function getService(serviceName, caller) {
if (cache.hasOwnProperty(serviceName)) {
if (cache[serviceName] === INSTANTIATING) {
throw $injectorMinErr('cdep', 'Circular dependency found: {0}',
serviceName + ' <- ' + path.join(' <- '));
}
return cache[serviceName];
} else {
try {
path.unshift(serviceName);
cache[serviceName] = INSTANTIATING;
cache[serviceName] = factory(serviceName, caller);
return cache[serviceName];
} catch (err) {
if (cache[serviceName] === INSTANTIATING) {
delete cache[serviceName];
}
throw err;
} finally {
path.shift();
}
}
}
function injectionArgs(fn, locals, serviceName) {
var args = [],
$inject = createInjector.$$annotate(fn, strictDi, serviceName);
for (var i = 0, length = $inject.length; i < length; i++) {
var key = $inject[i];
if (typeof key !== 'string') {
throw $injectorMinErr('itkn',
'Incorrect injection token! Expected service name as string, got {0}', key);
}
args.push(locals && locals.hasOwnProperty(key) ? locals[key] :
getService(key, serviceName));
}
return args;
}
function isClass(func) {
// Support: IE 9-11 only
// IE 9-11 do not support classes and IE9 leaks with the code below.
if (msie || typeof func !== 'function') {
return false;
}
var result = func.$$ngIsClass;
if (!isBoolean(result)) {
// Support: Edge 12-13 only
// See: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/6156135/
result = func.$$ngIsClass = /^(?:class\b|constructor\()/.test(stringifyFn(func));
}
return result;
}
function invoke(fn, self, locals, serviceName) {
if (typeof locals === 'string') {
serviceName = locals;
locals = null;
}
var args = injectionArgs(fn, locals, serviceName);
if (isArray(fn)) {
fn = fn[fn.length - 1];
}
if (!isClass(fn)) {
// http://jsperf.com/angularjs-invoke-apply-vs-switch
// #5388
return fn.apply(self, args);
} else {
args.unshift(null);
return new (Function.prototype.bind.apply(fn, args))();
}
}
function instantiate(Type, locals, serviceName) {
// Check if Type is annotated and use just the given function at n-1 as parameter
// e.g. someModule.factory('greeter', ['$window', function(renamed$window) {}]);
var ctor = (isArray(Type) ? Type[Type.length - 1] : Type);
var args = injectionArgs(Type, locals, serviceName);
// Empty object at position 0 is ignored for invocation with `new`, but required.
args.unshift(null);
return new (Function.prototype.bind.apply(ctor, args))();
}
return {
invoke: invoke,
instantiate: instantiate,
get: getService,
annotate: createInjector.$$annotate,
has: function(name) {
return providerCache.hasOwnProperty(name + providerSuffix) || cache.hasOwnProperty(name);
}
};
}
}
createInjector.$$annotate = annotate;
|
runInvokeQueue
|
serial.go
|
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package serial defines the guru's schema for structured data
// serialization using JSON, XML, etc.
package serial
// All 'pos' strings are of the form "file:line:col".
// TODO(adonovan): improve performance by sharing filename strings.
// TODO(adonovan): improve precision by providing the start/end
// interval when available.
//
// TODO(adonovan): consider richer encodings of types, functions,
// methods, etc.
// A Peers is the result of a 'peers' query.
// If Allocs is empty, the selected channel can't point to anything.
type Peers struct {
Pos string `json:"pos"` // location of the selected channel op (<-)
Type string `json:"type"` // type of the selected channel
Allocs []string `json:"allocs,omitempty"` // locations of aliased make(chan) ops
Sends []string `json:"sends,omitempty"` // locations of aliased ch<-x ops
Receives []string `json:"receives,omitempty"` // locations of aliased <-ch ops
Closes []string `json:"closes,omitempty"` // locations of aliased close(ch) ops
}
// A Referrers is the result of a 'referrers' query.
type Referrers struct {
ObjPos string `json:"objpos,omitempty"` // location of the definition
Desc string `json:"desc"` // description of the denoted object
Refs []string `json:"refs,omitempty"` // locations of all references
}
|
// A Definition is the result of a 'definition' query.
type Definition struct {
ObjPos string `json:"objpos,omitempty"` // location of the definition
Desc string `json:"desc"` // description of the denoted object
}
type CalleesItem struct {
Name string `json:"name"` // full name of called function
Pos string `json:"pos"` // location of called function
}
// A Callees is the result of a 'callees' query.
//
// Callees is nonempty unless the call was a dynamic call on a
// provably nil func or interface value.
type Callees struct {
Pos string `json:"pos"` // location of selected call site
Desc string `json:"desc"` // description of call site
Callees []*CalleesItem `json:"callees,omitempty"` // set of possible call targets
}
// A Caller is one element of the slice returned by a 'callers' query.
// (Callstack also contains a similar slice.)
//
// The root of the callgraph has an unspecified "Caller" string.
type Caller struct {
Pos string `json:"pos,omitempty"` // location of the calling function
Desc string `json:"desc"` // description of call site
Caller string `json:"caller"` // full name of calling function
}
// A CallStack is the result of a 'callstack' query.
// It indicates an arbitrary path from the root of the callgraph to
// the query function.
//
// If the Callers slice is empty, the function was unreachable in this
// analysis scope.
type CallStack struct {
Pos string `json:"pos"` // location of the selected function
Target string `json:"target"` // the selected function
Callers []Caller `json:"callers"` // enclosing calls, innermost first.
}
// A FreeVar is one element of the slice returned by a 'freevars'
// query. Each one identifies an expression referencing a local
// identifier defined outside the selected region.
type FreeVar struct {
Pos string `json:"pos"` // location of the identifier's definition
Kind string `json:"kind"` // one of {var,func,type,const,label}
Ref string `json:"ref"` // referring expression (e.g. "x" or "x.y.z")
Type string `json:"type"` // type of the expression
}
// An Implements contains the result of an 'implements' query.
// It describes the queried type, the set of named non-empty interface
// types to which it is assignable, and the set of named/*named types
// (concrete or non-empty interface) which may be assigned to it.
//
type Implements struct {
T ImplementsType `json:"type,omitempty"` // the queried type
AssignableTo []ImplementsType `json:"to,omitempty"` // types assignable to T
AssignableFrom []ImplementsType `json:"from,omitempty"` // interface types assignable from T
AssignableFromPtr []ImplementsType `json:"fromptr,omitempty"` // interface types assignable only from *T
// The following fields are set only if the query was a method.
// Assignable{To,From,FromPtr}Method[i] is the corresponding
// method of type Assignable{To,From,FromPtr}[i], or blank
// {"",""} if that type lacks the method.
Method *DescribeMethod `json:"method,omitempty"` // the queried method
AssignableToMethod []DescribeMethod `json:"to_method,omitempty"`
AssignableFromMethod []DescribeMethod `json:"from_method,omitempty"`
AssignableFromPtrMethod []DescribeMethod `json:"fromptr_method,omitempty"`
}
// An ImplementsType describes a single type as part of an 'implements' query.
type ImplementsType struct {
Name string `json:"name"` // full name of the type
Pos string `json:"pos"` // location of its definition
Kind string `json:"kind"` // "basic", "array", etc
}
// A SyntaxNode is one element of a stack of enclosing syntax nodes in
// a "what" query.
type SyntaxNode struct {
Description string `json:"desc"` // description of syntax tree
Start int `json:"start"` // start byte offset, 0-based
End int `json:"end"` // end byte offset
}
// A What is the result of the "what" query, which quickly identifies
// the selection, parsing only a single file. It is intended for use
// in low-latency GUIs.
type What struct {
Enclosing []SyntaxNode `json:"enclosing"` // enclosing nodes of syntax tree
Modes []string `json:"modes"` // query modes enabled for this selection.
SrcDir string `json:"srcdir,omitempty"` // $GOROOT src directory containing queried package
ImportPath string `json:"importpath,omitempty"` // import path of queried package
Object string `json:"object,omitempty"` // name of identified object, if any
SameIDs []string `json:"sameids,omitempty"` // locations of references to same object
}
// A PointsToLabel describes a pointer analysis label.
//
// A "label" is an object that may be pointed to by a pointer, map,
// channel, 'func', slice or interface. Labels include:
// - functions
// - globals
// - arrays created by literals (e.g. []byte("foo")) and conversions ([]byte(s))
// - stack- and heap-allocated variables (including composite literals)
// - arrays allocated by append()
// - channels, maps and arrays created by make()
// - and their subelements, e.g. "alloc.y[*].z"
//
type PointsToLabel struct {
Pos string `json:"pos"` // location of syntax that allocated the object
Desc string `json:"desc"` // description of the label
}
// A PointsTo is one element of the result of a 'pointsto' query on an
// expression. It describes a single pointer: its type and the set of
// "labels" it points to.
//
// If the pointer is of interface type, it will have one PTS entry
// describing each concrete type that it may contain. For each
// concrete type that is a pointer, the PTS entry describes the labels
// it may point to. The same is true for reflect.Values, except the
// dynamic types needn't be concrete.
//
type PointsTo struct {
Type string `json:"type"` // (concrete) type of the pointer
NamePos string `json:"namepos,omitempty"` // location of type defn, if Named
Labels []PointsToLabel `json:"labels,omitempty"` // pointed-to objects
}
// A DescribeValue is the additional result of a 'describe' query
// if the selection indicates a value or expression.
type DescribeValue struct {
Type string `json:"type"` // type of the expression
Value string `json:"value,omitempty"` // value of the expression, if constant
ObjPos string `json:"objpos,omitempty"` // location of the definition, if an Ident
}
type DescribeMethod struct {
Name string `json:"name"` // method name, as defined by types.Selection.String()
Pos string `json:"pos"` // location of the method's definition
}
// A DescribeType is the additional result of a 'describe' query
// if the selection indicates a type.
type DescribeType struct {
Type string `json:"type"` // the string form of the type
NamePos string `json:"namepos,omitempty"` // location of definition of type, if named
NameDef string `json:"namedef,omitempty"` // underlying definition of type, if named
Methods []DescribeMethod `json:"methods,omitempty"` // methods of the type
}
type DescribeMember struct {
Name string `json:"name"` // name of member
Type string `json:"type,omitempty"` // type of member (underlying, if 'type')
Value string `json:"value,omitempty"` // value of member (if 'const')
Pos string `json:"pos"` // location of definition of member
Kind string `json:"kind"` // one of {var,const,func,type}
Methods []DescribeMethod `json:"methods,omitempty"` // methods (if member is a type)
}
// A DescribePackage is the additional result of a 'describe' if
// the selection indicates a package.
type DescribePackage struct {
Path string `json:"path"` // import path of the package
Members []*DescribeMember `json:"members,omitempty"` // accessible members of the package
}
// A Describe is the result of a 'describe' query.
// It may contain an element describing the selected semantic entity
// in detail.
type Describe struct {
Desc string `json:"desc"` // description of the selected syntax node
Pos string `json:"pos"` // location of the selected syntax node
Detail string `json:"detail,omitempty"` // one of {package, type, value}, or "".
// At most one of the following fields is populated:
// the one specified by 'detail'.
Package *DescribePackage `json:"package,omitempty"`
Type *DescribeType `json:"type,omitempty"`
Value *DescribeValue `json:"value,omitempty"`
}
// A WhichErrs is the result of a 'whicherrs' query.
// It contains the position of the queried error and the possible globals,
// constants, and types it may point to.
type WhichErrs struct {
ErrPos string `json:"errpos,omitempty"` // location of queried error
Globals []string `json:"globals,omitempty"` // locations of globals
Constants []string `json:"constants,omitempty"` // locations of constants
Types []WhichErrsType `json:"types,omitempty"` // Types
}
type WhichErrsType struct {
Type string `json:"type,omitempty"`
Position string `json:"position,omitempty"`
}
// A Result is the common result of any guru query.
// It contains a query-specific result element.
//
// TODO(adonovan): perhaps include other info such as: analysis scope,
// raw query position, stack of ast nodes, query package, etc.
type Result struct {
Mode string `json:"mode"` // mode of the query
// Exactly one of the following fields is populated:
// the one specified by 'mode'.
Callees *Callees `json:"callees,omitempty"`
Callers []Caller `json:"callers,omitempty"`
Callstack *CallStack `json:"callstack,omitempty"`
Definition *Definition `json:"definition,omitempty"`
Describe *Describe `json:"describe,omitempty"`
Freevars []*FreeVar `json:"freevars,omitempty"`
Implements *Implements `json:"implements,omitempty"`
Peers *Peers `json:"peers,omitempty"`
PointsTo []PointsTo `json:"pointsto,omitempty"`
Referrers *Referrers `json:"referrers,omitempty"`
What *What `json:"what,omitempty"`
WhichErrs *WhichErrs `json:"whicherrs,omitempty"`
}
| |
blog.controller.ts
|
import { Controller, Get, Inject } from '@nestjs/common';
import { ClientProxy, MessagePattern } from '@nestjs/microservices';
import { Observable } from 'rxjs';
import { MATH_SERVICE } from './blog.constants';
@Controller('blog')
export class
|
{
constructor(@Inject(MATH_SERVICE) private readonly client: ClientProxy) {}
@Get()
execute(): Observable<number> {
const pattern = { cmd: 'sum' };
const data = [1, 2, 3, 4, 5];
return this.client.send<number>(pattern, data);
}
@MessagePattern({ cmd: 'sum' })
accumulate(data: number[]): number {
return (data || []).reduce((a, b) => a + b);
}
}
|
BlogController
|
rpc.rs
|
//! A collection of node-specific RPC methods.
//! Substrate provides the `sc-rpc` crate, which defines the core RPC layer
//! used by Substrate nodes. This file extends those RPC definitions with
//! capabilities that are specific to this project's runtime configuration.
#![warn(missing_docs)]
use std::sync::Arc;
use hydra_dx_runtime::{opaque::Block, AccountId, AssetId, Balance, BlockNumber, Hash, Index};
use sc_consensus_babe::Epoch;
use sc_consensus_babe_rpc::BabeRpcHandler;
use sc_finality_grandpa::FinalityProofProvider;
use sc_finality_grandpa_rpc::GrandpaRpcHandler;
pub use sc_rpc_api::DenyUnsafe;
use sp_api::ProvideRuntimeApi;
use sp_block_builder::BlockBuilder;
use sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata};
use sp_consensus::SelectChain;
use sp_consensus_babe::BabeApi;
use sp_keystore::SyncCryptoStorePtr;
use sp_transaction_pool::TransactionPool;
/// Full client dependencies.
pub struct FullDeps<C, P, SC, B> {
/// The client instance to use.
pub client: Arc<C>,
/// Transaction pool instance.
pub pool: Arc<P>,
/// The SelectChain Strategy
pub select_chain: SC,
/// Whether to deny unsafe calls
pub deny_unsafe: DenyUnsafe,
/// BABE specific dependencies.
pub babe: BabeDeps,
/// GRANDPA specific dependencies.
pub grandpa: GrandpaDeps<B>,
}
/// Extra dependencies for BABE.
pub struct BabeDeps {
/// BABE protocol config.
pub babe_config: sc_consensus_babe::Config,
/// BABE pending epoch changes.
pub shared_epoch_changes: sc_consensus_epochs::SharedEpochChanges<Block, Epoch>,
/// The keystore that manages the keys of the node.
pub keystore: SyncCryptoStorePtr,
}
/// Extra dependencies for GRANDPA
pub struct GrandpaDeps<B> {
/// Voting round info.
pub shared_voter_state: sc_finality_grandpa::SharedVoterState,
/// Authority set info.
pub shared_authority_set: sc_finality_grandpa::SharedAuthoritySet<Hash, BlockNumber>,
/// Receives notifications about justification events from Grandpa.
pub justification_stream: sc_finality_grandpa::GrandpaJustificationStream<Block>,
/// Executor to drive the subscription manager in the Grandpa RPC handler.
pub subscription_executor: sc_rpc::SubscriptionTaskExecutor,
/// Finality proof provider.
pub finality_provider: Arc<FinalityProofProvider<B, Block>>,
}
/// Instantiate all full RPC extensions.
pub fn create_full<C, P, SC, B>(deps: FullDeps<C, P, SC, B>) -> jsonrpc_core::IoHandler<sc_rpc::Metadata>
where
C: ProvideRuntimeApi<Block>,
C: HeaderBackend<Block> + HeaderMetadata<Block, Error = BlockChainError>,
C: Send + Sync + 'static,
C::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>,
C::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>,
C::Api: BabeApi<Block>,
C::Api: BlockBuilder<Block>,
C::Api: pallet_xyk_rpc::XYKRuntimeApi<Block, AccountId, AssetId, Balance>,
P: TransactionPool + Sync + Send + 'static,
SC: SelectChain<Block> + 'static,
B: sc_client_api::Backend<Block> + Send + Sync + 'static,
B::State: sc_client_api::StateBackend<sp_runtime::traits::HashFor<Block>>,
{
use pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApi};
use pallet_xyk_rpc::{XYKApi, XYK};
use substrate_frame_rpc_system::{FullSystem, SystemApi};
let mut io = jsonrpc_core::IoHandler::default();
let FullDeps {
client,
pool,
select_chain,
deny_unsafe,
babe,
grandpa,
} = deps;
let BabeDeps {
keystore,
babe_config,
shared_epoch_changes,
} = babe;
let GrandpaDeps {
shared_voter_state,
shared_authority_set,
justification_stream,
subscription_executor,
finality_provider,
} = grandpa;
io.extend_with(SystemApi::to_delegate(FullSystem::new(
client.clone(),
pool,
deny_unsafe,
)));
io.extend_with(TransactionPaymentApi::to_delegate(TransactionPayment::new(
client.clone(),
)));
// Extend this RPC with a custom API by using the following syntax.
// `YourRpcStruct` should have a reference to a client, which is needed
// to call into the runtime.
// `io.extend_with(YourRpcTrait::to_delegate(YourRpcStruct::new(ReferenceToClient, ...)));`
io.extend_with(XYKApi::to_delegate(XYK::new(client.clone())));
io.extend_with(sc_consensus_babe_rpc::BabeApi::to_delegate(BabeRpcHandler::new(
client,
|
babe_config,
select_chain,
deny_unsafe,
)));
io.extend_with(sc_finality_grandpa_rpc::GrandpaApi::to_delegate(
GrandpaRpcHandler::new(
shared_authority_set,
shared_voter_state,
justification_stream,
subscription_executor,
finality_provider,
),
));
io
}
|
shared_epoch_changes,
keystore,
|
parallel_command.py
|
# coding: utf-8
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file
# except in compliance with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the License.
#
import pprint
import re # noqa: F401
import six
import typing
from enum import Enum
from ask_sdk_model.interfaces.alexa.presentation.apl.command import Command
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Union, Any
from datetime import datetime
from ask_sdk_model.interfaces.alexa.presentation.apl.command import Command as Command_bc5ff832
class ParallelCommand(Command):
"""
Execute a series of commands in parallel. The parallel command starts executing all child command simultaneously. The parallel command is considered finished when all of its child commands have finished. When the parallel command is terminated early, all currently executing commands are terminated.
:param delay: The delay in milliseconds before this command starts executing; must be non-negative. Defaults to 0.
:type delay: (optional) int
:param description: A user-provided description of this command.
:type description: (optional) str
:param when: If false, the execution of the command is skipped. Defaults to true.
:type when: (optional) bool
:param commands: An un-ordered array of commands to execute in parallel. Once all commands have finished executing the parallel command finishes. Please note that the delay of parallel command and the delay of each command are additive.
:type commands: (optional) list[ask_sdk_model.interfaces.alexa.presentation.apl.command.Command]
"""
deserialized_types = {
'object_type': 'str',
'delay': 'int',
'description': 'str',
'when': 'bool',
'commands': 'list[ask_sdk_model.interfaces.alexa.presentation.apl.command.Command]'
} # type: Dict
attribute_map = {
'object_type': 'type',
'delay': 'delay',
'description': 'description',
'when': 'when',
'commands': 'commands'
} # type: Dict
supports_multiple_types = False
def __init__(self, delay=None, description=None, when=None, commands=None):
# type: (Union[int, str, None], Optional[str], Optional[bool], Optional[List[Command_bc5ff832]]) -> None
"""Execute a series of commands in parallel. The parallel command starts executing all child command simultaneously. The parallel command is considered finished when all of its child commands have finished. When the parallel command is terminated early, all currently executing commands are terminated.
:param delay: The delay in milliseconds before this command starts executing; must be non-negative. Defaults to 0.
:type delay: (optional) int
:param description: A user-provided description of this command.
:type description: (optional) str
:param when: If false, the execution of the command is skipped. Defaults to true.
:type when: (optional) bool
:param commands: An un-ordered array of commands to execute in parallel. Once all commands have finished executing the parallel command finishes. Please note that the delay of parallel command and the delay of each command are additive.
:type commands: (optional) list[ask_sdk_model.interfaces.alexa.presentation.apl.command.Command]
"""
self.__discriminator_value = "Parallel" # type: str
self.object_type = self.__discriminator_value
super(ParallelCommand, self).__init__(object_type=self.__discriminator_value, delay=delay, description=description, when=when)
self.commands = commands
def to_dict(self):
# type: () -> Dict[str, object]
"""Returns the model properties as a dict"""
result = {} # type: Dict
for attr, _ in six.iteritems(self.deserialized_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else
x.value if isinstance(x, Enum) else x,
value
))
elif isinstance(value, Enum):
result[attr] = value.value
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else
(item[0], item[1].value)
if isinstance(item[1], Enum) else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
# type: () -> str
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
# type: () -> str
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
# type: (object) -> bool
"""Returns true if both objects are equal"""
if not isinstance(other, ParallelCommand):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
|
"""Returns true if both objects are not equal"""
return not self == other
|
|
mod.rs
|
use std::collections::HashMap;
#[derive(Debug)]
#[derive(PartialEq)]
#[derive(Clone)]
#[derive(Hash)]
#[derive(Eq)]
pub enum GwBasicToken {
EndTok,
ForTok,
NextTok,
DataTok,
InputTok,
DimTok,
ReadTok,
LetTok,
GotoTok,
RunTok,
IfTok,
RestoreTok,
GosubTok,
ReturnTok,
RemTok,
StopTok,
PrintTok,
ClearTok,
ListTok,
NewTok,
OnTok,
WaitTok,
DefTok,
PokeTok,
ContTok,
OutTok,
LprintTok,
LlistTok,
WidthTok,
ElseTok,
TronTok,
TroffTok,
SwapTok,
EraseTok,
EditTok,
ErrorTok,
ResumeTok,
DeleteTok,
AutoTok,
RenumTok,
DefstrTok,
DefintTok,
DefsngTok,
DefdblTok,
LineTok,
WhileTok,
WendTok,
CallTok,
WriteTok,
OptionTok,
RandomizeTok,
OpenTok,
CloseTok,
LoadTok,
MergeTok,
SaveTok,
ColorTok,
ClsTok,
MotorTok,
|
PsetTok,
PresetTok,
ScreenTok,
KeyTok,
LocateTok,
ToTok,
ThenTok,
TabTok,
StepTok,
UsrTok,
FnTok,
SpcTok,
NotTok,
ErlTok,
ErrTok,
StringDTok,
UsingTok,
InstrTok,
SingleQuoteTok,
VarptrTok,
CsrlinTok,
PointTok,
OffTok,
InkeyDTok,
GtTok,
EqlTok,
LtTok,
PlusTok,
MinusTok,
TimesTok,
DivTok,
PowOperatorTok,
AndTok,
OrTok,
XorTok,
EqvTok,
ImpTok,
ModTok,
Div2Tok,
CviTok,
CvsTok,
CvdTok,
MkiDTok,
MksDTok,
MkdDTok,
ExterrTok,
FilesTok,
FieldTok,
SystemTok,
NameTok,
LsetTok,
RsetTok,
KillTok,
PutTok,
GetTok,
ResetTok,
CommonTok,
ChainTok,
DateDTok,
TimeDTok,
PaintTok,
ComTok,
CircleTok,
DrawTok,
PlayTok,
TimerTok,
ErdevTok,
IoctlTok,
ChdirTok,
MkdirTok,
RmdirTok,
ShellTok,
EnvironTok,
ViewTok,
WindowTok,
PmapTok,
PaletteTok,
LcopyTok,
CallsTok,
NoiseTok,
PcopyTok,
TermTok,
LockTok,
UnlockTok,
LeftDTok,
RightDTok,
MidDTok,
SgnTok,
IntTok,
AbsTok,
SqrTok,
RndTok,
SinTok,
LogTok,
ExpTok,
CosTok,
TanTok,
AtnTok,
FreTok,
InpTok,
PosTok,
LenTok,
StrDTok,
ValTok,
AscTok,
ChrDTok,
PeekTok,
SpaceDTok,
OctDTok,
HexDTok,
LposTok,
CintTok,
CsngTok,
CdblTok,
FixTok,
PenTok,
StickTok,
StrigTok,
EofTok,
LocTok,
LofTok,
// new tokens
ColonSeparatorTok,
CommaSeparatorTok,
SemiColonSeparatorTok,
LparTok,
RparTok,
Untokenized(u8)
}
pub struct GwTokenInfo {
token_text : HashMap<String, GwBasicToken>,
token_vs_text : HashMap<GwBasicToken, String>
}
impl GwTokenInfo {
pub fn create() -> GwTokenInfo {
let mut dict = HashMap::new();
let mut dict2 = HashMap::new();
GwTokenInfo::add_token("GOTO", GwBasicToken::GotoTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("END", GwBasicToken::EndTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("PRINT", GwBasicToken::PrintTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("INPUT", GwBasicToken::InpTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("IF", GwBasicToken::IfTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("THEN", GwBasicToken::ThenTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("KEY", GwBasicToken::KeyTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("OFF", GwBasicToken::OffTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("ON", GwBasicToken::OnTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("CLS", GwBasicToken::ClsTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("COLOR", GwBasicToken::ColorTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("LIST", GwBasicToken::ListTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("RUN", GwBasicToken::RunTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("SYSTEM", GwBasicToken::SystemTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("LOAD", GwBasicToken::LoadTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("DEFDBL", GwBasicToken::DefdblTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("REM", GwBasicToken::RemTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("USING", GwBasicToken::UsingTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("*", GwBasicToken::TimesTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("/", GwBasicToken::DivTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("-", GwBasicToken::MinusTok, &mut dict, &mut dict2);
GwTokenInfo::add_token("+", GwBasicToken::PlusTok, &mut dict, &mut dict2);
GwTokenInfo {
token_text: dict,
token_vs_text: dict2
}
}
fn add_token(tok_text : &str,
token : GwBasicToken,
txt_vs_token : &mut HashMap<String, GwBasicToken>,
token_vs_txt : &mut HashMap<GwBasicToken, String>) {
let str_key = String::from(tok_text);
token_vs_txt.insert(token.clone(), str_key);
// Controversal! couldn't figure out how to reuse the
// `String` instance created above without adding a lifetime annotation
// to this struct which makes using this struct very difficult
txt_vs_token.insert(String::from(tok_text), token);
}
pub fn get_token(&self, tok_text : &String) -> Option<&GwBasicToken> {
self.token_text.get(tok_text)
}
}
|
BsaveTok,
BloadTok,
SoundTok,
BeepTok,
|
debug_printer.go
|
package terminal
import (
"time"
. "code.cloudfoundry.org/cli/cf/i18n"
"code.cloudfoundry.org/cli/cf/trace"
)
type DebugPrinter struct {
Logger trace.Printer
}
func (p DebugPrinter) Print(title, dump string) {
p.Logger.Printf("\n%s [%s]\n%s\n", HeaderColor(T(title)), time.Now().Format(time.RFC3339), trace.Sanitize(dump))
|
}
|
|
test_linux_consumption.py
|
from unittest import TestCase, skip
import os
import sys
from requests import Request
from azure_functions_worker.testutils_lc import (
LinuxConsumptionWebHostController
)
@skip('Flaky test and needs stabilization')
class TestLinuxConsumption(TestCase):
"""Test worker behaviors on specific scenarios.
SCM_RUN_FROM_PACKAGE: built function apps are acquired from
-> "Simple Batch" Subscription
-> "AzureFunctionsPythonWorkerCILinuxDevOps" Resource Group
-> "pythonworker<python_major><python_minor>sa" Storage Account
-> "python-worker-lc-apps" Blob Container
For a list of scenario names:
https://pythonworker39sa.blob.core.windows.net/python-worker-lc-apps?restype=container&comp=list
"""
@classmethod
def setUpClass(cls):
cls._py_version = f'{sys.version_info.major}.{sys.version_info.minor}'
cls._py_shortform = f'{sys.version_info.major}{sys.version_info.minor}'
cls._storage = os.getenv('AzureWebJobsStorage')
if cls._storage is None:
raise RuntimeError('Environment variable AzureWebJobsStorage is '
'required before running Linux Consumption test')
def test_placeholder_mode_root_returns_ok(self):
"""In any circumstances, a placeholder container should returns 200
even when it is not specialized.
"""
with LinuxConsumptionWebHostController("3", self._py_version) as ctrl:
req = Request('GET', ctrl.url)
resp = ctrl.send_request(req)
self.assertTrue(resp.ok)
def test_http_no_auth(self):
"""An HttpTrigger function app with 'azure-functions' library
should return 200.
"""
with LinuxConsumptionWebHostController("3", self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
"SCM_RUN_FROM_PACKAGE": self._get_blob_url("HttpNoAuth")
})
req = Request('GET', f'{ctrl.url}/api/HttpTrigger')
resp = ctrl.send_request(req)
self.assertEqual(resp.status_code, 200)
|
azure-functions
azure-eventhub
azure-storage-blob
numpy
cryptography
pyodbc
requests
should return 200 after importing all libraries.
"""
with LinuxConsumptionWebHostController("3", self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
"SCM_RUN_FROM_PACKAGE": self._get_blob_url("CommonLibraries")
})
req = Request('GET', f'{ctrl.url}/api/HttpTrigger')
resp = ctrl.send_request(req)
content = resp.json()
self.assertIn('azure.functions', content)
self.assertIn('azure.storage.blob', content)
self.assertIn('numpy', content)
self.assertIn('cryptography', content)
self.assertIn('pyodbc', content)
self.assertIn('requests', content)
self.assertEqual(resp.status_code, 200)
def test_new_protobuf(self):
"""A function app with the following requirements.txt:
azure-functions==1.7.0
protobuf==3.15.8
grpcio==1.33.2
should return 200 after importing all libraries.
"""
with LinuxConsumptionWebHostController("3", self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
"SCM_RUN_FROM_PACKAGE": self._get_blob_url("NewProtobuf")
})
req = Request('GET', f'{ctrl.url}/api/HttpTrigger')
resp = ctrl.send_request(req)
content = resp.json()
# Worker always picks up the SDK version bundled with the image
# Version of the packages are inconsistent due to isolation's bug
self.assertIn('azure.functions', content)
self.assertIn('google.protobuf', content)
self.assertIn('grpc', content)
self.assertEqual(resp.status_code, 200)
def test_old_protobuf(self):
"""A function app with the following requirements.txt:
azure-functions==1.5.0
protobuf==3.8.0
grpcio==1.27.1
should return 200 after importing all libraries.
"""
with LinuxConsumptionWebHostController("3", self._py_version) as ctrl:
ctrl.assign_container(env={
"AzureWebJobsStorage": self._storage,
"SCM_RUN_FROM_PACKAGE": self._get_blob_url("NewProtobuf")
})
req = Request('GET', f'{ctrl.url}/api/HttpTrigger')
resp = ctrl.send_request(req)
content = resp.json()
# Worker always picks up the SDK version bundled with the image
# Version of the packages are inconsistent due to isolation's bug
self.assertIn('azure.functions', content)
self.assertIn('google.protobuf', content)
self.assertIn('grpc', content)
self.assertEqual(resp.status_code, 200)
def _get_blob_url(self, scenario_name: str) -> str:
return (
f'https://pythonworker{self._py_shortform}sa.blob.core.windows.net/'
f'python-worker-lc-apps/{scenario_name}{self._py_shortform}.zip'
)
|
def test_common_libraries(self):
"""A function app with the following requirements.txt:
|
cycle_gan_model.py
|
import torch
from torch import Tensor, nn
from torch.nn.functional import mse_loss
from simulation.utils.machine_learning.data.image_pool import ImagePool
from simulation.utils.machine_learning.models.helper import set_requires_grad
from .base_model import BaseModel
from .cycle_gan_stats import CycleGANStats
class CycleGANModel(BaseModel):
"""This class implements the CycleGAN model, for learning image-to-image translation
without paired data.
CycleGAN paper: https://arxiv.org/pdf/1703.10593.pdf
"""
def __init__(
self,
netg_a_to_b: nn.Module,
netg_b_to_a: nn.Module,
netd_a: nn.Module = None,
netd_b: nn.Module = None,
is_train: bool = True,
cycle_noise_stddev: int = 0,
pool_size: int = 50,
beta1: float = 0.5,
lr: float = 0.0002,
lr_policy: str = "linear",
lambda_idt_a: int = 10,
lambda_idt_b: int = 10,
lambda_cycle: float = 0.5,
optimizer_type: str = "adam",
is_l1: bool = False,
):
"""Initialize the CycleGAN class.
Args:
is_train: enable or disable training mode
cycle_noise_stddev: Standard deviation of noise added to the cycle input.
Mean is 0.
pool_size: the size of image buffer that stores previously generated images
beta1: momentum term of adam
lr: initial learning rate for adam
lr_policy: linear #learning rate policy. [linear | step | plateau | cosine]
lambda_idt_a: weight for loss of domain A
lambda_idt_b: weight for loss of domain B
lambda_cycle: weight for loss identity
optimizer_type: Name of the optimizer that will be used
"""
super().__init__(
netg_a_to_b,
netg_b_to_a,
netd_a,
netd_b,
is_train,
lambda_cycle,
lambda_idt_a,
lambda_idt_b,
is_l1,
optimizer_type,
lr_policy,
beta1,
lr,
cycle_noise_stddev,
)
if is_train:
self.fake_a_pool = ImagePool(
pool_size
) # create image buffer to store previously generated images
self.fake_b_pool = ImagePool(
pool_size
) # create image buffer to store previously generated images
# define loss functions
def gan_loss(prediction: torch.Tensor, is_real: bool):
target = torch.tensor(
1.0 if is_real else 0.0, device=prediction.device
).expand_as(prediction)
return mse_loss(prediction, target)
self.criterionGAN = gan_loss
def
|
(
self, netd: nn.Module, real: torch.Tensor, fake: torch.Tensor
) -> Tensor:
"""Calculate GAN loss for the discriminator.
We also call loss_d.backward() to calculate the gradients.
Return:
Discriminator loss.
Args:
netd (nn.Module): the discriminator network
real (torch.Tensor): the real image
fake (torch.Tensor): the fake image
"""
# Real
pred_real = netd(real)
loss_d_real = self.criterionGAN(pred_real, True)
# Fake
pred_fake = netd(fake.detach())
loss_d_fake = self.criterionGAN(pred_fake, False)
# Combined loss and calculate gradients
loss_d = (loss_d_real + loss_d_fake) * 0.5
loss_d.backward()
return loss_d
def backward_d_a(self, real_a, fake_a) -> float:
"""Calculate GAN loss for discriminator D_B."""
fake_a = self.fake_a_pool.query(fake_a)
loss_d_a = self.backward_d_basic(self.networks.d_a, real_a, fake_a).item()
return loss_d_a
def backward_d_b(self, real_b, fake_b) -> float:
"""Calculate GAN loss for discriminator D_b."""
fake_b = self.fake_b_pool.query(fake_b)
loss_d_b = self.backward_d_basic(self.networks.d_b, real_b, fake_b).item()
return loss_d_b
def do_iteration(self, batch_a: torch.Tensor, batch_b: torch.Tensor):
"""Calculate losses, gradients, and update network weights; called in every training
iteration."""
real_a = batch_a
real_b = batch_b
# forward
fake_a, fake_b, rec_a, rec_b = self.forward(
real_a, real_b
) # compute fake images and reconstruction images.
# G_A and G_B
set_requires_grad(
[self.networks.d_a, self.networks.d_b], False
) # Ds require no gradients when optimizing Gs
self.optimizer_g.zero_grad() # set G_A and G_B's gradients to zero
# Identity loss
idt_a = self.networks.g_b_to_a(real_a)
idt_b = self.networks.g_a_to_b(real_b)
loss_idt_a = self.criterionIdt(idt_a, real_a) * self.lambda_idt_a
loss_idt_b = self.criterionIdt(idt_b, real_b) * self.lambda_idt_b
# GAN loss
loss_g_a_to_b = self.criterionGAN(self.networks.d_b(fake_b), True)
loss_g_b_to_a = self.criterionGAN(self.networks.d_a(fake_a), True)
# Forward cycle loss
loss_cycle_a = self.criterionCycle(rec_a, real_a) * self.lambda_cycle
# Backward cycle loss
loss_cycle_b = self.criterionCycle(rec_b, real_b) * self.lambda_cycle
# combined loss and calculate gradients
loss_g = (
loss_g_a_to_b
+ loss_g_b_to_a
+ loss_cycle_a
+ loss_cycle_b
+ loss_idt_a
+ loss_idt_b
)
loss_g.backward()
self.optimizer_g.step() # update G_A and G_B's weights
# D_A and D_B
set_requires_grad([self.networks.d_a, self.networks.d_b], True)
self.optimizer_d.zero_grad() # set D_A and D_B's gradients to zero
loss_d_a = self.backward_d_a(real_a, fake_a) # calculate gradients for D_A
loss_d_b = self.backward_d_b(real_b, fake_b) # calculate gradients for D_B
self.optimizer_d.step() # update D_A and D_B's weights
return CycleGANStats(
real_a,
real_b,
fake_a,
fake_b,
rec_a,
rec_b,
idt_a,
idt_b,
loss_g_a_to_b.item(),
loss_g_b_to_a.item(),
loss_idt_a.item(),
loss_idt_b.item(),
loss_cycle_a.item(),
loss_cycle_b.item(),
loss_d_a,
loss_d_b,
)
|
backward_d_basic
|
main.rs
|
use clap::{crate_authors, crate_description, crate_version, App, AppSettings, Arg, SubCommand};
use std::collections::HashMap;
mod formatter;
mod iter;
mod linter;
fn
|
() {
let matches = App::new("cniutil")
.about(crate_description!())
.version(crate_version!())
.setting(AppSettings::GlobalVersion)
.author(crate_authors!(", "))
.setting(AppSettings::SubcommandRequiredElseHelp)
.setting(AppSettings::UnifiedHelpMessage)
// these arguments are available for all subcommands
.arg(
Arg::with_name("extension")
.help("Enable CNI extensions.")
.long("with")
.possible_values(&["ini", "more-keys"])
.case_insensitive(true)
.multiple(true)
.require_delimiter(true)
.require_equals(true)
.global(true)
)
.arg(
Arg::with_name("removed-extension")
.help("Disable CNI extensions.")
.long("without")
.possible_values(&["ini", "more-keys"])
.case_insensitive(true)
.multiple(true)
.require_delimiter(true)
.require_equals(true)
.global(true)
)
.subcommand(
SubCommand::with_name("lint")
.setting(AppSettings::UnifiedHelpMessage)
.about("comments on validity and style of CNI files")
.arg(
Arg::with_name("FILES")
.help("The input files to read. '-' will result in stdin being read.")
.multiple(true)
.default_value("-")
)
)
.subcommand(
SubCommand::with_name("format")
.setting(AppSettings::UnifiedHelpMessage)
.visible_alias("fmt")
.about("Reads in CNI files and shows the combined result in the specified format.")
.arg(
Arg::with_name("cni")
.help("The output format should be CNI. Equivalent to --format=\"KEY = `VALUE`\". [default]")
.overrides_with_all(&["csv", "null", "format"])
.long("cni")
)
.arg(
Arg::with_name("threshold")
.help("Can only be used with --cni. Specifies the threshold of how many entries have to be in a section to make use of a section header. 0 means no section headers will be used. [default: 10]")
.long("section-threshold")
.short("n")
.validator(|arg| arg.parse::<usize>().map(|_| ()).map_err(|e| e.to_string()))
.requires("cni")
)
.arg(
Arg::with_name("csv")
.help("The output format should be comma separated values. Equivalent to --format=\"KEY,VALUE\"")
.overrides_with_all(&["cni", "null", "format"])
.long("csv")
.short("c")
)
.arg(
Arg::with_name("null")
.help("Records are terminated by a null character instead of a line feed to better accomodate values containing line feeds.")
.overrides_with_all(&["cni", "csv", "format"])
.long("null")
.short("0")
)
.arg(
Arg::with_name("format")
.help("Sets a custom format. KEY and VALUE are placeholders and may not occur more than once.")
.overrides_with_all(&["cni", "csv", "null"])
.long("format")
.takes_value(true)
)
.arg(
Arg::with_name("FILES")
.help("The input files to read. '-' will result in stdin being read.")
.multiple(true)
.default_value("-")
)
)
.get_matches();
// get enabled CNI extensions
let opts = {
let mut extensions = if matches.is_present("extension") {
matches
.values_of("extension")
.unwrap()
.zip(matches.indices_of("extension").unwrap())
// removes duplicates
.collect::<HashMap<_, _>>()
} else {
HashMap::new()
};
let removed_extensions = if matches.is_present("removed-extension") {
matches
.values_of("removed-extension")
.unwrap()
.zip(matches.indices_of("removed-extension").unwrap())
// removes duplicates
.collect::<HashMap<_, _>>()
} else {
HashMap::new()
};
for (removed, i) in removed_extensions {
if matches!(extensions.get(removed), Some(&j) if j<i) {
extensions.remove(removed);
}
}
cni_format::Opts {
ini: extensions.contains_key("ini"),
more_keys: extensions.contains_key("more-keys"),
}
};
match matches.subcommand() {
("lint", Some(matches)) => {
let files = matches.values_of("FILES").unwrap();
if files.len() == 1 {
// don't show the filename if there is only one file
linter::lint(&opts, matches.value_of("FILES").unwrap());
} else {
for file in files {
println!("{}", file);
linter::lint(&opts, file);
}
}
}
("format", Some(matches)) => {
use formatter::Format;
let format = if matches.is_present("csv") {
Format::Custom(Some("".into()), Some(",\"".into()), "\"\n".into())
} else if matches.is_present("null") {
Format::Custom(Some("".into()), Some("=".into()), "\0".into())
} else if matches.is_present("format") {
let format = format!("{}\n", matches.value_of("format").unwrap());
let key_pos = format.find("KEY");
let val_pos = format.find("VALUE");
Format::Custom(
key_pos.map(|i| format[..i].into()),
val_pos.map(|i| format[key_pos.unwrap_or(0)..i].into()),
format[val_pos.or(key_pos).unwrap_or(0)..].into(),
)
} else {
// must be the default CNI formatting
// the unwrap is okay because of the validator in clap
let section_threshold = matches
.value_of("threshold")
.unwrap_or("10")
.parse()
.unwrap();
Format::Cni(section_threshold)
};
formatter::format(matches.values_of("FILES").unwrap(), format, opts);
}
_ => unreachable!("unknown subcommand"),
}
}
|
main
|
test_exception.py
|
import unittest
from clayful.exception import ClayfulException
class
|
(unittest.TestCase):
def test_clayful_error_constructor(self):
error = ClayfulException(
'Brand',
'get',
400,
{},
'g-no-model',
'my message',
{}
)
self.assertEqual(error.is_clayful, True)
self.assertEqual(error.model, 'Brand')
self.assertEqual(error.method, 'get')
self.assertEqual(error.status, 400)
self.assertEqual(error.headers, {})
self.assertEqual(error.code, 'g-no-model')
self.assertEqual(error.message, 'my message')
self.assertEqual(error.validation, {})
def test_throw_clayful_error(self):
try:
raise ClayfulException(
'Brand',
'get',
400,
{},
'g-no-model',
'my message',
{}
)
except ClayfulException as e:
self.assertEqual(e.is_clayful, True)
|
ClayfulExceptionTest
|
in.go
|
package asdfasdf
func
Factorial(x int) int {
if x <=
1
|
return x * Factorial(x - 1,
)
}
|
{
return x
}
|
main.rs
|
use sljit_rs::{definition::*, *};
fn main() {
let comp = SlJITCompiler::new();
comp.set_context(
0,
sljit_arg1(SLJIT_ARG_TYPE_SW) | sljit_arg2(SLJIT_ARG_TYPE_SW),
2,
2,
|
comp.emit_op0(SLJIT_ENDBR);
comp.emit_fast_enter(SLJIT_R0, 0);
comp.emit_op2(SLJIT_ADD, SLJIT_R0, 0, SLJIT_S0, 0, SLJIT_S1, 0);
comp.emit_op_src(SLJIT_FAST_RETURN, SLJIT_R0, 0);
unsafe {
let code = comp.generate_code();
let func: extern "C" fn(i64, i64) -> i64;
func = std::mem::transmute(code);
println!("{}", func(2, 3));
}
}
|
0,
0,
0,
);
|
postgres.go
|
// Copyright 2013 Beego Authors
// Copyright 2014 The Macaron Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"): you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package session
import (
"database/sql"
"fmt"
"log"
"sync"
"time"
_ "github.com/lib/pq"
"go.wandrs.dev/session"
)
// PostgresStore represents a postgres session store implementation.
type PostgresStore struct {
c *sql.DB
sid string
lock sync.RWMutex
data map[interface{}]interface{}
}
// NewPostgresStore creates and returns a postgres session store.
func NewPostgresStore(c *sql.DB, sid string, kv map[interface{}]interface{}) *PostgresStore {
return &PostgresStore{
c: c,
sid: sid,
data: kv,
}
}
// Set sets value to given key in session.
func (s *PostgresStore) Set(key, value interface{}) error {
s.lock.Lock()
defer s.lock.Unlock()
s.data[key] = value
return nil
}
// Get gets value by given key in session.
func (s *PostgresStore) Get(key interface{}) interface{} {
s.lock.RLock()
defer s.lock.RUnlock()
return s.data[key]
}
// Delete delete a key from session.
func (s *PostgresStore) Delete(key interface{}) error {
s.lock.Lock()
defer s.lock.Unlock()
delete(s.data, key)
return nil
}
// ID returns current session ID.
func (s *PostgresStore) ID() string {
return s.sid
}
// save postgres session values to database.
// must call this method to save values to database.
func (s *PostgresStore) Release() error {
// Skip encoding if the data is empty
if len(s.data) == 0 {
return nil
}
data, err := session.EncodeGob(s.data)
if err != nil {
return err
}
_, err = s.c.Exec("UPDATE session SET data=$1, expiry=$2 WHERE key=$3",
data, time.Now().Unix(), s.sid)
return err
}
// Flush deletes all session data.
func (s *PostgresStore) Flush() error {
s.lock.Lock()
defer s.lock.Unlock()
s.data = make(map[interface{}]interface{})
return nil
}
// PostgresProvider represents a postgres session provider implementation.
type PostgresProvider struct {
c *sql.DB
maxlifetime int64
}
// Init initializes postgres session provider.
// connStr: user=a password=b host=localhost port=5432 dbname=c sslmode=disable
func (p *PostgresProvider) Init(maxlifetime int64, connStr string) (err error) {
p.maxlifetime = maxlifetime
p.c, err = sql.Open("postgres", connStr)
if err != nil {
return err
}
return p.c.Ping()
}
// Read returns raw session store by session ID.
func (p *PostgresProvider) Read(sid string) (session.RawStore, error) {
now := time.Now().Unix()
var data []byte
expiry := now
err := p.c.QueryRow("SELECT data, expiry FROM session WHERE key=$1", sid).Scan(&data, &expiry)
if err == sql.ErrNoRows {
_, err = p.c.Exec("INSERT INTO session(key,data,expiry) VALUES($1,$2,$3)",
sid, "", now)
}
if err != nil {
return nil, err
}
var kv map[interface{}]interface{}
if len(data) == 0 || expiry+p.maxlifetime <= now {
kv = make(map[interface{}]interface{})
} else {
kv, err = session.DecodeGob(data)
if err != nil {
return nil, err
}
}
return NewPostgresStore(p.c, sid, kv), nil
}
// Exist returns true if session with given ID exists.
func (p *PostgresProvider) Exist(sid string) bool {
var data []byte
err := p.c.QueryRow("SELECT data FROM session WHERE key=$1", sid).Scan(&data)
if err != nil && err != sql.ErrNoRows {
panic("session/postgres: error checking existence: " + err.Error())
}
return err != sql.ErrNoRows
}
// Destroy deletes a session by session ID.
func (p *PostgresProvider) Destroy(sid string) error {
_, err := p.c.Exec("DELETE FROM session WHERE key=$1", sid)
return err
}
// Regenerate regenerates a session store from old session ID to new one.
func (p *PostgresProvider) Regenerate(oldsid, sid string) (_ session.RawStore, err error) {
if p.Exist(sid) {
return nil, fmt.Errorf("new sid '%s' already exists", sid)
}
if !p.Exist(oldsid) {
if _, err = p.c.Exec("INSERT INTO session(key,data,expiry) VALUES($1,$2,$3)",
oldsid, "", time.Now().Unix()); err != nil {
return nil, err
}
}
if _, err = p.c.Exec("UPDATE session SET key=$1 WHERE key=$2", sid, oldsid); err != nil {
return nil, err
}
return p.Read(sid)
}
// Count counts and returns number of sessions.
func (p *PostgresProvider) Count() (total int) {
if err := p.c.QueryRow("SELECT COUNT(*) AS NUM FROM session").Scan(&total); err != nil
|
return total
}
// GC calls GC to clean expired sessions.
func (p *PostgresProvider) GC() {
if _, err := p.c.Exec("DELETE FROM session WHERE EXTRACT(EPOCH FROM NOW()) - expiry > $1", p.maxlifetime); err != nil {
log.Printf("session/postgres: error garbage collecting: %v", err)
}
}
func init() {
session.Register("postgres", &PostgresProvider{})
}
|
{
panic("session/postgres: error counting records: " + err.Error())
}
|
setup.py
|
# -*- coding: utf-8 -*-
"""
AMPLPY
------
AMPL API is an interface that allows developers to access the features of the
AMPL interpreter from within a programming language. All model generation and
solver interaction is handled directly by AMPL, which leads to great stability
and speed; the library just acts as an intermediary, and the added overhead
(in terms of memory and CPU usage) depends mostly on how much data is read
back from AMPL, the size of the model as such is irrelevant. Functions for
directly assigning data to AMPL parameters and sets are provided, which can
be used instead of the normal AMPL data reading procedures. AMPL API has been
written with usability in mind, and it is easy to access its functionalities
from C++, Java, C#, MATLAB, R and Python.
The AMPL API can function as an add-on to any existing AMPL installation. If
you do not yet have an AMPL installation on the computer where you will be
working with the API, see our
`demo page <http://ampl.com/try-ampl/download-a-free-demo/>`_
or
`trial page <http://ampl.com/try-ampl/request-a-full-trial/>`_
to download a working version that can be installed quickly.
Documentation:
``````````````
* http://amplpy.readthedocs.io
* http://ampl.com/api/nightly/python/
Repositories:
`````````````
* GitHub Repository: https://github.com/ampl/amplpy
* PyPI Repository: https://pypi.python.org/pypi/amplpy
"""
from setuptools import setup, Extension
import platform
import os
OSTYPE = platform.system()
x64 = platform.architecture()[0] == '64bit'
def ls_dir(base_dir):
|
def make_relative_rpath(path):
if OSTYPE == 'Darwin':
return '-Wl,-rpath,@loader_path/' + path
elif OSTYPE == 'Linux':
return '-Wl,-rpath,$ORIGIN/' + path
else:
return ''
def compile_args():
if OSTYPE == 'Windows':
return ['/TP /EHsc']
elif OSTYPE == 'Linux':
return ['-std=c++11']
else:
return []
libdir = 'lib64' if x64 else 'lib32'
setup(
name='amplpy',
version='0.6.7',
description='Python API for AMPL',
long_description=__doc__,
license='BSD-3',
platforms='any',
author='Filipe Brandão',
author_email='[email protected]',
url='http://ampl.com/',
download_url='https://github.com/ampl/amplpy',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: C++',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
],
packages=['amplpy'],
ext_modules=[Extension(
'_amplpython',
libraries=['ampl'],
library_dirs=[os.path.join('amplpy', 'amplpython', libdir)],
include_dirs=[os.path.join('amplpy', 'amplpython', 'include')],
extra_compile_args=compile_args(),
extra_link_args=[
make_relative_rpath(os.path.join('amplpy', 'amplpython', libdir))
],
sources=[
os.path.join('amplpy', 'amplpython', 'amplpythonPYTHON_wrap.cxx')
],
)],
package_data={'': ls_dir('amplpy/')},
install_requires=['future >= 0.15.0']
)
|
"""List files recursively."""
return [
os.path.join(dirpath.replace(base_dir, '', 1), f)
for (dirpath, dirnames, files) in os.walk(base_dir)
for f in files
]
|
food_obj.py
|
"""
snake/food_obj.py
author: Stephen Radley
date: 2018/07/05
package: snake
version: 0.0.1
"""
from random import randint
from snake.location_obj import Location
from snake.functions import find_valid_locs
"""
Food ...
"""
class
|
:
"""
__init__ ...
"""
def __init__(self, dim_x, dim_y, snake):
valid_locs = find_valid_locs(dim_x, dim_y, snake)
loc = valid_locs[randint(0, len(valid_locs)-1)]
self.loc = Location(loc[0], loc[1])
|
Food
|
mock_client.go
|
// Code generated by MockGen. DO NOT EDIT.
// Source: github.com/Charliekenney23/linodectl/internal/linode (interfaces: Client)
// Package mock is a generated GoMock package.
package mock
import (
context "context"
reflect "reflect"
gomock "github.com/golang/mock/gomock"
linodego "github.com/linode/linodego"
)
// MockClient is a mock of Client interface.
type MockClient struct {
ctrl *gomock.Controller
recorder *MockClientMockRecorder
}
// MockClientMockRecorder is the mock recorder for MockClient.
type MockClientMockRecorder struct {
mock *MockClient
}
// NewMockClient creates a new mock instance.
func
|
(ctrl *gomock.Controller) *MockClient {
mock := &MockClient{ctrl: ctrl}
mock.recorder = &MockClientMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockClient) EXPECT() *MockClientMockRecorder {
return m.recorder
}
// CreateInstance mocks base method.
func (m *MockClient) CreateInstance(arg0 context.Context, arg1 linodego.InstanceCreateOptions) (*linodego.Instance, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "CreateInstance", arg0, arg1)
ret0, _ := ret[0].(*linodego.Instance)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// CreateInstance indicates an expected call of CreateInstance.
func (mr *MockClientMockRecorder) CreateInstance(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateInstance", reflect.TypeOf((*MockClient)(nil).CreateInstance), arg0, arg1)
}
// CreateLKECluster mocks base method.
func (m *MockClient) CreateLKECluster(arg0 context.Context, arg1 linodego.LKEClusterCreateOptions) (*linodego.LKECluster, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "CreateLKECluster", arg0, arg1)
ret0, _ := ret[0].(*linodego.LKECluster)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// CreateLKECluster indicates an expected call of CreateLKECluster.
func (mr *MockClientMockRecorder) CreateLKECluster(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateLKECluster", reflect.TypeOf((*MockClient)(nil).CreateLKECluster), arg0, arg1)
}
// CreateObjectStorageBucket mocks base method.
func (m *MockClient) CreateObjectStorageBucket(arg0 context.Context, arg1 linodego.ObjectStorageBucketCreateOptions) (*linodego.ObjectStorageBucket, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "CreateObjectStorageBucket", arg0, arg1)
ret0, _ := ret[0].(*linodego.ObjectStorageBucket)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// CreateObjectStorageBucket indicates an expected call of CreateObjectStorageBucket.
func (mr *MockClientMockRecorder) CreateObjectStorageBucket(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateObjectStorageBucket", reflect.TypeOf((*MockClient)(nil).CreateObjectStorageBucket), arg0, arg1)
}
// CreateObjectStorageKey mocks base method.
func (m *MockClient) CreateObjectStorageKey(arg0 context.Context, arg1 linodego.ObjectStorageKeyCreateOptions) (*linodego.ObjectStorageKey, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "CreateObjectStorageKey", arg0, arg1)
ret0, _ := ret[0].(*linodego.ObjectStorageKey)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// CreateObjectStorageKey indicates an expected call of CreateObjectStorageKey.
func (mr *MockClientMockRecorder) CreateObjectStorageKey(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateObjectStorageKey", reflect.TypeOf((*MockClient)(nil).CreateObjectStorageKey), arg0, arg1)
}
// DeleteInstance mocks base method.
func (m *MockClient) DeleteInstance(arg0 context.Context, arg1 int) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteInstance", arg0, arg1)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteInstance indicates an expected call of DeleteInstance.
func (mr *MockClientMockRecorder) DeleteInstance(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteInstance", reflect.TypeOf((*MockClient)(nil).DeleteInstance), arg0, arg1)
}
// DeleteLKECluster mocks base method.
func (m *MockClient) DeleteLKECluster(arg0 context.Context, arg1 int) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteLKECluster", arg0, arg1)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteLKECluster indicates an expected call of DeleteLKECluster.
func (mr *MockClientMockRecorder) DeleteLKECluster(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLKECluster", reflect.TypeOf((*MockClient)(nil).DeleteLKECluster), arg0, arg1)
}
// DeleteObjectStorageBucket mocks base method.
func (m *MockClient) DeleteObjectStorageBucket(arg0 context.Context, arg1, arg2 string) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteObjectStorageBucket", arg0, arg1, arg2)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteObjectStorageBucket indicates an expected call of DeleteObjectStorageBucket.
func (mr *MockClientMockRecorder) DeleteObjectStorageBucket(arg0, arg1, arg2 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteObjectStorageBucket", reflect.TypeOf((*MockClient)(nil).DeleteObjectStorageBucket), arg0, arg1, arg2)
}
// DeleteObjectStorageKey mocks base method.
func (m *MockClient) DeleteObjectStorageKey(arg0 context.Context, arg1 int) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteObjectStorageKey", arg0, arg1)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteObjectStorageKey indicates an expected call of DeleteObjectStorageKey.
func (mr *MockClientMockRecorder) DeleteObjectStorageKey(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteObjectStorageKey", reflect.TypeOf((*MockClient)(nil).DeleteObjectStorageKey), arg0, arg1)
}
// DeleteStackscript mocks base method.
func (m *MockClient) DeleteStackscript(arg0 context.Context, arg1 int) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteStackscript", arg0, arg1)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteStackscript indicates an expected call of DeleteStackscript.
func (mr *MockClientMockRecorder) DeleteStackscript(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteStackscript", reflect.TypeOf((*MockClient)(nil).DeleteStackscript), arg0, arg1)
}
// GetInstance mocks base method.
func (m *MockClient) GetInstance(arg0 context.Context, arg1 int) (*linodego.Instance, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetInstance", arg0, arg1)
ret0, _ := ret[0].(*linodego.Instance)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetInstance indicates an expected call of GetInstance.
func (mr *MockClientMockRecorder) GetInstance(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetInstance", reflect.TypeOf((*MockClient)(nil).GetInstance), arg0, arg1)
}
// GetLKEClusterKubeconfig mocks base method.
func (m *MockClient) GetLKEClusterKubeconfig(arg0 context.Context, arg1 int) (*linodego.LKEClusterKubeconfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetLKEClusterKubeconfig", arg0, arg1)
ret0, _ := ret[0].(*linodego.LKEClusterKubeconfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetLKEClusterKubeconfig indicates an expected call of GetLKEClusterKubeconfig.
func (mr *MockClientMockRecorder) GetLKEClusterKubeconfig(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLKEClusterKubeconfig", reflect.TypeOf((*MockClient)(nil).GetLKEClusterKubeconfig), arg0, arg1)
}
// GetProfile mocks base method.
func (m *MockClient) GetProfile(arg0 context.Context) (*linodego.Profile, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetProfile", arg0)
ret0, _ := ret[0].(*linodego.Profile)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetProfile indicates an expected call of GetProfile.
func (mr *MockClientMockRecorder) GetProfile(arg0 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProfile", reflect.TypeOf((*MockClient)(nil).GetProfile), arg0)
}
// ListInstances mocks base method.
func (m *MockClient) ListInstances(arg0 context.Context, arg1 *linodego.ListOptions) ([]linodego.Instance, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListInstances", arg0, arg1)
ret0, _ := ret[0].([]linodego.Instance)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// ListInstances indicates an expected call of ListInstances.
func (mr *MockClientMockRecorder) ListInstances(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListInstances", reflect.TypeOf((*MockClient)(nil).ListInstances), arg0, arg1)
}
// ListLKEClusterPools mocks base method.
func (m *MockClient) ListLKEClusterPools(arg0 context.Context, arg1 int, arg2 *linodego.ListOptions) ([]linodego.LKEClusterPool, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListLKEClusterPools", arg0, arg1, arg2)
ret0, _ := ret[0].([]linodego.LKEClusterPool)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// ListLKEClusterPools indicates an expected call of ListLKEClusterPools.
func (mr *MockClientMockRecorder) ListLKEClusterPools(arg0, arg1, arg2 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLKEClusterPools", reflect.TypeOf((*MockClient)(nil).ListLKEClusterPools), arg0, arg1, arg2)
}
// ListLKEClusters mocks base method.
func (m *MockClient) ListLKEClusters(arg0 context.Context, arg1 *linodego.ListOptions) ([]linodego.LKECluster, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListLKEClusters", arg0, arg1)
ret0, _ := ret[0].([]linodego.LKECluster)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// ListLKEClusters indicates an expected call of ListLKEClusters.
func (mr *MockClientMockRecorder) ListLKEClusters(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListLKEClusters", reflect.TypeOf((*MockClient)(nil).ListLKEClusters), arg0, arg1)
}
// ListObjectStorageBuckets mocks base method.
func (m *MockClient) ListObjectStorageBuckets(arg0 context.Context, arg1 *linodego.ListOptions) ([]linodego.ObjectStorageBucket, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListObjectStorageBuckets", arg0, arg1)
ret0, _ := ret[0].([]linodego.ObjectStorageBucket)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// ListObjectStorageBuckets indicates an expected call of ListObjectStorageBuckets.
func (mr *MockClientMockRecorder) ListObjectStorageBuckets(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListObjectStorageBuckets", reflect.TypeOf((*MockClient)(nil).ListObjectStorageBuckets), arg0, arg1)
}
// ListStackscripts mocks base method.
func (m *MockClient) ListStackscripts(arg0 context.Context, arg1 *linodego.ListOptions) ([]linodego.Stackscript, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "ListStackscripts", arg0, arg1)
ret0, _ := ret[0].([]linodego.Stackscript)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// ListStackscripts indicates an expected call of ListStackscripts.
func (mr *MockClientMockRecorder) ListStackscripts(arg0, arg1 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListStackscripts", reflect.TypeOf((*MockClient)(nil).ListStackscripts), arg0, arg1)
}
// UpdateInstance mocks base method.
func (m *MockClient) UpdateInstance(arg0 context.Context, arg1 int, arg2 linodego.InstanceUpdateOptions) (*linodego.Instance, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpdateInstance", arg0, arg1, arg2)
ret0, _ := ret[0].(*linodego.Instance)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// UpdateInstance indicates an expected call of UpdateInstance.
func (mr *MockClientMockRecorder) UpdateInstance(arg0, arg1, arg2 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateInstance", reflect.TypeOf((*MockClient)(nil).UpdateInstance), arg0, arg1, arg2)
}
// UpdateLKECluster mocks base method.
func (m *MockClient) UpdateLKECluster(arg0 context.Context, arg1 int, arg2 linodego.LKEClusterUpdateOptions) (*linodego.LKECluster, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpdateLKECluster", arg0, arg1, arg2)
ret0, _ := ret[0].(*linodego.LKECluster)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// UpdateLKECluster indicates an expected call of UpdateLKECluster.
func (mr *MockClientMockRecorder) UpdateLKECluster(arg0, arg1, arg2 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateLKECluster", reflect.TypeOf((*MockClient)(nil).UpdateLKECluster), arg0, arg1, arg2)
}
// UpdateLKEClusterPool mocks base method.
func (m *MockClient) UpdateLKEClusterPool(arg0 context.Context, arg1, arg2 int, arg3 linodego.LKEClusterPoolUpdateOptions) (*linodego.LKEClusterPool, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpdateLKEClusterPool", arg0, arg1, arg2, arg3)
ret0, _ := ret[0].(*linodego.LKEClusterPool)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// UpdateLKEClusterPool indicates an expected call of UpdateLKEClusterPool.
func (mr *MockClientMockRecorder) UpdateLKEClusterPool(arg0, arg1, arg2, arg3 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateLKEClusterPool", reflect.TypeOf((*MockClient)(nil).UpdateLKEClusterPool), arg0, arg1, arg2, arg3)
}
|
NewMockClient
|
get_set.rs
|
#![cfg(not(feature = "no_object"))]
use rhai::{Engine, EvalAltResult, ImmutableString, INT};
|
#[test]
fn test_get_set() -> Result<(), Box<EvalAltResult>> {
#[derive(Clone)]
struct TestStruct {
x: INT,
y: INT,
array: Vec<INT>,
}
impl TestStruct {
fn get_x(&mut self) -> INT {
self.x
}
fn set_x(&mut self, new_x: INT) {
self.x = new_x;
}
fn get_y(&mut self) -> INT {
self.y
}
fn new() -> Self {
Self {
x: 1,
y: 0,
array: vec![1, 2, 3, 4, 5],
}
}
}
let mut engine = Engine::new();
engine.register_type::<TestStruct>();
engine.register_get_set("x", TestStruct::get_x, TestStruct::set_x);
engine.register_get("y", TestStruct::get_y);
engine.register_fn("add", |value: &mut INT| *value += 41);
engine.register_fn("new_ts", TestStruct::new);
assert_eq!(engine.eval::<INT>("let a = new_ts(); a.x = 500; a.x")?, 500);
assert_eq!(engine.eval::<INT>("let a = new_ts(); a.x.add(); a.x")?, 42);
assert_eq!(engine.eval::<INT>("let a = new_ts(); a.y.add(); a.y")?, 0);
#[cfg(not(feature = "no_index"))]
{
engine.register_indexer_get_set(
|value: &mut TestStruct, index: ImmutableString| value.array[index.len()],
|value: &mut TestStruct, index: ImmutableString, new_val: INT| {
value.array[index.len()] = new_val
},
);
assert_eq!(engine.eval::<INT>(r#"let a = new_ts(); a["abc"]"#)?, 4);
assert_eq!(
engine.eval::<INT>(r#"let a = new_ts(); a["abc"] = 42; a["abc"]"#)?,
42
);
}
Ok(())
}
#[test]
fn test_get_set_chain() -> Result<(), Box<EvalAltResult>> {
#[derive(Clone)]
struct TestChild {
x: INT,
}
impl TestChild {
fn get_x(&mut self) -> INT {
self.x
}
fn set_x(&mut self, new_x: INT) {
self.x = new_x;
}
fn new() -> TestChild {
TestChild { x: 1 }
}
}
#[derive(Clone)]
struct TestParent {
child: TestChild,
}
impl TestParent {
fn get_child(&mut self) -> TestChild {
self.child.clone()
}
fn set_child(&mut self, new_child: TestChild) {
self.child = new_child;
}
fn new() -> TestParent {
TestParent {
child: TestChild::new(),
}
}
}
let mut engine = Engine::new();
engine.register_type::<TestChild>();
engine.register_type_with_name::<TestParent>("TestParent");
engine.register_get_set("x", TestChild::get_x, TestChild::set_x);
engine.register_get_set("child", TestParent::get_child, TestParent::set_child);
engine.register_fn("new_tp", TestParent::new);
assert_eq!(
engine.eval::<INT>("let a = new_tp(); a.child.x = 500; a.child.x")?,
500
);
assert_eq!(
engine.eval::<String>("let a = new_tp(); type_of(a)")?,
"TestParent"
);
Ok(())
}
| |
AddCard.js
|
import React, {Component} from 'react'
|
import { gray, light, dark } from '../utils/colors'
import Constants from 'expo-constants'
import { ScrollView } from 'react-native-gesture-handler'
export class AddCard extends Component {
static navigationOptions = {
title: 'Add Card'
}
state = {
question: '',
answer: ''
}
handleAddQuestion = (input) => {
this.setState({
question: input
})
}
handleAddAnswer = (input) => {
this.setState({
answer: input
})
}
onPressButton = () => {
const { question, answer } = this.state
const deckId = this.props.navigation.state.params.deck
this.props.createNewCard(deckId, {question, answer})
addCardToDeck(deckId, {question, answer})
this.setState({
question: '',
answer: ''
})
//return to deck.
this.props.navigation.navigate('DeckDetails', deckId)
}
render() {
return (
<ScrollView>
<View style={styles.container}>
<Text style={styles.heading}>Add a new card to your mobile FlashCards!</Text>
<Text style={{textAlign: 'center', fontSize: 18, marginBottom: 10, color: dark, marginTop: 20}}>Add the question</Text>
<KeyboardAvoidingView behavior='padding'>
<TextInput
onChangeText={this.handleAddQuestion}
placeholder={'your question'}
style={styles.input}>
</TextInput>
{
this.state.question.length === '' &&
<Text>The card must have a question</Text>
}
</KeyboardAvoidingView>
<Text style={{textAlign: 'center', fontSize: 18, marginBottom: 10, color: dark}}>Add the answer, should be a true or false answer.</Text>
<KeyboardAvoidingView behavior='padding'>
<TextInput
onChangeText={this.handleAddAnswer}
placeholder={'is true or false'}
style={styles.input}>
</TextInput>
{
this.state.answer.length === '' &&
<Text>The card must have a question</Text>
}
</KeyboardAvoidingView>
<TouchableOpacity style={styles.primaryBtn} onPress={this.onPressButton} disabled={this.state.question === '' || this.state.answer === '' ? true : false}>
<Text style={styles.btnText}>Add Card</Text>
</TouchableOpacity>
</View>
</ScrollView>
)
}
}
const { width } = Dimensions.get('window')
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'flex-start',
alignItems: 'center',
paddingBottom: 20,
paddingTop: Constants.statusBarHeight,
paddingLeft: 10,
paddingRight: 10,
backgroundColor: light
},
heading: {
fontSize: 24,
textAlign: 'center',
marginBottom:10,
},
primaryBtn: {
backgroundColor: dark,
padding: 20,
paddingLeft: 30,
paddingRight: 30,
borderRadius: 7,
width: width - 40,
marginTop: 10,
marginBottom: 10
},
secondaryBtn: {
backgroundColor: light,
borderColor: dark,
borderWidth: 3,
},
btnText: {
color: light,
textAlign: 'center',
fontSize: 21
},
secondaryText: {
color: dark,
textAlign: 'center',
fontSize: 21
},
input: {
padding: 20,
marginTop: 10,
marginBottom: 25,
fontSize: 18,
borderWidth: 2,
borderColor: gray,
borderRadius: 8,
width: width - 40,
textAlign: 'center'
}
})
const mapDispatchToProps = dispatch => ({
createNewCard: (deckId, question, answer) =>
dispatch(addNewCard(deckId, question, answer))
})
export default connect(null, mapDispatchToProps)(AddCard)
|
import { View, Text, TextInput, TouchableOpacity, StyleSheet, KeyboardAvoidingView, Dimensions} from 'react-native'
import { addNewCard } from '../actions'
import { addCardToDeck } from '../utils/api'
import { connect } from 'react-redux'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.