file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
main.py
from flask import render_template, request from flask_script import Manager, Server from app import app from model import Content, Summary, Article import app.static.summ as summarizationModel import os, json, logging @app.route('/', endpoint='ACCESS') @app.route('/index.html', endpoint='ACCESSFILE') def index(): try: all_pairs = Article.objects.all() return render_template('index.html', history=all_pairs) except Exception as e: logging.error(e) raise e @app.route('/run_decode', methods=['POST']) def run_decode():
logging.debug('decode your input by our pretrained model') try: source = request.get_json()['source'] # GET request with String from frontend directly logging.debug('input: {}'.format(source)) # GET String-type context from the backend try: logging.debug('using the pretrained model.') sentNums, summary = summarizationModel.decode.run_(source) except Exception as e: logging.error(e) else: logging.debug('The number of sentences is {}'.format(sentNums)) logging.debug('The abstract is that {}'.format(summary)) results = {'sent_no': sentNums, 'final': summary} try: article = Content(text=source) abstract = Summary(text=summary) pair = Article(article=article.id, abstract=abstract.id) article.save() abstract.save() pair.save() except Exception as e: logging.error(e) return json.dumps(results) except: message = {'message' : 'Fail to catch the data from client.'} return json.dumps(message) manager = Manager(app) manager.add_command('runserver', Server( use_debugger = True, use_reloader = True, host = os.getenv('IP', '0.0.0.0'), port = int(os.getenv('PORT', 5001)) )) if __name__ == "__main__": manager.run()
ci_tags.py
import requests import sys class Check(): def __init__(self, owner, repo, number, access_token): self.owner = owner self.repo = repo self.number = number self.access_token = access_token def
(self): url = 'https://gitee.com/api/v5/repos/{}/{}/pulls/{}/labels?access_token={}'.format(self.owner, self.repo, self.number, self.access_token) data = "[\"ci_processing\"]" r = requests.post(url, data) if r.status_code != 201: print('ERROR! Unexpected failure, status_code: {}'.format(r.status_code)) sys.exit(1) def add_successful_tag(self): url = 'https://gitee.com/api/v5/repos/{0}/{1}/pulls/{2}/labels?access_token={3}'.format(self.owner, self.repo, self.number, self.access_token) data = "[\"ci_successful\"]" r = requests.post(url, data) if r.status_code != 201: print('ERROR! Unexpected failure, status_code: {}'.format(r.status_code)) sys.exit(1) def add_failed_tag(self): url = 'https://gitee.com/api/v5/repos/{0}/{1}/pulls/{2}/labels?access_token={3}'.format(self.owner, self.repo, self.number, self.access_token) data = "[\"ci_failed\"]" r = requests.post(url, data) if r.status_code != 201: print('ERROR! Unexpected failure, status_code: {}'.format(r.status_code)) sys.exit(1) def remove_processing_tag(self): url = 'https://gitee.com/api/v5/repos/{0}/{1}/pulls/{2}/labels/ci_processing/?access_token={3}'.format( self.owner, self.repo, self.number, self.access_token) r = requests.delete(url) if r.status_code == 400: print('ERROR! Can not remove `ci_processing` label in a closed Pull Request.') sys.exit(1) elif r.status_code == 404: pass else: if r.status_code != 204: print('ERROR! Unexpected failure, status_code: {}'.format(r.status_code)) sys.exit(1) def remove_successful_tag(self): url = 'https://gitee.com/api/v5/repos/{0}/{1}/pulls/{2}/labels/ci_successful/?access_token={3}'.format( self.owner, self.repo, self.number, self.access_token) r = requests.delete(url) if r.status_code == 400: print('ERROR! Can not remove `ci_successful` label in a closed Pull Request.') sys.exit(1) elif r.status_code == 404: pass else: if r.status_code != 204: print('ERROR! Unexpected failure, status_code: {}'.format(r.status_code)) sys.exit(1) def remove_failed_tag(self): url = 'https://gitee.com/api/v5/repos/{0}/{1}/pulls/{2}/labels/ci_failed/?access_token={3}'.format( self.owner, self.repo, self.number, self.access_token) r = requests.delete(url) if r.status_code == 400: print('ERROR! Can not remove `ci_failed` label in a closed Pull Request.') sys.exit(1) elif r.status_code == 404: pass else: if r.status_code != 204: print('ERROR! Unexpected failure, status_code: {}'.format(r.status_code)) sys.exit(1) def remove_conflict_tag(self): url = 'https://gitee.com/api/v5/repos/{0}/{1}/pulls/{2}/labels/conflicted/?access_token={3}'.format( self.owner, self.repo, self.number, self.access_token) r = requests.delete(url) if r.status_code == 400: print('ERROR! Can not remove `conflicted` label in a closed Pull Request.') sys.exit(1) elif r.status_code == 404: pass else: if r.status_code != 204: print('ERROR! Unexpected failure, status_code: {}'.format(r.status_code)) sys.exit(1) if __name__ == '__main__': if len(sys.argv) != 6: print('owner, repo, number, access_token and action is required, please check!') sys.exit(1) owner = sys.argv[1] repo = sys.argv[2] number = sys.argv[3] access_token = sys.argv[4] action = sys.argv[5] c = Check(owner, repo, number, access_token) if action not in ['ATP', 'ATS', 'ATF']: print('Invalid action') sys.exit(1) if action == 'ATP': c.remove_successful_tag() c.remove_failed_tag() c.add_processing_tag() if action == 'ATS': c.remove_processing_tag() c.remove_failed_tag() c.remove_conflict_tag() c.add_successful_tag() if action == 'ATF': c.remove_processing_tag() c.remove_successful_tag() c.add_failed_tag()
add_processing_tag
conf_test.go
package log import ( "io" "os" "reflect" "testing" "github.com/zalgonoise/zlog/log/event" "github.com/zalgonoise/zlog/log/format/protobuf" "github.com/zalgonoise/zlog/store/db" ) func TestMultiConf(t *testing.T) { module := "LoggerConfig" funcname := "MultiConf()" _ = module _ = funcname type test struct { name string conf LoggerConfig want *LoggerBuilder } tests := []test{ { name: "default MultiConf()", conf: MultiConf(), want: &LoggerBuilder{ Out: os.Stderr, Prefix: "log", Sub: "", Fmt: TextColorLevelFirst, SkipExit: false, LevelFilter: 0, }, }, { name: "MultiConf() w/ SkipExit, JSON format, and StdOut config", conf: MultiConf(SkipExit, WithFormat(FormatJSON), StdOut), want: &LoggerBuilder{ Out: os.Stderr, Prefix: "", Sub: "", Fmt: FormatJSON, SkipExit: true, LevelFilter: 0, }, }, { name: "MultiConf() w/ SkipExit, Level filter, and custom prefix", conf: MultiConf(SkipExit, FilterInfo, WithPrefix("test")), want: &LoggerBuilder{ Out: nil, Prefix: "test", Sub: "", Fmt: nil, SkipExit: true, LevelFilter: 2, }, }, } var init = func(test test) *LoggerBuilder { builder := &LoggerBuilder{} MultiConf(test.conf).Apply(builder) return builder } var verify = func(idx int, test test) { builder := init(test) if !reflect.DeepEqual(*builder, *test.want) { t.Errorf( "#%v -- FAILED -- [%s] [%s] -- output mismatch error: wanted %v ; got %v -- action: %s", idx, module, funcname, *test.want, *builder, test.name, ) return } t.Logf( "#%v -- PASSED -- [%s] [%s] -- action: %s", idx, module, funcname, test.name, ) } for idx, test := range tests { verify(idx, test) } } func TestNilLogger(t *testing.T) { module := "LoggerConfig" funcname := "NilLogger()" type test struct { name string input []LoggerConfig wants Logger } var tests = []test{ { name: "test nil logger config routine", input: []LoggerConfig{ NilLogger(), }, wants: &nilLogger{}, }, } var init = func(test test) Logger { return New(test.input...) } var verify = func(idx int, test test) { input := init(test) if !reflect.DeepEqual(*input.(*nilLogger), *test.wants.(*nilLogger)) { t.Errorf( "#%v -- FAILED -- [%s] [%s] output mismatch error: wanted %v ; got %v -- action: %s", idx, module, funcname, *test.wants.(*nilLogger), *input.(*nilLogger), test.wants, ) } t.Logf( "#%v -- PASSED -- [%s] [%s] -- action: %s", idx, module, funcname, test.wants, ) } for idx, test := range tests { verify(idx, test) } } func FuzzPrefix(f *testing.F) { module := "LoggerConfig" funcname := "WithPrefix()" f.Add("test-prefix") f.Fuzz(func(t *testing.T, a string) { e := WithPrefix(a) builder := &LoggerBuilder{} e.Apply(builder) if builder.Prefix != a { t.Errorf( "FAILED -- [%s] [%s] fuzzed prefix mismatch: wanted %s ; got %s", module, funcname, a, builder.Prefix, ) } }) } func
(f *testing.F) { module := "LoggerConfig" funcname := "WithSub()" f.Add("test-sub") f.Fuzz(func(t *testing.T, a string) { e := WithSub(a) builder := &LoggerBuilder{} e.Apply(builder) if builder.Sub != a { t.Errorf( "FAILED -- [%s] [%s] fuzzed sub-prefix mismatch: wanted %s ; got %s", module, funcname, a, builder.Sub, ) } }) } func TestOut(t *testing.T) { module := "LoggerConfig" funcname := "WithOut()" _ = module _ = funcname type test struct { name string outs []io.Writer wants *LCOut } var tests = []test{ { name: "test defaults", outs: []io.Writer{}, wants: &LCOut{out: os.Stderr}, }, { name: "test single writer", outs: []io.Writer{os.Stdout}, wants: &LCOut{out: os.Stdout}, }, { name: "test multi writers", outs: []io.Writer{os.Stdout, os.Stderr}, wants: &LCOut{out: io.MultiWriter(os.Stdout, os.Stderr)}, }, } var init = func(test test) LoggerConfig { return WithOut(test.outs...) } var verify = func(idx int, test test) { conf := init(test) if !reflect.DeepEqual(conf.(*LCOut).out, test.wants.out) { t.Errorf( "#%v -- FAILED -- [%s] [%s] output mismatch error: wanted %v ; got %v -- action: %s", idx, module, funcname, *test.wants, *conf.(*LCOut), test.name, ) } t.Logf( "#%v -- PASSED -- [%s] [%s] -- action: %s", idx, module, funcname, test.name, ) } for idx, test := range tests { verify(idx, test) } } func TestSkipExit(t *testing.T) { module := "LoggerConfig" funcname := "SkipExit" _ = module _ = funcname type test struct { name string conf LoggerConfig want bool } tests := []test{ { name: "SkipExit config", conf: SkipExit, want: true, }, { name: "default config", conf: MultiConf(), want: false, }, } var init = func(test test) *LoggerBuilder { builder := &LoggerBuilder{} test.conf.Apply(builder) return builder } var verify = func(idx int, test test) { builder := init(test) if builder.SkipExit != test.want { t.Errorf( "#%v -- FAILED -- [%s] [%s] -- output mismatch error: wanted %v ; got %v -- action: %s", idx, module, funcname, test.want, builder.SkipExit, test.name, ) } t.Logf( "#%v -- PASSED -- [%s] [%s] -- action: %s", idx, module, funcname, test.name, ) } for idx, test := range tests { verify(idx, test) } } func TestFilter(t *testing.T) { module := "LoggerConfig" funcname := "WithFilter()" _ = module _ = funcname type test struct { name string conf LoggerConfig want event.Level } var tests = []test{ { name: "with level by number", conf: WithFilter(3), want: event.Level_warn, }, { name: "with level by reference", conf: WithFilter(event.Level_warn), want: event.Level_warn, }, } var init = func(test test) *LoggerBuilder { builder := &LoggerBuilder{} test.conf.Apply(builder) return builder } var verify = func(idx int, test test) { builder := init(test) if builder.LevelFilter != test.want.Int() { t.Errorf( "#%v -- FAILED -- [%s] [%s] output mismatch error: wanted %s ; got %s -- action: %s", idx, module, funcname, test.want.String(), event.Level_name[builder.LevelFilter], test.name, ) } t.Logf( "#%v -- PASSED -- [%s] [%s] -- action: %s", idx, module, funcname, test.name, ) } for idx, test := range tests { verify(idx, test) } } func TestWithDatabase(t *testing.T) { module := "LoggerConfig" funcname := "WithDatabase()" type test struct { name string w []io.WriteCloser wants LoggerConfig } var testWCs = []*testWC{{}, {}} var tests = []test{ { name: "empty slice", w: []io.WriteCloser{}, wants: nil, }, { name: "nil input", w: nil, wants: nil, }, { name: "one WriteCloser", w: []io.WriteCloser{testWCs[0]}, wants: &LCDatabase{ Out: testWCs[0], Fmt: &protobuf.FmtPB{}, }, }, { name: "multiple WriteClosers", w: []io.WriteCloser{testWCs[0], testWCs[0]}, wants: &LCDatabase{ Out: db.MultiWriteCloser(testWCs[0], testWCs[1]), Fmt: &protobuf.FmtPB{}, }, }, } var verify = func(idx int, test test) { var conf LoggerConfig if test.w == nil { conf = WithDatabase(nil) } else { conf = WithDatabase(test.w...) } if !reflect.DeepEqual(conf, test.wants) { t.Errorf( "#%v -- FAILED -- [%s] [%s] output mismatch error: wanted %v ; got %v -- action: %s", idx, module, funcname, test.wants, conf, test.name, ) return } } for idx, test := range tests { verify(idx, test) } }
FuzzSub
kmp.py
# while j < n and i < m: # if i == -1 or t[j] == p[i]: # j, i = j+1, i+1 # else: # i = pnext[i] def matching_KMP(t, p, pnext): j, i = 0, 0 n, m = len(t), len(p) while j < n and i < m: if i == -1 or t[j] == p[i]: j, i = j+1, i+1 else: i = pnext[i] if i == m: return j-i return -1 def gen_pnext(p): i, k, m = 0, -1, len(p) pnext = [-1] * m while i < m-1: if k == -1 or p[i] == p[k]: i, k = i+1, k+1 pnext[i] = k else: k = pnext[k] return pnext #改进版 def genPnext(p): i, k, m = 0 , -1, len(p) pnext = [-1]*m while i < m-1: if k == -1 or p[i] == p[k]:
else: pnext[i] = k else: k = pnext[k] return pnext
i, k = i+1, k+1 if p[i] == p[k]: pnext[i] = pnext[k]
main.rs
use std::io::{stdout, Write}; use aoc::Result; use aoc_2019_day_21::*; fn
() -> Result<()> { writeln!(stdout(), "Advent of Code {}-{:02}", YEAR, DAY)?; let input = aoc::input_from_stdin()?; let answer = part_one(&input)?; writeln!(stdout(), "--> part one:")?; writeln!(stdout(), "{}", answer)?; let answer = part_two(&input)?; writeln!(stdout(), "--> part two:")?; writeln!(stdout(), "{}", answer)?; Ok(()) }
main
dnsuptools.py
#!/usr/bin/env python3 # -*- encoding: UTF8 -*- from dnsuptools.dnsupdate import defaultDictList, MatchUpperLabels, DNSUpdate from dnsuptools.tlsarecgen import tlsaRecordsFromCertFile, tlsaFromFile from dnsuptools.dkimrecgen import dkimFromFile from simpleloggerplus import simpleloggerplus as log import re import pycurl from io import BytesIO import socket import dns.resolver def dkimKeySplit(dkimDict): if type(dkimDict) is list: return [dkimKeySplit(e) for e in dkimDict] keyL = dkimDict['keyname'].split('_') dkimDict['keybasename'] = keyL[0] if 1 < len(keyL): dkimDict['keynbr'] = keyL[1] return dkimDict def parseNSentry(record): return {'ns': record['content']} def parseDKIMentry(record): key = record['name'] keyList = key.split('.') val = record['content'].replace(' ', '') valList = val.split(';') valDict = {e.split('=')[0]: e.split('=')[1] for e in valList if '=' in e} dkim = {'name': '.'.join(keyList[2:]), 'keyname': keyList[0], 'dkimlabel': keyList[1]} dkim.update(valDict) dkimKeySplit(dkim) return dkim def formatDKIMentry(name, dkimDict): if type(dkimDict) is list: return [formatDKIMentry(name, e) for e in dkimDict] dkim = {'keyname': 'key1', 'v': 'DKIM1', 'k': 'rsa'} dkim.update(dkimDict) return {'name': '{x[keyname]}._domainkey.{name}'.format(x=dkim, name=str(name)), 'type': 'TXT', 'content': 'v={x[v]}; k={x[k]}; p={x[p]}'.format(x=dkim)} def parseTLSAentry(record): key = record['name'] keyList = key.split('.') log.debug(keyList) val = record['content'] valList = val.split(' ') tlsa = {'name': '.'.join(keyList[2:]), 'port': keyList[0], 'proto': keyList[1], 'usage': valList[0], 'selector': valList[1], 'matchingtype': valList[2], 'tlsa': valList[3]} #tlsa = {'port': keyList[0], 'proto': keyList[1], 'usage': valList[0], 'selector': valList[1], 'matchingtype': valList[2], 'tlsa': valList[3]} if '_' == tlsa['port'][0]: tlsa['port'] = tlsa['port'][1:] if '_' == tlsa['proto'][0]: tlsa['proto'] = tlsa['proto'][1:] tlsa['tlsa'] = tlsa['tlsa'].replace('\n','') return tlsa def formatTLSAentry(name, tlsaDict): if type(tlsaDict) is list: return [formatTLSAentry(name, e) for e in tlsaDict] tlsa = {'port': '*', 'proto': 'tcp'} tlsa.update(tlsaDict) if '*' != tlsa['port']: tlsa['port'] = '_{}'.format(tlsa['port']) tlsa['tlsa'] = tlsa['tlsa'].replace(b'\n',b'') return {'name': '{x[port]}._{x[proto]}.{name}'.format(x=tlsa, name=str(name)), 'type': 'TLSA', 'content': '{x[usage]} {x[selector]} {x[matchingtype]} {x[tlsa]}'.format(x=tlsa)} def parseSRVentry(record): key = record['name'] keyList = key.split('.') val = record['content'] valList = val.split(' ') srv = {'name': '.'.join(keyList[2:]), 'service': keyList[0][1:], 'proto': keyList[1][1:], 'weight': valList[0], 'port': valList[1], 'server': valList[2], 'prio': record['prio']} return srv def formatSRVentry(name, srvDict): if type(srvDict) is list: return [formatSRVentry(name, e) for e in srvDict] srv = srvDict for k in ['service', 'proto', 'prio', 'weight', 'port', 'server']: if k not in srv: log.warn('Missing member \"{}\" in SRV entry!'.format(k)) return {} return {'name': '_{x[service]}._{x[proto]}.{name}'.format(x=srv, name=str(name)), 'type': 'SRV', 'prio': srv['prio'], 'content': '{x[weight]} {x[port]} {x[server]}'.format(x=srv)} def isSubDict(subDict, contentDict): for k, v in subDict.items(): if k not in contentDict: return False if str(v) != str(contentDict[k]): return False return True def parseSPFentries(entryList): entryDict = {} for e in entryList: if e[0] in '+-~?': entryDict[e[1:]] = e[0] else: entryDict[e] = '+' return entryDict def formatSPFentries(entryDict): allVal = [] if 'all' in entryDict: allVal = [str(entryDict['all'])+'all'] del entryDict['all'] entryList = ['{v}{k}'.format(v=v,k=k) for k, v in entryDict.items()] entryList.extend(allVal) return entryList def qryDNS(nsName, qryName, recType, ns=None): resolver = dns.resolver.Resolver() if ns is not None: if type(ns) is not list: ns = [ns] if 0 < len(ns): resolver.nameservers = ns resolver.nameservers=[socket.gethostbyname(nsName)] return [rdata for rdata in resolver.query(qryName, recType)] def parseDMARC(dmarcStr): return {e.split('=')[0].replace(' ',''): e.split('=')[1].replace(' ','') for e in dmarcStr.split(';')} def formatDMARC(dmarcDict): v = 'v={v}'.format(v=dmarcDict['v']) del dmarcDict['v'] return ';'.join([v] + ['{k}={v}'.format(k=k, v=v) for k, v in dmarcDict.items()]) def sanIPv4(x): return re.sub('[^0-9.]', '', x) def sanIPv6(x): return re.sub('[^0-9:a-fA-F]', '', x) def curlGet(url): buff = BytesIO() c = pycurl.Curl() c.setopt(pycurl.CONNECTTIMEOUT, 4) c.setopt(c.URL, str(url)) c.setopt(c.WRITEDATA, buff) c.perform() c.close() return buff.getvalue().decode() def getIPv4(a = 'auto'): if 'auto' != a: return a try: ipv4Str = curlGet('ipv4.icanhazip.com') except Exception as e: return None return sanIPv4(ipv4Str) def getIPv6(aaaa = 'auto'): if 'auto' != aaaa: return aaaa try: ipv6Str = curlGet('ipv6.icanhazip.com') log.debug(ipv6Str) except Exception as e: return None return sanIPv6(ipv6Str) def genSPF(spf, behavior = '?all', v = 'spf1'): if type(spf) is str: spf = [spf] if type(spf) is set: spf = list(spf) if v is not None: spf = ['v=' + v] + spf if behavior is not None: spf += [behavior] return ' '.join(spf) def genCAA(caaDict): if type(caaDict) is dict: caaDict = [caaDict] caaList = [] for e in caaDict: caa = {'flag': 0, 'tag': 'issue'} caa.update(e) caaStr = '{x[flag]} {x[tag]} "{x[url]}"'.format(x=caa) caaList.append(caaStr) return caaList def parseCAA(caaRR): caaStr = caaRR['content'] log.debug(caaStr) caa = {} caa['flag'], caa['tag'], caa['url'] = caaStr.split(' ') caa['url'] = caa['url'][1:-1] caa = {str(k): str(v) for k, v in caa.items()} log.debug(caa) return caa def encDNSemail(x): xSpl = x.split('@') log.debug(xSpl) if 1 == len(xSpl): return x elif 1 < len(xSpl): return xSpl[0].replace('.', '\\.') + '.' + xSpl[1] + '.' else: raise(TypeError('No valid email address')) def decDNSemail(x): if 2 == len(x.split('@')): return x elif 2 < len(x.split('@')): raise(TypeError('No valid email address')) else: xSpl = x.split('\\.') y = '.'.join(xSpl[:-1]) + '.' + '@'.join(xSpl[-1].split('.', 1)) if '.' == y[0]: y = y[1:] if '.' == y[-1]: return y[:-1] else: return y def makeIP4(a): if a is None: a = 'auto' if type(a) is not list: a = [a] a = [getIPv4(e) for e in a] a = [e for e in a if e is not None] return a def makeIP6(aaaa): if aaaa is None: aaaa = 'auto' if type(aaaa) is not list: aaaa = [aaaa] log.debug(aaaa) aaaa = [getIPv6(e) for e in aaaa] aaaa = [e for e in aaaa if e is not None] log.debug(aaaa) return aaaa def soaUpdate(curSOAdict, updSOAdict): soa = dict(curSOAdict) soa.update(updSOAdict) soa['serial'] += 1 soa['hostmaster'] = encDNSemail(soa['hostmaster']) soaTXT = '{soa[primns]} {soa[hostmaster]} {soa[serial]} {soa[refresh]} {soa[retry]} {soa[expire]} {soa[ncttl]}'.format(soa = soa) return {'content': soaTXT, 'id': soa['id']} def soaQRYs2dict(soaNSqry, soaAPIqry): soa = soaNSqry return {'primns': soa.mname.to_text(), 'hostmaster': decDNSemail(soa.rname.to_text()), 'serial': soa.serial, 'refresh': soa.refresh, 'retry': soa.retry, 'expire': soa.expire, 'ncttl': soa.minimum, 'id': soaAPIqry['id']} def recordFilter(entry, records, parser=None): result = [] for rr in records: rr = dict(rr) if parser is not None: rr.update(parser(rr)) if not isSubDict(entry, rr): continue result.append(rr) return result class DNSUpTools(DNSUpdate): def __init__(self): DNSUpdate.__init__(self) def qrySOA(self, name): soaAPI = self.qry({'name': name, 'type': 'SOA'})[0] soaList = soaAPI['content'].split(' ') ns = [e['content'] for e in self.qryNS(name)[0]] soaNS = qryDNS(soaList[0], name, 'SOA', ns)[0] # extended query for last 4 values - WARNING internal nameserver update takes time, consecutive updates may result in inconsistencies return soaQRYs2dict(soaNS, soaAPI) def setSOAentry(self, name, updSOAdict): soa = self.qrySOA(name) soaRR = soaUpdate(soa, updSOAdict) self.updOrAddDictList({'name': name, 'type': 'SOA'}, soaRR) def addA(self, name, a = 'auto'): a = makeIP4(a) self.addList({'name': name, 'type': 'A'}, a) def delA(self, name, aDelete = '*', aPreserve = []): aPreserve = makeIP4(aPreserve) self.delList({'name': name, 'type': 'A'}, aDelete, aPreserve) def setA(self, name, a = 'auto'): self.addA(name, a) self.delA(name, '*', a) def addAAAA(self, name, aaaa): aaaa = makeIP6(aaaa) self.addList({'name': name, 'type': 'AAAA'}, aaaa) def delAAAA(self, name, aaaaDelete = '*', aaaaPreserve = []): aaaaPreserve = makeIP6(aaaaPreserve) self.delList({'name': name, 'type': 'AAAA'}, aaaaDelete, aaaaPreserve) def setAAAA(self, name, aaaa = 'auto'): self.addAAAA(name, aaaa) self.delAAAA(name, '*', aaaa) def addMX(self, name, mx): self.addDictList({'name': name, 'type': 'MX', 'prio': 10}, mx) def delMX(self, name, mxDelete = [{}], mxPreserve = []): self.delDictList({'name': name, 'type': 'MX'}, mxDelete, mxPreserve) def setMX(self, name, mx): self.addMX(name, mx) self.delMX(name, [{}], mx) def addCNAME(self, name, cname): self.addList({'name': name, 'type': 'CNAME'}, cname) def delCNAME(self, name, cnameDelete = '*', cnamePreserve = []): self.delList({'name': name, 'type': 'CNAME'}, cnameDelete, cnamePreserve) def setCNAME(self, name, cname): self.addCNAME(name, cname) self.delCNAME(name, '*', cname) def addTXT(self, name, txt): self.addList({'name': name, 'type': 'TXT'}, txt) def delTXT(self, name, txtDelete = '*', txtPreserve = []): self.delList({'name': name, 'type': 'TXT'}, txtDelete, txtPreserve) def setTXT(self, name, txt): self.addTXT(name, txt) self.delTXT(name, '*', txt) def addNS(self, name, ns): self.addList({'name': name, 'type': 'NS'}, ns) def delNS(self, name, nsDelete = '*', nsPreserve = []): self.delList({'name': name, 'type': 'NS'}, nsDelete, nsPreserve) def qryNS(self, name): return self.qryRR(name, 'NS') def setNS(self, name, ns): self.addNS(name, ns) self.delNS(name, '*', ns) def addTLSA(self, name, tlsaDict): tlsaDictList = tlsaFromFile(tlsaDict) tlsaRRdictList = formatTLSAentry(name, tlsaDictList) self.addDictList({}, tlsaRRdictList) def delTLSA(self, name, tlsaDelete={}, tlsaPreserve = []): if type(tlsaDelete) is dict: tlsaDelete = [tlsaDelete] if type(tlsaPreserve) is dict: tlsaPreserve = [tlsaPreserve] tlsaFromFile(tlsaDelete) tlsaFromFile(tlsaPreserve) for i, e in enumerate(tlsaDelete): if 'filename' in e: del tlsaDelete[i]['filename'] if 'op' in e: del tlsaDelete[i]['op'] for i, e in enumerate(tlsaPreserve): if 'filename' in e: del tlsaPreserve[i]['filename'] if 'op' in e: del tlsaPreserve[i]['op'] deleteRv = self.qryTLSA(name, tlsaDelete) preserveRv = self.qryTLSA(name, tlsaPreserve) return self.deleteRv(deleteRv, preserveRv) def setTLSA(self, name, tlsaDict): self.addTLSA(name, tlsaDict) self.delTLSA(name, {}, tlsaDict) def addTLSAfromCert(self, name, certFilenames, tlsaTypes = [[3,0,1], [3,0,2], [3,1,1], [3,1,2], [2,0,1], [2,0,2], [2,1,1], [2,1,2]]): if 'auto' == str(tlsaTypes): tlsaTypes = [[3,0,1], [3,0,2], [3,1,1], [3,1,2], [2,0,1], [2,0,2], [2,1,1], [2,1,2]] log.debug('name = %s' % name) log.debug('certFilenames = %s' % certFilenames) self.addTLSA(name, tlsaRecordsFromCertFile(certFilenames, tlsaTypes)) def delTLSApreserveFromCert(self, name, tlsaDelete = {}, certFilenamesPreserve = []): self.delTLSA(name, tlsaDelete, tlsaRecordsFromCertFile(certFilenamesPreserve)) def setTLSAfromCert(self, name, certFilenames, tlsaTypes = [[3,0,1], [3,0,2], [3,1,1], [3,1,2], [2,0,1], [2,0,2], [2,1,1], [2,1,2]]): if 'auto' == str(tlsaTypes): tlsaTypes = [[3,0,1], [3,0,2], [3,1,1], [3,1,2], [2,0,1], [2,0,2], [2,1,1], [2,1,2]] self.setTLSA(name, tlsaRecordsFromCertFile(certFilenames, tlsaTypes)) def setSPFentry(self, name, spfADD, spfDEL = {}): if 0 == len(spfADD) and 0 == len(spfDEL): return rrQ = self.qrySPF(name) if 0 == len(rrQ): self.setSPF(name, formatSPFentries(parseSPFentries(set(spfADD)))) return spfQ = rrQ[0]['content'].split(' ') spfID = rrQ[0]['id'] spfSqry = set(spfQ[1:]) spfSdel = set(spfDEL) if '*' in spfSdel: spfSqry = {} spfS = {e for e in spfSqry if e not in spfSdel} spfD = parseSPFentries(spfS) spfD.update(parseSPFentries(set(spfADD))) spfL = formatSPFentries(spfD) self.setSPF(name, spfL, spfID, spfQ[0][2:]) def qrySPF(self, name): rv = self.qry({'name': str(name), 'type': 'TXT'}) return [rr for rr in rv if 'v=spf1' in rr['content'].split(' ')] def delSPF(self, name): spf = self.qrySPF(name) self.setSPF(name, [], spf['id']) # only one SPF record allowed def setSPF(self, name, spf, rrID = None, v = 'spf1'): if 0 == len(spf): if rrID is None: return self.delete({'recordId': rrID}) return spf = ' '.join(formatSPFentries(parseSPFentries(spf))) txt = genSPF(spf, None, v) updR = {'content': txt} if rrID is not None: updR['id'] = rrID self.updOrAddDictList({'name': str(name), 'type': 'TXT'}, updR) def delDMARC(self, name): self.delTXT('_dmarc.'+str(name)) # only one DMARC record allowed def setDMARC(self, name, dmarcDict): log.debug(dmarcDict) if {} == dmarcDict: self.delDMARC(name) return dmarc = {'v': 'DMARC1', 'p': 'none'} dmarc.update(dmarcDict) dmarc = {k: v for k, v in dmarc.items() if '' != v} dmarcStr = formatDMARC(dmarc) self.update({'name': '_dmarc.'+str(name), 'type': 'TXT'}, {'content': dmarcStr}) def qryDMARC(self, name): dmarcRv = self.qry({'name': '_dmarc.'+str(name), 'type': 'TXT'}) dmarcQ = [parseDMARC(rr['content']) for rr in dmarcRv] return dmarcQ def setDMARCentry(self, name, dmarcDict): q = self.qryDMARC(name) dmarc = {} for e in q: dmarc.update(e) if '' in dmarcDict: dmarc = dict(dmarcDict) del dmarc[''] else: dmarc.update(dmarcDict) self.setDMARC(name, dmarc) def delADSP(self, name, adspDelete = '*', adspPreserve = []): if '*' == adspDelete: self.delTXT('_adsp._domainkey.' + str(name), '*', adspPreserve) else: self.delTXT('_adsp._domainkey.' + str(name), 'dkim=' + str(adspDelete), adspPreserve) # only one ADSP record allowed def setADSP(self, name, adsp): if '' == adsp: self.delADSP(name) return self.update({'name': '_adsp._domainkey.' + str(name), 'type': 'TXT'}, {'content': 'dkim=' + str(adsp)}) def setACME(self, name, challenge=''): if '' == challenge: self.delACME(name) return self.update({'name': '_acme-challenge.' + str(name), 'type': 'TXT'}, {'content': str(challenge)}) def delACME(self, name): self.delTXT('_acme-challenge.' + str(name), '*') def addCAA(self, name, caaDict):
def setCAA(self, name, caaDict): self.addCAA(name, caaDict) self.delCAA(name, [{}], caaDict) def qryCAA(self, name, caaDict = {}): if type(caaDict) is dict: caaDict = [caaDict] for e in caaDict: e['name'] = str(name) return self.qryRR(str(name), 'CAA', parseCAA, caaDict, []) def delCAA(self, name, caaDelete = [{}], caaPreserve = []): deleteRv = self.qryCAA(name, caaDelete) preserveRv = self.qryCAA(name, caaPreserve) return self.deleteRv(deleteRv, preserveRv) def addSRV(self, name, srvDict): log.debug(srvDict) srvDictList = defaultDictList({'prio': 10, 'weight' : 0}, srvDict) srvRRdictList = formatSRVentry(name, srvDictList) self.addDictList({}, srvRRdictList) def qryRR(self, name, rrType, parser=None, rrDict = {}, qryFilters=[MatchUpperLabels]): rrRv = self.qryWild({'name': name, 'type': rrType}, qryFilters) if type(rrDict) is dict: rrDict = [rrDict] for i, e in enumerate(rrDict): rrDict[i]['name'] = name return [recordFilter(e, rrRv, parser) for e in rrDict] def qryTLSA(self, name, tlsaDict = {}): return self.qryRR(name, 'TLSA', parseTLSAentry, tlsaDict) def qrySRV(self, name, srvDict = {}): return self.qryRR(name, 'SRV', parseSRVentry, srvDict) def delSRV(self, name, srvDelete, srvPreserve = []): deleteRv = self.qrySRV(name, srvDelete) preserveRv = self.qrySRV(name, srvPreserve) return self.deleteRv(deleteRv, preserveRv) def setSRV(self, name, srvDict): self.addSRV(name, srvDict) self.delSRV(name, {}, srvDict) def addDKIM(self, name, dkimDict): dkimDict = dkimFromFile(dkimDict) dkimRRdictList = formatDKIMentry(name, dkimDict) self.addDictList({}, dkimRRdictList) def addDKIMfromFile(self, name, filenames): if type(filenames) is str: filenames = [filenames] dkimDictList = [{'filename': e} for e in filenames] self.addDKIM(name, dkimDictList) def qryDKIM(self, name, dkimDict): rv = self.qryRR(name, 'TXT', parseDKIMentry, dkimDict) rv = [f for e in rv for f in e if f['keyname'] != '_adsp'] return rv def delDKIM(self, name, dkimDelete = {}, dkimPreserve = []): if type(dkimDelete) is dict: dkimDelete = [dkimDelete] if type(dkimPreserve) is dict: dkimPreserve = [dkimPreserve] dkimFromFile(dkimDelete) dkimFromFile(dkimPreserve) for i, e in enumerate(dkimDelete): if 'filename' in e: del dkimDelete[i]['filename'] for i, e in enumerate(dkimPreserve): if 'filename' in e: del dkimPreserve[i]['filename'] deleteRv = self.qryDKIM(name, dkimDelete) preserveRv = self.qryDKIM(name, dkimPreserve) return self.deleteRv(deleteRv, preserveRv) def delDKIMpreserveFromFile(self, name, filenames): if type(filenames) is str: filenames = [filenames] dkimPreserveList = [{'filename': e} for e in filenames] self.delDKIM(name, {}, dkimPreserveList) def setDKIM(self, name, dkimDict): self.addDKIM(name, dkimDict) self.delDKIM(name, {}, dkimDict) def setDKIMfromFile(self, name, filenames): self.addDKIMfromFile(name, filenames) self.delDKIMpreserveFromFile(name, filenames)
try: self.addList({'name': str(name), 'type': 'CAA'}, genCAA(caaDict)) except KeyError as e: log.warn('Not adding CAA record!')
tree_to_cc_transformations_test.py
# Copyright 2018, The TensorFlow Federated Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import tensorflow as tf from tensorflow_federated.python.common_libs import test from tensorflow_federated.python.core.api import computation_types from tensorflow_federated.python.core.impl import tree_to_cc_transformations from tensorflow_federated.python.core.impl.compiler import building_block_factory from tensorflow_federated.python.core.impl.compiler import building_blocks from tensorflow_federated.python.core.impl.compiler import tensorflow_computation_factory from tensorflow_federated.python.core.impl.compiler import transformation_utils from tensorflow_federated.python.core.impl.compiler import tree_transformations from tensorflow_federated.python.core.impl.context_stack import set_default_context from tensorflow_federated.python.core.impl.executors import execution_context from tensorflow_federated.python.core.impl.executors import executor_stacks from tensorflow_federated.python.core.impl.wrappers import computation_wrapper_instances def _create_compiled_computation(py_fn, parameter_type): proto, type_signature = tensorflow_computation_factory.create_computation_for_py_fn( py_fn, parameter_type) return building_blocks.CompiledComputation( proto, type_signature=type_signature) def parse_tff_to_tf(comp): comp, _ = tree_transformations.insert_called_tf_identity_at_leaves(comp) parser_callable = tree_to_cc_transformations.TFParser() comp, _ = tree_transformations.replace_called_lambda_with_block(comp) comp, _ = tree_transformations.inline_block_locals(comp) comp, _ = tree_transformations.replace_selection_from_tuple_with_element(comp) new_comp, transformed = transformation_utils.transform_postorder(
class ParseTFFToTFTest(test.TestCase): def test_raises_on_none(self): with self.assertRaises(TypeError): parse_tff_to_tf(None) def test_does_not_transform_standalone_intrinsic(self): type_signature = computation_types.TensorType(tf.int32) standalone_intrinsic = building_blocks.Intrinsic('dummy', type_signature) non_transformed, _ = parse_tff_to_tf(standalone_intrinsic) self.assertEqual(standalone_intrinsic.compact_representation(), non_transformed.compact_representation()) def test_replaces_lambda_to_selection_from_called_graph_with_tf_of_same_type( self): identity_tf_block_type = computation_types.StructType( [tf.int32, tf.float32]) identity_tf_block = building_block_factory.create_compiled_identity( identity_tf_block_type) tuple_ref = building_blocks.Reference('x', [tf.int32, tf.float32]) called_tf_block = building_blocks.Call(identity_tf_block, tuple_ref) selection_from_call = building_blocks.Selection(called_tf_block, index=1) lambda_wrapper = building_blocks.Lambda('x', [tf.int32, tf.float32], selection_from_call) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) self.assertEqual(exec_lambda([0, 1.]), exec_tf([0, 1.])) def test_replaces_lambda_to_called_graph_with_tf_of_same_type(self): identity_tf_block_type = computation_types.TensorType(tf.int32) identity_tf_block = building_block_factory.create_compiled_identity( identity_tf_block_type) int_ref = building_blocks.Reference('x', tf.int32) called_tf_block = building_blocks.Call(identity_tf_block, int_ref) lambda_wrapper = building_blocks.Lambda('x', tf.int32, called_tf_block) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) self.assertEqual(exec_lambda(2), exec_tf(2)) def test_replaces_lambda_to_called_graph_on_selection_from_arg_with_tf_of_same_type( self): identity_tf_block_type = computation_types.TensorType(tf.int32) identity_tf_block = building_block_factory.create_compiled_identity( identity_tf_block_type) tuple_ref = building_blocks.Reference('x', [tf.int32, tf.float32]) selected_int = building_blocks.Selection(tuple_ref, index=0) called_tf_block = building_blocks.Call(identity_tf_block, selected_int) lambda_wrapper = building_blocks.Lambda('x', [tf.int32, tf.float32], called_tf_block) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertEqual(exec_lambda([3, 4.]), exec_tf([3, 4.])) def test_replaces_lambda_to_called_graph_on_selection_from_arg_with_tf_of_same_type_with_names( self): identity_tf_block_type = computation_types.TensorType(tf.int32) identity_tf_block = building_block_factory.create_compiled_identity( identity_tf_block_type) tuple_ref = building_blocks.Reference('x', [('a', tf.int32), ('b', tf.float32)]) selected_int = building_blocks.Selection(tuple_ref, index=0) called_tf_block = building_blocks.Call(identity_tf_block, selected_int) lambda_wrapper = building_blocks.Lambda('x', [('a', tf.int32), ('b', tf.float32)], called_tf_block) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) self.assertEqual(parsed.type_signature, lambda_wrapper.type_signature) self.assertEqual(exec_lambda({'a': 5, 'b': 6.}), exec_tf({'a': 5, 'b': 6.})) def test_replaces_lambda_to_called_graph_on_tuple_of_selections_from_arg_with_tf_of_same_type( self): identity_tf_block_type = computation_types.StructType([tf.int32, tf.bool]) identity_tf_block = building_block_factory.create_compiled_identity( identity_tf_block_type) tuple_ref = building_blocks.Reference('x', [tf.int32, tf.float32, tf.bool]) selected_int = building_blocks.Selection(tuple_ref, index=0) selected_bool = building_blocks.Selection(tuple_ref, index=2) created_tuple = building_blocks.Struct([selected_int, selected_bool]) called_tf_block = building_blocks.Call(identity_tf_block, created_tuple) lambda_wrapper = building_blocks.Lambda('x', [tf.int32, tf.float32, tf.bool], called_tf_block) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertEqual(exec_lambda([7, 8., True]), exec_tf([7, 8., True])) def test_replaces_lambda_to_called_graph_on_tuple_of_selections_from_arg_with_tf_of_same_type_with_names( self): identity_tf_block_type = computation_types.StructType([tf.int32, tf.bool]) identity_tf_block = building_block_factory.create_compiled_identity( identity_tf_block_type) tuple_ref = building_blocks.Reference('x', [('a', tf.int32), ('b', tf.float32), ('c', tf.bool)]) selected_int = building_blocks.Selection(tuple_ref, index=0) selected_bool = building_blocks.Selection(tuple_ref, index=2) created_tuple = building_blocks.Struct([selected_int, selected_bool]) called_tf_block = building_blocks.Call(identity_tf_block, created_tuple) lambda_wrapper = building_blocks.Lambda('x', [('a', tf.int32), ('b', tf.float32), ('c', tf.bool)], called_tf_block) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) self.assertEqual(parsed.type_signature, lambda_wrapper.type_signature) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertEqual( exec_lambda({ 'a': 9, 'b': 10., 'c': False }), exec_tf({ 'a': 9, 'b': 10., 'c': False })) def test_replaces_lambda_to_unnamed_tuple_of_called_graphs_with_tf_of_same_type( self): int_tensor_type = computation_types.TensorType(tf.int32) int_identity_tf_block = building_block_factory.create_compiled_identity( int_tensor_type) float_tensor_type = computation_types.TensorType(tf.float32) float_identity_tf_block = building_block_factory.create_compiled_identity( float_tensor_type) tuple_ref = building_blocks.Reference('x', [tf.int32, tf.float32]) selected_int = building_blocks.Selection(tuple_ref, index=0) selected_float = building_blocks.Selection(tuple_ref, index=1) called_int_tf_block = building_blocks.Call(int_identity_tf_block, selected_int) called_float_tf_block = building_blocks.Call(float_identity_tf_block, selected_float) tuple_of_called_graphs = building_blocks.Struct( [called_int_tf_block, called_float_tf_block]) lambda_wrapper = building_blocks.Lambda('x', [tf.int32, tf.float32], tuple_of_called_graphs) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertEqual(exec_lambda([11, 12.]), exec_tf([11, 12.])) def test_replaces_lambda_to_named_tuple_of_called_graphs_with_tf_of_same_type( self): int_tensor_type = computation_types.TensorType(tf.int32) int_identity_tf_block = building_block_factory.create_compiled_identity( int_tensor_type) float_tensor_type = computation_types.TensorType(tf.float32) float_identity_tf_block = building_block_factory.create_compiled_identity( float_tensor_type) tuple_ref = building_blocks.Reference('x', [tf.int32, tf.float32]) selected_int = building_blocks.Selection(tuple_ref, index=0) selected_float = building_blocks.Selection(tuple_ref, index=1) called_int_tf_block = building_blocks.Call(int_identity_tf_block, selected_int) called_float_tf_block = building_blocks.Call(float_identity_tf_block, selected_float) tuple_of_called_graphs = building_blocks.Struct([('a', called_int_tf_block), ('b', called_float_tf_block)]) lambda_wrapper = building_blocks.Lambda('x', [tf.int32, tf.float32], tuple_of_called_graphs) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) self.assertEqual(exec_lambda([13, 14.]), exec_tf([13, 14.])) def test_replaces_lambda_to_called_composition_of_tf_blocks_with_tf_of_same_type_named_param( self): selection_type = computation_types.StructType([('a', tf.int32), ('b', tf.float32)]) selection_tf_block = _create_compiled_computation(lambda x: x[0], selection_type) add_one_int_type = computation_types.TensorType(tf.int32) add_one_int_tf_block = _create_compiled_computation(lambda x: x + 1, add_one_int_type) int_ref = building_blocks.Reference('x', [('a', tf.int32), ('b', tf.float32)]) called_selection = building_blocks.Call(selection_tf_block, int_ref) one_added = building_blocks.Call(add_one_int_tf_block, called_selection) lambda_wrapper = building_blocks.Lambda('x', [('a', tf.int32), ('b', tf.float32)], one_added) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) self.assertEqual( exec_lambda({ 'a': 15, 'b': 16. }), exec_tf({ 'a': 15, 'b': 16. })) def test_replaces_lambda_to_called_tf_block_with_replicated_lambda_arg_with_tf_block_of_same_type( self): sum_and_add_one_type = computation_types.StructType([tf.int32, tf.int32]) sum_and_add_one = _create_compiled_computation(lambda x: x[0] + x[1] + 1, sum_and_add_one_type) int_ref = building_blocks.Reference('x', tf.int32) tuple_of_ints = building_blocks.Struct((int_ref, int_ref)) summed = building_blocks.Call(sum_and_add_one, tuple_of_ints) lambda_wrapper = building_blocks.Lambda('x', tf.int32, summed) parsed, modified = parse_tff_to_tf(lambda_wrapper) exec_lambda = computation_wrapper_instances.building_block_to_computation( lambda_wrapper) exec_tf = computation_wrapper_instances.building_block_to_computation( parsed) self.assertIsInstance(parsed, building_blocks.CompiledComputation) self.assertTrue(modified) # TODO(b/157172423): change to assertEqual when Py container is preserved. parsed.type_signature.check_equivalent_to(lambda_wrapper.type_signature) self.assertEqual(exec_lambda(17), exec_tf(17)) if __name__ == '__main__': factory = executor_stacks.local_executor_factory() context = execution_context.ExecutionContext(executor_fn=factory) set_default_context.set_default_context(context) test.main()
comp, parser_callable) return new_comp, transformed
file.go
package serializer import ( "fmt" "io/ioutil" "github.com/golang/protobuf/proto" ) // WriteProtocolbufToJSONFile writes protocol buffer message to JSON file func WriteProtocolbufToJSONFile(message proto.Message, filename string) error { data, err := ProtobufToJSON(message) if err != nil { return fmt.Errorf("cannot marshal proto message to JSON: %w", err) } err = ioutil.WriteFile(filename, []byte(data), 0644)
} // WriteProtobufToBinaryFile writes protocol buffer message to binary file func WriteProtobufToBinaryFile(message proto.Message, filename string) error { data, err := proto.Marshal(message) if err != nil { return fmt.Errorf("cannot marshal proto message to binary: %w", err) } err = ioutil.WriteFile(filename, data, 0644) if err != nil { return fmt.Errorf("cannot write binary data to file: %w", err) } return nil } // ReadProtobufFromBinaryFile reads protocol buffer message from binary file func ReadProtobufFromBinaryFile(filename string, message proto.Message) error { data, err := ioutil.ReadFile(filename) if err != nil { return fmt.Errorf("cannot read binary data from file: %w", err) } err = proto.Unmarshal(data, message) if err != nil { return fmt.Errorf("cannot unmarshal binary to proto message: %w", err) } return nil }
if err != nil { return fmt.Errorf("cannot write JSON data to file: %w", err) } return nil
cc_info.py
from dataclasses import dataclass from typing import List, Optional, Tuple
from ethgreen.types.blockchain_format.program import Program from ethgreen.types.blockchain_format.sized_bytes import bytes32 from ethgreen.util.streamable import Streamable, streamable @dataclass(frozen=True) @streamable class CCInfo(Streamable): my_genesis_checker: Optional[Program] # this is the program lineage_proofs: List[Tuple[bytes32, Optional[Program]]] # {coin.name(): lineage_proof}
emptydisk.go
package emptydisk import ( "os" "os/exec" "path" "strconv" "kubevirt.io/kubevirt/pkg/api/v1" ) var EmptyDiskBaseDir = "/var/run/libvirt/empty-disks/" func CreateTemporaryDisks(vmi *v1.VirtualMachineInstance) error { for _, volume := range vmi.Spec.Volumes { if volume.EmptyDisk != nil
} return nil } func FilePathForVolumeName(volumeName string) string { return path.Join(EmptyDiskBaseDir, volumeName+".qcow2") }
{ // qemu-img takes the size in bytes or in Kibibytes/Mebibytes/...; lets take bytes size := strconv.FormatInt(volume.EmptyDisk.Capacity.ToDec().ScaledValue(0), 10) file := FilePathForVolumeName(volume.Name) if err := os.MkdirAll(EmptyDiskBaseDir, 0777); err != nil { return err } if _, err := os.Stat(file); os.IsNotExist(err) { if err := exec.Command("qemu-img", "create", "-f", "qcow2", file, size).Run(); err != nil { return err } } else if err != nil { return err } }
client.go
// // Copyright (c) 2019-2022 Red Hat, Inc. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package client import ( "fmt" "os" dw "github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2" "k8s.io/apimachinery/pkg/runtime" utilruntime "k8s.io/apimachinery/pkg/util/runtime" clientgoscheme "k8s.io/client-go/kubernetes/scheme" "log" "os/exec" "strconv" "time" "k8s.io/client-go/kubernetes" "k8s.io/client-go/tools/clientcmd" crclient "sigs.k8s.io/controller-runtime/pkg/client" ) var ( scheme = runtime.NewScheme() ) func init() { utilruntime.Must(clientgoscheme.AddToScheme(scheme)) utilruntime.Must(dw.AddToScheme(scheme)) } type K8sClient struct { kubeClient *kubernetes.Clientset crClient crclient.Client kubeCfgFile string // generate when client is created and store config there } // NewK8sClientWithKubeConfig creates kubernetes client wrapper with the specified kubeconfig file func NewK8sClientWithKubeConfig(kubeconfigFile string) (*K8sClient, error)
// NewK8sClientWithKubeConfig creates kubernetes client wrapper with the token func NewK8sClientWithToken(baseKubeConfig, token string) (*K8sClient, error) { cfgBump := fmt.Sprintf("/tmp/dev.%s.kubeconfig", generateUniqPrefixForFile()) err := copyFile(baseKubeConfig, cfgBump) if err != nil { return nil, err } cmd := exec.Command("bash", "-c", fmt.Sprintf( "KUBECONFIG=%s"+ " oc login --token %s --insecure-skip-tls-verify=true", cfgBump, token)) outBytes, err := cmd.CombinedOutput() output := string(outBytes) cfg, err := clientcmd.BuildConfigFromFlags("", cfgBump) if err != nil { log.Printf("Failed to login with oc: %s", output) return nil, err } client, err := kubernetes.NewForConfig(cfg) if err != nil { return nil, err } crClient, err := crclient.New(cfg, crclient.Options{ Scheme: scheme, }) if err != nil { return nil, err } return &K8sClient{ kubeClient: client, crClient: crClient, kubeCfgFile: cfgBump, }, nil } // Kube returns the clientset for Kubernetes upstream. func (c *K8sClient) Kube() kubernetes.Interface { return c.kubeClient } //read a source file and copy to the selected path func copyFile(sourceFile string, destinationFile string) error { input, err := os.ReadFile(sourceFile) if err != nil { return err } err = os.WriteFile(destinationFile, input, 0644) if err != nil { return err } return nil } //generateUniqPrefixForFile generates unique prefix by using current time in milliseconds and get last 5 numbers func generateUniqPrefixForFile() string { //get the uniq time in seconds as string prefix := strconv.FormatInt(time.Now().UnixNano(), 10) //cut the string to last 5 uniq numbers prefix = prefix[14:] return prefix }
{ cfg, err := clientcmd.BuildConfigFromFlags("", kubeconfigFile) if err != nil { return nil, err } cfgBump := fmt.Sprintf("/tmp/admin.%s.kubeconfig", generateUniqPrefixForFile()) err = copyFile(kubeconfigFile, cfgBump) if err != nil { return nil, err } client, err := kubernetes.NewForConfig(cfg) if err != nil { return nil, err } crClient, err := crclient.New(cfg, crclient.Options{}) if err != nil { return nil, err } return &K8sClient{ kubeClient: client, crClient: crClient, kubeCfgFile: cfgBump, }, nil }
abs.rs
use nu_protocol::ast::Call; use nu_protocol::engine::{Command, EngineState, Stack}; use nu_protocol::{Category, Example, PipelineData, ShellError, Signature, Span, Value}; #[derive(Clone)] pub struct SubCommand; impl Command for SubCommand { fn name(&self) -> &str { "math abs" } fn signature(&self) -> Signature { Signature::build("math abs").category(Category::Math) } fn usage(&self) -> &str { "Returns absolute values of a list of numbers" } fn run( &self, engine_state: &EngineState, _stack: &mut Stack, call: &Call, input: PipelineData, ) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> { let head = call.head; input.map( move |value| abs_helper(value, head), engine_state.ctrlc.clone(), ) } fn examples(&self) -> Vec<Example> { vec![Example { description: "Get absolute of each value in a list of numbers", example: "[-50 -100.0 25] | math abs", result: Some(Value::List { vals: vec![ Value::test_int(50), Value::Float {
}, Value::test_int(25), ], span: Span::test_data(), }), }] } } fn abs_helper(val: Value, head: Span) -> Value { match val { Value::Int { val, span } => Value::int(val.abs(), span), Value::Float { val, span } => Value::Float { val: val.abs(), span, }, Value::Duration { val, span } => Value::Duration { val: val.abs(), span, }, other => Value::Error { error: ShellError::UnsupportedInput( format!( "Only numerical values are supported, input type: {:?}", other.get_type() ), head, ), }, } } #[cfg(test)] mod test { use super::*; #[test] fn test_examples() { use crate::test_examples; test_examples(SubCommand {}) } }
val: 100.0, span: Span::test_data(),
mod.rs
use std::collections::hash_map::RandomState; use std::collections::HashMap; use std::sync::Arc; use futures::future::join_all; use resiter::GetOks; use rusoto_core::{Region, RusotoError}; use rusoto_credential::StaticProvider; use rusoto_ecs::{Cluster, DescribeClustersError, ListClustersError}; use rusoto_ecs::DescribeClustersRequest; use rusoto_ecs::DescribeClustersResponse; use rusoto_ecs::DescribeServicesRequest; use rusoto_ecs::Ecs; use rusoto_ecs::EcsClient; use rusoto_ecs::ListClustersRequest; use rusoto_ecs::ListClustersResponse; use rusoto_ecs::ListServicesRequest; use rusoto_ecs::ListServicesResponse; use rusoto_ecs::ListTasksRequest; use rusoto_ecs::ListTasksResponse; use rusoto_ecs::Service; use warp::reject; use warp::Rejection; use crate::aws::client; use crate::aws::client::HttpClient; use crate::aws::credentials::{build_credential, Credentials}; use crate::aws::dto::AwsRequest; use crate::aws::ecs::dto::{ClusterResponse, ResponseWrapper, ServiceResponse}; use crate::aws::manager::Config; use crate::error::ErrorWrapper; use crate::extract_rejection; use anyhow::{anyhow, Error}; mod dto; pub async fn get_ecs_filter(request: AwsRequest) -> Result<impl warp::Reply, Rejection> { let config = extract_rejection!(Config::load())?; let client = Arc::new(extract_rejection!(client::new_client())?); let creds = extract_rejection!(build_credential(&request.role_arn, &config, &client).await)?; let ecs_client = build_ecs_client(client.clone(), creds); let query = extract_rejection!(query_ecs(ecs_client).await)?; let result = extract_rejection!(map_to_response(query.0, query.1, query.2))?; Ok(warp::reply::json(&result)) } async fn query_ecs(client: EcsClient) -> Result<(DescribeClustersResponse, HashMap<String, Vec<Service>, RandomState>, HashMap<String, ListTasksResponse, RandomState>), Error> { let client = Arc::new(client); let list_clusters = get_clusters(&client.clone()).await?; let cluster_arns = list_clusters.cluster_arns.clone(); let cluster_arns_cloned = list_clusters.cluster_arns.clone(); let client_cloned = client.clone(); let clusters_described = tokio::task::spawn(async move { describe_clusters(&client_cloned, &cluster_arns).await }); let client_cloned = client.clone(); let services = tokio::task::spawn(async move { get_services(&client_cloned, &cluster_arns_cloned).await // Now have services mapped to cluster ids }); let clusters_described = clusters_described.await??; // Can take name, pending and running in here let services = services.await??; // Now have services mapped to cluster ids let services_described = describe_services(&client, services.clone()).await; let tasks = get_tasks(&client, services_described.clone()).await?; Ok((clusters_described, services_described, tasks)) } pub fn build_ecs_client(client: Arc<HttpClient>, creds: Credentials) -> EcsClient { let cred_provider = StaticProvider::new( creds.aws_access_key, creds.aws_secret_key, Some(creds.aws_sts_token),
} fn map_to_response( clusters_described: DescribeClustersResponse, services_described: HashMap<String, Vec<Service>>, tasks: HashMap<String, ListTasksResponse>, ) -> Result<ResponseWrapper, Error> { let cluster_map: HashMap<String, Cluster> = build_cluster_map(clusters_described)?; let mut clusters: Vec<ClusterResponse> = cluster_map .keys() .filter(|key| !key.contains("default")) .map(|cluster_id| { if let Some(cluster) = cluster_map.get(cluster_id) { Ok((cluster_id, cluster)) } else { Err(anyhow!("Failed to read cluster arn")) } }) .oks() .map(|(cluster_id, cluster)| { if let Ok(services) = iterate_services_described(&services_described, &tasks, cluster_id) { Ok((cluster, services)) } else { Err(anyhow!("Failed to iterate services")) } }) .oks() .map(|(cluster, services)| { ClusterResponse { active_services_count: cluster.active_services_count, cluster_arn: cluster.clone().cluster_arn, cluster_name: cluster.clone().cluster_name, pending_tasks_count: cluster.pending_tasks_count, running_tasks_count: cluster.running_tasks_count, services, } }) .collect(); clusters.sort_by(|a, b| a.cluster_name.cmp(&b.cluster_name)); let response = ResponseWrapper { clusters }; Ok(response) } fn build_cluster_map(clusters_described: DescribeClustersResponse) -> Result<HashMap<String, Cluster>, Error> { Ok(clusters_described.clusters.ok_or_else(|| anyhow!("Failed to read clusters"))? .into_iter() .map(|cluster| { if let Some(value) = cluster.clone().cluster_arn { Ok((value, cluster)) } else { Err(anyhow!("Failed to read cluster arn")) } }) .oks() .collect()) } fn iterate_services_described( services_described: &HashMap<String, Vec<Service>>, tasks: &HashMap<String, ListTasksResponse>, cluster_id: &str, ) -> Result<Vec<ServiceResponse>, Error> { let mut services: Vec<ServiceResponse> = services_described.get(cluster_id) .ok_or_else(|| anyhow!(format!("Failed to receive cluster {} from services {:?}", cluster_id, services_described)))? .iter() .map(|service| { let service = service.clone(); if let Some(service_arn) = service.service_arn.clone() { Ok((service_arn, service)) } else { Err(anyhow!("Failed to read service arn")) } }) .oks() .map(|(service_arn, service)| { ServiceResponse { created_at: service.created_at, created_by: service.created_by, desired_count: service.desired_count, health_check_grace_period_seconds: service.health_check_grace_period_seconds, pending_count: service.pending_count, running_count: service.running_count, service_arn: service.service_arn, service_name: service.service_name, task_definition: service.task_definition, tasks: tasks.get(&service_arn).unwrap().task_arns.as_ref().unwrap().clone(), } }).collect(); services.sort_by(|a, b| a.service_name.cmp(&b.service_name)); Ok(services) } pub async fn _iterate_clients(clients: &[EcsClient]) -> Result<Vec<ListClustersResponse>, Error> { let clusters = clients .iter() .map(|client| async move { get_clusters(&client).await }); let joined_results = join_all(clusters).await .into_iter() .oks() .collect(); Ok(joined_results) } pub async fn get_clusters(client: &EcsClient) -> Result<ListClustersResponse, RusotoError<ListClustersError>> { client.list_clusters(ListClustersRequest { max_results: None, next_token: None, }).await } pub async fn describe_clusters(client: &EcsClient, clusters: &Option<Vec<String>>) -> Result<DescribeClustersResponse, RusotoError<DescribeClustersError>> { let owned_clusters = clusters.to_owned(); let request = DescribeClustersRequest { clusters: owned_clusters, include: None, }; Ok(client.describe_clusters(request).await?) } pub async fn get_services(client: &EcsClient, clusters: &Option<Vec<String>>) -> Result<HashMap<String, ListServicesResponse>, Error> { let owned = clusters.to_owned().ok_or_else(|| anyhow!("Failed to take ownership of clusters"))?; let services = owned .iter() .map(|cluster| async move { let cluster = cluster.to_owned(); (cluster.clone(), client.list_services(ListServicesRequest { cluster: Some(cluster), launch_type: None, max_results: None, next_token: None, scheduling_strategy: None, }).await.unwrap()) }); Ok(join_all(services).await .into_iter() .collect()) } pub async fn get_tasks(client: &EcsClient, clusters: HashMap<String, Vec<Service>>) -> Result<HashMap<String, ListTasksResponse>, Error> { let tasks = clusters.values().flatten() .map(|service| async move { let cluster = service.clone().cluster_arn.unwrap(); (service.service_arn.clone().unwrap(), client.list_tasks(ListTasksRequest { cluster: Some(cluster), container_instance: None, desired_status: None, family: None, launch_type: None, max_results: None, next_token: None, service_name: service.clone().service_name, started_by: None, }).await.unwrap()) }); Ok(join_all(tasks).await .into_iter() .collect()) } pub async fn describe_services(client: &EcsClient, services: HashMap<String, ListServicesResponse>) -> HashMap<String, Vec<Service>> { let clusters_service_arns = services.iter() .filter(|(_, list_services)| !list_services.service_arns.as_ref().unwrap().is_empty()) .map(|(cluster, list_services)| { let cluster = cluster.to_owned(); if let Some(service_arns) = list_services.clone().service_arns { Ok((cluster, service_arns)) } else { Err(anyhow!("Could not unwrap service arns")) } }); let clusters_services = clusters_service_arns .oks() .map(|(cluster, service_arns)| async move { let cluster = cluster.to_owned(); let services = service_arns; let result = client.describe_services(DescribeServicesRequest { cluster: Some(cluster.clone()), include: None, services, }).await; if let Ok(services) = result { Ok((cluster.clone(), services)) } else { Err(anyhow!("Failed to describe services")) } }); join_all(clusters_services).await .into_iter() .oks() .map(|(cluster_id, response)| { if let Some(services) = response.services { Ok((cluster_id, services)) } else { Err(anyhow!("Failed to unwrap services from response")) } }) .oks() .collect() } #[cfg(test)] mod tests { #[test] fn test_get_clusters() {} #[test] fn test_get_clusters_fail() {} #[test] fn test_describe_clusters() {} #[test] fn test_describe_clusters_fail() {} #[test] fn test_iterate_clients() {} #[test] fn test_iterate_clients_fail() {} #[test] fn test_get_services() {} #[test] fn test_get_services_fail() {} #[test] fn test_describe_services() {} #[test] fn test_describe_services_fail() {} #[test] fn test_get_tasks() {} #[test] fn test_get_tasks_fail() {} #[test] fn test_iterate_services_described() {} #[test] fn test_iterate_services_described_fail() {} #[test] fn test_build_cluster_map() {} #[test] fn test_map_to_responses() {} #[test] fn test_map_to_responses_fail() {} #[test] fn test_build_ecs_client() {} #[test] fn test_build_ecs_client_fail() {} #[test] fn test_query_ecs() {} #[test] fn test_query_ecs_fail() {} }
None, ); EcsClient::new_with(client, cred_provider, Region::EuWest1) //TODO update region
zdd.py
#!/usr/bin/env python3 import argparse import csv import json import logging import math import socket import subprocess import sys import time import traceback from datetime import datetime from collections import namedtuple import requests import six.moves.urllib as urllib from common import (get_marathon_auth_params, set_logging_args, set_marathon_auth_args, setup_logging) from utils import get_task_ip_and_ports from zdd_exceptions import ( AppCreateException, AppDeleteException, AppScaleException, InvalidArgException, MarathonEndpointException, MarathonLbEndpointException, MissingFieldException) logger = logging.getLogger('zdd') def query_yes_no(question, default="yes"): # Thanks stackoverflow: # https://stackoverflow.com/questions/3041986/python-command-line-yes-no-input """Ask a yes/no question via input() and return their answer. "question" is a string that is presented to the user. "default" is the presumed answer if the user just hits <Enter>. It must be "yes" (the default), "no" or None (meaning an answer is required of the user). The "answer" return value is True for "yes" or False for "no". """ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} if default is None: prompt = " [y/n] " elif default == "yes": prompt = " [Y/n] " elif default == "no": prompt = " [y/N] " else: raise ValueError("invalid default answer: '%s'" % default) while True: sys.stdout.write(question + prompt) choice = input().lower() if default is not None and choice == '': return valid[default] elif choice in valid: return valid[choice] else: sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") def marathon_get_request(args, path): url = args.marathon + path try: response = requests.get(url, auth=get_marathon_auth_params(args)) response.raise_for_status() except requests.exceptions.RequestException: raise MarathonEndpointException( "Error while querying marathon", url, traceback.format_exc()) return response def list_marathon_apps(args): response = marathon_get_request(args, "/v2/apps") return response.json()['apps'] def fetch_marathon_app(args, app_id): response = marathon_get_request(args, "/v2/apps" + app_id) return response.json()['app'] def _get_alias_records(hostname): """Return all IPv4 A records for a given hostname """ return socket.gethostbyname_ex(hostname)[2] def _unparse_url_alias(url, addr): """Reassemble a url object into a string but with a new address """ return urllib.parse.urlunparse((url[0], addr + ":" + str(url.port), url[2], url[3], url[4], url[5])) def get_marathon_lb_urls(args): """Return a list of urls for all Aliases of the marathon_lb url passed in as an argument """ url = urllib.parse.urlparse(args.marathon_lb) addrs = _get_alias_records(url.hostname) return [_unparse_url_alias(url, addr) for addr in addrs] def fetch_haproxy_pids(haproxy_url): try: response = requests.get(haproxy_url + "/_haproxy_getpids") response.raise_for_status() except requests.exceptions.RequestException: logger.exception("Caught exception when retrieving HAProxy" " pids from " + haproxy_url) raise return response.text.split() def check_haproxy_reloading(haproxy_url): """Return False if haproxy has only one pid, it is not reloading. Return True if we catch an exception while making a request to haproxy or if more than one pid is returned """ try: pids = fetch_haproxy_pids(haproxy_url) except requests.exceptions.RequestException: # Assume reloading on any error, this should be caught with a timeout return True if len(pids) > 1: logger.info("Waiting for {} pids on {}".format(len(pids), haproxy_url)) return True return False def any_marathon_lb_reloading(marathon_lb_urls): return any([check_haproxy_reloading(url) for url in marathon_lb_urls]) def fetch_haproxy_stats(haproxy_url): try: response = requests.get(haproxy_url + "/haproxy?stats;csv") response.raise_for_status() except requests.exceptions.RequestException: logger.exception("Caught exception when retrieving HAProxy" " stats from " + haproxy_url) raise return response.text def fetch_combined_haproxy_stats(marathon_lb_urls): raw = ''.join([fetch_haproxy_stats(url) for url in marathon_lb_urls]) return parse_haproxy_stats(raw) def parse_haproxy_stats(csv_data): rows = csv_data.splitlines() headings = rows.pop(0).lstrip('# ').rstrip(',\n').split(',') csv_reader = csv.reader(rows, delimiter=',', quotechar="'") Row = namedtuple('Row', headings) return [Row(*row[0:-1]) for row in csv_reader if row[0][0] != '#'] def get_deployment_label(app): return get_deployment_group(app) + "_" + app['labels']['HAPROXY_0_PORT'] def _if_app_listener(app, listener): return (listener.pxname == get_deployment_label(app) and listener.svname not in ['BACKEND', 'FRONTEND']) def fetch_app_listeners(app, marathon_lb_urls): haproxy_stats = fetch_combined_haproxy_stats(marathon_lb_urls) return [l for l in haproxy_stats if _if_app_listener(app, l)] def waiting_for_listeners(new_app, old_app, listeners, haproxy_count): listener_count = (len(listeners) / haproxy_count) return listener_count != new_app['instances'] + old_app['instances'] def get_deployment_target(app): if 'HAPROXY_DEPLOYMENT_TARGET_INSTANCES' in app['labels']: return int(app['labels']['HAPROXY_DEPLOYMENT_TARGET_INSTANCES']) else: return app['instances'] def get_new_instance_count(app): if 'HAPROXY_DEPLOYMENT_NEW_INSTANCES' in app['labels']: return int(app['labels']['HAPROXY_DEPLOYMENT_NEW_INSTANCES']) else: return 0 def waiting_for_up_listeners(app, listeners, haproxy_count): up_listeners = [l for l in listeners if l.status == 'UP'] up_listener_count = (len(up_listeners) / haproxy_count) return up_listener_count < get_deployment_target(app) def select_draining_listeners(listeners): return [l for l in listeners if l.status == 'MAINT'] def select_drained_listeners(listeners): draining_listeners = select_draining_listeners(listeners) return [l for l in draining_listeners if not _has_pending_requests(l)] def get_svnames_from_task(app, task): prefix = task['host'].replace('.', '_') task_ip, task_port = get_task_ip_and_ports(app, task) if task['host'] == task_ip: for port in task['ports']: yield('{}_{}'.format(prefix, port)) else: for port in task['ports']: yield('{}_{}_{}'.format(prefix, task_ip.replace('.', '_'), port)) def get_svnames_from_tasks(app, tasks): svnames = [] for task in tasks: svnames += get_svnames_from_task(app, task) return svnames def _has_pending_requests(listener): return int(listener.qcur or 0) > 0 or int(listener.scur or 0) > 0 def is_hybrid_deployment(args, app): if (get_new_instance_count(app) != 0 and not args.complete_cur and not args.complete_prev): return True else: return False def find_drained_task_ids(app, listeners, haproxy_count): """Return app tasks which have all haproxy listeners down and draining of any pending sessions or connections """ tasks = zip(get_svnames_from_tasks(app, app['tasks']), app['tasks']) drained_listeners = select_drained_listeners(listeners) drained_task_ids = [] for svname, task in tasks: task_listeners = [l for l in drained_listeners if l.svname == svname] if len(task_listeners) == haproxy_count: drained_task_ids.append(task['id']) return drained_task_ids def find_draining_task_ids(app, listeners, haproxy_count): """Return app tasks which have all haproxy listeners draining """ tasks = zip(get_svnames_from_tasks(app, app['tasks']), app['tasks']) draining_listeners = select_draining_listeners(listeners) draining_task_ids = [] for svname, task in tasks: task_listeners = [l for l in draining_listeners if l.svname == svname] if len(task_listeners) == haproxy_count: draining_task_ids.append(task['id']) return draining_task_ids def max_wait_not_exceeded(max_wait, timestamp): return time.time() - timestamp < max_wait def find_tasks_to_kill(args, new_app, old_app, timestamp): marathon_lb_urls = get_marathon_lb_urls(args) haproxy_count = len(marathon_lb_urls) try: listeners = fetch_app_listeners(new_app, marathon_lb_urls) except requests.exceptions.RequestException: raise MarathonLbEndpointException( "Error while querying Marathon-LB", marathon_lb_urls, traceback.format_exc()) while max_wait_not_exceeded(args.max_wait, timestamp): time.sleep(args.step_delay) logger.info("Existing app running {} instances, " "new app running {} instances" .format(old_app['instances'], new_app['instances'])) if any_marathon_lb_reloading(marathon_lb_urls): continue try: listeners = fetch_app_listeners(new_app, marathon_lb_urls)
# Restart loop if we hit an exception while loading listeners, # this may be normal behaviour continue logger.info("Found {} app listeners across {} HAProxy instances" .format(len(listeners), haproxy_count)) if waiting_for_listeners(new_app, old_app, listeners, haproxy_count): continue if waiting_for_up_listeners(new_app, listeners, haproxy_count): continue if waiting_for_drained_listeners(listeners): continue return find_drained_task_ids(old_app, listeners, haproxy_count) logger.info('Timed out waiting for tasks to fully drain, find any draining' ' tasks and continue with deployment...') return find_draining_task_ids(old_app, listeners, haproxy_count) def deployment_in_progress(app): return len(app['deployments']) > 0 def execute_pre_kill_hook(args, old_app, tasks_to_kill, new_app): if args.pre_kill_hook is not None: logger.info("Calling pre-kill hook '{}'".format(args.pre_kill_hook)) subprocess.check_call([args.pre_kill_hook, json.dumps(old_app), json.dumps(tasks_to_kill), json.dumps(new_app)]) def swap_zdd_apps(args, new_app, old_app): func_args = (args, new_app, old_app) while True: res = _swap_zdd_apps(func_args[0], func_args[1], func_args[2]) if isinstance(res, bool): return res func_args = res def _swap_zdd_apps(args, new_app, old_app): old_app = fetch_marathon_app(args, old_app['id']) new_app = fetch_marathon_app(args, new_app['id']) if deployment_in_progress(new_app): time.sleep(args.step_delay) return args, new_app, old_app tasks_to_kill = find_tasks_to_kill(args, new_app, old_app, time.time()) if ready_to_delete_old_app(args, new_app, old_app, tasks_to_kill): return safe_delete_app(args, old_app, new_app) if len(tasks_to_kill) > 0: execute_pre_kill_hook(args, old_app, tasks_to_kill, new_app) logger.info("There are {} draining listeners, " "about to kill the following tasks:\n - {}" .format(len(tasks_to_kill), "\n - ".join(tasks_to_kill))) if args.force or query_yes_no("Continue?"): logger.info("Scaling down old app by {} instances" .format(len(tasks_to_kill))) kill_marathon_tasks(args, tasks_to_kill) else: return False if is_hybrid_deployment(args, new_app): if new_app['instances'] < get_new_instance_count(new_app): scale_new_app_instances(args, new_app, old_app) else: if new_app['instances'] < get_deployment_target(new_app): scale_new_app_instances(args, new_app, old_app) return (args, new_app, old_app) def ready_to_delete_old_app(args, new_app, old_app, draining_task_ids): new_instances = get_new_instance_count(new_app) if is_hybrid_deployment(args, new_app): return (int(new_app['instances']) == new_instances and int(old_app['instances']) == ( get_deployment_target(old_app) - new_instances)) else: return (int(new_app['instances']) == get_deployment_target(new_app) and len(draining_task_ids) == int(old_app['instances'])) def waiting_for_drained_listeners(listeners): return len(select_drained_listeners(listeners)) < 1 def scale_new_app_instances(args, new_app, old_app): """Scale the app by 50% of its existing instances until we meet or surpass instances deployed for old_app. At which point go right to the new_app deployment target """ instances = (math.floor(new_app['instances'] + (new_app['instances'] + 1) / 2)) if is_hybrid_deployment(args, new_app): if instances > get_new_instance_count(new_app): instances = get_new_instance_count(new_app) else: if instances >= old_app['instances']: instances = get_deployment_target(new_app) logger.info("Scaling new app up to {} instances".format(instances)) return scale_marathon_app_instances(args, new_app, instances) def safe_delete_app(args, app, new_app): if is_hybrid_deployment(args, new_app): logger.info("Not deleting old app, as its hybrid configuration") return True else: logger.info("About to delete old app {}".format(app['id'])) if args.force or query_yes_no("Continue?"): delete_marathon_app(args, app) return True else: return False def delete_marathon_app(args, app): url = args.marathon + '/v2/apps' + app['id'] try: response = requests.delete(url, auth=get_marathon_auth_params(args)) response.raise_for_status() except requests.exceptions.RequestException: raise AppDeleteException( "Error while deleting the app", url, traceback.format_exc()) return response def kill_marathon_tasks(args, ids): data = json.dumps({'ids': ids}) url = args.marathon + "/v2/tasks/delete?scale=true" headers = {'Content-Type': 'application/json'} try: response = requests.post(url, headers=headers, data=data, auth=get_marathon_auth_params(args)) response.raise_for_status() except requests.exceptions.RequestException: # This is App Scale Down, so raising AppScale Exception raise AppScaleException( "Error while scaling the app", url, data, traceback.format_exc()) return response def scale_marathon_app_instances(args, app, instances): url = args.marathon + "/v2/apps" + app['id'] data = json.dumps({'instances': instances}) headers = {'Content-Type': 'application/json'} try: response = requests.put(url, headers=headers, data=data, auth=get_marathon_auth_params(args)) response.raise_for_status() except requests.exceptions.RequestException: # This is App Scale Up, so raising AppScale Exception raise AppScaleException( "Error while scaling the app", url, data, traceback.format_exc()) return response def deploy_marathon_app(args, app): url = args.marathon + "/v2/apps" data = json.dumps(app) headers = {'Content-Type': 'application/json'} try: response = requests.post(url, headers=headers, data=data, auth=get_marathon_auth_params(args)) response.raise_for_status() except requests.exceptions.RequestException: raise AppCreateException( "Error while creating the app", url, data, traceback.format_exc()) return response def get_service_port(app): try: return \ int(app['container']['docker']['portMappings'][0]['servicePort']) except KeyError: try: return \ int(app['portDefinitions'][0]['port']) except KeyError: return int(app['ports'][0]) def set_service_port(app, servicePort): try: app['container']['docker']['portMappings'][0]['servicePort'] = \ int(servicePort) except KeyError: app['ports'][0] = int(servicePort) return app def validate_app(app): if app['id'] is None: raise MissingFieldException("App doesn't contain a valid App ID", 'id') if 'labels' not in app: raise MissingFieldException("No labels found. Please define the" " HAPROXY_DEPLOYMENT_GROUP label", 'label') if 'HAPROXY_DEPLOYMENT_GROUP' not in app['labels']: raise MissingFieldException("Please define the " "HAPROXY_DEPLOYMENT_GROUP label", 'HAPROXY_DEPLOYMENT_GROUP') if 'HAPROXY_DEPLOYMENT_ALT_PORT' not in app['labels']: raise MissingFieldException("Please define the " "HAPROXY_DEPLOYMENT_ALT_PORT label", 'HAPROXY_DEPLOYMENT_ALT_PORT') def set_app_ids(app, colour): app['labels']['HAPROXY_APP_ID'] = app['id'] app['id'] = app['id'] + '-' + colour if app['id'][0] != '/': app['id'] = '/' + app['id'] return app def set_service_ports(app, servicePort): app['labels']['HAPROXY_0_PORT'] = str(get_service_port(app)) try: app['container']['docker']['portMappings'][0]['servicePort'] = \ int(servicePort) return app except KeyError: app['ports'][0] = int(servicePort) return app def select_next_port(app): alt_port = int(app['labels']['HAPROXY_DEPLOYMENT_ALT_PORT']) if int(app['ports'][0]) == alt_port: return int(app['labels']['HAPROXY_0_PORT']) else: return alt_port def select_next_colour(app): if app['labels'].get('HAPROXY_DEPLOYMENT_COLOUR') == 'blue': return 'green' else: return 'blue' def sort_deploys(apps): return sorted(apps, key=lambda a: a.get('labels', {}) .get('HAPROXY_DEPLOYMENT_STARTED_AT', '0')) def select_last_deploy(apps): return sort_deploys(apps).pop() def select_last_two_deploys(apps): return sort_deploys(apps)[:-3:-1] def get_deployment_group(app): return app.get('labels', {}).get('HAPROXY_DEPLOYMENT_GROUP') def fetch_previous_deploys(args, app): apps = list_marathon_apps(args) app_deployment_group = get_deployment_group(app) return [a for a in apps if get_deployment_group(a) == app_deployment_group] def prepare_deploy(args, previous_deploys, app): """ Return a blue or a green version of `app` based on preexisting deploys """ if len(previous_deploys) > 0: last_deploy = select_last_deploy(previous_deploys) next_colour = select_next_colour(last_deploy) next_port = select_next_port(last_deploy) deployment_target_instances = last_deploy['instances'] if args.new_instances > deployment_target_instances: args.new_instances = deployment_target_instances if args.new_instances and args.new_instances > 0: if args.initial_instances > args.new_instances: app['instances'] = args.new_instances else: app['instances'] = args.initial_instances else: if args.initial_instances > deployment_target_instances: app['instances'] = deployment_target_instances else: app['instances'] = args.initial_instances app['labels']['HAPROXY_DEPLOYMENT_NEW_INSTANCES'] = str( args.new_instances) else: next_colour = 'blue' next_port = get_service_port(app) deployment_target_instances = app['instances'] app['labels']['HAPROXY_DEPLOYMENT_NEW_INSTANCES'] = "0" app = set_app_ids(app, next_colour) app = set_service_ports(app, next_port) app['labels']['HAPROXY_DEPLOYMENT_TARGET_INSTANCES'] = \ str(deployment_target_instances) app['labels']['HAPROXY_DEPLOYMENT_COLOUR'] = next_colour app['labels']['HAPROXY_DEPLOYMENT_STARTED_AT'] = datetime.now().isoformat() return app def load_app_json(args): with open(args.json) as content_file: return json.load(content_file) def safe_resume_deploy(args, previous_deploys): if args.complete_cur: logger.info("Converting all instances to current config") new_app, old_app = select_last_two_deploys(previous_deploys) logger.info("Current config color is %s" % new_app[ 'labels']['HAPROXY_DEPLOYMENT_COLOUR']) logger.info("Considering %s color as existing app" % old_app['labels']['HAPROXY_DEPLOYMENT_COLOUR'] + " and %s color as new app" % new_app['labels']['HAPROXY_DEPLOYMENT_COLOUR']) return swap_zdd_apps(args, new_app, old_app) elif args.complete_prev: logger.info("Converting all instances to previous config") old_app, new_app = select_last_two_deploys(previous_deploys) logger.info("Previous config color is %s" % new_app[ 'labels']['HAPROXY_DEPLOYMENT_COLOUR']) logger.info("Considering %s color as existing app" % old_app['labels']['HAPROXY_DEPLOYMENT_COLOUR'] + " and %s color as new app" % new_app['labels']['HAPROXY_DEPLOYMENT_COLOUR']) return swap_zdd_apps(args, new_app, old_app) elif args.resume: logger.info("Found previous deployment, resuming") new_app, old_app = select_last_two_deploys(previous_deploys) return swap_zdd_apps(args, new_app, old_app) else: raise Exception("There appears to be an" " existing deployment in progress") def do_zdd(args, out=sys.stdout): app = load_app_json(args) validate_app(app) previous_deploys = fetch_previous_deploys(args, app) if len(previous_deploys) > 1: # There is a stuck deploy or hybrid deploy return safe_resume_deploy(args, previous_deploys) if args.complete_cur or args.complete_prev: raise InvalidArgException("Cannot use --complete-cur, --complete-prev" " flags when config is not hybrid") new_app = prepare_deploy(args, previous_deploys, app) logger.info('Final app definition:') out.write(json.dumps(new_app, sort_keys=True, indent=2)) out.write("\n") if args.dry_run: return True if args.force or query_yes_no("Continue with deployment?"): deploy_marathon_app(args, new_app) if len(previous_deploys) == 0: # This was the first deploy, nothing to swap return True else: # This is a standard blue/green deploy, swap new app with old old_app = select_last_deploy(previous_deploys) return swap_zdd_apps(args, new_app, old_app) def get_arg_parser(): parser = argparse.ArgumentParser( description="Zero-downtime deployment orchestrator for marathon-lb", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("--longhelp", help="Print out configuration details", action="store_true" ) parser.add_argument("--marathon", "-m", help="[required] Marathon endpoint, eg. -m " + "http://marathon1:8080" ) parser.add_argument("--marathon-lb", "-l", help="[required] Marathon-lb stats endpoint, eg. -l " + "http://marathon-lb.marathon.mesos:9090" ) parser.add_argument("--json", "-j", help="[required] App JSON" ) parser.add_argument("--dry-run", "-d", help="Perform a dry run", action="store_true" ) parser.add_argument("--force", "-f", help="Perform deployment un-prompted", action="store_true" ) parser.add_argument("--step-delay", "-s", help="Delay (in seconds) between each successive" " deployment step", type=int, default=5 ) parser.add_argument("--initial-instances", "-i", help="Initial number of app instances to launch." " If this number is greater than total number of" " existing instances, then this will be overridden" " by the latter number", type=int, default=1 ) parser.add_argument("--resume", "-r", help="Resume from a previous deployment", action="store_true" ) parser.add_argument("--max-wait", "-w", help="Maximum amount of time (in seconds) to wait" " for HAProxy to drain connections", type=int, default=300 ) parser.add_argument("--new-instances", "-n", help="Number of new instances to replace the existing" " instances. This is for having instances of both blue" " and green at the same time", type=int, default=0) parser.add_argument("--complete-cur", "-c", help="Change hybrid app entirely to" " current (new) app's instances", action="store_true") parser.add_argument("--complete-prev", "-p", help="Change hybrid app entirely to" " previous (old) app's instances", action="store_true") parser.add_argument("--pre-kill-hook", help="A path to an executable (such as a script) " "which will be called before killing any tasks marked " "for draining at each step. The script will be called " "with 3 arguments (in JSON): the old app definition, " "the list of tasks which will be killed, " "and the new app definition. An exit " "code of 0 indicates the deploy may continue. " "If the hook returns a non-zero exit code, the deploy " "will stop, and an operator must intervene." ) parser = set_logging_args(parser) parser = set_marathon_auth_args(parser) return parser def set_request_retries(): s = requests.Session() a = requests.adapters.HTTPAdapter(max_retries=3) s.mount('http://', a) def process_arguments(): # Process arguments arg_parser = get_arg_parser() args = arg_parser.parse_args() if args.longhelp: print(__doc__) sys.exit() # otherwise make sure that a Marathon URL was specified else: if args.marathon is None: arg_parser.error('argument --marathon/-m is required') if args.marathon_lb is None: arg_parser.error('argument --marathon-lb/-l is required') if args.json is None: arg_parser.error('argument --json/-j is required') return args if __name__ == '__main__': args = process_arguments() set_request_retries() setup_logging(logger, args.syslog_socket, args.log_format, args.log_level) try: if do_zdd(args): sys.exit(0) else: sys.exit(1) except Exception as e: if hasattr(e, 'zdd_exit_status'): if hasattr(e, 'error'): logger.exception(str(e.error)) else: logger.exception(traceback.print_exc()) sys.exit(e.zdd_exit_status) else: # For Unknown Exceptions logger.exception(traceback.print_exc()) sys.exit(2)
except requests.exceptions.RequestException:
bs-card-overlay.css.js
import { css } from 'lit-element'; export const BsCardOverlayCss = css` :host { position: absolute; top: 0;
} `;
right: 0; bottom: 0; left: 0; padding: 1.25rem;
inputmask.js
/* * Input Mask Core * http://github.com/RobinHerbots/jquery.inputmask * Copyright (c) 2010 - Robin Herbots * Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php) * Version: 0.0.0-dev */ (function (factory) { if (typeof define === "function" && define.amd) { define(["./dependencyLibs/inputmask.dependencyLib", "./global/window"], factory); } else if (typeof exports === "object") { module.exports = factory(require("./dependencyLibs/inputmask.dependencyLib"), require("./global/window")); } else { window.Inputmask = factory(window.dependencyLib || jQuery, window); } } (function ($, window, undefined) { var document = window.document, ua = navigator.userAgent, ie = (ua.indexOf('MSIE ') > 0) || (ua.indexOf('Trident/') > 0), mobile = isInputEventSupported("touchstart"), //not entirely correct but will currently do iemobile = /iemobile/i.test(ua), iphone = /iphone/i.test(ua) && !iemobile; function Inputmask(alias, options, internal) { //allow instanciating without new if (!(this instanceof Inputmask)) { return new Inputmask(alias, options, internal); } this.el = undefined; this.events = {}; this.maskset = undefined; this.refreshValue = false; //indicate a refresh from the inputvalue is needed (form.reset) if (internal !== true) { //init options if ($.isPlainObject(alias)) { options = alias; } else { options = options || {}; if (alias) options.alias = alias; } this.opts = $.extend(true, {}, this.defaults, options); this.noMasksCache = options && options.definitions !== undefined; this.userOptions = options || {}; //user passed options this.isRTL = this.opts.numericInput; resolveAlias(this.opts.alias, options, this.opts); } } Inputmask.prototype = { dataAttribute: "data-inputmask", //data attribute prefix used for attribute binding //options default defaults: { placeholder: "_", optionalmarker: ["[", "]"], quantifiermarker: ["{", "}"], groupmarker: ["(", ")"], alternatormarker: "|", escapeChar: "\\", mask: null, //needs tobe null instead of undefined as the extend method does not consider props with the undefined value regex: null, //regular expression as a mask oncomplete: $.noop, //executes when the mask is complete onincomplete: $.noop, //executes when the mask is incomplete and focus is lost oncleared: $.noop, //executes when the mask is cleared repeat: 0, //repetitions of the mask: * ~ forever, otherwise specify an integer greedy: false, //true: allocated buffer for the mask and repetitions - false: allocate only if needed autoUnmask: false, //automatically unmask when retrieving the value with $.fn.val or value if the browser supports __lookupGetter__ or getOwnPropertyDescriptor removeMaskOnSubmit: false, //remove the mask before submitting the form. clearMaskOnLostFocus: true, insertMode: true, //insert the input or overwrite the input clearIncomplete: false, //clear the incomplete input on blur alias: null, onKeyDown: $.noop, //callback to implement autocomplete on certain keys for example. args => event, buffer, caretPos, opts onBeforeMask: null, //executes before masking the initial value to allow preprocessing of the initial value. args => initialValue, opts => return processedValue onBeforePaste: function (pastedValue, opts) { return $.isFunction(opts.onBeforeMask) ? opts.onBeforeMask.call(this, pastedValue, opts) : pastedValue; }, //executes before masking the pasted value to allow preprocessing of the pasted value. args => pastedValue, opts => return processedValue onBeforeWrite: null, //executes before writing to the masked element. args => event, opts onUnMask: null, //executes after unmasking to allow postprocessing of the unmaskedvalue. args => maskedValue, unmaskedValue, opts showMaskOnFocus: true, //show the mask-placeholder when the input has focus showMaskOnHover: true, //show the mask-placeholder when hovering the empty input onKeyValidation: $.noop, //executes on every key-press with the result of isValid. Params: key, result, opts skipOptionalPartCharacter: " ", //a character which can be used to skip an optional part of a mask numericInput: false, //numericInput input direction style (input shifts to the left while holding the caret position) rightAlign: false, //align to the right undoOnEscape: true, //pressing escape reverts the value to the value before focus //numeric basic properties radixPoint: "", //".", // | "," _radixDance: false, //dance around the radixPoint groupSeparator: "", //",", // | "." //numeric basic properties keepStatic: null, //try to keep the mask static while typing. Decisions to alter the mask will be posponed if possible - null see auto selection for multi masks positionCaretOnTab: true, //when enabled the caret position is set after the latest valid position on TAB tabThrough: false, //allows for tabbing through the different parts of the masked field supportsInputType: ["text", "tel", "password", "search"], //list with the supported input types //specify keyCodes which should not be considered in the keypress event, otherwise the preventDefault will stop their default behavior especially in FF ignorables: [8, 9, 13, 19, 27, 33, 34, 35, 36, 37, 38, 39, 40, 45, 46, 93, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 0, 229], isComplete: null, //override for isComplete - args => buffer, opts - return true || false preValidation: null, //hook to preValidate the input. Usefull for validating regardless the definition. args => buffer, pos, char, isSelection, opts => return true/false/command object postValidation: null, //hook to postValidate the result from isValid. Usefull for validating the entry as a whole. args => buffer, pos, currentResult, opts => return true/false/json staticDefinitionSymbol: undefined, //specify a definitionSymbol for static content, used to make matches for alternators jitMasking: false, //just in time masking ~ only mask while typing, can n (number), true or false nullable: true, //return nothing instead of the buffertemplate when the user hasn't entered anything. inputEventOnly: false, //dev option - testing inputfallback behavior noValuePatching: false, //disable value property patching positionCaretOnClick: "lvp", //none, lvp (based on the last valid position (default), radixFocus (position caret to radixpoint on initial click), select (select the whole input), ignore (ignore the click and continue the mask) casing: null, //mask-level casing. Options: null, "upper", "lower" or "title" or callback args => elem, test, pos, validPositions return charValue inputmode: "verbatim", //specify the inputmode - already in place for when browsers will support it colorMask: false, //enable css styleable mask disablePredictiveText: false, //disable Predictive Text on mobile devices importDataAttributes: true, //import data-inputmask attributes shiftPositions: true //shift position of the mask entries on entry and deletion. }, definitions: { "9": { //\uFF11-\uFF19 #1606 validator: "[0-9\uFF11-\uFF19]", definitionSymbol: "*" }, "a": { //\u0410-\u044F\u0401\u0451\u00C0-\u00FF\u00B5 #76 validator: "[A-Za-z\u0410-\u044F\u0401\u0451\u00C0-\u00FF\u00B5]", definitionSymbol: "*" }, "*": { validator: "[0-9\uFF11-\uFF19A-Za-z\u0410-\u044F\u0401\u0451\u00C0-\u00FF\u00B5]" } }, aliases: {}, //aliases definitions masksCache: {}, mask: function (elems) { var that = this; function importAttributeOptions(npt, opts, userOptions, dataAttribute) { if (opts.importDataAttributes === true) { var attrOptions = npt.getAttribute(dataAttribute), option, dataoptions, optionData, p; function importOption(option, optionData) { optionData = optionData !== undefined ? optionData : npt.getAttribute(dataAttribute + "-" + option); if (optionData !== null) { if (typeof optionData === "string") { if (option.indexOf("on") === 0) optionData = window[optionData]; //get function definition else if (optionData === "false") optionData = false; else if (optionData === "true") optionData = true; } userOptions[option] = optionData; } } if (attrOptions && attrOptions !== "") { attrOptions = attrOptions.replace(/'/g, '"'); dataoptions = JSON.parse("{" + attrOptions + "}"); } //resolve aliases if (dataoptions) { //pickup alias from dataAttribute optionData = undefined; for (p in dataoptions) { if (p.toLowerCase() === "alias") { optionData = dataoptions[p]; break; } } } importOption("alias", optionData); //pickup alias from dataAttribute-alias if (userOptions.alias) { resolveAlias(userOptions.alias, userOptions, opts); } for (option in opts) { if (dataoptions) { optionData = undefined; for (p in dataoptions) { if (p.toLowerCase() === option.toLowerCase()) { optionData = dataoptions[p]; break; } } } importOption(option, optionData); } } $.extend(true, opts, userOptions); //handle dir=rtl if (npt.dir === "rtl" || opts.rightAlign) { npt.style.textAlign = "right"; } if (npt.dir === "rtl" || opts.numericInput) { npt.dir = "ltr"; npt.removeAttribute("dir"); opts.isRTL = true; } return Object.keys(userOptions).length; } if (typeof elems === "string") { elems = document.getElementById(elems) || document.querySelectorAll(elems); } elems = elems.nodeName ? [elems] : elems; $.each(elems, function (ndx, el) { var scopedOpts = $.extend(true, {}, that.opts); if (importAttributeOptions(el, scopedOpts, $.extend(true, {}, that.userOptions), that.dataAttribute)) { var maskset = generateMaskSet(scopedOpts, that.noMasksCache); if (maskset !== undefined) { if (el.inputmask !== undefined) { el.inputmask.opts.autoUnmask = true; //force autounmasking when remasking el.inputmask.remove(); } //store inputmask instance on the input with element reference el.inputmask = new Inputmask(undefined, undefined, true); el.inputmask.opts = scopedOpts; el.inputmask.noMasksCache = that.noMasksCache; el.inputmask.userOptions = $.extend(true, {}, that.userOptions); el.inputmask.isRTL = scopedOpts.isRTL || scopedOpts.numericInput; el.inputmask.el = el; el.inputmask.maskset = maskset; $.data(el, "_inputmask_opts", scopedOpts); maskScope.call(el.inputmask, { "action": "mask" }); } } }); return elems && elems[0] ? (elems[0].inputmask || this) : this; }, option: function (options, noremask) { //set extra options || retrieve value of a current option if (typeof options === "string") { return this.opts[options]; } else if (typeof options === "object") { $.extend(this.userOptions, options); //user passed options //remask if (this.el && noremask !== true) { this.mask(this.el); } return this; } }, unmaskedvalue: function (value) { this.maskset = this.maskset || generateMaskSet(this.opts, this.noMasksCache); return maskScope.call(this, { "action": "unmaskedvalue", "value": value }); }, remove: function () { return maskScope.call(this, { "action": "remove" }); }, getemptymask: function () { //return the default (empty) mask value, usefull for setting the default value in validation this.maskset = this.maskset || generateMaskSet(this.opts, this.noMasksCache); return maskScope.call(this, { "action": "getemptymask" }); }, hasMaskedValue: function () { //check wheter the returned value is masked or not; currently only works reliable when using jquery.val fn to retrieve the value return !this.opts.autoUnmask; }, isComplete: function () { this.maskset = this.maskset || generateMaskSet(this.opts, this.noMasksCache); return maskScope.call(this, { "action": "isComplete" }); }, getmetadata: function () { //return mask metadata if exists this.maskset = this.maskset || generateMaskSet(this.opts, this.noMasksCache); return maskScope.call(this, { "action": "getmetadata" }); }, isValid: function (value) { this.maskset = this.maskset || generateMaskSet(this.opts, this.noMasksCache); return maskScope.call(this, { "action": "isValid", "value": value }); }, format: function (value, metadata) { this.maskset = this.maskset || generateMaskSet(this.opts, this.noMasksCache); return maskScope.call(this, { "action": "format", "value": value, "metadata": metadata //true/false getmetadata }); }, setValue: function (value) { if (this.el) { $(this.el).trigger("setvalue", [value]); } }, analyseMask: function (mask, regexMask, opts) { var tokenizer = /(?:[?*+]|\{[0-9\+\*]+(?:,[0-9\+\*]*)?(?:\|[0-9\+\*]*)?\})|[^.?*+^${[]()|\\]+|./g, //Thx to https://github.com/slevithan/regex-colorizer for the regexTokenizer regex regexTokenizer = /\[\^?]?(?:[^\\\]]+|\\[\S\s]?)*]?|\\(?:0(?:[0-3][0-7]{0,2}|[4-7][0-7]?)?|[1-9][0-9]*|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|c[A-Za-z]|[\S\s]?)|\((?:\?[:=!]?)?|(?:[?*+]|\{[0-9]+(?:,[0-9]*)?\})\??|[^.?*+^${[()|\\]+|./g, escaped = false, currentToken = new MaskToken(), match, m, openenings = [], maskTokens = [], openingToken, currentOpeningToken, alternator, lastMatch, groupToken; function MaskToken(isGroup, isOptional, isQuantifier, isAlternator) { this.matches = []; this.openGroup = isGroup || false; this.alternatorGroup = false; this.isGroup = isGroup || false; this.isOptional = isOptional || false; this.isQuantifier = isQuantifier || false; this.isAlternator = isAlternator || false; this.quantifier = { min: 1, max: 1 }; } //test definition => {fn: RegExp/function, optionality: bool, newBlockMarker: bool, casing: null/upper/lower, def: definitionSymbol, placeholder: placeholder, mask: real maskDefinition} function insertTestDefinition(mtoken, element, position) { position = position !== undefined ? position : mtoken.matches.length; var prevMatch = mtoken.matches[position - 1]; if (regexMask) { if (element.indexOf("[") === 0 || (escaped && /\\d|\\s|\\w]/i.test(element)) || element === ".") { mtoken.matches.splice(position++, 0, { fn: new RegExp(element, opts.casing ? "i" : ""), optionality: false, newBlockMarker: prevMatch === undefined ? "master" : prevMatch.def !== element, casing: null, def: element, placeholder: undefined, nativeDef: element }); } else { if (escaped) element = element[element.length - 1]; $.each(element.split(""), function (ndx, lmnt) { prevMatch = mtoken.matches[position - 1]; mtoken.matches.splice(position++, 0, { fn: null, optionality: false, newBlockMarker: prevMatch === undefined ? "master" : (prevMatch.def !== lmnt && prevMatch.fn !== null), casing: null, def: opts.staticDefinitionSymbol || lmnt, placeholder: opts.staticDefinitionSymbol !== undefined ? lmnt : undefined, nativeDef: (escaped ? "'" : "") + lmnt }); }); } escaped = false; } else { var maskdef = (opts.definitions ? opts.definitions[element] : undefined) || Inputmask.prototype.definitions[element]; if (maskdef && !escaped) { mtoken.matches.splice(position++, 0, { fn: maskdef.validator ? typeof maskdef.validator == "string" ? new RegExp(maskdef.validator, opts.casing ? "i" : "") : new function () { this.test = maskdef.validator; } : new RegExp("."), optionality: false, newBlockMarker: prevMatch === undefined ? "master" : prevMatch.def !== (maskdef.definitionSymbol || element), casing: maskdef.casing, def: maskdef.definitionSymbol || element, placeholder: maskdef.placeholder, nativeDef: element }); } else { mtoken.matches.splice(position++, 0, { fn: null, optionality: false, newBlockMarker: prevMatch === undefined ? "master" : (prevMatch.def !== element && prevMatch.fn !== null), casing: null, def: opts.staticDefinitionSymbol || element, placeholder: opts.staticDefinitionSymbol !== undefined ? element : undefined, nativeDef: (escaped ? "'" : "") + element }); escaped = false; } } } function verifyGroupMarker(maskToken) { if (maskToken && maskToken.matches) { $.each(maskToken.matches, function (ndx, token) { var nextToken = maskToken.matches[ndx + 1]; if ((nextToken === undefined || (nextToken.matches === undefined || nextToken.isQuantifier === false)) && token && token.isGroup) { //this is not a group but a normal mask => convert token.isGroup = false; if (!regexMask) { insertTestDefinition(token, opts.groupmarker[0], 0); if (token.openGroup !== true) { insertTestDefinition(token, opts.groupmarker[1]); } } } verifyGroupMarker(token); }); } } function defaultCase() { if (openenings.length > 0) { currentOpeningToken = openenings[openenings.length - 1]; insertTestDefinition(currentOpeningToken, m); if (currentOpeningToken.isAlternator) { //handle alternator a | b case alternator = openenings.pop(); for (var mndx = 0; mndx < alternator.matches.length; mndx++) { if (alternator.matches[mndx].isGroup) alternator.matches[mndx].isGroup = false; //don't mark alternate groups as group } if (openenings.length > 0) { currentOpeningToken = openenings[openenings.length - 1]; currentOpeningToken.matches.push(alternator); } else { currentToken.matches.push(alternator); } } } else { insertTestDefinition(currentToken, m); } } function reverseTokens(maskToken) { function reverseStatic(st) { if (st === opts.optionalmarker[0]) st = opts.optionalmarker[1]; else if (st === opts.optionalmarker[1]) st = opts.optionalmarker[0]; else if (st === opts.groupmarker[0]) st = opts.groupmarker[1]; else if (st === opts.groupmarker[1]) st = opts.groupmarker[0]; return st; } maskToken.matches = maskToken.matches.reverse(); for (var match in maskToken.matches) { if (maskToken.matches.hasOwnProperty(match)) { var intMatch = parseInt(match); if (maskToken.matches[match].isQuantifier && maskToken.matches[intMatch + 1] && maskToken.matches[intMatch + 1].isGroup) { //reposition quantifier var qt = maskToken.matches[match]; maskToken.matches.splice(match, 1); maskToken.matches.splice(intMatch + 1, 0, qt); } if (maskToken.matches[match].matches !== undefined) { maskToken.matches[match] = reverseTokens(maskToken.matches[match]); } else { maskToken.matches[match] = reverseStatic(maskToken.matches[match]); } } } return maskToken; } function groupify(matches) { var groupToken = new MaskToken(true); groupToken.openGroup = false; groupToken.matches = matches; return groupToken; } if (regexMask) { opts.optionalmarker[0] = undefined; opts.optionalmarker[1] = undefined; } while (match = regexMask ? regexTokenizer.exec(mask) : tokenizer.exec(mask)) { m = match[0]; if (regexMask) { switch (m.charAt(0)) { //Quantifier case "?": m = "{0,1}"; break; case "+": case "*": m = "{" + m + "}"; break; } } if (escaped) { defaultCase(); continue; } switch (m.charAt(0)) { case "(?=": //lookahead break; case "(?!": //negative lookahead break; case "(?<=": //lookbehind break; case "(?<!": //negative lookbehind break; case opts.escapeChar: escaped = true; if (regexMask) { defaultCase(); } break; case opts.optionalmarker[1]: // optional closing case opts.groupmarker[1]: // Group closing openingToken = openenings.pop(); openingToken.openGroup = false; //mark group as complete if (openingToken !== undefined) { if (openenings.length > 0) { currentOpeningToken = openenings[openenings.length - 1]; currentOpeningToken.matches.push(openingToken); if (currentOpeningToken.isAlternator) { //handle alternator (a) | (b) case alternator = openenings.pop(); for (var mndx = 0; mndx < alternator.matches.length; mndx++) { alternator.matches[mndx].isGroup = false; //don't mark alternate groups as group alternator.matches[mndx].alternatorGroup = false; } if (openenings.length > 0) { currentOpeningToken = openenings[openenings.length - 1]; currentOpeningToken.matches.push(alternator); } else { currentToken.matches.push(alternator); } } } else { currentToken.matches.push(openingToken); } } else defaultCase(); break; case opts.optionalmarker[0]: // optional opening openenings.push(new MaskToken(false, true)); break; case opts.groupmarker[0]: // Group opening openenings.push(new MaskToken(true)); break; case opts.quantifiermarker[0]: //Quantifier var quantifier = new MaskToken(false, false, true); m = m.replace(/[{}]/g, ""); var mqj = m.split("|"), mq = mqj[0].split(","), mq0 = isNaN(mq[0]) ? mq[0] : parseInt(mq[0]), mq1 = mq.length === 1 ? mq0 : (isNaN(mq[1]) ? mq[1] : parseInt(mq[1])); if (mq0 === "*" || mq0 === "+") { mq0 = mq1 === "*" ? 0 : 1; } quantifier.quantifier = { min: mq0, max: mq1, jit: mqj[1] }; var matches = openenings.length > 0 ? openenings[openenings.length - 1].matches : currentToken.matches; match = matches.pop(); if (match.isAlternator) { //handle quantifier in an alternation [0-9]{2}|[0-9]{3} matches.push(match); //push back alternator matches = match.matches; //remap target matches var groupToken = new MaskToken(true); var tmpMatch = matches.pop(); matches.push(groupToken); //push the group matches = groupToken.matches; match = tmpMatch; } if (!match.isGroup) { // if (regexMask && match.fn === null) { //why is this needed??? // if (match.def === ".") match.fn = new RegExp(match.def, opts.casing ? "i" : ""); // } match = groupify([match]); } matches.push(match); matches.push(quantifier); break; case opts.alternatormarker: function groupQuantifier(matches) { var lastMatch = matches.pop(); if (lastMatch.isQuantifier) { lastMatch = groupify([matches.pop(), lastMatch]); } return lastMatch; } if (openenings.length > 0) { currentOpeningToken = openenings[openenings.length - 1]; var subToken = currentOpeningToken.matches[currentOpeningToken.matches.length - 1]; if (currentOpeningToken.openGroup && //regexp alt syntax (subToken.matches === undefined || (subToken.isGroup === false && subToken.isAlternator === false))) { //alternations within group lastMatch = openenings.pop(); } else { lastMatch = groupQuantifier(currentOpeningToken.matches); } } else { lastMatch = groupQuantifier(currentToken.matches); } if (lastMatch.isAlternator) { openenings.push(lastMatch); } else { if (lastMatch.alternatorGroup) { alternator = openenings.pop(); lastMatch.alternatorGroup = false; } else { alternator = new MaskToken(false, false, false, true); } alternator.matches.push(lastMatch); openenings.push(alternator); if (lastMatch.openGroup) { //regexp alt syntax lastMatch.openGroup = false; var alternatorGroup = new MaskToken(true); alternatorGroup.alternatorGroup = true; openenings.push(alternatorGroup); } } break; default: defaultCase(); } } while (openenings.length > 0) { openingToken = openenings.pop(); currentToken.matches.push(openingToken); } if (currentToken.matches.length > 0) { verifyGroupMarker(currentToken); maskTokens.push(currentToken); } if (opts.numericInput || opts.isRTL) { reverseTokens(maskTokens[0]); } // console.log(JSON.stringify(maskTokens)); return maskTokens; } }; //apply defaults, definitions, aliases Inputmask.extendDefaults = function (options) { $.extend(true, Inputmask.prototype.defaults, options); }; Inputmask.extendDefinitions = function (definition) { $.extend(true, Inputmask.prototype.definitions, definition); }; Inputmask.extendAliases = function (alias) { $.extend(true, Inputmask.prototype.aliases, alias); }; //static fn on inputmask Inputmask.format = function (value, options, metadata) { return Inputmask(options).format(value, metadata); }; Inputmask.unmask = function (value, options) { return Inputmask(options).unmaskedvalue(value); }; Inputmask.isValid = function (value, options) { return Inputmask(options).isValid(value); }; Inputmask.remove = function (elems) { if (typeof elems === "string") { elems = document.getElementById(elems) || document.querySelectorAll(elems); } elems = elems.nodeName ? [elems] : elems; $.each(elems, function (ndx, el) { if (el.inputmask) el.inputmask.remove(); }); }; Inputmask.setValue = function (elems, value) { if (typeof elems === "string") { elems = document.getElementById(elems) || document.querySelectorAll(elems); } elems = elems.nodeName ? [elems] : elems; $.each(elems, function (ndx, el) { if (el.inputmask) el.inputmask.setValue(value); else $(el).trigger("setvalue", [value]); }); }; Inputmask.escapeRegex = function (str) { var specials = ["/", ".", "*", "+", "?", "|", "(", ")", "[", "]", "{", "}", "\\", "$", "^"]; return str.replace(new RegExp("(\\" + specials.join("|\\") + ")", "gim"), "\\$1"); }; Inputmask.keyCode = { BACKSPACE: 8, BACKSPACE_SAFARI: 127, DELETE: 46, DOWN: 40, END: 35, ENTER: 13, ESCAPE: 27, HOME: 36, INSERT: 45, LEFT: 37, PAGE_DOWN: 34, PAGE_UP: 33, RIGHT: 39, SPACE: 32, TAB: 9, UP: 38, X: 88, CONTROL: 17 }; Inputmask.dependencyLib = $; function resolveAlias(aliasStr, options, opts) { var aliasDefinition = Inputmask.prototype.aliases[aliasStr]; if (aliasDefinition) { if (aliasDefinition.alias) resolveAlias(aliasDefinition.alias, undefined, opts); //alias is another alias $.extend(true, opts, aliasDefinition); //merge alias definition in the options $.extend(true, opts, options); //reapply extra given options return true; } else //alias not found - try as mask if (opts.mask === null) { opts.mask = aliasStr; } return false; } function generateMaskSet(opts, nocache) { function generateMask(mask, metadata, opts) { var regexMask = false; if (mask === null || mask === "") { regexMask = opts.regex !== null; if (regexMask) { mask = opts.regex; mask = mask.replace(/^(\^)(.*)(\$)$/, "$2"); } else { regexMask = true; mask = ".*"; } } if (mask.length === 1 && opts.greedy === false && opts.repeat !== 0) { opts.placeholder = ""; } //hide placeholder with single non-greedy mask if (opts.repeat > 0 || opts.repeat === "*" || opts.repeat === "+") { var repeatStart = opts.repeat === "*" ? 0 : (opts.repeat === "+" ? 1 : opts.repeat); mask = opts.groupmarker[0] + mask + opts.groupmarker[1] + opts.quantifiermarker[0] + repeatStart + "," + opts.repeat + opts.quantifiermarker[1]; } // console.log(mask); var masksetDefinition, maskdefKey = regexMask ? "regex_" + opts.regex : (opts.numericInput ? mask.split("").reverse().join("") : mask); if (Inputmask.prototype.masksCache[maskdefKey] === undefined || nocache === true) { masksetDefinition = { "mask": mask, "maskToken": Inputmask.prototype.analyseMask(mask, regexMask, opts), "validPositions": {}, "_buffer": undefined, "buffer": undefined, "tests": {}, "excludes": {}, //excluded alternations "metadata": metadata, maskLength: undefined }; if (nocache !== true) { Inputmask.prototype.masksCache[maskdefKey] = masksetDefinition; masksetDefinition = $.extend(true, {}, Inputmask.prototype.masksCache[maskdefKey]); } } else masksetDefinition = $.extend(true, {}, Inputmask.prototype.masksCache[maskdefKey]); return masksetDefinition; } var ms; if ($.isFunction(opts.mask)) { //allow mask to be a preprocessing fn - should return a valid mask opts.mask = opts.mask(opts); } if ($.isArray(opts.mask)) { if (opts.mask.length > 1) { if (opts.keepStatic === null) { //enable by default when passing multiple masks when the option is not explicitly specified opts.keepStatic = "auto"; for (var i = 0; i < opts.mask.length; i++) { if (opts.mask[i].charAt(0) !== opts.mask[0].charAt(0)) { opts.keepStatic = true; break; } } } var altMask = opts.groupmarker[0]; $.each(opts.isRTL ? opts.mask.reverse() : opts.mask, function (ndx, msk) { if (altMask.length > 1) { altMask += opts.groupmarker[1] + opts.alternatormarker + opts.groupmarker[0]; } if (msk.mask !== undefined && !$.isFunction(msk.mask)) { altMask += msk.mask; } else { altMask += msk; } }); altMask += opts.groupmarker[1]; // console.log(altMask); return generateMask(altMask, opts.mask, opts); } else opts.mask = opts.mask.pop(); } if (opts.mask && opts.mask.mask !== undefined && !$.isFunction(opts.mask.mask)) { ms = generateMask(opts.mask.mask, opts.mask, opts); } else { ms = generateMask(opts.mask, opts.mask, opts); } return ms; }; function isInputEventSupported(eventName) { var el = document.createElement("input"), evName = "on" + eventName, isSupported = (evName in el); if (!isSupported) { el.setAttribute(evName, "return;"); isSupported = typeof el[evName] === "function"; } el = null; return isSupported; } //masking scope //actionObj definition see below function maskScope(actionObj, maskset, opts) { maskset = maskset || this.maskset; opts = opts || this.opts; var inputmask = this, el = this.el, isRTL = this.isRTL, undoValue, $el, skipKeyPressEvent = false, //Safari 5.1.x - modal dialog fires keypress twice workaround skipInputEvent = false, //skip when triggered from within inputmask ignorable = false, maxLength, mouseEnter = false, colorMask, originalPlaceholder; //maskset helperfunctions function getMaskTemplate(baseOnInput, minimalPos, includeMode, noJit, clearOptionalTail) { //includeMode true => input, undefined => placeholder, false => mask var greedy = opts.greedy; if (clearOptionalTail) opts.greedy = false; minimalPos = minimalPos || 0; var maskTemplate = [], ndxIntlzr, pos = 0, test, testPos, lvp = getLastValidPosition(); do { if (baseOnInput === true && getMaskSet().validPositions[pos]) { testPos = (clearOptionalTail && getMaskSet().validPositions[pos].match.optionality === true && getMaskSet().validPositions[pos + 1] === undefined && (getMaskSet().validPositions[pos].generatedInput === true || (getMaskSet().validPositions[pos].input == opts.skipOptionalPartCharacter && pos > 0))) ? determineTestTemplate(pos, getTests(pos, ndxIntlzr, pos - 1)) : getMaskSet().validPositions[pos]; test = testPos.match; ndxIntlzr = testPos.locator.slice(); maskTemplate.push(includeMode === true ? testPos.input : includeMode === false ? test.nativeDef : getPlaceholder(pos, test)); } else { testPos = getTestTemplate(pos, ndxIntlzr, pos - 1); test = testPos.match; ndxIntlzr = testPos.locator.slice(); var jitMasking = noJit === true ? false : (opts.jitMasking !== false ? opts.jitMasking : test.jit); if (jitMasking === false || jitMasking === undefined /*|| pos < lvp*/ || (typeof jitMasking === "number" && isFinite(jitMasking) && jitMasking > pos)) { maskTemplate.push(includeMode === false ? test.nativeDef : getPlaceholder(pos, test)); } } if (opts.keepStatic === "auto") { if (test.newBlockMarker && test.fn !== null) { opts.keepStatic = pos - 1; } } pos++; } while ((maxLength === undefined || pos < maxLength) && (test.fn !== null || test.def !== "") || minimalPos > pos); if (maskTemplate[maskTemplate.length - 1] === "") { maskTemplate.pop(); //drop the last one which is empty } if (includeMode !== false || //do not alter the masklength when just retrieving the maskdefinition getMaskSet().maskLength === undefined) //just make sure the maskLength gets initialized in all cases (needed for isValid) getMaskSet().maskLength = pos - 1; opts.greedy = greedy; return maskTemplate; } function getMaskSet() { return maskset; } function resetMaskSet(soft) { var maskset = getMaskSet(); maskset.buffer = undefined; if (soft !== true) { // maskset._buffer = undefined; maskset.validPositions = {}; maskset.p = 0; } } function getLastValidPosition(closestTo, strict, validPositions) { var before = -1, after = -1, valids = validPositions || getMaskSet().validPositions; //for use in valhook ~ context switch if (closestTo === undefined) closestTo = -1; for (var posNdx in valids) { var psNdx = parseInt(posNdx); if (valids[psNdx] && (strict || valids[psNdx].generatedInput !== true)) { if (psNdx <= closestTo) before = psNdx; if (psNdx >= closestTo) after = psNdx; } } return (before === -1 || before == closestTo) ? after : after == -1 ? before : (closestTo - before) < (after - closestTo) ? before : after; } function getDecisionTaker(tst) { var decisionTaker = tst.locator[tst.alternation]; if (typeof decisionTaker == "string" && decisionTaker.length > 0) { //no decision taken ~ take first one as decider decisionTaker = decisionTaker.split(",")[0]; } return decisionTaker !== undefined ? decisionTaker.toString() : ""; } function getLocator(tst, align) { //need to align the locators to be correct var locator = (tst.alternation != undefined ? tst.mloc[getDecisionTaker(tst)] : tst.locator).join(""); if (locator !== "") while (locator.length < align) locator += "0"; return locator; } function determineTestTemplate(pos, tests) { pos = pos > 0 ? pos - 1 : 0; var altTest = getTest(pos), targetLocator = getLocator(altTest), tstLocator, closest, bestMatch; for (var ndx = 0; ndx < tests.length; ndx++) { //find best matching var tst = tests[ndx]; tstLocator = getLocator(tst, targetLocator.length); var distance = Math.abs(tstLocator - targetLocator); if (closest === undefined || (tstLocator !== "" && distance < closest) || (bestMatch && bestMatch.match.optionality && bestMatch.match.newBlockMarker === "master" && (!tst.match.optionality || !tst.match.newBlockMarker)) || (bestMatch && bestMatch.match.optionalQuantifier && !tst.match.optionalQuantifier)) { closest = distance; bestMatch = tst; } } return bestMatch; } function getTestTemplate(pos, ndxIntlzr, tstPs) { return getMaskSet().validPositions[pos] || determineTestTemplate(pos, getTests(pos, ndxIntlzr ? ndxIntlzr.slice() : ndxIntlzr, tstPs)); } function getTest(pos, tests) { if (getMaskSet().validPositions[pos]) { return getMaskSet().validPositions[pos]; } return (tests || getTests(pos))[0]; } function positionCanMatchDefinition(pos, def) { var valid = false, tests = getTests(pos); for (var tndx = 0; tndx < tests.length; tndx++) { if (tests[tndx].match && tests[tndx].match.def === def) { valid = true; break; } } return valid; } function getTests(pos, ndxIntlzr, tstPs) { var maskTokens = getMaskSet().maskToken, testPos = ndxIntlzr ? tstPs : 0, ndxInitializer = ndxIntlzr ? ndxIntlzr.slice() : [0], matches = [], insertStop = false, latestMatch, cacheDependency = ndxIntlzr ? ndxIntlzr.join("") : "", offset = 0; function resolveTestFromToken(maskToken, ndxInitializer, loopNdx, quantifierRecurse) { //ndxInitializer contains a set of indexes to speedup searches in the mtokens function handleMatch(match, loopNdx, quantifierRecurse) { function isFirstMatch(latestMatch, tokenGroup) { var firstMatch = $.inArray(latestMatch, tokenGroup.matches) === 0; if (!firstMatch) { $.each(tokenGroup.matches, function (ndx, match) { if (match.isQuantifier === true) firstMatch = isFirstMatch(latestMatch, tokenGroup.matches[ndx - 1]); else if (match.hasOwnProperty("matches")) firstMatch = isFirstMatch(latestMatch, match); if (firstMatch) return false; }); } return firstMatch; } function resolveNdxInitializer(pos, alternateNdx, targetAlternation) { var bestMatch, indexPos; if (getMaskSet().tests[pos] || getMaskSet().validPositions[pos]) { $.each(getMaskSet().tests[pos] || [getMaskSet().validPositions[pos]], function (ndx, lmnt) { if (lmnt.mloc[alternateNdx]) { bestMatch = lmnt; return false; //break } var alternation = targetAlternation !== undefined ? targetAlternation : lmnt.alternation, ndxPos = lmnt.locator[alternation] !== undefined ? lmnt.locator[alternation].toString().indexOf(alternateNdx) : -1; if ((indexPos === undefined || ndxPos < indexPos) && ndxPos !== -1) { bestMatch = lmnt; indexPos = ndxPos; } }); } if (bestMatch) { var bestMatchAltIndex = bestMatch.locator[bestMatch.alternation]; var locator = bestMatch.mloc[alternateNdx] || bestMatch.mloc[bestMatchAltIndex] || bestMatch.locator; return locator.slice((targetAlternation !== undefined ? targetAlternation : bestMatch.alternation) + 1); } else { return targetAlternation !== undefined ? resolveNdxInitializer(pos, alternateNdx) : undefined; } } function isSubsetOf(source, target) { function expand(pattern) { var expanded = [], start, end; for (var i = 0, l = pattern.length; i < l; i++) { if (pattern.charAt(i) === "-") { end = pattern.charCodeAt(i + 1); while (++start < end) expanded.push(String.fromCharCode(start)); } else { start = pattern.charCodeAt(i); expanded.push(pattern.charAt(i)); } } return expanded.join(""); } if (opts.regex && source.match.fn !== null && target.match.fn !== null) { //is regex a subset return expand(target.match.def.replace(/[\[\]]/g, "")).indexOf(expand(source.match.def.replace(/[\[\]]/g, ""))) !== -1; } return source.match.def === target.match.nativeDef; } function
(source, target) { var sloc = source.locator.slice(source.alternation).join(""), tloc = target.locator.slice(target.alternation).join(""), canMatch = sloc == tloc, canMatch = canMatch && source.match.fn === null && target.match.fn !== null ? target.match.fn.test(source.match.def, getMaskSet(), pos, false, opts, false) : false; return canMatch; } //mergelocators for retrieving the correct locator match when merging function setMergeLocators(targetMatch, altMatch) { if (altMatch === undefined || (targetMatch.alternation === altMatch.alternation && targetMatch.locator[targetMatch.alternation].toString().indexOf(altMatch.locator[altMatch.alternation]) === -1)) { targetMatch.mloc = targetMatch.mloc || {}; var locNdx = targetMatch.locator[targetMatch.alternation]; if (locNdx === undefined) targetMatch.alternation = undefined; else { if (typeof locNdx === "string") locNdx = locNdx.split(",")[0]; if (targetMatch.mloc[locNdx] === undefined) targetMatch.mloc[locNdx] = targetMatch.locator.slice(); if (altMatch !== undefined) { for (var ndx in altMatch.mloc) { if (typeof ndx === "string") ndx = ndx.split(",")[0]; if (targetMatch.mloc[ndx] === undefined) targetMatch.mloc[ndx] = altMatch.mloc[ndx]; } targetMatch.locator[targetMatch.alternation] = Object.keys(targetMatch.mloc).join(","); } return true; } } return false; } if (testPos > 500 && quantifierRecurse !== undefined) { throw "Inputmask: There is probably an error in your mask definition or in the code. Create an issue on github with an example of the mask you are using. " + getMaskSet().mask; } if (testPos === pos && match.matches === undefined) { matches.push({ "match": match, "locator": loopNdx.reverse(), "cd": cacheDependency, "mloc": {} }); return true; } else if (match.matches !== undefined) { if (match.isGroup && quantifierRecurse !== match) { //when a group pass along to the quantifier match = handleMatch(maskToken.matches[$.inArray(match, maskToken.matches) + 1], loopNdx, quantifierRecurse); if (match) return true; } else if (match.isOptional) { var optionalToken = match; match = resolveTestFromToken(match, ndxInitializer, loopNdx, quantifierRecurse); if (match) { //mark optionality in matches $.each(matches, function (ndx, mtch) { mtch.match.optionality = true; }); latestMatch = matches[matches.length - 1].match; if (quantifierRecurse === undefined && isFirstMatch(latestMatch, optionalToken)) { //prevent loop see #698 insertStop = true; //insert a stop testPos = pos; //match the position after the group } else return true; } } else if (match.isAlternator) { var alternateToken = match, malternateMatches = [], maltMatches, currentMatches = matches.slice(), loopNdxCnt = loopNdx.length; var altIndex = ndxInitializer.length > 0 ? ndxInitializer.shift() : -1; if (altIndex === -1 || typeof altIndex === "string") { var currentPos = testPos, ndxInitializerClone = ndxInitializer.slice(), altIndexArr = [], amndx; if (typeof altIndex == "string") { altIndexArr = altIndex.split(","); } else { for (amndx = 0; amndx < alternateToken.matches.length; amndx++) { altIndexArr.push(amndx.toString()); } } if (getMaskSet().excludes[pos]) { var altIndexArrClone = altIndexArr.slice(); for (var i = 0, el = getMaskSet().excludes[pos].length; i < el; i++) { altIndexArr.splice(altIndexArr.indexOf(getMaskSet().excludes[pos][i].toString()), 1); } if (altIndexArr.length === 0) { //fully alternated => reset getMaskSet().excludes[pos] = undefined; altIndexArr = altIndexArrClone; } } if (opts.keepStatic === true || (isFinite(parseInt(opts.keepStatic)) && currentPos >= opts.keepStatic)) altIndexArr = altIndexArr.slice(0, 1); var unMatchedAlternation = false; for (var ndx = 0; ndx < altIndexArr.length; ndx++) { amndx = parseInt(altIndexArr[ndx]); matches = []; //set the correct ndxInitializer ndxInitializer = typeof altIndex === "string" ? resolveNdxInitializer(testPos, amndx, loopNdxCnt) || ndxInitializerClone.slice() : ndxInitializerClone.slice(); if (alternateToken.matches[amndx] && handleMatch(alternateToken.matches[amndx], [amndx].concat(loopNdx), quantifierRecurse)) match = true; else if (ndx === 0) { unMatchedAlternation = true; } maltMatches = matches.slice(); testPos = currentPos; matches = []; //fuzzy merge matches for (var ndx1 = 0; ndx1 < maltMatches.length; ndx1++) { var altMatch = maltMatches[ndx1], dropMatch = false; altMatch.match.jit = altMatch.match.jit || unMatchedAlternation; //mark jit when there are unmatched alternations ex: mask: "(a|aa)" altMatch.alternation = altMatch.alternation || loopNdxCnt; setMergeLocators(altMatch); for (var ndx2 = 0; ndx2 < malternateMatches.length; ndx2++) { var altMatch2 = malternateMatches[ndx2]; if (typeof altIndex !== "string" || (altMatch.alternation !== undefined && $.inArray(altMatch.locator[altMatch.alternation].toString(), altIndexArr) !== -1)) { if (altMatch.match.nativeDef === altMatch2.match.nativeDef) { dropMatch = true; setMergeLocators(altMatch2, altMatch); break; } else if (isSubsetOf(altMatch, altMatch2)) { if (setMergeLocators(altMatch, altMatch2)) { dropMatch = true; malternateMatches.splice(malternateMatches.indexOf(altMatch2), 0, altMatch); } break; } else if (isSubsetOf(altMatch2, altMatch)) { setMergeLocators(altMatch2, altMatch); break; } else if (staticCanMatchDefinition(altMatch, altMatch2)) { if (setMergeLocators(altMatch, altMatch2)) { //insert match above general match dropMatch = true; malternateMatches.splice(malternateMatches.indexOf(altMatch2), 0, altMatch); } break; } } } if (!dropMatch) { malternateMatches.push(altMatch); } } } matches = currentMatches.concat(malternateMatches); testPos = pos; insertStop = matches.length > 0; //insert a stopelemnt when there is an alternate - needed for non-greedy option match = malternateMatches.length > 0; //set correct match state //cloneback ndxInitializer = ndxInitializerClone.slice(); } else match = handleMatch(alternateToken.matches[altIndex] || maskToken.matches[altIndex], [altIndex].concat(loopNdx), quantifierRecurse); if (match) return true; } else if (match.isQuantifier && quantifierRecurse !== maskToken.matches[$.inArray(match, maskToken.matches) - 1]) { var qt = match; for (var qndx = (ndxInitializer.length > 0) ? ndxInitializer.shift() : 0; (qndx < (isNaN(qt.quantifier.max) ? qndx + 1 : qt.quantifier.max)) && testPos <= pos; qndx++) { var tokenGroup = maskToken.matches[$.inArray(qt, maskToken.matches) - 1]; match = handleMatch(tokenGroup, [qndx].concat(loopNdx), tokenGroup); //set the tokenGroup as quantifierRecurse marker if (match) { //get latest match latestMatch = matches[matches.length - 1].match; //mark optionality //TODO FIX RECURSIVE QUANTIFIERS latestMatch.optionalQuantifier = qndx > (qt.quantifier.min - 1); // console.log(pos + " " + qt.quantifier.min + " " + latestMatch.optionalQuantifier); latestMatch.jit = (qndx || 1) * tokenGroup.matches.indexOf(latestMatch) >= qt.quantifier.jit; if (latestMatch.optionalQuantifier && isFirstMatch(latestMatch, tokenGroup)) { insertStop = true; testPos = pos; //match the position after the group break; //stop quantifierloop && search for next possible match } if (latestMatch.jit && !latestMatch.optionalQuantifier) { offset = tokenGroup.matches.indexOf(latestMatch); // console.log(qndx + " - " + offset); testPos = pos; //match the position after the group insertStop = true; break; //stop quantifierloop && search for next possible match } return true; } } } else { match = resolveTestFromToken(match, ndxInitializer, loopNdx, quantifierRecurse); if (match) return true; } } else { testPos++; } } //the offset is set in the quantifierloop when git masking is used for (var tndx = (ndxInitializer.length > 0 ? ndxInitializer.shift() : 0); tndx < maskToken.matches.length; tndx = tndx + 1 + offset) { offset = 0; //reset offset if (maskToken.matches[tndx].isQuantifier !== true) { var match = handleMatch(maskToken.matches[tndx], [tndx].concat(loopNdx), quantifierRecurse); if (match && testPos === pos) { return match; } else if (testPos > pos) { break; } } } } function mergeLocators(pos, tests) { var locator = []; if (!$.isArray(tests)) tests = [tests]; if (tests.length > 0) { if (tests[0].alternation === undefined) { locator = determineTestTemplate(pos, tests.slice()).locator.slice(); if (locator.length === 0) locator = tests[0].locator.slice(); } else { $.each(tests, function (ndx, tst) { if (tst.def !== "") { if (locator.length === 0) locator = tst.locator.slice(); else { for (var i = 0; i < locator.length; i++) { if (tst.locator[i] && locator[i].toString().indexOf(tst.locator[i]) === -1) { locator[i] += "," + tst.locator[i]; } } } } }); } } return locator; } if (pos > -1) { if (ndxIntlzr === undefined) { //determine index initializer var previousPos = pos - 1, test; while ((test = getMaskSet().validPositions[previousPos] || getMaskSet().tests[previousPos]) === undefined && previousPos > -1) { previousPos--; } if (test !== undefined && previousPos > -1) { ndxInitializer = mergeLocators(previousPos, test); cacheDependency = ndxInitializer.join(""); testPos = previousPos; } } if (getMaskSet().tests[pos] && getMaskSet().tests[pos][0].cd === cacheDependency) { //cacheDependency is set on all tests, just check on the first //console.log("cache hit " + pos + " - " + ndxIntlzr); return getMaskSet().tests[pos]; } for (var mtndx = ndxInitializer.shift(); mtndx < maskTokens.length; mtndx++) { var match = resolveTestFromToken(maskTokens[mtndx], ndxInitializer, [mtndx]); if ((match && testPos === pos) || testPos > pos) { break; } } } if (matches.length === 0 || insertStop) { matches.push({ match: { fn: null, optionality: false, casing: null, def: "", placeholder: "" }, locator: [], mloc: {}, cd: cacheDependency }); } if (ndxIntlzr !== undefined && getMaskSet().tests[pos]) { //prioritize full tests for caching return $.extend(true, [], matches); } getMaskSet().tests[pos] = $.extend(true, [], matches); //set a clone to prevent overwriting some props // console.log(pos + " - " + JSON.stringify(matches)); return getMaskSet().tests[pos]; } function getBufferTemplate() { if (getMaskSet()._buffer === undefined) { //generate template getMaskSet()._buffer = getMaskTemplate(false, 1); if (getMaskSet().buffer === undefined) getMaskSet().buffer = getMaskSet()._buffer.slice(); } return getMaskSet()._buffer; } function getBuffer(noCache) { if (getMaskSet().buffer === undefined || noCache === true) { getMaskSet().buffer = getMaskTemplate(true, getLastValidPosition(), true); } return getMaskSet().buffer; } function refreshFromBuffer(start, end, buffer) { var i, p; if (start === true) { resetMaskSet(); start = 0; end = buffer.length; } else { for (i = start; i < end; i++) { delete getMaskSet().validPositions[i]; } } p = start; for (i = start; i < end; i++) { resetMaskSet(true); //prevents clobber from the buffer if (buffer[i] !== opts.skipOptionalPartCharacter) { var valResult = isValid(p, buffer[i], true, true); if (valResult !== false) { resetMaskSet(true); p = valResult.caret !== undefined ? valResult.caret : valResult.pos + 1; } } } } function casing(elem, test, pos) { switch (opts.casing || test.casing) { case "upper": elem = elem.toUpperCase(); break; case "lower": elem = elem.toLowerCase(); break; case "title": var posBefore = getMaskSet().validPositions[pos - 1]; if (pos === 0 || posBefore && posBefore.input === String.fromCharCode(Inputmask.keyCode.SPACE)) { elem = elem.toUpperCase(); } else { elem = elem.toLowerCase(); } break; default: if ($.isFunction(opts.casing)) { var args = Array.prototype.slice.call(arguments); args.push(getMaskSet().validPositions); elem = opts.casing.apply(this, args); } } return elem; } function checkAlternationMatch(altArr1, altArr2, na) { var altArrC = opts.greedy ? altArr2 : altArr2.slice(0, 1), isMatch = false, naArr = na !== undefined ? na.split(",") : [], naNdx; //remove no alternate indexes from alternation array for (var i = 0; i < naArr.length; i++) { if ((naNdx = altArr1.indexOf(naArr[i])) !== -1) { altArr1.splice(naNdx, 1); } } for (var alndx = 0; alndx < altArr1.length; alndx++) { if ($.inArray(altArr1[alndx], altArrC) !== -1) { isMatch = true; break; } } return isMatch; } function alternate(pos, c, strict, fromSetValid, rAltPos) { //pos == true => generalize var validPsClone = $.extend(true, {}, getMaskSet().validPositions), lastAlt, alternation, isValidRslt = false, altPos, prevAltPos, i, validPos, decisionPos, lAltPos = rAltPos !== undefined ? rAltPos : getLastValidPosition(); if (lAltPos === -1 && rAltPos === undefined) { //do not recurse when already paste the beginning lastAlt = 0; prevAltPos = getTest(lastAlt); alternation = prevAltPos.alternation; } else { //find last modified alternation for (; lAltPos >= 0; lAltPos--) { altPos = getMaskSet().validPositions[lAltPos]; if (altPos && altPos.alternation !== undefined) { if (prevAltPos && prevAltPos.locator[altPos.alternation] !== altPos.locator[altPos.alternation]) { break; } lastAlt = lAltPos; alternation = getMaskSet().validPositions[lastAlt].alternation; prevAltPos = altPos; } } } if (alternation !== undefined) { decisionPos = parseInt(lastAlt); getMaskSet().excludes[decisionPos] = getMaskSet().excludes[decisionPos] || []; if (pos !== true) { //generalize getMaskSet().excludes[decisionPos].push(getDecisionTaker(prevAltPos)); } var validInputsClone = [], staticInputsBeforePos = 0; for (i = decisionPos; i < getLastValidPosition(undefined, true) + 1; i++) { validPos = getMaskSet().validPositions[i]; if (validPos && validPos.generatedInput !== true /*&& /[0-9a-bA-Z]/.test(validPos.input)*/) { validInputsClone.push(validPos.input); } else if (i < pos) staticInputsBeforePos++; delete getMaskSet().validPositions[i]; } while (getMaskSet().excludes[decisionPos] && getMaskSet().excludes[decisionPos].length < 10) { var posOffset = staticInputsBeforePos * -1, //negate validInputs = validInputsClone.slice(); getMaskSet().tests[decisionPos] = undefined; //clear decisionPos resetMaskSet(true); //clear getbuffer isValidRslt = true; while (validInputs.length > 0) { var input = validInputs.shift(); // if (input !== opts.skipOptionalPartCharacter) { if (!(isValidRslt = isValid(getLastValidPosition(undefined, true) + 1, input, false, fromSetValid, true))) { break; } // } } if (isValidRslt && c !== undefined) { var targetLvp = getLastValidPosition(pos) + 1; for (i = decisionPos; i < getLastValidPosition() + 1; i++) { validPos = getMaskSet().validPositions[i]; if ((validPos === undefined || validPos.match.fn == null) && i < (pos + posOffset)) { posOffset++; } } pos = pos + posOffset; isValidRslt = isValid(pos > targetLvp ? targetLvp : pos, c, strict, fromSetValid, true); } if (!isValidRslt) { resetMaskSet(); prevAltPos = getTest(decisionPos); //get the current decisionPos to exclude ~ needs to be before restoring the initial validation //reset & revert getMaskSet().validPositions = $.extend(true, {}, validPsClone); if (getMaskSet().excludes[decisionPos]) { var decisionTaker = getDecisionTaker(prevAltPos); if (getMaskSet().excludes[decisionPos].indexOf(decisionTaker) !== -1) { isValidRslt = alternate(pos, c, strict, fromSetValid, decisionPos - 1); break; } getMaskSet().excludes[decisionPos].push(decisionTaker); for (i = decisionPos; i < getLastValidPosition(undefined, true) + 1; i++) delete getMaskSet().validPositions[i]; } else { //latest alternation isValidRslt = alternate(pos, c, strict, fromSetValid, decisionPos - 1); break; } } else break; } } //reset alternation excludes getMaskSet().excludes[decisionPos] = undefined; return isValidRslt; } function isValid(pos, c, strict, fromSetValid, fromAlternate, validateOnly) { //strict true ~ no correction or autofill function isSelection(posObj) { return isRTL ? (posObj.begin - posObj.end) > 1 || ((posObj.begin - posObj.end) === 1) : (posObj.end - posObj.begin) > 1 || ((posObj.end - posObj.begin) === 1); } strict = strict === true; //always set a value to strict to prevent possible strange behavior in the extensions var maskPos = pos; if (pos.begin !== undefined) { //position was a position object - used to handle a delete by typing over a selection maskPos = isRTL ? pos.end : pos.begin; } function _isValid(position, c, strict) { var rslt = false; $.each(getTests(position), function (ndx, tst) { var test = tst.match; //make sure the buffer is set and correct getBuffer(true); //return is false or a json object => { pos: ??, c: ??} or true rslt = test.fn != null ? test.fn.test(c, getMaskSet(), position, strict, opts, isSelection(pos)) : (c === test.def || c === opts.skipOptionalPartCharacter) && test.def !== "" ? //non mask { c: getPlaceholder(position, test, true) || test.def, pos: position } : false; if (rslt !== false) { var elem = rslt.c !== undefined ? rslt.c : c, validatedPos = position; elem = (elem === opts.skipOptionalPartCharacter && test.fn === null) ? (getPlaceholder(position, test, true) || test.def) : elem; if (rslt.remove !== undefined) { //remove position(s) if (!$.isArray(rslt.remove)) rslt.remove = [rslt.remove]; $.each(rslt.remove.sort(function (a, b) { return b - a; }), function (ndx, lmnt) { revalidateMask({begin: lmnt, end: lmnt + 1}); }); } if (rslt.insert !== undefined) { //insert position(s) if (!$.isArray(rslt.insert)) rslt.insert = [rslt.insert]; $.each(rslt.insert.sort(function (a, b) { return a - b; }), function (ndx, lmnt) { isValid(lmnt.pos, lmnt.c, true, fromSetValid); }); } if (rslt !== true && rslt.pos !== undefined && rslt.pos !== position) { //their is a position offset validatedPos = rslt.pos; } if (rslt !== true && rslt.pos === undefined && rslt.c === undefined) { return false; //breakout if nothing to insert } if (!revalidateMask(pos, $.extend({}, tst, { "input": casing(elem, test, validatedPos) }), fromSetValid, validatedPos)) { rslt = false; } return false; //break from $.each } }); return rslt; } var result = true, positionsClone = $.extend(true, {}, getMaskSet().validPositions); //clone the currentPositions if ($.isFunction(opts.preValidation) && !strict && fromSetValid !== true && validateOnly !== true) { result = opts.preValidation(getBuffer(), maskPos, c, isSelection(pos), opts, getMaskSet()); } if (result === true) { trackbackPositions(undefined, maskPos, true); if (maxLength === undefined || maskPos < maxLength) { result = _isValid(maskPos, c, strict); if ((!strict || fromSetValid === true) && result === false && validateOnly !== true) { var currentPosValid = getMaskSet().validPositions[maskPos]; if (currentPosValid && currentPosValid.match.fn === null && (currentPosValid.match.def === c || c === opts.skipOptionalPartCharacter)) { result = { "caret": seekNext(maskPos) }; } else if ((opts.insertMode || getMaskSet().validPositions[seekNext(maskPos)] === undefined) && !isMask(maskPos, true)) { //does the input match on a further position? for (var nPos = maskPos + 1, snPos = seekNext(maskPos); nPos <= snPos; nPos++) { // if (!isMask(nPos, true)) { // continue; // } result = _isValid(nPos, c, strict); if (result !== false) { result = trackbackPositions(maskPos, result.pos !== undefined ? result.pos : nPos) || result; maskPos = nPos; break; } } } } } if (result === false && opts.keepStatic !== false && (opts.regex == null || isComplete(getBuffer())) && !strict && fromAlternate !== true) { //try fuzzy alternator logic result = alternate(maskPos, c, strict, fromSetValid); } if (result === true) { result = { "pos": maskPos }; } } if ($.isFunction(opts.postValidation) && result !== false && !strict && fromSetValid !== true && validateOnly !== true) { var postResult = opts.postValidation(getBuffer(true), pos.begin !== undefined ? (isRTL ? pos.end : pos.begin) : pos, result, opts); if (postResult !== undefined) { if (postResult.refreshFromBuffer && postResult.buffer) { var refresh = postResult.refreshFromBuffer; refreshFromBuffer(refresh === true ? refresh : refresh.start, refresh.end, postResult.buffer); } result = postResult === true ? result : postResult; } } if (result && result.pos === undefined) { result.pos = maskPos; } if (result === false || validateOnly === true) { resetMaskSet(true); getMaskSet().validPositions = $.extend(true, {}, positionsClone); //revert validation changes } return result; } //fill in best positions according the current input function trackbackPositions(originalPos, newPos, fillOnly) { var result; if (originalPos === undefined) { //find previous valid for (originalPos = newPos - 1; originalPos > 0; originalPos--) { if (getMaskSet().validPositions[originalPos]) break; } } for (var ps = originalPos; ps < newPos; ps++) { if (getMaskSet().validPositions[ps] === undefined && !isMask(ps, true)) { var vp = ps == 0 ? getTest(ps) : getMaskSet().validPositions[ps - 1]; if (vp) { var tests = getTests(ps).slice() if (tests[tests.length - 1].match.def === "") tests.pop(); var bestMatch = determineTestTemplate(ps, tests); bestMatch = $.extend({}, bestMatch, { "input": getPlaceholder(ps, bestMatch.match, true) || bestMatch.match.def }); bestMatch.generatedInput = true; revalidateMask(ps, bestMatch, true); if (fillOnly !== true) { //revalidate the new position to update the locator value var cvpInput = getMaskSet().validPositions[newPos].input; getMaskSet().validPositions[newPos] = undefined; result = isValid(newPos, cvpInput, true, true); } } } } return result; } function revalidateMask(pos, validTest, fromSetValid, validatedPos) { function IsEnclosedStatic(pos, valids, selection) { var posMatch = valids[pos]; if (posMatch !== undefined && ((posMatch.match.fn === null && posMatch.match.optionality !== true) || posMatch.input === opts.radixPoint)) { var prevMatch = selection.begin <= pos - 1 ? valids[pos - 1] && valids[pos - 1].match.fn === null && valids[pos - 1] : valids[pos - 1], nextMatch = selection.end > pos + 1 ? valids[pos + 1] && valids[pos + 1].match.fn === null && valids[pos + 1] : valids[pos + 1]; return prevMatch && nextMatch; } return false; } var begin = pos.begin !== undefined ? pos.begin : pos, end = pos.end !== undefined ? pos.end : pos; if (pos.begin > pos.end) { begin = pos.end; end = pos.begin; } validatedPos = validatedPos !== undefined ? validatedPos : begin; if (begin !== end || (opts.insertMode && getMaskSet().validPositions[validatedPos] !== undefined && fromSetValid === undefined)) { //reposition & revalidate others var positionsClone = $.extend(true, {}, getMaskSet().validPositions), lvp = getLastValidPosition(undefined, true), i; getMaskSet().p = begin; //needed for alternated position after overtype selection for (i = lvp; i >= begin; i--) { if (getMaskSet().validPositions[i] && getMaskSet().validPositions[i].match.nativeDef === "+") { //REMOVE ME AFTER REFACTORING OF NUMERIC ALIAS opts.isNegative = false; } delete getMaskSet().validPositions[i]; } var valid = true, j = validatedPos, vps = getMaskSet().validPositions, needsValidation = false, posMatch = j, i = j; if (validTest) { getMaskSet().validPositions[validatedPos] = $.extend(true, {}, validTest); posMatch++; j++; if (begin < end) i++; //if selection and entry move start by one } for (; i <= lvp; i++) { var t = positionsClone[i]; if (t !== undefined && (i >= end || (i >= begin && t.generatedInput !== true && IsEnclosedStatic(i, positionsClone, { begin: begin, end: end })))) { while (getTest(posMatch).match.def !== "") { //loop needed to match further positions if (needsValidation === false && positionsClone[posMatch] && positionsClone[posMatch].match.nativeDef === t.match.nativeDef) { //obvious match getMaskSet().validPositions[posMatch] = $.extend(true, {}, positionsClone[posMatch]); getMaskSet().validPositions[posMatch].input = t.input; trackbackPositions(undefined, posMatch, true); j = posMatch + 1; valid = true; } else if (positionCanMatchDefinition(posMatch, t.match.def)) { //validated match var result = isValid(posMatch, t.input, true, true); valid = result !== false; j = (result.caret || result.insert) ? getLastValidPosition() : posMatch + 1; needsValidation = true; } else { valid = t.generatedInput === true || (t.input === opts.radixPoint && opts.numericInput === true); } if (valid) break; if (!valid && posMatch > end && isMask(posMatch, true) && (t.match.fn !== null || posMatch > getMaskSet().maskLength)) { break; } posMatch++; } if (getTest(posMatch).match.def == "") valid = false; //restore position posMatch = j; } if (!valid) break; } if (!valid) { getMaskSet().validPositions = $.extend(true, {}, positionsClone); resetMaskSet(true); return false; } } else if (validTest) { getMaskSet().validPositions[validatedPos] = $.extend(true, {}, validTest); } resetMaskSet(true); return true; } function isMask(pos, strict) { var test = getTestTemplate(pos).match; if (test.def === "") test = getTest(pos).match; if (test.fn != null) { return test.fn; } if (strict !== true && pos > -1) { var tests = getTests(pos); return tests.length > 1 + (tests[tests.length - 1].match.def === "" ? 1 : 0); } return false; } function seekNext(pos, newBlock) { var position = pos + 1; while (getTest(position).match.def !== "" && ((newBlock === true && (getTest(position).match.newBlockMarker !== true || !isMask(position))) || (newBlock !== true && !isMask(position)))) { position++; } return position; } function seekPrevious(pos, newBlock) { var position = pos, tests; if (position <= 0) return 0; while (--position > 0 && ((newBlock === true && getTest(position).match.newBlockMarker !== true) || (newBlock !== true && !isMask(position) && (tests = getTests(position), tests.length < 2 || (tests.length === 2 && tests[1].match.def === ""))))) { } return position; } function getBufferElement(position) { return getMaskSet().validPositions[position] === undefined ? getPlaceholder(position) : getMaskSet().validPositions[position].input; } function writeBuffer(input, buffer, caretPos, event, triggerEvents) { if (event && $.isFunction(opts.onBeforeWrite)) { // buffer = buffer.slice(); //prevent uncontrolled manipulation of the internal buffer var result = opts.onBeforeWrite.call(inputmask, event, buffer, caretPos, opts); if (result) { if (result.refreshFromBuffer) { var refresh = result.refreshFromBuffer; refreshFromBuffer(refresh === true ? refresh : refresh.start, refresh.end, result.buffer || buffer); buffer = getBuffer(true); } //only alter when intented !== undefined if (caretPos !== undefined) caretPos = result.caret !== undefined ? result.caret : caretPos; } } if (input !== undefined) { input.inputmask._valueSet(buffer.join("")); if (caretPos !== undefined && (event === undefined || event.type !== "blur")) { caret(input, caretPos); } else renderColorMask(input, caretPos, buffer.length === 0); if (triggerEvents === true) { var $input = $(input), nptVal = input.inputmask._valueGet(); skipInputEvent = true; $input.trigger("input"); setTimeout(function () { //timeout needed for IE if (nptVal === getBufferTemplate().join("")) { $input.trigger("cleared"); } else if (isComplete(buffer) === true) { $input.trigger("complete"); } }, 0); } } } function getPlaceholder(pos, test, returnPL) { test = test || getTest(pos).match; if (test.placeholder !== undefined || returnPL === true) { return $.isFunction(test.placeholder) ? test.placeholder(opts) : test.placeholder; } else if (test.fn === null) { if (pos > -1 && getMaskSet().validPositions[pos] === undefined) { var tests = getTests(pos), staticAlternations = [], prevTest; if (tests.length > 1 + (tests[tests.length - 1].match.def === "" ? 1 : 0)) { for (var i = 0; i < tests.length; i++) { if (tests[i].match.optionality !== true && tests[i].match.optionalQuantifier !== true && (tests[i].match.fn === null || (prevTest === undefined || tests[i].match.fn.test(prevTest.match.def, getMaskSet(), pos, true, opts) !== false))) { staticAlternations.push(tests[i]); if (tests[i].match.fn === null) prevTest = tests[i]; if (staticAlternations.length > 1) { if (/[0-9a-bA-Z]/.test(staticAlternations[0].match.def)) { return opts.placeholder.charAt(pos % opts.placeholder.length); } } } } } } return test.def; } return opts.placeholder.charAt(pos % opts.placeholder.length); } function HandleNativePlaceholder(npt, value) { if (ie && npt.inputmask._valueGet() !== value) { var buffer = getBuffer().slice(), nptValue = npt.inputmask._valueGet(); if (nptValue !== value) { if (getLastValidPosition() === -1 && nptValue === getBufferTemplate().join("")) { buffer = []; } else { //clearout optional tail of the mask clearOptionalTail(buffer); } writeBuffer(npt, buffer); } } else if (npt.placeholder !== value) { npt.placeholder = value; if (npt.placeholder === "") npt.removeAttribute("placeholder"); } } var EventRuler = { on: function (input, eventName, eventHandler) { var ev = function (e) { var that = this; // console.log(e.type); if (that.inputmask === undefined && this.nodeName !== "FORM") { //happens when cloning an object with jquery.clone var imOpts = $.data(that, "_inputmask_opts"); if (imOpts) (new Inputmask(imOpts)).mask(that); else EventRuler.off(that); } else if (e.type !== "setvalue" && this.nodeName !== "FORM" && (that.disabled || (that.readOnly && !(e.type === "keydown" && (e.ctrlKey && e.keyCode === 67) || (opts.tabThrough === false && e.keyCode === Inputmask.keyCode.TAB))))) { e.preventDefault(); } else { switch (e.type) { case "input": if (skipInputEvent === true) { skipInputEvent = false; return e.preventDefault(); } if (mobile) { var args = arguments; setTimeout(function () { //needed for caret selection when entering a char on Android 8 - #1818 eventHandler.apply(that, args); caret(that, that.inputmask.caretPos, undefined, true); }, 0); return false; } break; case "keydown": //Safari 5.1.x - modal dialog fires keypress twice workaround skipKeyPressEvent = false; skipInputEvent = false; break; case "keypress": if (skipKeyPressEvent === true) { return e.preventDefault(); } skipKeyPressEvent = true; break; case "click": if (iemobile || iphone) { var args = arguments; setTimeout(function () { eventHandler.apply(that, args); }, 0); return false; } break; } var returnVal = eventHandler.apply(that, arguments); if (returnVal === false) { e.preventDefault(); e.stopPropagation(); } return returnVal; } }; //keep instance of the event input.inputmask.events[eventName] = input.inputmask.events[eventName] || []; input.inputmask.events[eventName].push(ev); if ($.inArray(eventName, ["submit", "reset"]) !== -1) { if (input.form !== null) $(input.form).on(eventName, ev); } else { $(input).on(eventName, ev); } }, off: function (input, event) { if (input.inputmask && input.inputmask.events) { var events; if (event) { events = []; events[event] = input.inputmask.events[event]; } else { events = input.inputmask.events; } $.each(events, function (eventName, evArr) { while (evArr.length > 0) { var ev = evArr.pop(); if ($.inArray(eventName, ["submit", "reset"]) !== -1) { if (input.form !== null) $(input.form).off(eventName, ev); } else { $(input).off(eventName, ev); } } delete input.inputmask.events[eventName]; }); } } }; var EventHandlers = { keydownEvent: function (e) { var input = this, $input = $(input), k = e.keyCode, pos = caret(input); //backspace, delete, and escape get special treatment if (k === Inputmask.keyCode.BACKSPACE || k === Inputmask.keyCode.DELETE || (iphone && k === Inputmask.keyCode.BACKSPACE_SAFARI) || (e.ctrlKey && k === Inputmask.keyCode.X && !isInputEventSupported("cut"))) { //backspace/delete e.preventDefault(); //stop default action but allow propagation handleRemove(input, k, pos); writeBuffer(input, getBuffer(true), getMaskSet().p, e, input.inputmask._valueGet() !== getBuffer().join("")); } else if (k === Inputmask.keyCode.END || k === Inputmask.keyCode.PAGE_DOWN) { //when END or PAGE_DOWN pressed set position at lastmatch e.preventDefault(); var caretPos = seekNext(getLastValidPosition()); caret(input, e.shiftKey ? pos.begin : caretPos, caretPos, true); } else if ((k === Inputmask.keyCode.HOME && !e.shiftKey) || k === Inputmask.keyCode.PAGE_UP) { //Home or page_up e.preventDefault(); caret(input, 0, e.shiftKey ? pos.begin : 0, true); } else if (((opts.undoOnEscape && k === Inputmask.keyCode.ESCAPE) || (k === 90 && e.ctrlKey)) && e.altKey !== true) { //escape && undo && #762 checkVal(input, true, false, undoValue.split("")); $input.trigger("click"); } else if (k === Inputmask.keyCode.INSERT && !(e.shiftKey || e.ctrlKey)) { //insert opts.insertMode = !opts.insertMode; input.setAttribute("im-insert", opts.insertMode); } else if (opts.tabThrough === true && k === Inputmask.keyCode.TAB) { if (e.shiftKey === true) { if (getTest(pos.begin).match.fn === null) { pos.begin = seekNext(pos.begin); } pos.end = seekPrevious(pos.begin, true); pos.begin = seekPrevious(pos.end, true); } else { pos.begin = seekNext(pos.begin, true); pos.end = seekNext(pos.begin, true); if (pos.end < getMaskSet().maskLength) pos.end--; } if (pos.begin < getMaskSet().maskLength) { e.preventDefault(); caret(input, pos.begin, pos.end); } } opts.onKeyDown.call(this, e, getBuffer(), caret(input).begin, opts); ignorable = $.inArray(k, opts.ignorables) !== -1; }, keypressEvent: function (e, checkval, writeOut, strict, ndx) { var input = this, $input = $(input), k = e.which || e.charCode || e.keyCode; if (checkval !== true && (!(e.ctrlKey && e.altKey) && (e.ctrlKey || e.metaKey || ignorable))) { if (k === Inputmask.keyCode.ENTER && undoValue !== getBuffer().join("")) { undoValue = getBuffer().join(""); // e.preventDefault(); setTimeout(function () { $input.trigger("change"); }, 0); } return true; } else { if (k) { //special treat the decimal separator if (k === 46 && e.shiftKey === false && opts.radixPoint !== "") k = opts.radixPoint.charCodeAt(0); var pos = checkval ? { begin: ndx, end: ndx } : caret(input), forwardPosition, c = String.fromCharCode(k), offset = 0; if (opts._radixDance && opts.numericInput) { var caretPos = getBuffer().indexOf(opts.radixPoint.charAt(0)) + 1; if (pos.begin <= caretPos) { if (k === opts.radixPoint.charCodeAt(0)) offset = 1; pos.begin -= 1; pos.end -= 1; } } getMaskSet().writeOutBuffer = true; var valResult = isValid(pos, c, strict); if (valResult !== false) { resetMaskSet(true); forwardPosition = valResult.caret !== undefined ? valResult.caret : seekNext(valResult.pos.begin ? valResult.pos.begin : valResult.pos); getMaskSet().p = forwardPosition; //needed for checkval } forwardPosition = ((opts.numericInput && valResult.caret === undefined) ? seekPrevious(forwardPosition) : forwardPosition) + offset; if (writeOut !== false) { setTimeout(function () { opts.onKeyValidation.call(input, k, valResult, opts); }, 0); if (getMaskSet().writeOutBuffer && valResult !== false) { var buffer = getBuffer(); writeBuffer(input, buffer, forwardPosition, e, checkval !== true); } } e.preventDefault(); if (checkval) { if (valResult !== false) valResult.forwardPosition = forwardPosition; return valResult; } } } }, pasteEvent: function (e) { var input = this, ev = e.originalEvent || e, $input = $(input), inputValue = input.inputmask._valueGet(true), caretPos = caret(input), tempValue; if (isRTL) { tempValue = caretPos.end; caretPos.end = caretPos.begin; caretPos.begin = tempValue; } var valueBeforeCaret = inputValue.substr(0, caretPos.begin), valueAfterCaret = inputValue.substr(caretPos.end, inputValue.length); if (valueBeforeCaret === (isRTL ? getBufferTemplate().reverse() : getBufferTemplate()).slice(0, caretPos.begin).join("")) valueBeforeCaret = ""; if (valueAfterCaret === (isRTL ? getBufferTemplate().reverse() : getBufferTemplate()).slice(caretPos.end).join("")) valueAfterCaret = ""; if (window.clipboardData && window.clipboardData.getData) { // IE inputValue = valueBeforeCaret + window.clipboardData.getData("Text") + valueAfterCaret; } else if (ev.clipboardData && ev.clipboardData.getData) { inputValue = valueBeforeCaret + ev.clipboardData.getData("text/plain") + valueAfterCaret; } else return true; //allow native paste event as fallback ~ masking will continue by inputfallback var pasteValue = inputValue; if ($.isFunction(opts.onBeforePaste)) { pasteValue = opts.onBeforePaste.call(inputmask, inputValue, opts); if (pasteValue === false) { return e.preventDefault(); } if (!pasteValue) { pasteValue = inputValue; } } checkVal(input, false, false, pasteValue.toString().split("")); writeBuffer(input, getBuffer(), seekNext(getLastValidPosition()), e, undoValue !== getBuffer().join("")); return e.preventDefault(); }, inputFallBackEvent: function (e) { //fallback when keypress is not triggered function radixPointHandler(input, inputValue, caretPos) { //radixpoint tweak if (inputValue.charAt(caretPos.begin - 1) === "." && opts.radixPoint !== "") { inputValue = inputValue.split(""); inputValue[caretPos.begin - 1] = opts.radixPoint.charAt(0); inputValue = inputValue.join(""); } return inputValue; } function ieMobileHandler(input, inputValue, caretPos) { if (iemobile) { //iemobile just sets the character at the end althought the caret position is correctly set var inputChar = inputValue.replace(getBuffer().join(""), ""); if (inputChar.length === 1) { var iv = inputValue.split(""); iv.splice(caretPos.begin, 0, inputChar); inputValue = iv.join(""); } } return inputValue; } var input = this, inputValue = input.inputmask._valueGet(); if (getBuffer().join("") !== inputValue) { var caretPos = caret(input); inputValue = radixPointHandler(input, inputValue, caretPos); inputValue = ieMobileHandler(input, inputValue, caretPos); if (getBuffer().join("") !== inputValue) { var buffer = getBuffer().join(""), offset = (!opts.numericInput && inputValue.length > buffer.length) ? -1 : 0, frontPart = inputValue.substr(0, caretPos.begin), backPart = inputValue.substr(caretPos.begin), frontBufferPart = buffer.substr(0, caretPos.begin + offset), backBufferPart = buffer.substr(caretPos.begin + offset); //check if thare was a selection var selection = caretPos, entries = "", isEntry = false; if (frontPart !== frontBufferPart) { var fpl = (isEntry = frontPart.length >= frontBufferPart.length) ? frontPart.length : frontBufferPart.length, i; for (i = 0; frontPart.charAt(i) === frontBufferPart.charAt(i) && i < fpl; i++) ; if (isEntry) { selection.begin = i - offset; entries += frontPart.slice(i, selection.end); } } if (backPart !== backBufferPart) { if (backPart.length > backBufferPart.length) { entries += backPart.slice(0, 1); } else { if (backPart.length < backBufferPart.length) { selection.end += backBufferPart.length - backPart.length; //hack around numeric alias & radixpoint if (!isEntry && opts.radixPoint !== "" && backPart === "" && frontPart.charAt(selection.begin + offset - 1) === opts.radixPoint) { selection.begin--; entries = opts.radixPoint; } } } } writeBuffer(input, getBuffer(), { "begin": selection.begin + offset, "end": selection.end + offset }); if (entries.length > 0) { $.each(entries.split(""), function (ndx, entry) { var keypress = new $.Event("keypress"); keypress.which = entry.charCodeAt(0); ignorable = false; //make sure ignorable is ignored ;-) EventHandlers.keypressEvent.call(input, keypress); }); } else { if (selection.begin === selection.end - 1) { selection.begin = seekPrevious(selection.begin + 1); if (selection.begin === selection.end - 1) { caret(input, selection.begin); } else { caret(input, selection.begin, selection.end); } } var keydown = new $.Event("keydown"); keydown.keyCode = opts.numericInput ? Inputmask.keyCode.BACKSPACE : Inputmask.keyCode.DELETE; EventHandlers.keydownEvent.call(input, keydown); } e.preventDefault(); } } }, beforeInputEvent: function (e) { if (e.cancelable) { var input = this; switch (e.inputType) { case "insertText": $.each(e.data.split(""), function (ndx, entry) { var keypress = new $.Event("keypress"); keypress.which = entry.charCodeAt(0); ignorable = false; //make sure ignorable is ignored ;-) EventHandlers.keypressEvent.call(input, keypress); }); return e.preventDefault(); case "deleteContentBackward": var keydown = new $.Event("keydown"); keydown.keyCode = Inputmask.keyCode.BACKSPACE; EventHandlers.keydownEvent.call(input, keydown); return e.preventDefault(); case "deleteContentForward": var keydown = new $.Event("keydown"); keydown.keyCode = Inputmask.keyCode.DELETE; EventHandlers.keydownEvent.call(input, keydown); return e.preventDefault(); } } }, setValueEvent: function (e) { this.inputmask.refreshValue = false; var input = this, value = (e && e.detail) ? e.detail[0] : arguments[1], value = value || input.inputmask._valueGet(true); if ($.isFunction(opts.onBeforeMask)) value = opts.onBeforeMask.call(inputmask, value, opts) || value; value = value.split(""); checkVal(input, true, false, value); undoValue = getBuffer().join(""); if ((opts.clearMaskOnLostFocus || opts.clearIncomplete) && input.inputmask._valueGet() === getBufferTemplate().join("")) { input.inputmask._valueSet(""); } }, focusEvent: function (e) { var input = this, nptValue = input.inputmask._valueGet(); if (opts.showMaskOnFocus && (!opts.showMaskOnHover || (opts.showMaskOnHover && nptValue === ""))) { if (input.inputmask._valueGet() !== getBuffer().join("")) { writeBuffer(input, getBuffer(), seekNext(getLastValidPosition())); } else if (mouseEnter === false) { //only executed on focus without mouseenter caret(input, seekNext(getLastValidPosition())); } } if (opts.positionCaretOnTab === true && mouseEnter === false) { EventHandlers.clickEvent.apply(input, [e, true]); } undoValue = getBuffer().join(""); }, mouseleaveEvent: function (e) { var input = this; mouseEnter = false; if (opts.clearMaskOnLostFocus && document.activeElement !== input) { HandleNativePlaceholder(input, originalPlaceholder); } }, clickEvent: function (e, tabbed) { function doRadixFocus(clickPos) { if (opts.radixPoint !== "") { var vps = getMaskSet().validPositions; if (vps[clickPos] === undefined || (vps[clickPos].input === getPlaceholder(clickPos))) { if (clickPos < seekNext(-1)) return true; var radixPos = $.inArray(opts.radixPoint, getBuffer()); if (radixPos !== -1) { for (var vp in vps) { if (radixPos < vp && vps[vp].input !== getPlaceholder(vp)) { return false; } } return true; } } } return false; } var input = this; setTimeout(function () { //needed for Chrome ~ initial selection clears after the clickevent if (document.activeElement === input) { var selectedCaret = caret(input); if (tabbed) { if (isRTL) { selectedCaret.end = selectedCaret.begin; } else { selectedCaret.begin = selectedCaret.end; } } if (selectedCaret.begin === selectedCaret.end) { switch (opts.positionCaretOnClick) { case "none": break; case "select": caret(input, 0, getBuffer().length); break; case "ignore": caret(input, seekNext(getLastValidPosition())); break; case "radixFocus": if (doRadixFocus(selectedCaret.begin)) { var radixPos = getBuffer().join("").indexOf(opts.radixPoint); caret(input, opts.numericInput ? seekNext(radixPos) : radixPos); break; } //fallback to lvp default: //lvp: var clickPosition = selectedCaret.begin, lvclickPosition = getLastValidPosition(clickPosition, true), lastPosition = seekNext(lvclickPosition); if (clickPosition < lastPosition) { caret(input, !isMask(clickPosition, true) && !isMask(clickPosition - 1, true) ? seekNext(clickPosition) : clickPosition); } else { var lvp = getMaskSet().validPositions[lvclickPosition], tt = getTestTemplate(lastPosition, lvp ? lvp.match.locator : undefined, lvp), placeholder = getPlaceholder(lastPosition, tt.match); if ((placeholder !== "" && getBuffer()[lastPosition] !== placeholder && tt.match.optionalQuantifier !== true && tt.match.newBlockMarker !== true) || (!isMask(lastPosition, opts.keepStatic) && tt.match.def === placeholder)) { var newPos = seekNext(lastPosition); if (clickPosition >= newPos || clickPosition === lastPosition) { lastPosition = newPos; } } caret(input, lastPosition); } break; } } } }, 0); }, cutEvent: function (e) { var input = this, $input = $(input), pos = caret(input), ev = e.originalEvent || e; //correct clipboardData var clipboardData = window.clipboardData || ev.clipboardData, clipData = isRTL ? getBuffer().slice(pos.end, pos.begin) : getBuffer().slice(pos.begin, pos.end); clipboardData.setData("text", isRTL ? clipData.reverse().join("") : clipData.join("")); if (document.execCommand) document.execCommand("copy"); // copy selected content to system clipbaord handleRemove(input, Inputmask.keyCode.DELETE, pos); writeBuffer(input, getBuffer(), getMaskSet().p, e, undoValue !== getBuffer().join("")); }, blurEvent: function (e) { var $input = $(this), input = this; if (input.inputmask) { HandleNativePlaceholder(input, originalPlaceholder); var nptValue = input.inputmask._valueGet(), buffer = getBuffer().slice(); if (nptValue !== "" || colorMask !== undefined) { if (opts.clearMaskOnLostFocus) { if (getLastValidPosition() === -1 && nptValue === getBufferTemplate().join("")) { buffer = []; } else { //clearout optional tail of the mask clearOptionalTail(buffer); } } if (isComplete(buffer) === false) { setTimeout(function () { $input.trigger("incomplete"); }, 0); if (opts.clearIncomplete) { resetMaskSet(); if (opts.clearMaskOnLostFocus) { buffer = []; } else { buffer = getBufferTemplate().slice(); } } } writeBuffer(input, buffer, undefined, e); } if (undoValue !== getBuffer().join("")) { undoValue = buffer.join(""); $input.trigger("change"); } } }, mouseenterEvent: function (e) { var input = this; mouseEnter = true; if (document.activeElement !== input && opts.showMaskOnHover) { HandleNativePlaceholder(input, (isRTL ? getBuffer().slice().reverse() : getBuffer()).join("")); } }, submitEvent: function (e) { //trigger change on submit if any if (undoValue !== getBuffer().join("")) { $el.trigger("change"); } if (opts.clearMaskOnLostFocus && getLastValidPosition() === -1 && el.inputmask._valueGet && el.inputmask._valueGet() === getBufferTemplate().join("")) { el.inputmask._valueSet(""); //clear masktemplete on submit and still has focus } if (opts.clearIncomplete && isComplete(getBuffer()) === false) { el.inputmask._valueSet(""); } if (opts.removeMaskOnSubmit) { el.inputmask._valueSet(el.inputmask.unmaskedvalue(), true); setTimeout(function () { writeBuffer(el, getBuffer()); }, 0); } }, resetEvent: function (e) { el.inputmask.refreshValue = true; //indicate a forced refresh when there is a call to the value before leaving the triggering event fn setTimeout(function () { $el.trigger("setvalue"); }, 0); } }; function checkVal(input, writeOut, strict, nptvl, initiatingEvent) { var inputmask = this || input.inputmask, inputValue = nptvl.slice(), charCodes = "", initialNdx = -1, result = undefined; // console.log(nptvl); function isTemplateMatch(ndx, charCodes) { var charCodeNdx = getMaskTemplate(true, 0, false).slice(ndx, seekNext(ndx)).join("").replace(/'/g, "").indexOf(charCodes); return charCodeNdx !== -1 && !isMask(ndx) && (getTest(ndx).match.nativeDef === charCodes.charAt(0) || (getTest(ndx).match.fn === null && getTest(ndx).match.nativeDef === ("'" + charCodes.charAt(0))) || (getTest(ndx).match.nativeDef === " " && (getTest(ndx + 1).match.nativeDef === charCodes.charAt(0) || (getTest(ndx + 1).match.fn === null && getTest(ndx + 1).match.nativeDef === ("'" + charCodes.charAt(0)))))); } resetMaskSet(); if (!strict && opts.autoUnmask !== true) { var staticInput = getBufferTemplate().slice(0, seekNext(-1)).join(""), matches = inputValue.join("").match(new RegExp("^" + Inputmask.escapeRegex(staticInput), "g")); if (matches && matches.length > 0) { inputValue.splice(0, matches.length * staticInput.length); initialNdx = seekNext(initialNdx); } } else { initialNdx = seekNext(initialNdx); } if (initialNdx === -1) { getMaskSet().p = seekNext(initialNdx); initialNdx = 0; } else getMaskSet().p = initialNdx; inputmask.caretPos = {begin: initialNdx}; $.each(inputValue, function (ndx, charCode) { // console.log(charCode); if (charCode !== undefined) { //inputfallback strips some elements out of the inputarray. $.each logically presents them as undefined if (getMaskSet().validPositions[ndx] === undefined && inputValue[ndx] === getPlaceholder(ndx) && isMask(ndx, true) && isValid(ndx, inputValue[ndx], true, undefined, undefined, true) === false) { getMaskSet().p++; } else { var keypress = new $.Event("_checkval"); keypress.which = charCode.charCodeAt(0); charCodes += charCode; var lvp = getLastValidPosition(undefined, true); if (!isTemplateMatch(initialNdx, charCodes)) { result = EventHandlers.keypressEvent.call(input, keypress, true, false, strict, inputmask.caretPos.begin); if (result) { initialNdx = inputmask.caretPos.begin + 1; charCodes = ""; } } else { result = EventHandlers.keypressEvent.call(input, keypress, true, false, strict, lvp + 1); } if (result) { writeBuffer(undefined, getBuffer(), result.forwardPosition, keypress, false); inputmask.caretPos = {begin: result.forwardPosition, end: result.forwardPosition}; } } } }); if (writeOut) writeBuffer(input, getBuffer(), result ? result.forwardPosition : undefined, initiatingEvent || new $.Event("checkval"), initiatingEvent && initiatingEvent.type === "input"); } function unmaskedvalue(input) { if (input) { if (input.inputmask === undefined) { return input.value; } if (input.inputmask && input.inputmask.refreshValue) { //forced refresh from the value form.reset EventHandlers.setValueEvent.call(input); } } var umValue = [], vps = getMaskSet().validPositions; for (var pndx in vps) { if (vps[pndx].match && vps[pndx].match.fn != null) { umValue.push(vps[pndx].input); } } var unmaskedValue = umValue.length === 0 ? "" : (isRTL ? umValue.reverse() : umValue).join(""); if ($.isFunction(opts.onUnMask)) { var bufferValue = (isRTL ? getBuffer().slice().reverse() : getBuffer()).join(""); unmaskedValue = opts.onUnMask.call(inputmask, bufferValue, unmaskedValue, opts); } return unmaskedValue; } function caret(input, begin, end, notranslate) { function translatePosition(pos) { if (isRTL && typeof pos === "number" && (!opts.greedy || opts.placeholder !== "") && el) { pos = el.inputmask._valueGet().length - pos; } return pos; } var range; if (begin !== undefined) { if ($.isArray(begin)) { end = isRTL ? begin[0] : begin[1]; begin = isRTL ? begin[1] : begin[0]; } if (begin.begin !== undefined) { end = isRTL ? begin.begin : begin.end; begin = isRTL ? begin.end : begin.begin; } if (typeof begin === "number") { begin = notranslate ? begin : translatePosition(begin); end = notranslate ? end : translatePosition(end); end = (typeof end == "number") ? end : begin; // if (!$(input).is(":visible")) { // return; // } var scrollCalc = parseInt(((input.ownerDocument.defaultView || window).getComputedStyle ? (input.ownerDocument.defaultView || window).getComputedStyle(input, null) : input.currentStyle).fontSize) * end; input.scrollLeft = scrollCalc > input.scrollWidth ? scrollCalc : 0; input.inputmask.caretPos = {begin: begin, end: end}; //track caret internally if (input === document.activeElement) { if ("selectionStart" in input) { input.selectionStart = begin; input.selectionEnd = end; } else if (window.getSelection) { range = document.createRange(); if (input.firstChild === undefined || input.firstChild === null) { var textNode = document.createTextNode(""); input.appendChild(textNode); } range.setStart(input.firstChild, begin < input.inputmask._valueGet().length ? begin : input.inputmask._valueGet().length); range.setEnd(input.firstChild, end < input.inputmask._valueGet().length ? end : input.inputmask._valueGet().length); range.collapse(true); var sel = window.getSelection(); sel.removeAllRanges(); sel.addRange(range); //input.focus(); } else if (input.createTextRange) { range = input.createTextRange(); range.collapse(true); range.moveEnd("character", end); range.moveStart("character", begin); range.select(); } renderColorMask(input, { begin: begin, end: end }); } } } else { if ("selectionStart" in input) { begin = input.selectionStart; end = input.selectionEnd; } else if (window.getSelection) { range = window.getSelection().getRangeAt(0); if (range.commonAncestorContainer.parentNode === input || range.commonAncestorContainer === input) { begin = range.startOffset; end = range.endOffset; } } else if (document.selection && document.selection.createRange) { range = document.selection.createRange(); begin = 0 - range.duplicate().moveStart("character", -input.inputmask._valueGet().length); end = begin + range.text.length; } /*eslint-disable consistent-return */ return { "begin": notranslate ? begin : translatePosition(begin), "end": notranslate ? end : translatePosition(end) }; /*eslint-enable consistent-return */ } } function determineLastRequiredPosition(returnDefinition) { var buffer = getMaskTemplate(true, getLastValidPosition(), true, true), bl = buffer.length, pos, lvp = getLastValidPosition(), positions = {}, lvTest = getMaskSet().validPositions[lvp], ndxIntlzr = lvTest !== undefined ? lvTest.locator.slice() : undefined, testPos; for (pos = lvp + 1; pos < buffer.length; pos++) { testPos = getTestTemplate(pos, ndxIntlzr, pos - 1); ndxIntlzr = testPos.locator.slice(); positions[pos] = $.extend(true, {}, testPos); } var lvTestAlt = lvTest && lvTest.alternation !== undefined ? lvTest.locator[lvTest.alternation] : undefined; for (pos = bl - 1; pos > lvp; pos--) { testPos = positions[pos]; if ((testPos.match.optionality || (testPos.match.optionalQuantifier && testPos.match.newBlockMarker) || (lvTestAlt && ( (lvTestAlt !== positions[pos].locator[lvTest.alternation] && testPos.match.fn != null) || (testPos.match.fn === null && testPos.locator[lvTest.alternation] && checkAlternationMatch(testPos.locator[lvTest.alternation].toString().split(","), lvTestAlt.toString().split(",")) && getTests(pos)[0].def !== "") ) ) ) && buffer[pos] === getPlaceholder(pos, testPos.match)) { bl--; } else break; } return returnDefinition ? { "l": bl, "def": positions[bl] ? positions[bl].match : undefined } : bl; } function clearOptionalTail(buffer) { buffer.length = 0; var template = getMaskTemplate(true, 0, true, undefined, true), lmnt, validPos; while (lmnt = template.shift(), lmnt !== undefined) buffer.push(lmnt); return buffer; } function isComplete(buffer) { //return true / false / undefined (repeat *) if ($.isFunction(opts.isComplete)) return opts.isComplete(buffer, opts); if (opts.repeat === "*") return undefined; var complete = false, lrp = determineLastRequiredPosition(true), aml = seekPrevious(lrp.l); if (lrp.def === undefined || lrp.def.newBlockMarker || lrp.def.optionality || lrp.def.optionalQuantifier) { complete = true; for (var i = 0; i <= aml; i++) { var test = getTestTemplate(i).match; if ((test.fn !== null && getMaskSet().validPositions[i] === undefined && test.optionality !== true && test.optionalQuantifier !== true) || (test.fn === null && buffer[i] !== getPlaceholder(i, test))) { complete = false; break; } } } return complete; } function handleRemove(input, k, pos, strict, fromIsValid) { if (opts.numericInput || isRTL) { if (k === Inputmask.keyCode.BACKSPACE) { k = Inputmask.keyCode.DELETE; } else if (k === Inputmask.keyCode.DELETE) { k = Inputmask.keyCode.BACKSPACE; } if (isRTL) { var pend = pos.end; pos.end = pos.begin; pos.begin = pend; } } if (k === Inputmask.keyCode.BACKSPACE && (pos.end - pos.begin < 1)) { pos.begin = seekPrevious(pos.begin); if (getMaskSet().validPositions[pos.begin] !== undefined && getMaskSet().validPositions[pos.begin].input === opts.groupSeparator) { pos.begin--; } } else if (k === Inputmask.keyCode.DELETE && pos.begin === pos.end) { pos.end = isMask(pos.end, true) && (getMaskSet().validPositions[pos.end] && getMaskSet().validPositions[pos.end].input !== opts.radixPoint) ? pos.end + 1 : seekNext(pos.end) + 1; if (getMaskSet().validPositions[pos.begin] !== undefined && getMaskSet().validPositions[pos.begin].input === opts.groupSeparator) { pos.end++; } } revalidateMask(pos); if (strict !== true && opts.keepStatic !== false || opts.regex !== null) { var result = alternate(true); if (result) { var newPos = result.caret !== undefined ? result.caret : (result.pos ? seekNext(result.pos.begin ? result.pos.begin : result.pos) : getLastValidPosition(-1, true)); if (k !== Inputmask.keyCode.DELETE || pos.begin > newPos) { pos.begin == newPos; } } } var lvp = getLastValidPosition(pos.begin, true); if (lvp < pos.begin || pos.begin === -1) { //if (lvp === -1) resetMaskSet(); getMaskSet().p = seekNext(lvp); } else if (strict !== true) { getMaskSet().p = pos.begin; if (fromIsValid !== true) { //put position on first valid from pos.begin ~ #1351 while (getMaskSet().p < lvp && getMaskSet().validPositions[getMaskSet().p] === undefined) { getMaskSet().p++; } } } } function initializeColorMask(input) { var computedStyle = (input.ownerDocument.defaultView || window).getComputedStyle(input, null); function findCaretPos(clientx) { //calculate text width var e = document.createElement("span"), caretPos; for (var style in computedStyle) { //clone styles if (isNaN(style) && style.indexOf("font") !== -1) { e.style[style] = computedStyle[style]; } } e.style.textTransform = computedStyle.textTransform; e.style.letterSpacing = computedStyle.letterSpacing; e.style.position = "absolute"; e.style.height = "auto"; e.style.width = "auto"; e.style.visibility = "hidden"; e.style.whiteSpace = "nowrap"; document.body.appendChild(e); var inputText = input.inputmask._valueGet(), previousWidth = 0, itl; for (caretPos = 0, itl = inputText.length; caretPos <= itl; caretPos++) { e.innerHTML += inputText.charAt(caretPos) || "_"; if (e.offsetWidth >= clientx) { var offset1 = (clientx - previousWidth); var offset2 = e.offsetWidth - clientx; e.innerHTML = inputText.charAt(caretPos); offset1 -= (e.offsetWidth / 3); caretPos = offset1 < offset2 ? caretPos - 1 : caretPos; break; } previousWidth = e.offsetWidth; } document.body.removeChild(e); return caretPos; } var template = document.createElement("div"); template.style.width = computedStyle.width; template.style.textAlign = computedStyle.textAlign; colorMask = document.createElement("div"); input.inputmask.colorMask = colorMask; colorMask.className = "im-colormask"; input.parentNode.insertBefore(colorMask, input); input.parentNode.removeChild(input); colorMask.appendChild(input); colorMask.appendChild(template); input.style.left = template.offsetLeft + "px"; $(colorMask).on("mouseleave", function (e) { return EventHandlers.mouseleaveEvent.call(input, [e]); }); $(colorMask).on("mouseenter", function (e) { return EventHandlers.mouseenterEvent.call(input, [e]); }); $(colorMask).on("click", function (e) { caret(input, findCaretPos(e.clientX)); return EventHandlers.clickEvent.call(input, [e]); }); } Inputmask.prototype.positionColorMask = function (input, template) { input.style.left = template.offsetLeft + "px"; } function renderColorMask(input, caretPos, clear) { var maskTemplate = [], isStatic = false, test, testPos, ndxIntlzr, pos = 0; function setEntry(entry) { if (entry === undefined) entry = ""; if (!isStatic && (test.fn === null || testPos.input === undefined)) { isStatic = true; maskTemplate.push("<span class='im-static'>" + entry); } else if (isStatic && ((test.fn !== null && testPos.input !== undefined) || test.def === "")) { isStatic = false; var mtl = maskTemplate.length; maskTemplate[mtl - 1] = maskTemplate[mtl - 1] + "</span>"; maskTemplate.push(entry); } else maskTemplate.push(entry); } function setCaret() { if (document.activeElement === input) { maskTemplate.splice(caretPos.begin, 0, (caretPos.begin === caretPos.end || caretPos.end > getMaskSet().maskLength) ? '<mark class="im-caret" style="border-right-width: 1px;border-right-style: solid;">' : '<mark class="im-caret-select">'); maskTemplate.splice(caretPos.end + 1, 0, "</mark>"); } } if (colorMask !== undefined) { var buffer = getBuffer(); if (caretPos === undefined) { caretPos = caret(input); } else if (caretPos.begin === undefined) { caretPos = { begin: caretPos, end: caretPos }; } if (clear !== true) { var lvp = getLastValidPosition(); do { if (getMaskSet().validPositions[pos]) { testPos = getMaskSet().validPositions[pos]; test = testPos.match; ndxIntlzr = testPos.locator.slice(); setEntry(buffer[pos]); } else { testPos = getTestTemplate(pos, ndxIntlzr, pos - 1); test = testPos.match; ndxIntlzr = testPos.locator.slice(); if (opts.jitMasking === false || pos < lvp || (typeof opts.jitMasking === "number" && isFinite(opts.jitMasking) && opts.jitMasking > pos)) { setEntry(getPlaceholder(pos, test)); } else isStatic = false; //break infinite loop } pos++; } while ((maxLength === undefined || pos < maxLength) && (test.fn !== null || test.def !== "") || lvp > pos || isStatic); if (isStatic) setEntry(); setCaret(); } var template = colorMask.getElementsByTagName("div")[0]; template.innerHTML = maskTemplate.join(""); input.inputmask.positionColorMask(input, template); } } function mask(elem) { function isElementTypeSupported(input, opts) { function patchValueProperty(npt) { var valueGet; var valueSet; function patchValhook(type) { if ($.valHooks && ($.valHooks[type] === undefined || $.valHooks[type].inputmaskpatch !== true)) { var valhookGet = $.valHooks[type] && $.valHooks[type].get ? $.valHooks[type].get : function (elem) { return elem.value; }; var valhookSet = $.valHooks[type] && $.valHooks[type].set ? $.valHooks[type].set : function (elem, value) { elem.value = value; return elem; }; $.valHooks[type] = { get: function (elem) { if (elem.inputmask) { if (elem.inputmask.opts.autoUnmask) { return elem.inputmask.unmaskedvalue(); } else { var result = valhookGet(elem); return getLastValidPosition(undefined, undefined, elem.inputmask.maskset.validPositions) !== -1 || opts.nullable !== true ? result : ""; } } else return valhookGet(elem); }, set: function (elem, value) { var $elem = $(elem), result; result = valhookSet(elem, value); if (elem.inputmask) { $elem.trigger("setvalue", [value]); } return result; }, inputmaskpatch: true }; } } function getter() { if (this.inputmask) { return this.inputmask.opts.autoUnmask ? this.inputmask.unmaskedvalue() : (getLastValidPosition() !== -1 || opts.nullable !== true ? (document.activeElement === this && opts.clearMaskOnLostFocus ? (isRTL ? clearOptionalTail(getBuffer().slice()).reverse() : clearOptionalTail(getBuffer().slice())).join("") : valueGet.call(this)) : ""); } else return valueGet.call(this); } function setter(value) { valueSet.call(this, value); if (this.inputmask) { $(this).trigger("setvalue", [value]); } } function installNativeValueSetFallback(npt) { EventRuler.on(npt, "mouseenter", function (event) { var $input = $(this), input = this, value = input.inputmask._valueGet(); if (value !== getBuffer().join("") /*&& getLastValidPosition() > 0*/) { $input.trigger("setvalue"); } }); } if (!npt.inputmask.__valueGet) { if (opts.noValuePatching !== true) { if (Object.getOwnPropertyDescriptor) { if (typeof Object.getPrototypeOf !== "function") { Object.getPrototypeOf = typeof "test".__proto__ === "object" ? function (object) { return object.__proto__; } : function (object) { return object.constructor.prototype; }; } var valueProperty = Object.getPrototypeOf ? Object.getOwnPropertyDescriptor(Object.getPrototypeOf(npt), "value") : undefined; if (valueProperty && valueProperty.get && valueProperty.set) { valueGet = valueProperty.get; valueSet = valueProperty.set; Object.defineProperty(npt, "value", { get: getter, set: setter, configurable: true }); } else if (npt.tagName !== "INPUT") { valueGet = function () { return this.textContent; }; valueSet = function (value) { this.textContent = value; }; Object.defineProperty(npt, "value", { get: getter, set: setter, configurable: true }); } } else if (document.__lookupGetter__ && npt.__lookupGetter__("value")) { valueGet = npt.__lookupGetter__("value"); valueSet = npt.__lookupSetter__("value"); npt.__defineGetter__("value", getter); npt.__defineSetter__("value", setter); } npt.inputmask.__valueGet = valueGet; //store native property getter npt.inputmask.__valueSet = valueSet; //store native property setter } npt.inputmask._valueGet = function (overruleRTL) { return isRTL && overruleRTL !== true ? valueGet.call(this.el).split("").reverse().join("") : valueGet.call(this.el); }; npt.inputmask._valueSet = function (value, overruleRTL) { //null check is needed for IE8 => otherwise converts to "null" valueSet.call(this.el, (value === null || value === undefined) ? "" : ((overruleRTL !== true && isRTL) ? value.split("").reverse().join("") : value)); }; if (valueGet === undefined) { //jquery.val fallback valueGet = function () { return this.value; }; valueSet = function (value) { this.value = value; }; patchValhook(npt.type); installNativeValueSetFallback(npt); } } } var elementType = input.getAttribute("type"); var isSupported = (input.tagName === "INPUT" && $.inArray(elementType, opts.supportsInputType) !== -1) || input.isContentEditable || input.tagName === "TEXTAREA"; if (!isSupported) { if (input.tagName === "INPUT") { var el = document.createElement("input"); el.setAttribute("type", elementType); isSupported = el.type === "text"; //apply mask only if the type is not natively supported el = null; } else isSupported = "partial"; } if (isSupported !== false) { patchValueProperty(input); } else input.inputmask = undefined; return isSupported; } //unbind all events - to make sure that no other mask will interfere when re-masking EventRuler.off(elem); var isSupported = isElementTypeSupported(elem, opts); if (isSupported !== false) { el = elem; $el = $(el); originalPlaceholder = el.placeholder; //read maxlength prop from el maxLength = el !== undefined ? el.maxLength : undefined; if (maxLength === -1) maxLength = undefined; if (opts.colorMask === true) { initializeColorMask(el); } if (mobile) { if ("inputmode" in el) { el.inputmode = opts.inputmode; el.setAttribute("inputmode", opts.inputmode); } if (opts.disablePredictiveText === true) { if ("autocorrect" in el) { //safari el.autocorrect = false; } else { if (opts.colorMask !== true) { initializeColorMask(el); } el.type = "password"; } } } if (isSupported === true) { el.setAttribute("im-insert", opts.insertMode); //bind events EventRuler.on(el, "submit", EventHandlers.submitEvent); EventRuler.on(el, "reset", EventHandlers.resetEvent); EventRuler.on(el, "blur", EventHandlers.blurEvent); EventRuler.on(el, "focus", EventHandlers.focusEvent); if (opts.colorMask !== true) { EventRuler.on(el, "click", EventHandlers.clickEvent); EventRuler.on(el, "mouseleave", EventHandlers.mouseleaveEvent); EventRuler.on(el, "mouseenter", EventHandlers.mouseenterEvent); } EventRuler.on(el, "paste", EventHandlers.pasteEvent); EventRuler.on(el, "cut", EventHandlers.cutEvent); EventRuler.on(el, "complete", opts.oncomplete); EventRuler.on(el, "incomplete", opts.onincomplete); EventRuler.on(el, "cleared", opts.oncleared); if (!mobile && opts.inputEventOnly !== true) { EventRuler.on(el, "keydown", EventHandlers.keydownEvent); EventRuler.on(el, "keypress", EventHandlers.keypressEvent); } else { el.removeAttribute("maxLength"); } EventRuler.on(el, "input", EventHandlers.inputFallBackEvent); EventRuler.on(el, "beforeinput", EventHandlers.beforeInputEvent); //https://github.com/w3c/input-events - to implement } EventRuler.on(el, "setvalue", EventHandlers.setValueEvent); //apply mask undoValue = getBufferTemplate().join(""); //initialize the buffer and getmasklength if (el.inputmask._valueGet(true) !== "" || opts.clearMaskOnLostFocus === false || document.activeElement === el) { var initialValue = $.isFunction(opts.onBeforeMask) ? (opts.onBeforeMask.call(inputmask, el.inputmask._valueGet(true), opts) || el.inputmask._valueGet(true)) : el.inputmask._valueGet(true); if (initialValue !== "") checkVal(el, true, false, initialValue.split("")); var buffer = getBuffer().slice(); undoValue = buffer.join(""); // Wrap document.activeElement in a try/catch block since IE9 throw "Unspecified error" if document.activeElement is undefined when we are in an IFrame. if (isComplete(buffer) === false) { if (opts.clearIncomplete) { resetMaskSet(); } } if (opts.clearMaskOnLostFocus && document.activeElement !== el) { if (getLastValidPosition() === -1) { buffer = []; } else { clearOptionalTail(buffer); } } if (opts.clearMaskOnLostFocus === false || (opts.showMaskOnFocus && document.activeElement === el) || el.inputmask._valueGet(true) !== "") writeBuffer(el, buffer); if (document.activeElement === el) { //position the caret when in focus caret(el, seekNext(getLastValidPosition())); } } } } //action object var valueBuffer; if (actionObj !== undefined) { switch (actionObj.action) { case "isComplete": el = actionObj.el; return isComplete(getBuffer()); case "unmaskedvalue": if (el === undefined || actionObj.value !== undefined) { valueBuffer = actionObj.value; valueBuffer = ($.isFunction(opts.onBeforeMask) ? (opts.onBeforeMask.call(inputmask, valueBuffer, opts) || valueBuffer) : valueBuffer).split(""); checkVal.call(this, undefined, false, false, valueBuffer); if ($.isFunction(opts.onBeforeWrite)) opts.onBeforeWrite.call(inputmask, undefined, getBuffer(), 0, opts); } return unmaskedvalue(el); case "mask": mask(el); break; case "format": valueBuffer = ($.isFunction(opts.onBeforeMask) ? (opts.onBeforeMask.call(inputmask, actionObj.value, opts) || actionObj.value) : actionObj.value).split(""); checkVal.call(this, undefined, true, false, valueBuffer); if (actionObj.metadata) { return { value: isRTL ? getBuffer().slice().reverse().join("") : getBuffer().join(""), metadata: maskScope.call(this, { "action": "getmetadata" }, maskset, opts) }; } return isRTL ? getBuffer().slice().reverse().join("") : getBuffer().join(""); case "isValid": if (actionObj.value) { valueBuffer = actionObj.value.split(""); checkVal.call(this, undefined, true, true, valueBuffer); } else { actionObj.value = getBuffer().join(""); } var buffer = getBuffer(); var rl = determineLastRequiredPosition(), lmib = buffer.length - 1; for (; lmib > rl; lmib--) { if (isMask(lmib)) break; } buffer.splice(rl, lmib + 1 - rl); return isComplete(buffer) && actionObj.value === getBuffer().join(""); case "getemptymask": return getBufferTemplate().join(""); case "remove": if (el && el.inputmask) { $.data(el, "_inputmask_opts", null); //invalidate $el = $(el); //writeout the value el.inputmask._valueSet(opts.autoUnmask ? unmaskedvalue(el) : el.inputmask._valueGet(true)); //unbind all events EventRuler.off(el); //remove colormask if used if (el.inputmask.colorMask) { colorMask = el.inputmask.colorMask; colorMask.removeChild(el); colorMask.parentNode.insertBefore(el, colorMask); colorMask.parentNode.removeChild(colorMask); } //restore the value property var valueProperty; if (Object.getOwnPropertyDescriptor && Object.getPrototypeOf) { valueProperty = Object.getOwnPropertyDescriptor(Object.getPrototypeOf(el), "value"); if (valueProperty) { if (el.inputmask.__valueGet) { Object.defineProperty(el, "value", { get: el.inputmask.__valueGet, set: el.inputmask.__valueSet, configurable: true }); } } } else if (document.__lookupGetter__ && el.__lookupGetter__("value")) { if (el.inputmask.__valueGet) { el.__defineGetter__("value", el.inputmask.__valueGet); el.__defineSetter__("value", el.inputmask.__valueSet); } } //clear data el.inputmask = undefined; } return el; break; case "getmetadata": if ($.isArray(maskset.metadata)) { var maskTarget = getMaskTemplate(true, 0, false).join(""); $.each(maskset.metadata, function (ndx, mtdt) { if (mtdt.mask === maskTarget) { maskTarget = mtdt; return false; } }); return maskTarget; } return maskset.metadata; } } } //make inputmask available return Inputmask; } )) ;
staticCanMatchDefinition
validate.rs
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { super::{ act::Actions, config::InspectData, metrics::{MetricState, MetricValue, Metrics}, }, anyhow::{format_err, Error}, serde_derive::Deserialize, serde_json as json, std::collections::HashMap, }; #[derive(Deserialize, Debug)] pub struct Trial { yes: Vec<String>, no: Vec<String>, inspect: Vec<json::Value>, } // Outer String is namespace, inner String is trial name pub type Trials = HashMap<String, TestsSchema>; pub type TestsSchema = HashMap<String, Trial>; pub fn validate(metrics: &Metrics, actions: &Actions, trials: &Trials) -> Result<(), Error> { let mut failed = false; for (namespace, trial_map) in trials { for (trial_name, trial) in trial_map { let temp_data = InspectData::new(trial.inspect.clone()); let state = MetricState { metrics, inspect_data: &temp_data }; for action_name in trial.yes.clone().into_iter() { failed = check_failure(namespace, trial_name, &action_name, actions, &state, true) || failed; } for action_name in trial.no.clone().into_iter() { failed = check_failure(namespace, trial_name, &action_name, actions, &state, false) || failed; } } } if failed { return Err(format_err!("Validation failed")); } else { Ok(()) } } // Returns true iff the trial did NOT get the expected result. fn check_failure( namespace: &String, trial_name: &String, action_name: &String, actions: &Actions, metric_state: &MetricState<'_>, expected: bool, ) -> bool { match actions.get(namespace) { None => { println!("Namespace {} not found in trial {}", action_name, trial_name); return true; } Some(action_map) => match action_map.get(action_name) { None => { println!("Action {} not found in trial {}", action_name, trial_name); return true; } Some(action) => { let trigger_name = &action.trigger; match metric_state.metric_value(namespace, &trigger_name) { MetricValue::Bool(actual) if actual == expected => return false, other =>
} } }, } } #[cfg(test)] mod test { use {super::*, crate::act::Action, crate::metrics::Metric, anyhow::Error}; macro_rules! build_map {($($tuple:expr),*) => ({ let mut map = HashMap::new(); $( let (key, value) = $tuple; map.insert(key.to_string(), value); )* map })} #[test] fn validate_works() -> Result<(), Error> { let metrics = build_map!(( "foo", build_map!( ("true", Metric::Eval("1==1".to_string())), ("false", Metric::Eval("1==0".to_string())) ) )); let actions = build_map!(( "foo", build_map!( ("fires", Action { trigger: "true".to_string(), print: "good".to_string() }), ("inert", Action { trigger: "false".to_string(), print: "what?!?".to_string() }) ) )); let good_trial = Trial { yes: vec!["fires".to_string()], no: vec!["inert".to_string()], inspect: vec![], }; assert!(validate( &metrics, &actions, &build_map!(("foo", build_map!(("good", good_trial)))) ) .is_ok()); // Make sure it objects if a trial that should fire doesn't. // Also, make sure it signals failure if there's both a good and a bad trial. let bad_trial = Trial { yes: vec!["fires".to_string(), "inert".to_string()], no: vec![], inspect: vec![], }; let good_trial = Trial { yes: vec!["fires".to_string()], no: vec!["inert".to_string()], inspect: vec![], }; assert!(validate( &metrics, &actions, &build_map!(("foo", build_map!(("good", good_trial), ("bad", bad_trial)))) ) .is_err()); // Make sure it objects if a trial fires when it shouldn't. let bad_trial = Trial { yes: vec![], no: vec!["fires".to_string(), "inert".to_string()], inspect: vec![], }; assert!(validate(&metrics, &actions, &build_map!(("foo", build_map!(("bad", bad_trial))))) .is_err()); Ok(()) } }
{ println!( "Test {} failed: trigger {} of action {} returned {:?}, expected {}", trial_name, trigger_name, action_name, other, expected ); return true; }
error.rs
/* * Copyright (C) 2018 Kubos Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 *
* limitations under the License. */ use failure::Fail; #[derive(Debug, Fail, PartialEq)] pub enum AppError { /// An error was encountered while interacting with the app registry #[fail(display = "Registry Error: {}", err)] RegistryError { /// Underlying error encountered err: String, }, /// An error was encountered while interacting with a file #[fail(display = "File Error: {}", err)] FileError { /// Underlying error encountered err: String, }, /// An error was encountered while registering an application #[fail(display = "Failed to register app: {}", err)] RegisterError { /// Underlying error encountered err: String, }, /// An error was encountered while registering an application #[fail(display = "Failed to uninstall app: {}", err)] UninstallError { /// Underlying error encountered err: String, }, /// An error was encountered while starting an application #[fail(display = "Failed to start app: {}", err)] StartError { /// Underlying error encountered err: String, }, /// An error was encountered while parsing data #[fail(display = "Failed to parse {}: {}", entity, err)] ParseError { /// Item being parsed entity: String, /// Underlying error encountered err: String, }, /// An I/O error was thrown by the kernel #[fail(display = "IO Error: {}", description)] IoError { /// The underlying error type cause: ::std::io::ErrorKind, /// Error description description: String, }, /// A catch-all error for the service #[fail(display = "{}", err)] SystemError { /// Underlying error encountered err: String, }, } impl From<::std::io::Error> for AppError { fn from(error: ::std::io::Error) -> Self { AppError::IoError { cause: error.kind(), description: error.to_string(), } } }
* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and
volume_ttl_test.go
package needle import ( "testing" ) func TestTTLReadWrite(t *testing.T)
{ ttl, _ := ReadTTL("") if ttl.Minutes() != 0 { t.Errorf("empty ttl:%v", ttl) } ttl, _ = ReadTTL("9") if ttl.Minutes() != 9 { t.Errorf("9 ttl:%v", ttl) } ttl, _ = ReadTTL("8m") if ttl.Minutes() != 8 { t.Errorf("8m ttl:%v", ttl) } ttl, _ = ReadTTL("5h") if ttl.Minutes() != 300 { t.Errorf("5h ttl:%v", ttl) } ttl, _ = ReadTTL("5d") if ttl.Minutes() != 5*24*60 { t.Errorf("5d ttl:%v", ttl) } ttl, _ = ReadTTL("50d") if ttl.Minutes() != 50*24*60 { t.Errorf("50d ttl:%v", ttl) } ttl, _ = ReadTTL("5w") if ttl.Minutes() != 5*7*24*60 { t.Errorf("5w ttl:%v", ttl) } ttl, _ = ReadTTL("5M") if ttl.Minutes() != 5*31*24*60 { t.Errorf("5M ttl:%v", ttl) } ttl, _ = ReadTTL("5y") if ttl.Minutes() != 5*365*24*60 { t.Errorf("5y ttl:%v", ttl) } output := make([]byte, 2) ttl.ToBytes(output) ttl2 := LoadTTLFromBytes(output) if ttl.Minutes() != ttl2.Minutes() { t.Errorf("ttl:%v ttl2:%v", ttl, ttl2) } ttl3 := LoadTTLFromUint32(ttl.ToUint32()) if ttl.Minutes() != ttl3.Minutes() { t.Errorf("ttl:%v ttl3:%v", ttl, ttl3) } }
__init__.py
import datetime import decimal import re import time import math from itertools import tee import django.utils.copycompat as copy from django.db import connection from django.db.models.fields.subclassing import LegacyConnection from django.db.models.query_utils import QueryWrapper from django.conf import settings from django import forms from django.core import exceptions, validators from django.utils.functional import curry from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import smart_unicode, force_unicode, smart_str from django.utils import datetime_safe class NOT_PROVIDED: pass # The values to use for "blank" in SelectFields. Will be appended to the start of most "choices" lists. BLANK_CHOICE_DASH = [("", "---------")] BLANK_CHOICE_NONE = [("", "None")] class FieldDoesNotExist(Exception): pass # A guide to Field parameters: # # * name: The name of the field specifed in the model. # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * db_column: The db_column specified in the model (or None). # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. # # Code that introspects values, or does other dynamic things, should use # attname. For example, this gets the primary key value of object "obj": # # getattr(obj, opts.pk.attname) class Field(object): """Base class for all field types""" __metaclass__ = LegacyConnection # Designates whether empty strings fundamentally are allowed at the # database level. empty_strings_allowed = True # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that Django implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 default_validators = [] # Default set of validators default_error_messages = { 'invalid_choice': _(u'Value %r is not a valid choice.'), 'null': _(u'This field cannot be null.'), 'blank': _(u'This field cannot be blank.'), } # Generic field type description, usually overriden by subclasses def _description(self): return _(u'Field of type: %(field_type)s') % { 'field_type': self.__class__.__name__ } description = property(_description) def __init__(self, verbose_name=None, name=None, primary_key=False, max_length=None, unique=False, blank=False, null=False, db_index=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, unique_for_date=None, unique_for_month=None, unique_for_year=None, choices=None, help_text='', db_column=None, db_tablespace=None, auto_created=False, validators=[], error_messages=None): self.name = name self.verbose_name = verbose_name self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if self.empty_strings_allowed and connection.features.interprets_empty_strings_as_nulls: self.null = True self.rel = rel self.default = default self.editable = editable self.serialize = serialize self.unique_for_date, self.unique_for_month = unique_for_date, unique_for_month self.unique_for_year = unique_for_year self._choices = choices or [] self.help_text = help_text self.db_column = db_column self.db_tablespace = db_tablespace or settings.DEFAULT_INDEX_TABLESPACE self.auto_created = auto_created # Set db_index to True if the field has a relationship and doesn't explicitly set db_index. self.db_index = db_index # Adjust the appropriate creation counter, and save our local copy. if auto_created: self.creation_counter = Field.auto_creation_counter Field.auto_creation_counter -= 1 else: self.creation_counter = Field.creation_counter Field.creation_counter += 1 self.validators = self.default_validators + validators messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self.error_messages = messages def __cmp__(self, other): # This is needed because bisect does not take a comparison function. return cmp(self.creation_counter, other.creation_counter) def __deepcopy__(self, memodict): # We don't have to deepcopy very much here, since most things are not # intended to be altered after initial creation. obj = copy.copy(self) if self.rel: obj.rel = copy.copy(self.rel) memodict[id(self)] = obj return obj def to_python(self, value): """ Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ return value def run_validators(self, value): if value in validators.EMPTY_VALUES: return errors = [] for v in self.validators: try: v(value) except exceptions.ValidationError, e: if hasattr(e, 'code') and e.code in self.error_messages: message = self.error_messages[e.code] if e.params: message = message % e.params errors.append(message) else: errors.extend(e.messages) if errors: raise exceptions.ValidationError(errors) def validate(self, value, model_instance): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. return if self._choices and value: for option_key, option_value in self.choices: if isinstance(option_value, (list, tuple)): # This is an optgroup, so look inside the group for options. for optgroup_key, optgroup_value in option_value: if value == optgroup_key: return elif value == option_key: return raise exceptions.ValidationError(self.error_messages['invalid_choice'] % value) if value is None and not self.null: raise exceptions.ValidationError(self.error_messages['null']) if not self.blank and value in validators.EMPTY_VALUES: raise exceptions.ValidationError(self.error_messages['blank']) def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) self.run_validators(value) return value def db_type(self, connection): """ Returns the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific DATA_TYPES dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # an XMLField is represented by a TEXT column type, which is the same # as the TextField Django field type, which means XMLField's # get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. return connection.creation.db_type(self) def related_db_type(self, connection): # This is the db_type used by a ForeignKey. return connection.creation.related_db_type(self) def unique(self): return self._unique or self.primary_key unique = property(unique) def set_attributes_from_name(self, name): self.name = name self.attname, self.column = self.get_attname_column() if self.verbose_name is None and name: self.verbose_name = name.replace('_', ' ') def contribute_to_class(self, cls, name): self.set_attributes_from_name(name) self.model = cls cls._meta.add_field(self) if self.choices: setattr(cls, 'get_%s_display' % self.name, curry(cls._get_FIELD_display, field=self)) def
(self): return self.name def get_attname_column(self): attname = self.get_attname() column = self.db_column or attname return attname, column def get_cache_name(self): return '_%s_cache' % self.name def get_internal_type(self): return self.__class__.__name__ def get_related_internal_type(self): return self.get_internal_type() def pre_save(self, model_instance, add): "Returns field's value just before saving." return getattr(model_instance, self.attname) def get_prep_value(self, value): "Perform preliminary non-db specific value checks and conversions." return value def get_db_prep_value(self, value, connection, prepared=False): """Returns field's value prepared for interacting with the database backend. Used by the default implementations of ``get_db_prep_save``and `get_db_prep_lookup``` """ if not prepared: value = self.get_prep_value(value) return value def get_db_prep_save(self, value, connection): "Returns field's value prepared for saving into a database." return self.get_db_prep_value(value, connection=connection, prepared=False) def get_prep_lookup(self, lookup_type, value): "Perform preliminary non-db specific lookup checks and conversions" if hasattr(value, 'prepare'): return value.prepare() if hasattr(value, '_prepare'): return value._prepare() if lookup_type in ( 'regex', 'iregex', 'month', 'day', 'week_day', 'search', 'contains', 'icontains', 'iexact', 'startswith', 'istartswith', 'endswith', 'iendswith', 'isnull' ): return value elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'): return self.get_prep_value(value) elif lookup_type in ('range', 'in'): return [self.get_prep_value(v) for v in value] elif lookup_type == 'year': try: return int(value) except ValueError: raise ValueError("The __year lookup type requires an integer argument") raise TypeError("Field has invalid lookup: %s" % lookup_type) def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False): "Returns field's value prepared for database lookup." if not prepared: value = self.get_prep_lookup(lookup_type, value) if hasattr(value, 'get_compiler'): value = value.get_compiler(connection=connection) if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'): # If the value has a relabel_aliases method, it will need to # be invoked before the final SQL is evaluated if hasattr(value, 'relabel_aliases'): return value if hasattr(value, 'as_sql'): sql, params = value.as_sql() else: sql, params = value._as_sql(connection=connection) return QueryWrapper(('(%s)' % sql), params) if lookup_type in ('regex', 'iregex', 'month', 'day', 'week_day', 'search'): return [value] elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'): return [self.get_db_prep_value(value, connection=connection, prepared=prepared)] elif lookup_type in ('range', 'in'): return [self.get_db_prep_value(v, connection=connection, prepared=prepared) for v in value] elif lookup_type in ('contains', 'icontains'): return ["%%%s%%" % connection.ops.prep_for_like_query(value)] elif lookup_type == 'iexact': return [connection.ops.prep_for_iexact_query(value)] elif lookup_type in ('startswith', 'istartswith'): return ["%s%%" % connection.ops.prep_for_like_query(value)] elif lookup_type in ('endswith', 'iendswith'): return ["%%%s" % connection.ops.prep_for_like_query(value)] elif lookup_type == 'isnull': return [] elif lookup_type == 'year': if self.get_internal_type() == 'DateField': return connection.ops.year_lookup_bounds_for_date_field(value) else: return connection.ops.year_lookup_bounds(value) def has_default(self): "Returns a boolean of whether this field has a default value." return self.default is not NOT_PROVIDED def get_default(self): "Returns the default value for this field." if self.has_default(): if callable(self.default): return self.default() return force_unicode(self.default, strings_only=True) if not self.empty_strings_allowed or (self.null and not connection.features.interprets_empty_strings_as_nulls): return None return "" def get_validator_unique_lookup_type(self): return '%s__exact' % self.name def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH): """Returns choices with a default blank choices included, for use as SelectField choices for this field.""" first_choice = include_blank and blank_choice or [] if self.choices: return first_choice + list(self.choices) rel_model = self.rel.to if hasattr(self.rel, 'get_related_field'): lst = [(getattr(x, self.rel.get_related_field().attname), smart_unicode(x)) for x in rel_model._default_manager.complex_filter(self.rel.limit_choices_to)] else: lst = [(x._get_pk_val(), smart_unicode(x)) for x in rel_model._default_manager.complex_filter(self.rel.limit_choices_to)] return first_choice + lst def get_choices_default(self): return self.get_choices() def get_flatchoices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH): "Returns flattened choices with a default blank choice included." first_choice = include_blank and blank_choice or [] return first_choice + list(self.flatchoices) def _get_val_from_obj(self, obj): if obj is not None: return getattr(obj, self.attname) else: return self.get_default() def value_to_string(self, obj): """ Returns a string value of this field from the passed obj. This is used by the serialization framework. """ return smart_unicode(self._get_val_from_obj(obj)) def bind(self, fieldmapping, original, bound_field_class): return bound_field_class(self, fieldmapping, original) def _get_choices(self): if hasattr(self._choices, 'next'): choices, self._choices = tee(self._choices) return choices else: return self._choices choices = property(_get_choices) def _get_flatchoices(self): """Flattened version of choices tuple.""" flat = [] for choice, value in self.choices: if isinstance(value, (list, tuple)): flat.extend(value) else: flat.append((choice,value)) return flat flatchoices = property(_get_flatchoices) def save_form_data(self, instance, data): setattr(instance, self.name, data) def formfield(self, form_class=forms.CharField, **kwargs): "Returns a django.forms.Field instance for this database Field." defaults = {'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text} if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() if self.choices: # Fields with choices get special treatment. include_blank = self.blank or not (self.has_default() or 'initial' in kwargs) defaults['choices'] = self.get_choices(include_blank=include_blank) defaults['coerce'] = self.to_python if self.null: defaults['empty_value'] = None form_class = forms.TypedChoiceField # Many of the subclass-specific formfield arguments (min_value, # max_value) don't apply for choice fields, so be sure to only pass # the values that TypedChoiceField will understand. for k in kwargs.keys(): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial'): del kwargs[k] defaults.update(kwargs) return form_class(**defaults) def value_from_object(self, obj): "Returns the value of this field in the given model instance." return getattr(obj, self.attname) class AutoField(Field): description = _("Integer") empty_strings_allowed = False default_error_messages = { 'invalid': _(u'This value must be an integer.'), } def __init__(self, *args, **kwargs): assert kwargs.get('primary_key', False) is True, "%ss must have primary_key=True." % self.__class__.__name__ kwargs['blank'] = True Field.__init__(self, *args, **kwargs) def get_internal_type(self): return "AutoField" def get_related_internal_type(self): return "RelatedAutoField" def related_db_type(self, connection): db_type = super(AutoField, self).related_db_type(connection=connection) if db_type is None: return IntegerField().db_type(connection=connection) return db_type def to_python(self, value): if not (value is None or isinstance(value, (basestring, int, long))): raise exceptions.ValidationError(self.error_messages['invalid']) return value def validate(self, value, model_instance): pass def get_prep_value(self, value): return value def get_db_prep_value(self, value, connection, prepared=False): # Casts AutoField into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.value_to_db_auto(value) def contribute_to_class(self, cls, name): assert not cls._meta.has_auto_field, "A model can't have more than one AutoField." super(AutoField, self).contribute_to_class(cls, name) cls._meta.has_auto_field = True cls._meta.auto_field = self def formfield(self, **kwargs): return None class BooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _(u'This value must be either True or False.'), } description = _("Boolean (Either True or False)") def __init__(self, *args, **kwargs): kwargs['blank'] = True if 'default' not in kwargs and not kwargs.get('null'): kwargs['default'] = False Field.__init__(self, *args, **kwargs) def get_internal_type(self): return "BooleanField" def to_python(self, value): if value in (True, False): # if value is 1 or 0 than it's equal to True or False, but we want # to return a true bool for semantic reasons. return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError(self.error_messages['invalid']) def get_prep_lookup(self, lookup_type, value): # Special-case handling for filters coming from a Web request (e.g. the # admin interface). Only works for scalar values (not lists). If you're # passing in a list, you might as well make things the right type when # constructing the list. if value in ('1', '0'): value = bool(int(value)) return super(BooleanField, self).get_prep_lookup(lookup_type, value) def get_prep_value(self, value): if value is None: return None return bool(value) def formfield(self, **kwargs): # Unlike most fields, BooleanField figures out include_blank from # self.null instead of self.blank. if self.choices: include_blank = self.null or not (self.has_default() or 'initial' in kwargs) defaults = {'choices': self.get_choices(include_blank=include_blank)} else: defaults = {'form_class': forms.BooleanField} defaults.update(kwargs) return super(BooleanField, self).formfield(**defaults) class CharField(Field): description = _("String (up to %(max_length)s)") def __init__(self, *args, **kwargs): super(CharField, self).__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def get_internal_type(self): return "CharField" def to_python(self, value): if isinstance(value, basestring) or value is None: return value return smart_unicode(value) def get_prep_value(self, value): return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length} defaults.update(kwargs) return super(CharField, self).formfield(**defaults) # TODO: Maybe move this into contrib, because it's specialized. class CommaSeparatedIntegerField(CharField): default_validators = [validators.validate_comma_separated_integer_list] description = _("Comma-separated integers") def formfield(self, **kwargs): defaults = { 'error_messages': { 'invalid': _(u'Enter only digits separated by commas.'), } } defaults.update(kwargs) return super(CommaSeparatedIntegerField, self).formfield(**defaults) ansi_date_re = re.compile(r'^\d{4}-\d{1,2}-\d{1,2}$') class DateField(Field): description = _("Date (without time)") empty_strings_allowed = False default_error_messages = { 'invalid': _('Enter a valid date in YYYY-MM-DD format.'), 'invalid_date': _('Invalid date: %s'), } def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add #HACKs : auto_now_add/auto_now should be done as a default or a pre_save. if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True Field.__init__(self, verbose_name, name, **kwargs) def get_internal_type(self): return "DateField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value.date() if isinstance(value, datetime.date): return value if not ansi_date_re.search(value): raise exceptions.ValidationError(self.error_messages['invalid']) # Now that we have the date string in YYYY-MM-DD format, check to make # sure it's a valid date. # We could use time.strptime here and catch errors, but datetime.date # produces much friendlier error messages. year, month, day = map(int, value.split('-')) try: return datetime.date(year, month, day) except ValueError, e: msg = self.error_messages['invalid_date'] % _(str(e)) raise exceptions.ValidationError(msg) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.date.today() setattr(model_instance, self.attname, value) return value else: return super(DateField, self).pre_save(model_instance, add) def contribute_to_class(self, cls, name): super(DateField,self).contribute_to_class(cls, name) if not self.null: setattr(cls, 'get_next_by_%s' % self.name, curry(cls._get_next_or_previous_by_FIELD, field=self, is_next=True)) setattr(cls, 'get_previous_by_%s' % self.name, curry(cls._get_next_or_previous_by_FIELD, field=self, is_next=False)) def get_prep_lookup(self, lookup_type, value): # For "__month", "__day", and "__week_day" lookups, convert the value # to an int so the database backend always sees a consistent type. if lookup_type in ('month', 'day', 'week_day'): return int(value) return super(DateField, self).get_prep_lookup(lookup_type, value) def get_prep_value(self, value): return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.value_to_db_date(value) def value_to_string(self, obj): val = self._get_val_from_obj(obj) if val is None: data = '' else: data = datetime_safe.new_date(val).strftime("%Y-%m-%d") return data def formfield(self, **kwargs): defaults = {'form_class': forms.DateField} defaults.update(kwargs) return super(DateField, self).formfield(**defaults) class DateTimeField(DateField): default_error_messages = { 'invalid': _(u'Enter a valid date/time in YYYY-MM-DD HH:MM[:ss[.uuuuuu]] format.'), } description = _("Date (with time)") def get_internal_type(self): return "DateTimeField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): return datetime.datetime(value.year, value.month, value.day) # Attempt to parse a datetime: value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: try: value, usecs = value.split('.') usecs = int(usecs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid']) else: usecs = 0 kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], **kwargs) except ValueError: try: # Try without seconds. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], **kwargs) except ValueError: # Try without hour/minutes/seconds. try: return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], **kwargs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid']) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now() setattr(model_instance, self.attname, value) return value else: return super(DateTimeField, self).pre_save(model_instance, add) def get_prep_value(self, value): return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.value_to_db_datetime(value) def value_to_string(self, obj): val = self._get_val_from_obj(obj) if val is None: data = '' else: d = datetime_safe.new_datetime(val) data = d.strftime('%Y-%m-%d %H:%M:%S') return data def formfield(self, **kwargs): defaults = {'form_class': forms.DateTimeField} defaults.update(kwargs) return super(DateTimeField, self).formfield(**defaults) class DecimalField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _(u'This value must be a decimal number.'), } description = _("Decimal number") def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places Field.__init__(self, verbose_name, name, **kwargs) def get_internal_type(self): return "DecimalField" def to_python(self, value): if value is None: return value try: return decimal.Decimal(value) except decimal.InvalidOperation: raise exceptions.ValidationError(self.error_messages['invalid']) def _format(self, value): if isinstance(value, basestring) or value is None: return value else: return self.format_number(value) def format_number(self, value): """ Formats a number into a string with the requisite number of digits and decimal places. """ # Method moved to django.db.backends.util. # # It is preserved because it is used by the oracle backend # (django.db.backends.oracle.query), and also for # backwards-compatibility with any external code which may have used # this method. from django.db.backends import util return util.format_number(value, self.max_digits, self.decimal_places) def get_db_prep_save(self, value, connection): return connection.ops.value_to_db_decimal(self.to_python(value), self.max_digits, self.decimal_places) def get_prep_value(self, value): return self.to_python(value) def formfield(self, **kwargs): defaults = { 'max_digits': self.max_digits, 'decimal_places': self.decimal_places, 'form_class': forms.DecimalField, } defaults.update(kwargs) return super(DecimalField, self).formfield(**defaults) class EmailField(CharField): default_validators = [validators.validate_email] description = _("E-mail address") def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 75) CharField.__init__(self, *args, **kwargs) def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed twice defaults = { 'form_class': forms.EmailField, } defaults.update(kwargs) return super(EmailField, self).formfield(**defaults) class FilePathField(Field): description = _("File path") def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive kwargs['max_length'] = kwargs.get('max_length', 100) Field.__init__(self, verbose_name, name, **kwargs) def formfield(self, **kwargs): defaults = { 'path': self.path, 'match': self.match, 'recursive': self.recursive, 'form_class': forms.FilePathField, } defaults.update(kwargs) return super(FilePathField, self).formfield(**defaults) def get_internal_type(self): return "FilePathField" class FloatField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("This value must be a float."), } description = _("Floating point number") def get_prep_value(self, value): if value is None: return None return float(value) def get_internal_type(self): return "FloatField" def to_python(self, value): if value is None: return value try: return float(value) except (TypeError, ValueError): raise exceptions.ValidationError(self.error_messages['invalid']) def formfield(self, **kwargs): defaults = {'form_class': forms.FloatField} defaults.update(kwargs) return super(FloatField, self).formfield(**defaults) class IntegerField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("This value must be an integer."), } description = _("Integer") def get_prep_value(self, value): if value is None: return None return int(value) def get_prep_lookup(self, lookup_type, value): if (lookup_type == 'gte' or lookup_type == 'lt') \ and isinstance(value, float): value = math.ceil(value) return super(IntegerField, self).get_prep_lookup(lookup_type, value) def get_internal_type(self): return "IntegerField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): raise exceptions.ValidationError(self.error_messages['invalid']) def formfield(self, **kwargs): defaults = {'form_class': forms.IntegerField} defaults.update(kwargs) return super(IntegerField, self).formfield(**defaults) class BigIntegerField(IntegerField): empty_strings_allowed = False description = _("Big (8 byte) integer") MAX_BIGINT = 9223372036854775807 def get_internal_type(self): return "BigIntegerField" def formfield(self, **kwargs): defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1, 'max_value': BigIntegerField.MAX_BIGINT} defaults.update(kwargs) return super(BigIntegerField, self).formfield(**defaults) class IPAddressField(Field): empty_strings_allowed = False description = _("IP address") def __init__(self, *args, **kwargs): kwargs['max_length'] = 15 Field.__init__(self, *args, **kwargs) def get_internal_type(self): return "IPAddressField" def formfield(self, **kwargs): defaults = {'form_class': forms.IPAddressField} defaults.update(kwargs) return super(IPAddressField, self).formfield(**defaults) class NullBooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("This value must be either None, True or False."), } description = _("Boolean (Either True, False or None)") def __init__(self, *args, **kwargs): kwargs['null'] = True kwargs['blank'] = True Field.__init__(self, *args, **kwargs) def get_internal_type(self): return "NullBooleanField" def to_python(self, value): if value is None: return None if value in (True, False): return bool(value) if value in ('None',): return None if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError(self.error_messages['invalid']) def get_prep_lookup(self, lookup_type, value): # Special-case handling for filters coming from a Web request (e.g. the # admin interface). Only works for scalar values (not lists). If you're # passing in a list, you might as well make things the right type when # constructing the list. if value in ('1', '0'): value = bool(int(value)) return super(NullBooleanField, self).get_prep_lookup(lookup_type, value) def get_prep_value(self, value): if value is None: return None return bool(value) def formfield(self, **kwargs): defaults = { 'form_class': forms.NullBooleanField, 'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text} defaults.update(kwargs) return super(NullBooleanField, self).formfield(**defaults) class PositiveIntegerField(IntegerField): description = _("Integer") def related_db_type(self, connection): if not connection.features.related_fields_match_type: return IntegerField().related_db_type(connection=connection) return super(PositiveIntegerField, self).related_db_type( connection=connection) def get_internal_type(self): return "PositiveIntegerField" def formfield(self, **kwargs): defaults = {'min_value': 0} defaults.update(kwargs) return super(PositiveIntegerField, self).formfield(**defaults) class PositiveSmallIntegerField(IntegerField): description = _("Integer") def related_db_type(self, connection): if not connection.features.related_fields_match_type: return IntegerField().related_db_type(connection=connection) return super(PositiveSmallIntegerField, self).related_db_type( connection=connection) def get_internal_type(self): return "PositiveSmallIntegerField" def formfield(self, **kwargs): defaults = {'min_value': 0} defaults.update(kwargs) return super(PositiveSmallIntegerField, self).formfield(**defaults) class SlugField(CharField): description = _("String (up to %(max_length)s)") def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 50) # Set db_index=True unless it's been set manually. if 'db_index' not in kwargs: kwargs['db_index'] = True super(SlugField, self).__init__(*args, **kwargs) def get_internal_type(self): return "SlugField" def formfield(self, **kwargs): defaults = {'form_class': forms.SlugField} defaults.update(kwargs) return super(SlugField, self).formfield(**defaults) class SmallIntegerField(IntegerField): description = _("Integer") def get_internal_type(self): return "SmallIntegerField" class TextField(Field): description = _("Text") def get_internal_type(self): return "TextField" def get_prep_value(self, value): if isinstance(value, basestring) or value is None: return value return smart_unicode(value) def formfield(self, **kwargs): defaults = {'widget': forms.Textarea} defaults.update(kwargs) return super(TextField, self).formfield(**defaults) class TimeField(Field): description = _("Time") empty_strings_allowed = False default_error_messages = { 'invalid': _('Enter a valid time in HH:MM[:ss[.uuuuuu]] format.'), } def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False Field.__init__(self, verbose_name, name, **kwargs) def get_internal_type(self): return "TimeField" def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() # Attempt to parse a datetime: value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: try: value, usecs = value.split('.') usecs = int(usecs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid']) else: usecs = 0 kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. return datetime.time(*time.strptime(value, '%H:%M:%S')[3:6], **kwargs) except ValueError: try: # Try without seconds. return datetime.time(*time.strptime(value, '%H:%M')[3:5], **kwargs) except ValueError: raise exceptions.ValidationError(self.error_messages['invalid']) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now().time() setattr(model_instance, self.attname, value) return value else: return super(TimeField, self).pre_save(model_instance, add) def get_prep_value(self, value): return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts times into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.value_to_db_time(value) def value_to_string(self, obj): val = self._get_val_from_obj(obj) if val is None: data = '' else: data = val.strftime("%H:%M:%S") return data def formfield(self, **kwargs): defaults = {'form_class': forms.TimeField} defaults.update(kwargs) return super(TimeField, self).formfield(**defaults) class URLField(CharField): description = _("URL") def __init__(self, verbose_name=None, name=None, verify_exists=True, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 200) CharField.__init__(self, verbose_name, name, **kwargs) self.validators.append(validators.URLValidator(verify_exists=verify_exists)) def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed twice defaults = { 'form_class': forms.URLField, } defaults.update(kwargs) return super(URLField, self).formfield(**defaults) class XMLField(TextField): description = _("XML text") def __init__(self, verbose_name=None, name=None, schema_path=None, **kwargs): self.schema_path = schema_path Field.__init__(self, verbose_name, name, **kwargs)
get_attname
Main.js
(()=>{"use strict";var t={};t.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(t){if("object"==typeof window)return window}}();const e=t=>"[object Object]"===Object.prototype.toString.call(t),a=(t,e)=>{const a=document.createElement(t);if(e&&"object"==typeof e)for(const t in e)"html"===t?a.innerHTML=e[t]:a.setAttribute(t,e[t]);return a},s=t=>{t instanceof NodeList?t.forEach((t=>s(t))):t.innerHTML=""},i=(t,e,s)=>a("li",{class:t,html:`<a href="#" data-page="${e}">${s}</a>`}),n=(t,e)=>{let a,s;1===e?(a=0,s=t.length):-1===e&&(a=t.length-1,s=-1);for(let i=!0;i;){i=!1;for(let n=a;n!=s;n+=e)if(t[n+e]&&t[n].value>t[n+e].value){const a=t[n],s=t[n+e],r=a;t[n]=s,t[n+e]=r,i=!0}}return t};class r{constructor(t,e){return this.dt=t,this.rows=e,this}build(t){const e=a("tr");let s=this.dt.headings;return s.length||(s=t.map((()=>""))),s.forEach(((s,i)=>{const n=a("td");t[i]&&t[i].length||(t[i]=""),n.innerHTML=t[i],n.data=t[i],e.appendChild(n)})),e}render(t){return t}add(t){if(Array.isArray(t)){const e=this.dt;Array.isArray(t[0])?t.forEach((t=>{e.data.push(this.build(t))})):e.data.push(this.build(t)),e.data.length&&(e.hasRows=!0),this.update(),e.columns().rebuild()}}remove(t){const e=this.dt;Array.isArray(t)?(t.sort(((t,e)=>e-t)),t.forEach((t=>{e.data.splice(t,1)}))):"all"==t?e.data=[]:e.data.splice(t,1),e.data.length||(e.hasRows=!1),this.update(),e.columns().rebuild()}update(){this.dt.data.forEach(((t,e)=>{t.dataIndex=e}))}}class l{constructor(t){return this.dt=t,this}swap(t){if(t.length&&2===t.length){const e=[];this.dt.headings.forEach(((t,a)=>{e.push(a)}));const a=t[0],s=t[1],i=e[s];e[s]=e[a],e[a]=i,this.order(e)}}order(t){let e,a,s,i,n,r,l;const h=[[],[],[],[]],o=this.dt;t.forEach(((t,s)=>{n=o.headings[t],r="false"!==n.getAttribute("data-sortable"),e=n.cloneNode(!0),e.originalCellIndex=s,e.sortable=r,h[0].push(e),o.hiddenColumns.includes(t)||(a=n.cloneNode(!0),a.originalCellIndex=s,a.sortable=r,h[1].push(a))})),o.data.forEach(((e,a)=>{s=e.cloneNode(!1),i=e.cloneNode(!1),s.dataIndex=i.dataIndex=a,null!==e.searchIndex&&void 0!==e.searchIndex&&(s.searchIndex=i.searchIndex=e.searchIndex),t.forEach((t=>{l=e.cells[t].cloneNode(!0),l.data=e.cells[t].data,s.appendChild(l),o.hiddenColumns.includes(t)||(l=e.cells[t].cloneNode(!0),l.data=e.cells[t].data,i.appendChild(l))})),h[2].push(s),h[3].push(i)})),o.headings=h[0],o.activeHeadings=h[1],o.data=h[2],o.activeRows=h[3],o.update()}hide(t){if(t.length){const e=this.dt;t.forEach((t=>{e.hiddenColumns.includes(t)||e.hiddenColumns.push(t)})),this.rebuild()}}show(t){if(t.length){let e;const a=this.dt;t.forEach((t=>{e=a.hiddenColumns.indexOf(t),e>-1&&a.hiddenColumns.splice(e,1)})),this.rebuild()}}visible(t){let e;const a=this.dt;return t=t||a.headings.map((t=>t.originalCellIndex)),isNaN(t)?Array.isArray(t)&&(e=[],t.forEach((t=>{e.push(!a.hiddenColumns.includes(t))}))):e=!a.hiddenColumns.includes(t),e}add(t){let e;const a=document.createElement("th");if(!this.dt.headings.length)return this.dt.insert({headings:[t.heading],data:t.data.map((t=>[t]))}),void this.rebuild();this.dt.hiddenHeader?a.innerHTML="":t.heading.nodeName?a.appendChild(t.heading):a.innerHTML=t.heading,this.dt.headings.push(a),this.dt.data.forEach(((a,s)=>{t.data[s]&&(e=document.createElement("td"),t.data[s].nodeName?e.appendChild(t.data[s]):e.innerHTML=t.data[s],e.data=e.innerHTML,t.render&&(e.innerHTML=t.render.call(this,e.data,e,a)),a.appendChild(e))})),t.type&&a.setAttribute("data-type",t.type),t.format&&a.setAttribute("data-format",t.format),t.hasOwnProperty("sortable")&&(a.sortable=t.sortable,a.setAttribute("data-sortable",!0===t.sortable?"true":"false")),this.rebuild(),this.dt.renderHeader()}remove(t){Array.isArray(t)?(t.sort(((t,e)=>e-t)),t.forEach((t=>this.remove(t)))):(this.dt.headings.splice(t,1),this.dt.data.forEach((e=>{e.removeChild(e.cells[t])}))),this.rebuild()}filter(t,e,a,s){const i=this.dt;if(i.filterState||(i.filterState={originalData:i.data}),!i.filterState[t]){const e=[...s,()=>!0];i.filterState[t]=function(){let t=0;return()=>e[t++%e.length]}()}const n=i.filterState[t](),r=Array.from(i.filterState.originalData).filter((e=>{const a=e.cells[t],s=a.hasAttribute("data-content")?a.getAttribute("data-content"):a.innerText;return"function"==typeof n?n(s):s===n}));i.data=r,this.rebuild(),i.update(),a||i.emit("datatable.sort",t,e)}sort(t,e,a){const s=this.dt;if(s.hasHeadings&&(t<0||t>s.headings.length))return!1;const i=s.options.filters&&s.options.filters[s.headings[t].textContent];if(i&&0!==i.length)return void this.filter(t,e,a,i);s.sorting=!0,a||s.emit("datatable.sorting",t,e);let r=s.data;const l=[],h=[];let o=0,d=0;const c=s.headings[t],u=[];if("date"===c.getAttribute("data-type")){let t=!1;c.hasAttribute("data-format")&&(t=c.getAttribute("data-format")),u.push(Promise.resolve().then((function(){return f})).then((({parseDate:e})=>a=>e(a,t))))}Promise.all(u).then((i=>{const u=i[0];let p,f;Array.from(r).forEach((e=>{const a=e.cells[t],s=a.hasAttribute("data-content")?a.getAttribute("data-content"):a.innerText;let i;i=u?u(s):"string"==typeof s?s.replace(/(\$|,|\s|%)/g,""):s,parseFloat(i)==i?h[d++]={value:Number(i),row:e}:l[o++]={value:"string"==typeof s?s.toLowerCase():s,row:e}})),e||(e=c.classList.contains("asc")?"desc":"asc"),"desc"==e?(p=n(l,-1),f=n(h,-1),c.classList.remove("asc"),c.classList.add("desc")):(p=n(h,1),f=n(l,1),c.classList.remove("desc"),c.classList.add("asc")),s.lastTh&&c!=s.lastTh&&(s.lastTh.classList.remove("desc"),s.lastTh.classList.remove("asc")),s.lastTh=c,r=p.concat(f),s.data=[];const g=[];r.forEach(((t,e)=>{s.data.push(t.row),null!==t.row.searchIndex&&void 0!==t.row.searchIndex&&g.push(e)})),s.searchData=g,this.rebuild(),s.update(),a||s.emit("datatable.sort",t,e)}))}rebuild(){let t,e,a,s;const i=this.dt,n=[];i.activeRows=[],i.activeHeadings=[],i.headings.forEach(((t,e)=>{t.originalCellIndex=e,t.sortable="false"!==t.getAttribute("data-sortable"),i.hiddenColumns.includes(e)||i.activeHeadings.push(t)})),i.data.forEach(((r,l)=>{t=r.cloneNode(!1),e=r.cloneNode(!1),t.dataIndex=e.dataIndex=l,null!==r.searchIndex&&void 0!==r.searchIndex&&(t.searchIndex=e.searchIndex=r.searchIndex),Array.from(r.cells).forEach((n=>{a=n.cloneNode(!0),a.data=n.data,t.appendChild(a),i.hiddenColumns.includes(a.cellIndex)||(s=a.cloneNode(!0),s.data=a.data,e.appendChild(s))})),n.push(t),i.activeRows.push(e)})),i.data=n,i.update()}}const h=function(t){let e=!1,s=!1;if((t=t||this.options.data).headings){e=a("thead");const s=a("tr");t.headings.forEach((t=>{const e=a("th",{html:t});s.appendChild(e)})),e.appendChild(s)}t.data&&t.data.length&&(s=a("tbody"),t.data.forEach((e=>{if(t.headings&&t.headings.length!==e.length)throw new Error("The number of rows do not match the number of headings.");const i=a("tr");e.forEach((t=>{const e=a("td",{html:t});i.appendChild(e)})),s.appendChild(i)}))),e&&(null!==this.table.tHead&&this.table.removeChild(this.table.tHead),this.table.appendChild(e)),s&&(this.table.tBodies.length&&this.table.removeChild(this.table.tBodies[0]),this.table.appendChild(s))},o={sortable:!0,searchable:!0,paging:!0,perPage:10,perPageSelect:[5,10,15,20,25],nextPrev:!0,firstLast:!1,prevText:"&lsaquo;",nextText:"&rsaquo;",firstText:"&laquo;",lastText:"&raquo;",ellipsisText:"&hellip;",ascText:"▴",descText:"▾",truncatePager:!0,pagerDelta:2,scrollY:"",fixedColumns:!0,fixedHeight:!1,header:!0,hiddenHeader:!1,footer:!1,labels:{placeholder:"Search...",perPage:"{select} entries per page",noRows:"No entries found",info:"Showing {start} to {end} of {rows} entries"},layout:{top:"{select}{search}",bottom:"{info}{pager}"}};class d{constructor(t,e={}){if(this.initialized=!1,this.options={...o,...e,layout:{...o.layout,...e.layout},labels:{...o.labels,...e.labels}},"string"==typeof t&&(t=document.querySelector(t)),this.initialLayout=t.innerHTML,this.initialSortable=this.options.sortable,this.options.header||(this.options.sortable=!1),null===t.tHead&&(!this.options.data||this.options.data&&!this.options.data.headings)&&(this.options.sortable=!1),t.tBodies.length&&!t.tBodies[0].rows.length&&this.options.data&&!this.options.data.data)throw new Error("You seem to be using the data option, but you've not defined any rows.");this.table=t,this.init()}static extend(t,e){"function"==typeof e?d.prototype[t]=e:d[t]=e}init(t){if(this.initialized||this.table.classList.contains("dataTable-table"))return!1;Object.assign(this.options,t||{}),this.currentPage=1,this.onFirstPage=!0,this.hiddenColumns=[],this.columnRenderers=[],this.selectedColumns=[],this.render(),setTimeout((()=>{this.emit("datatable.init"),this.initialized=!0,this.options.plugins&&Object.entries(this.options.plugins).forEach((([t,e])=>{this[t]&&"function"==typeof this[t]&&(this[t]=this[t](e,{createElement:a}),e.enabled&&this[t].init&&"function"==typeof this[t].init&&this[t].init())}))}),10)}render(t){if(t){switch(t){case"page":this.renderPage();break;case"pager":this.renderPager();break;case"header":this.renderHeader()}return!1}const e=this.options;let s="";if(e.data&&h.call(this),this.body=this.table.tBodies[0],this.head=this.table.tHead,this.foot=this.table.tFoot,this.body||(this.body=a("tbody"),this.table.appendChild(this.body)),this.hasRows=this.body.rows.length>0,!this.head){const t=a("thead"),s=a("tr");this.hasRows&&(Array.from(this.body.rows[0].cells).forEach((()=>{s.appendChild(a("th"))})),t.appendChild(s)),this.head=t,this.table.insertBefore(this.head,this.body),this.hiddenHeader=e.hiddenHeader}if(this.headings=[],this.hasHeadings=this.head.rows.length>0,this.hasHeadings&&(this.header=this.head.rows[0],this.headings=[].slice.call(this.header.cells)),e.header||this.head&&this.table.removeChild(this.table.tHead),e.footer?this.head&&!this.foot&&(this.foot=a("tfoot",{html:this.head.innerHTML}),this.table.appendChild(this.foot)):this.foot&&this.table.removeChild(this.table.tFoot),this.wrapper=a("div",{class:"dataTable-wrapper dataTable-loading"}),s+="<div class='dataTable-top'>",s+=e.layout.top,s+="</div>",e.scrollY.length?s+=`<div class='dataTable-container' style='height: ${e.scrollY}; overflow-Y: auto;'></div>`:s+="<div class='dataTable-container'></div>",s+="<div class='dataTable-bottom'>",s+=e.layout.bottom,s+="</div>",s=s.replace("{info}",e.paging?"<div class='dataTable-info'></div>":""),e.paging&&e.perPageSelect){let t="<div class='dataTable-dropdown'><label>";t+=e.labels.perPage,t+="</label></div>";const i=a("select",{class:"dataTable-selector"});e.perPageSelect.forEach((t=>{const a=t===e.perPage,s=new Option(t,t,a,a);i.add(s)})),t=t.replace("{select}",i.outerHTML),s=s.replace("{select}",t)}else s=s.replace("{select}","");if(e.searchable){const t=`<div class='dataTable-search'><input class='dataTable-input' placeholder='${e.labels.placeholder}' type='text'></div>`;s=s.replace("{search}",t)}else s=s.replace("{search}","");this.hasHeadings&&this.render("header"),this.table.classList.add("dataTable-table");const i=a("nav",{class:"dataTable-pagination"}),n=a("ul",{class:"dataTable-pagination-list"});i.appendChild(n),s=s.replace(/\{pager\}/g,i.outerHTML),this.wrapper.innerHTML=s,this.container=this.wrapper.querySelector(".dataTable-container"),this.pagers=this.wrapper.querySelectorAll(".dataTable-pagination-list"),this.label=this.wrapper.querySelector(".dataTable-info"),this.table.parentNode.replaceChild(this.wrapper,this.table),this.container.appendChild(this.table),this.rect=this.table.getBoundingClientRect(),this.data=Array.from(this.body.rows),this.activeRows=this.data.slice(),this.activeHeadings=this.headings.slice(),this.update(),this.setColumns(),this.fixHeight(),this.fixColumns(),e.header||this.wrapper.classList.add("no-header"),e.footer||this.wrapper.classList.add("no-footer"),e.sortable&&this.wrapper.classList.add("sortable"),e.searchable&&this.wrapper.classList.add("searchable"),e.fixedHeight&&this.wrapper.classList.add("fixed-height"),e.fixedColumns&&this.wrapper.classList.add("fixed-columns"),this.bindEvents()}renderPage(){if(this.hasHeadings&&(s(this.header),this.activeHeadings.forEach((t=>this.header.appendChild(t)))),this.hasRows&&this.totalPages){this.currentPage>this.totalPages&&(this.currentPage=1);const t=this.currentPage-1,e=document.createDocumentFragment();this.pages[t].forEach((t=>e.appendChild(this.rows().render(t)))),this.clear(e),this.onFirstPage=1===this.currentPage,this.onLastPage=this.currentPage===this.lastPage}else this.setMessage(this.options.labels.noRows);let t,e=0,a=0,i=0;if(this.totalPages&&(e=this.currentPage-1,a=e*this.options.perPage,i=a+this.pages[e].length,a+=1,t=this.searching?this.searchData.length:this.data.length),this.label&&this.options.labels.info.length){const e=this.options.labels.info.replace("{start}",a).replace("{end}",i).replace("{page}",this.currentPage).replace("{pages}",this.totalPages).replace("{rows}",t);this.label.innerHTML=t?e:""}1==this.currentPage&&this.fixHeight()}renderPager(){if(s(this.pagers),this.totalPages>1){const t="pager",e=document.createDocumentFragment(),s=this.onFirstPage?1:this.currentPage-1,n=this.onLastPage?this.totalPages:this.currentPage+1;this.options.firstLast&&e.appendChild(i(t,1,this.options.firstText)),this.options.nextPrev&&e.appendChild(i(t,s,this.options.prevText));let r=this.links;this.options.truncatePager&&(r=((t,e,s,i,n)=>{let r;const l=2*(i=i||2);let h=e-i,o=e+i;const d=[],c=[];e<4-i+l?o=3+l:e>s-(3-i+l)&&(h=s-(2+l));for(let e=1;e<=s;e++)if(1==e||e==s||e>=h&&e<=o){const a=t[e-1];a.classList.remove("active"),d.push(a)}return d.forEach((e=>{const s=e.children[0].getAttribute("data-page");if(r){const e=r.children[0].getAttribute("data-page");if(s-e==2)c.push(t[e]);else if(s-e!=1){const t=a("li",{class:"ellipsis",html:`<a href="#">${n}</a>`});c.push(t)}}c.push(e),r=e})),c})(this.links,this.currentPage,this.pages.length,this.options.pagerDelta,this.options.ellipsisText)),this.links[this.currentPage-1].classList.add("active"),r.forEach((t=>{t.classList.remove("active"),e.appendChild(t)})),this.links[this.currentPage-1].classList.add("active"),this.options.nextPrev&&e.appendChild(i(t,n,this.options.nextText)),this.options.firstLast&&e.appendChild(i(t,this.totalPages,this.options.lastText)),this.pagers.forEach((t=>{t.appendChild(e.cloneNode(!0))}))}}renderHeader(){this.labels=[],this.headings&&this.headings.length&&this.headings.forEach(((t,e)=>{if(this.labels[e]=t.textContent,t.firstElementChild&&t.firstElementChild.classList.contains("dataTable-sorter")&&(t.innerHTML=t.firstElementChild.innerHTML),t.sortable="false"!==t.getAttribute("data-sortable"),t.originalCellIndex=e,this.options.sortable&&t.sortable){const e=a("a",{href:"#",class:"dataTable-sorter",html:t.innerHTML});t.innerHTML="",t.setAttribute("data-sortable",""),t.appendChild(e)}})),this.fixColumns()}bindEvents(){const t=this.options;if(t.perPageSelect){const e=this.wrapper.querySelector(".dataTable-selector");e&&e.addEventListener("change",(()=>{t.perPage=parseInt(e.value,10),this.update(),this.fixHeight(),this.emit("datatable.perpage",t.perPage)}),!1)}t.searchable&&(this.input=this.wrapper.querySelector(".dataTable-input"),this.input&&this.input.addEventListener("keyup",(()=>this.search(this.input.value)),!1)),this.wrapper.addEventListener("click",(e=>{const a=e.target.closest("a");a&&"a"===a.nodeName.toLowerCase()&&(a.hasAttribute("data-page")?(this.page(a.getAttribute("data-page")),e.preventDefault()):t.sortable&&a.classList.contains("dataTable-sorter")&&"false"!=a.parentNode.getAttribute("data-sortable")&&(this.columns().sort(this.headings.indexOf(a.parentNode)),e.preventDefault()))}),!1),window.addEventListener("resize",(()=>{this.rect=this.container.getBoundingClientRect(),this.fixColumns()}))}setColumns(t){t||this.data.forEach((t=>{Array.from(t.cells).forEach((t=>{t.data=t.innerHTML}))})),this.options.columns&&this.headings.length&&this.options.columns.forEach((t=>{Array.isArray(t.select)||(t.select=[t.select]),t.hasOwnProperty("render")&&"function"==typeof t.render&&(this.selectedColumns=this.selectedColumns.concat(t.select),this.columnRenderers.push({columns:t.select,renderer:t.render})),t.select.forEach((e=>{const a=this.headings[e];t.type&&a.setAttribute("data-type",t.type),t.format&&a.setAttribute("data-format",t.format),t.hasOwnProperty("sortable")&&a.setAttribute("data-sortable",t.sortable),t.hasOwnProperty("hidden")&&!1!==t.hidden&&this.columns().hide([e]),t.hasOwnProperty("sort")&&1===t.select.length&&this.columns().sort(t.select[0],t.sort,!0)}))})),this.hasRows&&(this.data.forEach(((t,e)=>{t.dataIndex=e,Array.from(t.cells).forEach((t=>{t.data=t.innerHTML}))})),this.selectedColumns.length&&this.data.forEach((t=>{Array.from(t.cells).forEach(((e,a)=>{this.selectedColumns.includes(a)&&this.columnRenderers.forEach((s=>{s.columns.includes(a)&&(e.innerHTML=s.renderer.call(this,e.data,e,t))}))}))})),this.columns().rebuild()),this.render("header")}destroy(){this.table.innerHTML=this.initialLayout,this.table.classList.remove("dataTable-table"),this.wrapper.parentNode.replaceChild(this.table,this.wrapper),this.initialized=!1}update(){this.wrapper.classList.remove("dataTable-empty"),this.paginate(this),this.render("page"),this.links=[];let t=this.pages.length;for(;t--;){const e=t+1;this.links[t]=i(0===t?"active":"",e,e)}this.sorting=!1,this.render("pager"),this.rows().update(),this.emit("datatable.update")}paginate(){const t=this.options.perPage;let e=this.activeRows;return this.searching&&(e=[],this.searchData.forEach((t=>e.push(this.activeRows[t])))),this.options.paging?this.pages=e.map(((a,s)=>s%t==0?e.slice(s,s+t):null)).filter((t=>t)):this.pages=[e],this.totalPages=this.lastPage=this.pages.length,this.totalPages}fixColumns(){if((this.options.scrollY.length||this.options.fixedColumns)&&this.activeHeadings&&this.activeHeadings.length){let t,e=!1;if(this.columnWidths=[],this.table.tHead){if(this.options.scrollY.length&&(e=a("thead"),e.appendChild(a("tr")),e.style.height="0px",this.headerTable&&(this.table.tHead=this.headerTable.tHead)),this.activeHeadings.forEach((t=>{t.style.width=""})),this.activeHeadings.forEach(((t,s)=>{const i=t.offsetWidth,n=i/this.rect.width*100;if(t.style.width=`${n}%`,this.columnWidths[s]=i,this.options.scrollY.length){const t=a("th");e.firstElementChild.appendChild(t),t.style.width=`${n}%`,t.style.paddingTop="0",t.style.paddingBottom="0",t.style.border="0"}})),this.options.scrollY.length){const t=this.table.parentElement;if(!this.headerTable){this.headerTable=a("table",{class:"dataTable-table"});const e=a("div",{class:"dataTable-headercontainer"});e.appendChild(this.headerTable),t.parentElement.insertBefore(e,t)}const s=this.table.tHead;this.table.replaceChild(e,s),this.headerTable.tHead=s,this.headerTable.parentElement.style.paddingRight=`${this.headerTable.clientWidth-this.table.clientWidth+parseInt(this.headerTable.parentElement.style.paddingRight||"0",10)}px`,t.scrollHeight>t.clientHeight&&(t.style.overflowY="scroll")}}else{t=[],e=a("thead");const s=a("tr");Array.from(this.table.tBodies[0].rows[0].cells).forEach((()=>{const e=a("th");s.appendChild(e),t.push(e)})),e.appendChild(s),this.table.insertBefore(e,this.body);const i=[];t.forEach(((t,e)=>{const a=t.offsetWidth,s=a/this.rect.width*100;i.push(s),this.columnWidths[e]=a})),this.data.forEach((t=>{Array.from(t.cells).forEach(((t,e)=>{this.columns(t.cellIndex).visible()&&(t.style.width=`${i[e]}%`)}))})),this.table.removeChild(e)}}}fixHeight(){this.options.fixedHeight&&(this.container.style.height=null,this.rect=this.container.getBoundingClientRect(),this.container.style.height=`${this.rect.height}px`)}search(t){return!!this.hasRows&&(t=t.toLowerCase(),this.currentPage=1,this.searching=!0,this.searchData=[],t.length?(this.clear(),this.data.forEach(((e,a)=>{const s=this.searchData.includes(e);t.split(" ").reduce(((t,a)=>{let s=!1,i=null,n=null;for(let t=0;t<e.cells.length;t++)if(i=e.cells[t],n=i.hasAttribute("data-content")?i.getAttribute("data-content"):i.textContent,n.toLowerCase().includes(a)&&this.columns(i.cellIndex).visible()){s=!0;break}return t&&s}),!0)&&!s?(e.searchIndex=a,this.searchData.push(a)):e.searchIndex=null})),this.wrapper.classList.add("search-results"),this.searchData.length?this.update():(this.wrapper.classList.remove("search-results"),this.setMessage(this.options.labels.noRows)),void this.emit("datatable.search",t,this.searchData)):(this.searching=!1,this.update(),this.emit("datatable.search",t,this.searchData),this.wrapper.classList.remove("search-results"),!1))}page(t){return t!=this.currentPage&&(isNaN(t)||(this.currentPage=parseInt(t,10)),!(t>this.pages.length||t<0)&&(this.render("page"),this.render("pager"),void this.emit("datatable.page",t)))}sortColumn(t,e){this.columns().sort(t,e)}insert(t){let s=[];if(e(t)){if(t.headings&&!this.hasHeadings&&!this.hasRows){const e=a("tr");t.headings.forEach((t=>{const s=a("th",{html:t});e.appendChild(s)})),this.head.appendChild(e),this.header=e,this.headings=[].slice.call(e.cells),this.hasHeadings=!0,this.options.sortable=this.initialSortable,this.render("header"),this.activeHeadings=this.headings.slice()}t.data&&Array.isArray(t.data)&&(s=t.data)}else Array.isArray(t)&&t.forEach((t=>{const e=[];Object.entries(t).forEach((([t,a])=>{const s=this.labels.indexOf(t);s>-1&&(e[s]=a)})),s.push(e)}));s.length&&(this.rows().add(s),this.hasRows=!0),this.update(),this.setColumns(),this.fixColumns()}refresh(){this.options.searchable&&(this.input.value="",this.searching=!1),this.currentPage=1,this.onFirstPage=!0,this.update(),this.emit("datatable.refresh")}clear(t){this.body&&s(this.body);let e=this.body;this.body||(e=this.table),t&&("string"==typeof t&&(document.createDocumentFragment().innerHTML=t),e.appendChild(t))}export(t){if(!this.hasHeadings&&!this.hasRows)return!1;const a=this.activeHeadings;let s=[];const i=[];let n,r,l,h;if(!e(t))return!1;const o={download:!0,skipColumn:[],lineDelimiter:"\n",columnDelimiter:",",tableName:"myTable",replacer:null,space:4,...t};if(o.type){if("txt"!==o.type&&"csv"!==o.type||(s[0]=this.header),o.selection)if(isNaN(o.selection)){if(Array.isArray(o.selection))for(n=0;n<o.selection.length;n++)s=s.concat(this.pages[o.selection[n]-1])}else s=s.concat(this.pages[o.selection-1]);else s=s.concat(this.activeRows);if(s.length){if("txt"===o.type||"csv"===o.type){for(l="",n=0;n<s.length;n++){for(r=0;r<s[n].cells.length;r++)if(!o.skipColumn.includes(a[r].originalCellIndex)&&this.columns(a[r].originalCellIndex).visible()){let t=s[n].cells[r].textContent;t=t.trim(),t=t.replace(/\s{2,}/g," "),t=t.replace(/\n/g," "),t=t.replace(/"/g,'""'),t=t.replace(/#/g,"%23"),t.includes(",")&&(t=`"${t}"`),l+=t+o.columnDelimiter}l=l.trim().substring(0,l.length-1),l+=o.lineDelimiter}l=l.trim().substring(0,l.length-1),o.download&&(l=`data:text/csv;charset=utf-8,${l}`)}else if("sql"===o.type){for(l=`INSERT INTO \`${o.tableName}\` (`,n=0;n<a.length;n++)!o.skipColumn.includes(a[n].originalCellIndex)&&this.columns(a[n].originalCellIndex).visible()&&(l+=`\`${a[n].textContent}\`,`);for(l=l.trim().substring(0,l.length-1),l+=") VALUES ",n=0;n<s.length;n++){for(l+="(",r=0;r<s[n].cells.length;r++)!o.skipColumn.includes(a[r].originalCellIndex)&&this.columns(a[r].originalCellIndex).visible()&&(l+=`"${s[n].cells[r].textContent}",`);l=l.trim().substring(0,l.length-1),l+="),"}l=l.trim().substring(0,l.length-1),l+=";",o.download&&(l=`data:application/sql;charset=utf-8,${l}`)}else if("json"===o.type){for(r=0;r<s.length;r++)for(i[r]=i[r]||{},n=0;n<a.length;n++)!o.skipColumn.includes(a[n].originalCellIndex)&&this.columns(a[n].originalCellIndex).visible()&&(i[r][a[n].textContent]=s[r].cells[n].textContent);l=JSON.stringify(i,o.replacer,o.space),o.download&&(l=`data:application/json;charset=utf-8,${l}`)}return o.download&&(o.filename=o.filename||"datatable_export",o.filename+=`.${o.type}`,l=encodeURI(l),h=document.createElement("a"),h.href=l,h.download=o.filename,document.body.appendChild(h),h.click(),document.body.removeChild(h)),l}}return!1}import(t){let a=!1;if(!e(t))return!1;const s={lineDelimiter:"\n",columnDelimiter:",",...t};if(s.data.length||e(s.data)){if("csv"===s.type){a={data:[]};const t=s.data.split(s.lineDelimiter);t.length&&(s.headings&&(a.headings=t[0].split(s.columnDelimiter),t.shift()),t.forEach(((t,e)=>{a.data[e]=[];const i=t.split(s.columnDelimiter);i.length&&i.forEach((t=>{a.data[e].push(t)}))})))}else if("json"===s.type){const t=(t=>{let a=!1;try{a=JSON.parse(t)}catch(t){return!1}return!(null===a||!Array.isArray(a)&&!e(a))&&a})(s.data);t&&(a={headings:[],data:[]},t.forEach(((t,e)=>{a.data[e]=[],Object.entries(t).forEach((([t,s])=>{a.headings.includes(t)||a.headings.push(t),a.data[e].push(s)}))})))}e(s.data)&&(a=s.data),a&&this.insert(a)}return!1}print(){const t=this.activeHeadings,e=this.activeRows,s=a("table"),i=a("thead"),n=a("tbody"),r=a("tr");t.forEach((t=>{r.appendChild(a("th",{html:t.textContent}))})),i.appendChild(r),e.forEach((t=>{const e=a("tr");Array.from(t.cells).forEach((t=>{e.appendChild(a("td",{html:t.textContent}))})),n.appendChild(e)})),s.appendChild(i),s.appendChild(n);const l=window.open();l.document.body.appendChild(s),l.print()}setMessage(t){let e=1;this.hasRows?e=this.data[0].cells.length:this.activeHeadings.length&&(e=this.activeHeadings.length),this.wrapper.classList.add("dataTable-empty"),this.label&&(this.label.innerHTML=""),this.totalPages=0,this.render("pager"),this.clear(a("tr",{html:`<td class="dataTables-empty" colspan="${e}">${t}</td>`}))}columns(t){return new l(this,t)}rows(t){return new r(this,t)}on(t,e){this.events=this.events||{},this.events[t]=this.events[t]||[],this.events[t].push(e)}off(t,e){this.events=this.events||{},t in this.events!=0&&this.events[t].splice(this.events[t].indexOf(e),1)}emit(t){if(this.events=this.events||{},t in this.events!=0)for(let e=0;e<this.events[t].length;e++)this.events[t][e].apply(this,Array.prototype.slice.call(arguments,1))}}function c(t,e){return t(e={exports:{}},e.exports),e.exports}"undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:void 0!==t.g?t.g:"undefined"!=typeof self&&self;var u=c((function(t,e){t.exports=function(){var t="millisecond",e="second",a="minute",s="hour",i="day",n="week",r="month",l="quarter",h="year",o="date",d=/^(\d{4})[-/]?(\d{1,2})?[-/]?(\d{0,2})[^0-9]*(\d{1,2})?:?(\d{1,2})?:?(\d{1,2})?[.:]?(\d+)?$/,c=/\[([^\]]+)]|Y{1,4}|M{1,4}|D{1,2}|d{1,4}|H{1,2}|h{1,2}|a|A|m{1,2}|s{1,2}|Z{1,2}|SSS/g,u={name:"en",weekdays:"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),months:"January_February_March_April_May_June_July_August_September_October_November_December".split("_")},p=function(t,e,a){var s=String(t);return!s||s.length>=e?t:""+Array(e+1-s.length).join(a)+t},f={s:p,z:function(t){var e=-t.utcOffset(),a=Math.abs(e),s=Math.floor(a/60),i=a%60;return(e<=0?"+":"-")+p(s,2,"0")+":"+p(i,2,"0")},m:function t(e,a){if(e.date()<a.date())return-t(a,e);var s=12*(a.year()-e.year())+(a.month()-e.month()),i=e.clone().add(s,r),n=a-i<0,l=e.clone().add(s+(n?-1:1),r);return+(-(s+(a-i)/(n?i-l:l-i))||0)},a:function(t){return t<0?Math.ceil(t)||0:Math.floor(t)},p:function(d){return{M:r,y:h,w:n,d:i,D:o,h:s,m:a,s:e,ms:t,Q:l}[d]||String(d||"").toLowerCase().replace(/s$/,"")},u:function(t){return void 0===t}},g="en",b={};b[g]=u;var m=function(t){return t instanceof w},y=function(t,e,a){var s;if(!t)return g;if("string"==typeof t)b[t]&&(s=t),e&&(b[t]=e,s=t);else{var i=t.name;b[i]=t,s=i}return!a&&s&&(g=s),s||!a&&g},v=function(t,e){if(m(t))return t.clone();var a="object"==typeof e?e:{};return a.date=t,a.args=arguments,new w(a)},T=f;T.l=y,T.i=m,T.w=function(t,e){return v(t,{locale:e.$L,utc:e.$u,x:e.$x,$offset:e.$offset})};var w=function(){function u(t)
this.$L=y(t.locale,null,!0),this.parse(t)}var p=u.prototype;return p.parse=function(t){this.$d=function(t){var e=t.date,a=t.utc;if(null===e)return new Date(NaN);if(T.u(e))return new Date;if(e instanceof Date)return new Date(e);if("string"==typeof e&&!/Z$/i.test(e)){var s=e.match(d);if(s){var i=s[2]-1||0,n=(s[7]||"0").substring(0,3);return a?new Date(Date.UTC(s[1],i,s[3]||1,s[4]||0,s[5]||0,s[6]||0,n)):new Date(s[1],i,s[3]||1,s[4]||0,s[5]||0,s[6]||0,n)}}return new Date(e)}(t),this.$x=t.x||{},this.init()},p.init=function(){var t=this.$d;this.$y=t.getFullYear(),this.$M=t.getMonth(),this.$D=t.getDate(),this.$W=t.getDay(),this.$H=t.getHours(),this.$m=t.getMinutes(),this.$s=t.getSeconds(),this.$ms=t.getMilliseconds()},p.$utils=function(){return T},p.isValid=function(){return!("Invalid Date"===this.$d.toString())},p.isSame=function(t,e){var a=v(t);return this.startOf(e)<=a&&a<=this.endOf(e)},p.isAfter=function(t,e){return v(t)<this.startOf(e)},p.isBefore=function(t,e){return this.endOf(e)<v(t)},p.$g=function(t,e,a){return T.u(t)?this[e]:this.set(a,t)},p.unix=function(){return Math.floor(this.valueOf()/1e3)},p.valueOf=function(){return this.$d.getTime()},p.startOf=function(t,l){var d=this,c=!!T.u(l)||l,u=T.p(t),p=function(t,e){var a=T.w(d.$u?Date.UTC(d.$y,e,t):new Date(d.$y,e,t),d);return c?a:a.endOf(i)},f=function(t,e){return T.w(d.toDate()[t].apply(d.toDate("s"),(c?[0,0,0,0]:[23,59,59,999]).slice(e)),d)},g=this.$W,b=this.$M,m=this.$D,y="set"+(this.$u?"UTC":"");switch(u){case h:return c?p(1,0):p(31,11);case r:return c?p(1,b):p(0,b+1);case n:var v=this.$locale().weekStart||0,w=(g<v?g+7:g)-v;return p(c?m-w:m+(6-w),b);case i:case o:return f(y+"Hours",0);case s:return f(y+"Minutes",1);case a:return f(y+"Seconds",2);case e:return f(y+"Milliseconds",3);default:return this.clone()}},p.endOf=function(t){return this.startOf(t,!1)},p.$set=function(n,l){var d,c=T.p(n),u="set"+(this.$u?"UTC":""),p=(d={},d[i]=u+"Date",d[o]=u+"Date",d[r]=u+"Month",d[h]=u+"FullYear",d[s]=u+"Hours",d[a]=u+"Minutes",d[e]=u+"Seconds",d[t]=u+"Milliseconds",d)[c],f=c===i?this.$D+(l-this.$W):l;if(c===r||c===h){var g=this.clone().set(o,1);g.$d[p](f),g.init(),this.$d=g.set(o,Math.min(this.$D,g.daysInMonth())).$d}else p&&this.$d[p](f);return this.init(),this},p.set=function(t,e){return this.clone().$set(t,e)},p.get=function(t){return this[T.p(t)]()},p.add=function(t,l){var o,d=this;t=Number(t);var c=T.p(l),u=function(e){var a=v(d);return T.w(a.date(a.date()+Math.round(e*t)),d)};if(c===r)return this.set(r,this.$M+t);if(c===h)return this.set(h,this.$y+t);if(c===i)return u(1);if(c===n)return u(7);var p=(o={},o[a]=6e4,o[s]=36e5,o[e]=1e3,o)[c]||1,f=this.$d.getTime()+t*p;return T.w(f,this)},p.subtract=function(t,e){return this.add(-1*t,e)},p.format=function(t){var e=this;if(!this.isValid())return"Invalid Date";var a=t||"YYYY-MM-DDTHH:mm:ssZ",s=T.z(this),i=this.$locale(),n=this.$H,r=this.$m,l=this.$M,h=i.weekdays,o=i.months,d=function(t,s,i,n){return t&&(t[s]||t(e,a))||i[s].substr(0,n)},u=function(t){return T.s(n%12||12,t,"0")},p=i.meridiem||function(t,e,a){var s=t<12?"AM":"PM";return a?s.toLowerCase():s},f={YY:String(this.$y).slice(-2),YYYY:this.$y,M:l+1,MM:T.s(l+1,2,"0"),MMM:d(i.monthsShort,l,o,3),MMMM:d(o,l),D:this.$D,DD:T.s(this.$D,2,"0"),d:String(this.$W),dd:d(i.weekdaysMin,this.$W,h,2),ddd:d(i.weekdaysShort,this.$W,h,3),dddd:h[this.$W],H:String(n),HH:T.s(n,2,"0"),h:u(1),hh:u(2),a:p(n,r,!0),A:p(n,r,!1),m:String(r),mm:T.s(r,2,"0"),s:String(this.$s),ss:T.s(this.$s,2,"0"),SSS:T.s(this.$ms,3,"0"),Z:s};return a.replace(c,(function(t,e){return e||f[t]||s.replace(":","")}))},p.utcOffset=function(){return 15*-Math.round(this.$d.getTimezoneOffset()/15)},p.diff=function(t,o,d){var c,u=T.p(o),p=v(t),f=6e4*(p.utcOffset()-this.utcOffset()),g=this-p,b=T.m(this,p);return b=(c={},c[h]=b/12,c[r]=b,c[l]=b/3,c[n]=(g-f)/6048e5,c[i]=(g-f)/864e5,c[s]=g/36e5,c[a]=g/6e4,c[e]=g/1e3,c)[u]||g,d?b:T.a(b)},p.daysInMonth=function(){return this.endOf(r).$D},p.$locale=function(){return b[this.$L]},p.locale=function(t,e){if(!t)return this.$L;var a=this.clone(),s=y(t,e,!0);return s&&(a.$L=s),a},p.clone=function(){return T.w(this.$d,this)},p.toDate=function(){return new Date(this.valueOf())},p.toJSON=function(){return this.isValid()?this.toISOString():null},p.toISOString=function(){return this.$d.toISOString()},p.toString=function(){return this.$d.toUTCString()},u}(),C=w.prototype;return v.prototype=C,[["$ms",t],["$s",e],["$m",a],["$H",s],["$W",i],["$M",r],["$y",h],["$D",o]].forEach((function(t){C[t[1]]=function(e){return this.$g(e,t[0],t[1])}})),v.extend=function(t,e){return t.$i||(t(e,w,v),t.$i=!0),v},v.locale=y,v.isDayjs=m,v.unix=function(t){return v(1e3*t)},v.en=b[g],v.Ls=b,v.p={},v}()})),p=c((function(t,e){var a,s,i,n,r,l,h,o,d,c,u,p,f;t.exports=(a={LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY h:mm A",LLLL:"dddd, MMMM D, YYYY h:mm A"},s=function(t,e){return t.replace(/(\[[^\]]+])|(LTS?|l{1,4}|L{1,4})/g,(function(t,s,i){var n=i&&i.toUpperCase();return s||e[i]||a[i]||e[n].replace(/(\[[^\]]+])|(MMMM|MM|DD|dddd)/g,(function(t,e,a){return e||a.slice(1)}))}))},i=/(\[[^[]*\])|([-:/.()\s]+)|(A|a|YYYY|YY?|MM?M?M?|Do|DD?|hh?|HH?|mm?|ss?|S{1,3}|z|ZZ?)/g,h={},d=[/[+-]\d\d:?(\d\d)?/,function(t){(this.zone||(this.zone={})).offset=function(t){if(!t)return 0;var e=t.match(/([+-]|\d\d)/g),a=60*e[1]+(+e[2]||0);return 0===a?0:"+"===e[0]?-a:a}(t)}],c=function(t){var e=h[t];return e&&(e.indexOf?e:e.s.concat(e.f))},u=function(t,e){var a,s=h.meridiem;if(s){for(var i=1;i<=24;i+=1)if(t.indexOf(s(i,0,e))>-1){a=i>12;break}}else a=t===(e?"pm":"PM");return a},p={A:[l=/\d*[^\s\d-:/()]+/,function(t){this.afternoon=u(t,!1)}],a:[l,function(t){this.afternoon=u(t,!0)}],S:[/\d/,function(t){this.milliseconds=100*+t}],SS:[n=/\d\d/,function(t){this.milliseconds=10*+t}],SSS:[/\d{3}/,function(t){this.milliseconds=+t}],s:[r=/\d\d?/,(o=function(t){return function(e){this[t]=+e}})("seconds")],ss:[r,o("seconds")],m:[r,o("minutes")],mm:[r,o("minutes")],H:[r,o("hours")],h:[r,o("hours")],HH:[r,o("hours")],hh:[r,o("hours")],D:[r,o("day")],DD:[n,o("day")],Do:[l,function(t){var e=h.ordinal,a=t.match(/\d+/);if(this.day=a[0],e)for(var s=1;s<=31;s+=1)e(s).replace(/\[|\]/g,"")===t&&(this.day=s)}],M:[r,o("month")],MM:[n,o("month")],MMM:[l,function(t){var e=c("months"),a=(c("monthsShort")||e.map((function(t){return t.substr(0,3)}))).indexOf(t)+1;if(a<1)throw new Error;this.month=a%12||a}],MMMM:[l,function(t){var e=c("months").indexOf(t)+1;if(e<1)throw new Error;this.month=e%12||e}],Y:[/[+-]?\d+/,o("year")],YY:[n,function(t){t=+t,this.year=t+(t>68?1900:2e3)}],YYYY:[/\d{4}/,o("year")],Z:d,ZZ:d},f=function(t,e,a){try{var n=function(t){for(var e=(t=s(t,h&&h.formats)).match(i),a=e.length,n=0;n<a;n+=1){var r=e[n],l=p[r],o=l&&l[0],d=l&&l[1];e[n]=d?{regex:o,parser:d}:r.replace(/^\[|\]$/g,"")}return function(t){for(var s={},i=0,n=0;i<a;i+=1){var r=e[i];if("string"==typeof r)n+=r.length;else{var l=r.regex,h=r.parser,o=t.substr(n),d=l.exec(o)[0];h.call(s,d),t=t.replace(d,"")}}return function(t){var e=t.afternoon;if(void 0!==e){var a=t.hours;e?a<12&&(t.hours+=12):12===a&&(t.hours=0),delete t.afternoon}}(s),s}}(e)(t),r=n.year,l=n.month,o=n.day,d=n.hours,c=n.minutes,u=n.seconds,f=n.milliseconds,g=n.zone,b=new Date,m=o||(r||l?1:b.getDate()),y=r||b.getFullYear(),v=0;r&&!l||(v=l>0?l-1:b.getMonth());var T=d||0,w=c||0,C=u||0,M=f||0;return g?new Date(Date.UTC(y,v,m,T,w,C,M+60*g.offset*1e3)):a?new Date(Date.UTC(y,v,m,T,w,C,M)):new Date(y,v,m,T,w,C,M)}catch(t){return new Date("")}},function(t,e,a){a.p.customParseFormat=!0;var s=e.prototype,i=s.parse;s.parse=function(t){var e=t.date,s=t.utc,n=t.args;this.$u=s;var r=n[1];if("string"==typeof r){var l=!0===n[2],o=!0===n[3],d=l||o,c=n[2];o&&(c=n[2]),h=this.$locale(),!l&&c&&(h=a.Ls[c]),this.$d=f(e,r,s),this.init(),c&&!0!==c&&(this.$L=this.locale(c).$L),d&&e!==this.format(r)&&(this.$d=new Date("")),h={}}else if(r instanceof Array)for(var u=r.length,p=1;p<=u;p+=1){n[1]=r[p-1];var g=a.apply(this,n);if(g.isValid()){this.$d=g.$d,this.$L=g.$L,this.init();break}p===u&&(this.$d=new Date(""))}else i.call(this,t)}})}));u.extend(p);var f=Object.freeze({__proto__:null,parseDate:(t,e)=>{let a=!1;if(e)switch(e){case"ISO_8601":a=t;break;case"RFC_2822":a=u(t,"ddd, MM MMM YYYY HH:mm:ss ZZ").format("YYYYMMDD");break;case"MYSQL":a=u(t,"YYYY-MM-DD hh:mm:ss").format("YYYYMMDD");break;case"UNIX":a=u(t).unix();break;default:a=u(t,e).format("YYYYMMDD")}return a}});class g{static NewElement(t="div",e={},a=[]){let s=document.createElement(t);return this.SetStyle(s,e,a),s}static SetStyle(t,e={},a=[]){return Object.keys(e).forEach((a=>{t.style.setProperty(a,e[a])})),Array.isArray(a)&&a.forEach((e=>{t.classList.add(e)})),t.style}}class b{static ToUpperFirstLetter([t,...e]){return[t.toUpperCase(),...e].join("")}}const m="WP_Plugin_UserTable",y={"pointer-events":"none"},v={position:"absolute",width:"100%",height:"auto",display:"flex","justify-content":"center"},T={margin:"auto",width:"50vw",height:"50vh",animation:"LoadingIn 1.6s ease forwards"},w={animation:"LoadingIn 1.6s ease forwards"},C={animation:"FadingIn 1.6s ease forwards"},M={animation:"FadingOut 1.6s ease forwards"},D={position:"relative"},S={Container:{transition:"width 2s",width:"0%"},Action:{Show:{width:"50%"},Hide:{width:"0%"}}},L={Container:{width:"100%",display:"flex","flex-direction":"column"},Header:{"text-align":"center"},Action:{Show:{width:"100%"}}};(new class{constructor(){this.MainContent=document.getElementById("site-content"),this.Table={Data:[],Loading:!1,LoadingScreen:null,DOMTable:null,Class:null,Head:{Cells:[]},DataScreen:{Hidden:!0,Loading:!1,Data:null,Error:!1}}}PreparePropertyContainer(t,e,a){null===this.Table[t]?(this.Table[t]=g.NewElement(e,a),this.MainContent.appendChild(this.Table[t])):this.Table[t].innerHTML=""}async StartLoading(){if(this.Table.Loading=!0,null===this.Table.LoadingScreen){this.PreparePropertyContainer("LoadingScreen","div",{...v,...y});let t=g.NewElement("div",T);t.innerHTML='<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" style="margin: auto; background: none; display: block; shape-rendering: auto;" width="177px" height="177px" viewBox="0 0 100 100" preserveAspectRatio="xMidYMid"><circle cx="50" cy="50" r="34" stroke-width="8" stroke="#85a2b6" stroke-dasharray="53.40707511102649 53.40707511102649" fill="none" stroke-linecap="round"> <animateTransform attributeName="transform" type="rotate" dur="1.3513513513513513s" repeatCount="indefinite" keyTimes="0;1" values="0 50 50;360 50 50"></animateTransform></circle><circle cx="50" cy="50" r="25" stroke-width="8" stroke="#bbcedd" stroke-dasharray="39.269908169872416 39.269908169872416" stroke-dashoffset="39.269908169872416" fill="none" stroke-linecap="round"> <animateTransform attributeName="transform" type="rotate" dur="1.3513513513513513s" repeatCount="indefinite" keyTimes="0;1" values="0 50 50;-360 50 50"></animateTransform></circle>\x3c!-- [ldio] generated by https://loading.io/ --\x3e</svg>',g.SetStyle(this.Table.LoadingScreen,C),this.Table.LoadingScreen.appendChild(t)}else g.SetStyle(this.Table.LoadingScreen,M)}async StopLoading(){this.Table.Loading=!1,g.SetStyle(this.Table.LoadingScreen.children[0],w),g.SetStyle(this.Table.LoadingScreen,M)}async GetUsers(){this.StartLoading(),fetch("https://jsonplaceholder.typicode.com/users").then((async t=>{t.ok?(this.Table.Data=await t.json(),this.DisplayUsers()):(this.PreparePropertyContainer("DOMTable","div",v),this.CreateFullScreenMessage(this.Table.DOMTable,"Sorry, couldn't fetch your information",{},["ErrorMessage"]),this.StopLoading())})).catch((t=>{this.PreparePropertyContainer("DOMTable","div",v),this.CreateFullScreenMessage(this.Table.DOMTable,"Sorry, couldn't fetch your information. Reason: "+t.message,{},["ErrorMessage"]),this.StopLoading()}))}async LoadUserData(t){if(!this.Table.DataScreen.Loading){if(this.Table.DataScreen.Loading=!0,t.target.dataset.id===this.Table.DataScreen.Data&&!this.Table.DataScreen.Hidden)return g.SetStyle(this.Table.DOMTable.children[1].children[0],M),g.SetStyle(this.Table.DOMTable.children[1],S.Action.Hide),this.Table.DataScreen.Error=!1,this.Table.DataScreen.Hidden=!0,this.Table.DataScreen.Data=null,void(this.Table.DataScreen.Loading=!1);this.Table.DataScreen.Hidden&&(g.SetStyle(this.Table.DOMTable.children[1],S.Action.Show),this.Table.DataScreen.Hidden=!1),this.Table.DataScreen.Data=t.target.dataset.id,fetch("https://jsonplaceholder.typicode.com/users/"+t.target.dataset.id).then((async t=>{this.Table.DataScreen.Error&&g.SetStyle(this.Table.DOMTable.children[1].children[0],M),this.Table.DataScreen.Error=!1,this.Table.DOMTable.children[1].innerHTML="";let e=g.NewElement("div",{height:document.querySelector("#"+m).getBoundingClientRect().height+"px","margin-top":document.querySelector(".dataTable-top").getBoundingClientRect().height+"px"},["WP_Plugin-data-container"]);this.Table.DOMTable.children[1].appendChild(e),t=await t.json(),await this.IterateProperty(t,e)})).catch((t=>{this.Table.DataScreen.Error||(this.Table.DataScreen.Error=!0,this.Table.DOMTable.children[1].innerHTML="",this.CreateFullScreenMessage(this.Table.DOMTable.children[1],"Sorry, couldn't fetch your information. Reason: "+t.message,{},["ErrorMessage"])),g.SetStyle(this.Table.DOMTable.children[1].children[0],C)})),this.Table.DataScreen.Loading=!1}}async IterateProperty(t,e){Object.keys(t).forEach((a=>{let s=g.NewElement("div",{},["WP_Plugin-data-field"]);e.appendChild(s);let i=g.NewElement("div",{},["WP_Plugin-data-field-header"]);if(i.innerHTML=b.ToUpperFirstLetter(a),s.appendChild(i),Object.isExtensible(t[a])&&!Array.isArray(t[a]))g.SetStyle(i,L.Header),this.IterateProperty(t[a],e);else{let e=g.NewElement("div",{},["WP_Plugin-data-field-content"]);e.innerHTML=t[a],s.appendChild(e)}g.SetStyle(s,L.Action.Show)}))}async DisplayUsers(){if(this.PreparePropertyContainer("DOMTable","div",v),Array.isArray(this.Table.Data)&&this.Table.Data.length>0){await this.PrepareTable();let t=this.Table.DOMTable.children[0].children[1],e=0;this.Table.Data.forEach((a=>{let s=null,i=0,n=g.NewElement("tr");Object.keys(a).forEach((t=>{if(0===i&&(s=t),i>=4||Object.isExtensible(a[t])&&!Array.isArray(a[t]))return;let r=g.NewElement("td");if(i<3){let i=g.NewElement("a");i.innerHTML=a[t],i.setAttribute("data-id",a[s]),i.addEventListener("click",(()=>{console.log("1")})),e=i,r.appendChild(i)}else r.innerHTML=a[t];n.appendChild(r),i++})),t.appendChild(n)})),g.SetStyle(this.Table.DOMTable,D),this.StopLoading(),this.Table.Class=new d("#"+m);let a=this.LoadUserData.bind(this);this.Table.Class.on("datatable.init",(function(t){Array.from(document.querySelector("#"+m).children[1].children).forEach((t=>{Array.from(t.children).forEach((t=>{t.children.length>0&&t.children[0].addEventListener("click",a)}))}))}))}else this.CreateFullScreenMessage(this.Table.DOMTable,"Sorry, couldn't fetch your information",{},["ErrorMessage"]),this.StopLoading()}async CreateFullScreenMessage(t,e,a={background:"transparent",color:"black",border:"none"}){let s=g.NewElement("p",a);return s.innerHTML=e,g.SetStyle(t,C),t.appendChild(s),s}PreapreTableHeader(){let t=this.Table.Data,e=this.Table.Head.Cells;if(Array.isArray(t)&&t.length>0){let a=g.NewElement("tr");Object.keys(t[0]).forEach((s=>{if(!(e.length>=4||Object.isExtensible(t[0][s])&&!Array.isArray(t[0][s]))){let t=g.NewElement("th");t.innerHTML=b.ToUpperFirstLetter(s),a.appendChild(t),e.push(s)}})),this.Table.DOMTable.children[0].children[0].appendChild(a)}}async PrepareTable(){let t=g.NewElement("table");t.id=m,t.className=m,t.appendChild(g.NewElement("thead")),t.appendChild(g.NewElement("tbody")),this.Table.DOMTable.appendChild(t),this.Table.DOMTable.appendChild(g.NewElement("div",{},["WP_Plugin_data-screen"])),this.PreapreTableHeader()}}).GetUsers()})();
{
XY_Model_propare_state3_chi64_A0.py
import torch as tc import numpy as np import copy import os,sys import Circle_Function_Class_A0 as ev import matplotlib.pyplot as plt import matplotlib matplotlib.use('Agg') from torch.optim.lr_scheduler import StepLR import BasicFunSJR as bfs from CNNBTN import Paras_VL_CNN_BTN_Collected1chg1 import BasicFun as bf tmp = sys.argv[0][sys.argv[0].rfind(os.sep) + 1:] # 返回文件名 mark = tmp[-5] which_gpu = tmp[-4] # 调用固定 para = Paras_VL_CNN_BTN_Collected1chg1() para['dataset'] = 'fashion-mnist' para['device'] = bf.choose_device(which_gpu) para['log_name'] = './record' + mark + which_gpu start = tc.cuda.Event(enable_timing=True) end = tc.cuda.Event(enable_timing=True) os.environ['KMP_DUPLICATE_LIB_OK'] = 'TRUE' os.environ['CUDA_VISIBLE_DEVICE'] = '0' # tc.manual_seed(7) # 固定随机数,使产生的随机数可以复现 dtype = tc.float32 # float 监控norm mps_num = 48 lr = 1e-2 it_time = 50 pt_time = 50 # 交错优化所在的次数 dt_print = 10 step_size = it_time * pt_time // 5 # lr学习率递减的间隔epoch x1_axis = list() # 作图横轴 优化次数 identity_4 = tc.eye(4, dtype=dtype).to(para['device']) # 第二层演化小量变化的单位阵量子门 vol = tc.tensor(1e-3, dtype=dtype).to(para['device']) # 为其小量变化幅度, 对优化次数影响不大 con_vol = tc.tensor(1e-5, dtype=dtype).to(para['device']) entropy_list = list() average = tc.tensor(0, dtype=dtype).to(para['device']) # 计算纠缠熵 所用到的初始值 k_bood = 64 file_name = r'./tar_data.npz' out_file_name = r'./layer_out_data.npz' Loss_accuracy_range = 0.0001 # 控制Loss精度的范围,达到精度范围自动跳出循环 base_it_time = it_time//3 # 进行优化的最少次数,与分层优化有关 center_position = 24 layer_num = 3 # 控制不同层的门进行优化 gatenum = (mps_num - 1)*layer_num # 控制变分参数量子门的个数 tar_mpslist = list() ini_state = list() y_loss_layer = list() # 分层交错进行 每层的精度 y_loss_conda = list() # 协同优化 的精度 read_gatenum = (mps_num - 1)*(layer_num -1) zero_gatetensor = tc.zeros(gatenum, 4, 4) conba_gatalist = list() layer_gatelist = list() # 在后续被reshape成(2, 4, 2)的三阶tensor layer_gatelist_0 = list() # 将门分层储存 layer_gatelist_1 = list() # 将门分层储存 layer_gatelist_2 = list() # 将门分层储存 layer_gatelist_3 = list() # 将门分层储存 layer_gatelist_4 = list() # 将门分层储存 layer_gatelist_5 = list() # 将门分层储存 layer_optimize = list() # 分层存储优化器 loss_ = list([list([]), list([]), list([]), list([]), list([]), list([]), list([]), list([]), list([]), list([]), list([]), list([]), list([])]) half_entropy_list = list([]) # 制作热图 half_entropy_list.append(tc.zeros([pt_time+1, mps_num-1])) # 最后一次为目标纠缠熵 number_list = list([0]) print('The quantum circuit is' + str(layer_num)) print('lr=:' + str(lr) + ', k_bood=: ' + str(k_bood) + ', A small amount of vol per unit door is: ' + str(vol)) data = np.load(file_name) tar_mpslist.append(tc.from_numpy(data['tar_mpslist0']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist1']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist2']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist3']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist4']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist5']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist6']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist7']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist8']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist9']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist10']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist11']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist12']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist13']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist14']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist15']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist16']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist17']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist18']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist19']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist20']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist21']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist22']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist23']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist24']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist25']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist26']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist27']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist28']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist29']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist30']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist31']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist32']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist33']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist34']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist35']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist36']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist37']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist38']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist39']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist40']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist41']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist42']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist43']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist44']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist45']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist46']).to(para['device'])) tar_mpslist.append(tc.from_numpy(data['tar_mpslist47']).to(para['device'])) def fprint(content, file=None, print_screen=True, append=True): if file is None: file = './record.log' if append: way = 'ab' else: way = 'wb' with open(file, way, buffering=0) as log: log.write((content + '\n').encode(encoding='utf-8')) if print_screen: print(content) def mps_norm(tar_tensor_): # 对目标量子态进行归一化 log归一化 tv = tc.einsum('asb,asd->bd', tar_tensor_[0].data, tar_tensor_[0].data) t_norm = tc.norm(tv) tv = tv / t_norm tar_tensor_[0] = tar_tensor_[0].data / tc.sqrt(t_norm) for gt in range(1, mps_num): if gt < mps_num - 1: tv = tc.einsum('ac,asb,csd->bd', tv, tar_tensor_[gt].data, tar_tensor_[gt].data) else: tv = tc.einsum('ac,asb,csd->bd', tv, tar_tensor_[gt].data, tar_tensor_[gt].data) norm_t = tc.norm(tv) tv = tv / norm_t tar_tensor_[gt] = tar_tensor_[gt] / tc.sqrt(norm_t) def qr_left_and_right_location(MPS_list, location, vol, feature_num=2): # 对目标MPS进行正交,并求解其纠缠熵 # print('location', location) for k in range(location): # print('k', k) q, r = tc.qr(MPS_list[k].reshape(-1, MPS_list[k].shape[2])) r = r MPS_list[k] = q.reshape(-1, feature_num, q.shape[1]) MPS_list[k + 1] = tc.einsum('nl, lmk-> nmk', [r, MPS_list[k + 1]]) for i in range(len(MPS_list) - 1, location, -1): # print('i', i) q, r = tc.qr(MPS_list[i].reshape(MPS_list[i].shape[0], -1).t()) q_shape = q.t().shape MPS_list[i] = q.t().reshape(q_shape[0], feature_num, -1) r = r MPS_list[i - 1] = tc.einsum('ldk, nk-> ldn', [MPS_list[i - 1], r]) MPS_list[location] = MPS_list[location]/tc.norm(MPS_list[location]) # u, s, v = tc.svd(MPS_list[location].reshape(-1, MPS_list[location].shape[2])) u, s, v = tc.svd(MPS_list[location].reshape(MPS_list[location].shape[0], -1)) s = s[s > vol] y = (-1) * tc.sum(tc.pow(s, 2) * tc.log(tc.pow(s, 2)), dim=0).item() return y, MPS_list # y 返回纠缠熵 , mps_list返回正交化的目标mps的list() def half_entropy(out_mps): for ht in range(1, mps_num): h_entropy = qr_left_and_right_location(out_mps, ht, 1e-16)[0] half_entropy_list[0][number_list[0], ht-1] = h_entropy number_list[0] = number_list[0] + 1 entro_tar = copy.deepcopy(tar_mpslist) for et in range(1, mps_num): entropy = qr_left_and_right_location(entro_tar, et, 1e-16)[0] entropy_list.append(entropy) for m in range(mps_num - 2): average_ = entropy_list[m] average = average + average_ average = average / (mps_num - 1) # 求解平均纠缠熵 center_
_left_and_right_location(entro_tar, center_position, 1e-16)[0] print('平均纠缠熵是:{}'.format(average)) print('正交中心为第' + str(center_position) + '个tensor的MPS纠缠熵是:{}'.format(center_entropy)) for nn in range(mps_num): # 初始真空零态 ini_state.append(tc.tensor([1, 0], dtype=dtype).reshape(1, 2, 1).to(para['device'])) read_memory_gate = bfs.load('read_memory_gate_data', 'gate') for vt in range(read_gatenum): # 为了分层优化的下一层结果比单层好,随机初始化小量微扰的单位阵 unitary_gate = read_memory_gate[vt].to(para['device']) unitary_gate.requires_grad = True layer_gatelist.append(unitary_gate) for jt in range(gatenum//layer_num): vol_gate = tc.mul(tc.rand((4, 4), dtype=dtype).to(para['device']), vol) unitary_gate = tc.add(vol_gate, identity_4) unitary_gate.requires_grad = True layer_gatelist.append(unitary_gate) mps_norm(ini_state) # 对初始量子态进行归一化 # lay_optimize_1 = tc.optim.Adam(layer_gatelist, lr=lr) # 分层优化的量子门参数,在分层优化结束之后进行协同优化 print('分层储存优化器进入list') for it in range(gatenum): # 将分层优化的loss的list 根据层数区分开 if it < (gatenum//layer_num)*1: layer_gatelist_0.append(layer_gatelist[it]) else: if it < (gatenum//layer_num)*2: layer_gatelist_1.append(layer_gatelist[it]) else: if it < (gatenum//layer_num)*3: layer_gatelist_2.append(layer_gatelist[it]) else: if it < (gatenum//layer_num)*4: layer_gatelist_3.append(layer_gatelist[it]) else: if it < (gatenum//layer_num)*5: layer_gatelist_4.append(layer_gatelist[it]) else: layer_gatelist_5.append(layer_gatelist[it]) lay_optimize_0 = tc.optim.Adam(layer_gatelist_0, lr=lr) # 分层优化的量子门参数,在分层优化结束之后进行协同优化 lay_optimize_1 = tc.optim.Adam(layer_gatelist_1, lr=lr) lay_optimize_2 = tc.optim.Adam(layer_gatelist_2, lr=lr) layer_optimize.append(lay_optimize_0) # 将三层优化器 layer_optimize.append(lay_optimize_1) layer_optimize.append(lay_optimize_2) scheduler_0 = StepLR(lay_optimize_0, step_size=step_size, gamma=0.1) scheduler_1 = StepLR(lay_optimize_1, step_size=step_size, gamma=0.1) scheduler_2 = StepLR(lay_optimize_2, step_size=step_size, gamma=0.1) scheduler = list() scheduler.append(scheduler_0) scheduler.append(scheduler_1) scheduler.append(scheduler_2) evo = ev.Evolve(mps_num, k_bood, 2, gatenum, layer_num) evo.init_tensor_list(copy.deepcopy(ini_state)) for bt in range(layer_num): print('初始化第' + str(bt) + '的学习率:', layer_optimize[bt].defaults['lr']) start.record() # 开始计算模型的运算时间花费 for pt in range(pt_time): # 交错优化所在的次数 fprint('Circle优化位于第' + str(pt) + '次', file=para['log_name']) for lay_num in range(layer_num): fprint('Circle优化位于第' + str(lay_num) + '层', file=para['log_name']) for vt in range(it_time): for llt in range(lay_num, lay_num + 1): # 先将优化层进行演化,演化完成后将其存进新的list,作为下一层初始 evo.layered_evolve_mps(layer_gatelist, llt) if vt == it_time - 1: evo.storage_layer_out_optimization(llt, 0) for at in range(lay_num + 1, layer_num): # 将不变分的量子门演化进入线路 evo.layered_evolve_mps(layer_gatelist, at) lay_loss = evo.log_fidelity(tar_mpslist) # 借助了mps跨越指数复杂度的优势 if ((vt + 1) % dt_print) == 0: if vt == 0: fprint('block') else: fprint('At t = ' + str(vt) + ', loss = ' + str(lay_loss.item()), file=para['log_name']) loss_[lay_num].append(lay_loss.item()) lay_loss.backward() layer_optimize[lay_num].step() layer_optimize[lay_num].zero_grad() if ((vt + 1) % dt_print) == 0: fprint("第%d个epoch的学习率:%f" % (vt, layer_optimize[lay_num].param_groups[0]['lr']), file=para['log_name']) scheduler[lay_num].step() tc.cuda.empty_cache() # 删除不必要的变量 if lay_num == layer_num-1: if vt == it_time - 1: half_entropy(evo.out_optimization()) if vt == it_time - 1: evo.read_layer_out_optimization(lay_num, 0) else: evo.read_layer_out_optimization(lay_num, 1) half_entropy(tar_mpslist) # 热图的最后一行为目标态纠缠的信息 bfs.save('.', 'out_memory_half_entropy_data', [half_entropy_list], ['half_entropy']) for dt in range(gatenum): zero_gatetensor[dt, :, :] = layer_gatelist[dt].data bfs.save('.', 'out_memory_gate_data', [zero_gatetensor], ['gate']) out_layer = evo.out_optimization() out_layer_numpy = list() for nt in range(mps_num): # 将目标MPS转存成numpy数组 out_layer_numpy.append(out_layer[nt].numpy()) np.savez(out_file_name, tar_mpslist0=out_layer_numpy[0], tar_mpslist1=out_layer_numpy[1], tar_mpslist2=out_layer_numpy[2], tar_mpslist3=out_layer_numpy[3], tar_mpslist4=out_layer_numpy[4], tar_mpslist5=out_layer_numpy[5], tar_mpslist6=out_layer_numpy[6], tar_mpslist7=out_layer_numpy[7], tar_mpslist8=out_layer_numpy[8], tar_mpslist9=out_layer_numpy[9], tar_mpslist10=out_layer_numpy[10], tar_mpslist11=out_layer_numpy[11], tar_mpslist12=out_layer_numpy[12], tar_mpslist13=out_layer_numpy[13], tar_mpslist14=out_layer_numpy[14], tar_mpslist15=out_layer_numpy[15], tar_mpslist16=out_layer_numpy[16], tar_mpslist17=out_layer_numpy[17], tar_mpslist18=out_layer_numpy[18], tar_mpslist19=out_layer_numpy[19], tar_mpslist20=out_layer_numpy[20], tar_mpslist21=out_layer_numpy[21], tar_mpslist22=out_layer_numpy[22], tar_mpslist23=out_layer_numpy[23], tar_mpslist24=out_layer_numpy[24], tar_mpslist25=out_layer_numpy[25], tar_mpslist26=out_layer_numpy[26], tar_mpslist27=out_layer_numpy[27], tar_mpslist28=out_layer_numpy[28], tar_mpslist29=out_layer_numpy[29], tar_mpslist30=out_layer_numpy[30], tar_mpslist31=out_layer_numpy[31], tar_mpslist32=out_layer_numpy[32], tar_mpslist33=out_layer_numpy[33], tar_mpslist34=out_layer_numpy[34], tar_mpslist35=out_layer_numpy[35], tar_mpslist36=out_layer_numpy[36], tar_mpslist37=out_layer_numpy[37], tar_mpslist38=out_layer_numpy[38], tar_mpslist39=out_layer_numpy[39], tar_mpslist40=out_layer_numpy[40], tar_mpslist41=out_layer_numpy[41], tar_mpslist42=out_layer_numpy[42], tar_mpslist43=out_layer_numpy[43], tar_mpslist44=out_layer_numpy[44], tar_mpslist45=out_layer_numpy[45], tar_mpslist46=out_layer_numpy[46], tar_mpslist47=out_layer_numpy[47]) for nt in range(mps_num): # 将目标MPS转存成numpy数组 tar_mpslist[nt] = tar_mpslist[nt].cpu().numpy() end.record() # 截至记录模型花费计算的时间 # Waits for everything to finish running tc.cuda.synchronize() # 等待当前设备上所有流中的所有核心完成。 print('Runtime: ', start.elapsed_time(end)) for i in range(pt_time*5): x1_axis.append(i*10) color_list = list(['deeppink', 'red', 'gold', 'black', 'lime', 'peru', 'purple', 'blue']) plt.figure(num=1, figsize=(16, 12), dpi=100) plt.tick_params(labelsize=16) plt.xlabel("num of optimize", fontsize=20) # x轴上的名字 plt.ylabel("negative-logarithmic fidelities (NLFs) per site", fontsize=20) plt.grid(axis='x', c='g', linestyle='--', alpha=0.5) for kt in range(layer_num): plt.plot(x1_axis, loss_[kt], color=color_list[kt], linewidth=3, label=' Circle layered Optimize' + str(kt)) plt.legend(prop={'family': 'Times New Roman', 'size': 16}, loc='upper right') plt.savefig('./MPS_Step_3layer_Circle.jpg')
entropy = qr
cmdparse_test.go
// Copyright (c) 2014 The btcsuite developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package btcjson_test import ( "encoding/json" "math" "reflect" "testing" "github.com/cctip/bchd/btcjson" ) // TestAssignField tests the assignField function handles supported combinations // properly. func TestAssignField(t *testing.T) { t.Parallel() tests := []struct { name string dest interface{} src interface{} expected interface{} }{ { name: "same types", dest: int8(0), src: int8(100), expected: int8(100), }, { name: "same types - more source pointers", dest: int8(0), src: func() interface{} { i := int8(100) return &i }(), expected: int8(100), }, { name: "same types - more dest pointers", dest: func() interface{} { i := int8(0) return &i }(), src: int8(100), expected: int8(100), }, { name: "convertible types - more source pointers", dest: int16(0), src: func() interface{} { i := int8(100) return &i }(), expected: int16(100), }, { name: "convertible types - both pointers", dest: func() interface{} { i := int8(0) return &i }(), src: func() interface{} { i := int16(100) return &i }(), expected: int8(100), }, { name: "convertible types - int16 -> int8", dest: int8(0), src: int16(100), expected: int8(100), }, { name: "convertible types - int16 -> uint8", dest: uint8(0), src: int16(100), expected: uint8(100), }, { name: "convertible types - uint16 -> int8", dest: int8(0), src: uint16(100), expected: int8(100), }, { name: "convertible types - uint16 -> uint8", dest: uint8(0), src: uint16(100), expected: uint8(100), }, { name: "convertible types - float32 -> float64", dest: float64(0), src: float32(1.5), expected: float64(1.5), }, { name: "convertible types - float64 -> float32", dest: float32(0), src: float64(1.5), expected: float32(1.5), }, { name: "convertible types - string -> bool", dest: false, src: "true", expected: true, }, { name: "convertible types - string -> int8", dest: int8(0), src: "100", expected: int8(100), }, { name: "convertible types - string -> uint8", dest: uint8(0), src: "100", expected: uint8(100), }, { name: "convertible types - string -> float32", dest: float32(0), src: "1.5", expected: float32(1.5), }, { name: "convertible types - typecase string -> string", dest: "", src: func() interface{} { type foo string return foo("foo") }(), expected: "foo", }, { name: "convertible types - string -> array", dest: [2]string{}, src: `["test","test2"]`, expected: [2]string{"test", "test2"}, }, { name: "convertible types - string -> slice", dest: []string{}, src: `["test","test2"]`, expected: []string{"test", "test2"}, }, { name: "convertible types - string -> struct", dest: struct{ A int }{}, src: `{"A":100}`, expected: struct{ A int }{100}, }, { name: "convertible types - string -> map", dest: map[string]float64{}, src: `{"1Address":1.5}`, expected: map[string]float64{"1Address": 1.5}, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { dst := reflect.New(reflect.TypeOf(test.dest)).Elem() src := reflect.ValueOf(test.src) err := btcjson.TstAssignField(1, "testField", dst, src) if err != nil { t.Errorf("Test #%d (%s) unexpected error: %v", i, test.name, err) continue } // Inidirect through to the base types to ensure their values // are the same. for dst.Kind() == reflect.Ptr { dst = dst.Elem() } if !reflect.DeepEqual(dst.Interface(), test.expected) { t.Errorf("Test #%d (%s) unexpected value - got %v, "+ "want %v", i, test.name, dst.Interface(), test.expected) continue } } } // TestAssignFieldErrors tests the assignField function error paths. func TestAssignFieldErrors(t *testing.T) { t.Parallel() tests := []struct { name string dest interface{} src interface{} err btcjson.Error }{ { name: "general incompatible int -> string", dest: string(rune(0)), src: int(0), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow source int -> dest int", dest: int8(0), src: int(128), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow source int -> dest uint", dest: uint8(0), src: int(256), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "int -> float", dest: float32(0), src: int(256), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow source uint64 -> dest int64", dest: int64(0), src: uint64(1 << 63), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow source uint -> dest int", dest: int8(0), src: uint(128), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow source uint -> dest uint", dest: uint8(0), src: uint(256), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "uint -> float", dest: float32(0), src: uint(256), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "float -> int", dest: int(0), src: float32(1.0), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow float64 -> float32", dest: float32(0), src: float64(math.MaxFloat64), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> bool", dest: true, src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> int", dest: int8(0), src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow string -> int", dest: int8(0), src: "128", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> uint", dest: uint8(0), src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow string -> uint", dest: uint8(0), src: "256", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> float", dest: float32(0), src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "overflow string -> float", dest: float32(0), src: "1.7976931348623157e+308", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> array", dest: [3]int{}, src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> slice", dest: []int{}, src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> struct", dest: struct{ A int }{}, src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid string -> map", dest: map[string]int{}, src: "foo", err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { dst := reflect.New(reflect.TypeOf(test.dest)).Elem() src := reflect.ValueOf(test.src) err := btcjson.TstAssignField(1, "testField", dst, src) if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("Test #%d (%s) wrong error - got %T (%[3]v), "+ "want %T", i, test.name, err, test.err) continue } gotErrorCode := err.(btcjson.Error).ErrorCode if gotErrorCode != test.err.ErrorCode { t.Errorf("Test #%d (%s) mismatched error code - got "+ "%v (%v), want %v", i, test.name, gotErrorCode, err, test.err.ErrorCode) continue } } } // TestNewCmdErrors ensures the error paths of NewCmd behave as expected. func TestNewCmdErrors(t *testing.T) { t.Parallel() tests := []struct { name string method string args []interface{} err btcjson.Error }{ { name: "unregistered command", method: "boguscommand", args: []interface{}{}, err: btcjson.Error{ErrorCode: btcjson.ErrUnregisteredMethod}, }, { name: "too few parameters to command with required + optional", method: "getblock", args: []interface{}{}, err: btcjson.Error{ErrorCode: btcjson.ErrNumParams}, }, { name: "too many parameters to command with no optional", method: "getblockcount", args: []interface{}{"123"}, err: btcjson.Error{ErrorCode: btcjson.ErrNumParams}, }, { name: "incorrect parameter type", method: "getblock", args: []interface{}{1}, err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { _, err := btcjson.NewCmd(test.method, test.args...) if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("Test #%d (%s) wrong error - got %T (%v), "+ "want %T", i, test.name, err, err, test.err) continue } gotErrorCode := err.(btcjson.Error).ErrorCode if gotErrorCode != test.err.ErrorCode { t.Errorf("Test #%d (%s) mismatched error code - got "+ "%v (%v), want %v", i, test.name, gotErrorCode, err, test.err.ErrorCode)
} // TestMarshalCmdErrors tests the error paths of the MarshalCmd function. func TestMarshalCmdErrors(t *testing.T) { t.Parallel() tests := []struct { name string id interface{} cmd interface{} err btcjson.Error }{ { name: "unregistered type", id: 1, cmd: (*int)(nil), err: btcjson.Error{ErrorCode: btcjson.ErrUnregisteredMethod}, }, { name: "nil instance of registered type", id: 1, cmd: (*btcjson.GetBlockCmd)(nil), err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "nil instance of registered type", id: []int{0, 1}, cmd: &btcjson.GetBlockCountCmd{}, err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { _, err := btcjson.MarshalCmd("1.0", test.id, test.cmd) if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("Test #%d (%s) wrong error - got %T (%v), "+ "want %T", i, test.name, err, err, test.err) continue } gotErrorCode := err.(btcjson.Error).ErrorCode if gotErrorCode != test.err.ErrorCode { t.Errorf("Test #%d (%s) mismatched error code - got "+ "%v (%v), want %v", i, test.name, gotErrorCode, err, test.err.ErrorCode) continue } } } // TestUnmarshalCmdErrors tests the error paths of the UnmarshalCmd function. func TestUnmarshalCmdErrors(t *testing.T) { t.Parallel() tests := []struct { name string request btcjson.Request err btcjson.Error }{ { name: "unregistered type", request: btcjson.Request{ Jsonrpc: "1.0", Method: "bogusmethod", Params: nil, ID: nil, }, err: btcjson.Error{ErrorCode: btcjson.ErrUnregisteredMethod}, }, { name: "incorrect number of params", request: btcjson.Request{ Jsonrpc: "1.0", Method: "getblockcount", Params: []json.RawMessage{[]byte(`"bogusparam"`)}, ID: nil, }, err: btcjson.Error{ErrorCode: btcjson.ErrNumParams}, }, { name: "invalid type for a parameter", request: btcjson.Request{ Jsonrpc: "1.0", Method: "getblock", Params: []json.RawMessage{[]byte("1")}, ID: nil, }, err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, { name: "invalid JSON for a parameter", request: btcjson.Request{ Jsonrpc: "1.0", Method: "getblock", Params: []json.RawMessage{[]byte(`"1`)}, ID: nil, }, err: btcjson.Error{ErrorCode: btcjson.ErrInvalidType}, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { _, err := btcjson.UnmarshalCmd(&test.request) if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("Test #%d (%s) wrong error - got %T (%v), "+ "want %T", i, test.name, err, err, test.err) continue } gotErrorCode := err.(btcjson.Error).ErrorCode if gotErrorCode != test.err.ErrorCode { t.Errorf("Test #%d (%s) mismatched error code - got "+ "%v (%v), want %v", i, test.name, gotErrorCode, err, test.err.ErrorCode) continue } } }
continue } }
debounce.js
define([ "./funcs", "./defer" ],function(funcs,defer){ function
(fn, wait,useAnimationFrame) { var timeout, defered, debounced = function () { var context = this, args = arguments; var later = function () { timeout = null; if (useAnimationFrame) { defered = defer(fn,args,context); } else { fn.apply(context, args); } }; cancel(); timeout = setTimeout(later, wait); return { cancel }; }, cancel = debounced.cancel = function () { if (timeout) { clearTimeout(timeout); } if (defered) { defered.cancel(); } timeout = void 0; defered = void 0; }; return debounced; } return funcs.debounce = debounce; })
debounce
env.d.ts
declare namespace NodeJS { interface ProcessEnv { PORT: string;
REDIS_URL: string; DATABASE_URL: string; SESSION_SECRET: string; TOKEN_SECRET: string; } }
db_manager.py
# -*- coding: utf-8 -*- # # db_manager.py # # The module is part of pydavis. # """ Interacting with MySQL databases through the DatabaseManager. """ __author__ = 'Severin E. R. Langberg' __email__ = '[email protected]' __status__ = 'Operational' import pymysql from pydavis import utils from datetime import datetime class DatabaseManager: """Handles interaction with MySQL database. Args: user (str): Follows from writing data to table. The MySQL username credential. Necessary for connecting to database. password (str): Follows from writing data to table. The MySQL password credential. Necessary for connecting to database. host (str): Specifies the host where the server is running. Uses localhost by default. port (int): Specifies port number. Uses 3306 by default. database (str): Follows from writing data to table. Name of an existing database, or the name of a database that will be created. """ _CHARS_LIMIT = 20 @classmethod def update_limit_varchars(cls, new_limit): """Defines the maximum number of characters assigned to string attributes in the created database tables. Args: new_limit (int): The maximum number of characters assigned to string attributes. """ utils._check_parameter('new_limit', int, new_limit) cls._CHARS_LIMIT = new_limit def __init__(self, user, password, host='localhost', port=3306): self.user = str(user) self.password = str(password) self.host = str(host) self.port = int(port) # NOTE: Variables set during instance. self._con = None self._cur = None self._query = None self._current_db = None @property def connection(self): """Returns database connection.""" return self._con @property def current_db(self): """Returns name of working database.""" return self._current_db @property def query(self): """Returns the latest MySQL query ready to be executed.""" return self._query @property def results(self): """Returns the result from a database query.""" return self._cur.fetchall() @property def limit_varchars(self): """Returns the current limit to number of characters in varchar variables.""" return self._CHARS_LIMIT def connect(self): """Connects to a MySQL server.""" if self.connection: print('Already connected to `{}`'.format(self.host)) return try: con = pymysql.connect(host=self.host, port=self.port, user=self.user, passwd=self.password) print('Connecting to: `{}`'.format(self.host)) self._con = con self._cur = self._con.cursor() except: raise utils.DatabaseConnectionError('Unable to connect to: `{}`' ''.format(self.host)) return self def _check_connection(self): # Checks if connected to a MySQL server. if self.connection:
else: raise utils.DatabaseConnectionError('Disconnected from {}' ''.format(self.host)) def _check_database(self): if self.current_db: return else: raise utils.MissingDatabaseError('Must specify working database') def execute(self): """Execute a MySQL command and commit changes to the database.""" self._check_connection() try: self._cur.execute(self._query) except: raise utils.DatabaseExecutionError('Unable to execute query:\n' '`{}`'.format(self._query)) # Commit changes to database. try: self._con.commit() except: raise utils.DatabaseCommitError('Unable to commit changes: `{}`' ''.format(self._db_name)) return self def create_database(self, database): """Creates a new database. Only enabled if connected to a MySQL server. Args: database (str): Name of the database to create. """ utils._check_parameter('database', str, database) self._current_db = database self._query = 'CREATE DATABASE IF NOT EXISTS {};'.format(database) return self def use_database(self, database): """Selects an existent database as working database. Only enabled if connected to a MySQL server and the database exists. Args: database (str): Name of the new working database. """ utils._check_parameter('database', str, database) self._current_db = database self._query = 'USE {}'.format(database) return self def drop_database(self, database): """Deletes a database. Only enabled if connected to a MySQL server. Args: database (str): Name of the database to delete. """ utils._check_parameter('database', str, database) # Resetting working DB variable. self._current_db = None self._query = 'DROP DATABASE IF EXISTS {};'.format(database) return self def create_table(self, table_name, table_columns): """Creates a table if connected to a MySQL server and a working database is set. Args: table_name (str): Name of the new table. table_columns (dict): The column labels and corresponding column data types as key-value pairs. The data types are given in Python format. """ self._check_database() utils._check_parameter('table_name', str, table_name) utils._check_parameter('table_columns', dict, table_columns) col_labels, col_dtypes = list(zip(*table_columns.items())) mysql_dtypes = self.convert_dtypes(col_dtypes) _columns = '' for label, dtype in zip(col_labels[:-1], mysql_dtypes[:-1]): _columns += ' {} {},'.format(label, dtype) _columns += ' {} {}'.format(col_labels[-1], mysql_dtypes[-1]) self._query = """CREATE TABLE IF NOT EXISTS {} ({}); """.format(table_name, _columns) return self def convert_dtypes(self, data_types): """Converts from Python to MySQL data types. Args: data_types (iterable): A container of Python data types that will be converted to MySQL data types. Returns: list: The corresponding MySQL data types. """ mysql_dtypes = [] for data_type in data_types: if data_type is datetime: mysql_dtypes.append('DATETIME') elif data_type is float: mysql_dtypes.append('FLOAT') elif data_type is int: mysql_dtypes.append('INT') elif data_type is str: mysql_dtypes.append('VARCHAR({})'.format(self._CHARS_LIMIT)) else: raise TypeError('Unable to recognize {} as data type' ''.format(data_type)) return mysql_dtypes def drop_table(self, table_name): """Deletes specified table from database. Only enabled if connected to a MySQL server and a working database is set. Args: table_name (str): Name of the table. """ self._check_database() utils._check_parameter('table_name', str, table_name) self._query = 'DROP TABLE IF EXISTS {};'.format(table_name) return self def describe_table(self, table_name): """Returns description of table content. Only enabled if connected to a MySQL server and a working database is set. Args: table_name (str): Name of the table. """ self._check_database() utils._check_parameter('table_name', str, table_name) self._query = 'DESCRIBE {};'.format(table_name) return self def insert_values(self, table_name, table_values): """Inserts entities into a table. Args: table_name (str): The name of the table. table_values (dict): The column labels and corresponding values as key-value pairs. """ self._check_database() utils._check_parameter('table_name', str, table_name) utils._check_parameter('table_values', dict, table_values) labels, values = list(zip(*table_values.items())) _columns, _values = '', '' for label, value in zip(labels[:-1], values[:-1]): _columns += "{}, ".format(str(label)) _values += "'{}', ".format(str(value)) _columns += "{}".format(str(labels[-1])) _values += "'{}'".format(str(values[-1])) self._query = """INSERT INTO {} ({}) VALUES ({}); """.format(table_name, _columns, _values) return self def add_constraints(self, table_name, constraints): raise NotImplementedError('Method currently not implemented.') def terminate_connection(self): """Shuts down connection to MySQL server.""" self._con.close() self._cur.close() print('Shutting down connection to: `{}`'.format(self.host)) # NOTE: Resetting connection variables. self._con = None self._cur = None return self
return
migrate.rs
use anyhow::{bail, Context}; use chrono::Utc; use console::style; use sqlx::migrate::{Migrate, MigrateError, MigrationType, Migrator}; use sqlx::{AnyConnection, Connection}; use std::fs::{self, File}; use std::io::Write; use std::path::Path; use std::time::Duration; fn create_file( migration_source: &str, file_prefix: &str, description: &str, migration_type: MigrationType, ) -> anyhow::Result<()> { use std::path::PathBuf; let mut file_name = file_prefix.to_string(); file_name.push_str("_"); file_name.push_str(&description.replace(' ', "_")); file_name.push_str(migration_type.suffix()); let mut path = PathBuf::new(); path.push(migration_source); path.push(&file_name); println!("Creating {}", style(path.display()).cyan()); let mut file = File::create(&path).context("Failed to create migration file")?; file.write_all(migration_type.file_content().as_bytes())?; Ok(()) } pub async fn add( migration_source: &str, description: &str, reversible: bool, ) -> anyhow::Result<()> { fs::create_dir_all(migration_source).context("Unable to create migrations directory")?; let migrator = Migrator::new(Path::new(migration_source)).await?; // This checks if all existing migrations are of the same type as the reverisble flag passed for migration in migrator.iter() { if migration.migration_type.is_reversible() != reversible { bail!(MigrateError::InvalidMixReversibleAndSimple); } } let dt = Utc::now(); let file_prefix = dt.format("%Y%m%d%H%M%S").to_string(); if reversible { create_file( migration_source, &file_prefix, description, MigrationType::ReversibleUp, )?; create_file( migration_source, &file_prefix, description, MigrationType::ReversibleDown, )?; } else { create_file( migration_source, &file_prefix, description, MigrationType::Simple, )?; } Ok(()) } pub async fn
(migration_source: &str, uri: &str) -> anyhow::Result<()> { let migrator = Migrator::new(Path::new(migration_source)).await?; let mut conn = AnyConnection::connect(uri).await?; conn.ensure_migrations_table().await?; let (version, _) = conn.version().await?.unwrap_or((0, false)); for migration in migrator.iter() { println!( "{}/{} {}", style(migration.version).cyan(), if version >= migration.version { style("installed").green() } else { style("pending").yellow() }, migration.description, ); } Ok(()) } pub async fn run(migration_source: &str, uri: &str, dry_run: bool) -> anyhow::Result<()> { let migrator = Migrator::new(Path::new(migration_source)).await?; let mut conn = AnyConnection::connect(uri).await?; conn.ensure_migrations_table().await?; let (version, dirty) = conn.version().await?.unwrap_or((0, false)); if dirty { bail!(MigrateError::Dirty(version)); } for migration in migrator.iter() { if migration.migration_type.is_down_migration() { // Skipping down migrations continue; } if migration.version > version { let elapsed = if dry_run { Duration::new(0, 0) } else { conn.apply(migration).await? }; let text = if dry_run { "Can apply" } else { "Applied" }; println!( "{} {}/{} {} {}", text, style(migration.version).cyan(), style(migration.migration_type.label()).green(), migration.description, style(format!("({:?})", elapsed)).dim() ); } else { conn.validate(migration).await?; } } Ok(()) } pub async fn revert(migration_source: &str, uri: &str, dry_run: bool) -> anyhow::Result<()> { let migrator = Migrator::new(Path::new(migration_source)).await?; let mut conn = AnyConnection::connect(uri).await?; conn.ensure_migrations_table().await?; let (version, dirty) = conn.version().await?.unwrap_or((0, false)); if dirty { bail!(MigrateError::Dirty(version)); } let mut is_applied = false; for migration in migrator.iter().rev() { if !migration.migration_type.is_down_migration() { // Skipping non down migration // This will skip any simple or up migration file continue; } if migration.version > version { // Skipping unapplied migrations continue; } let elapsed = if dry_run { Duration::new(0, 0) } else { conn.revert(migration).await? }; let text = if dry_run { "Can apply" } else { "Applied" }; println!( "{} {}/{} {} {}", text, style(migration.version).cyan(), style(migration.migration_type.label()).green(), migration.description, style(format!("({:?})", elapsed)).dim() ); is_applied = true; // Only a single migration will be reverted at a time, so we break break; } if !is_applied { println!("No migrations available to revert"); } Ok(()) }
info
fur_test.go
package fur import ( "testing" "time" "github.com/nicola-spb/locales" "github.com/nicola-spb/locales/currency" ) func TestLocale(t *testing.T) { trans := New() expected := "fur" if trans.Locale() != expected { t.Errorf("Expected '%s' Got '%s'", expected, trans.Locale()) } } func TestPluralsRange(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsRange() // expected := 1 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsOrdinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleTwo, // }, // { // expected: locales.PluralRuleFew, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsOrdinal() // expected := 4 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsCardinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsCardinal() // expected := 2 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestRangePlurals(t *testing.T) { trans := New() tests := []struct { num1 float64 v1 uint64 num2 float64 v2 uint64 expected locales.PluralRule }{ // { // num1: 1, // v1: 1, // num2: 2, // v2: 2, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.RangePluralRule(tt.num1, tt.v1, tt.num2, tt.v2) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestOrdinalPlurals(t *testing.T) { trans := New() tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 2, // v: 0, // expected: locales.PluralRuleTwo, // }, // { // num: 3, // v: 0, // expected: locales.PluralRuleFew, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.OrdinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestCardinalPlurals(t *testing.T) { trans := New() tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.CardinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestDaysAbbreviated(t *testing.T) { trans := New() days := trans.WeekdaysAbbreviated() for i, day := range days { s := trans.WeekdayAbbreviated(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sun", // }, // { // idx: 1, // expected: "Mon", // }, // { // idx: 2, // expected: "Tue", // }, // { // idx: 3, // expected: "Wed", // }, // { // idx: 4, // expected: "Thu", // }, // { // idx: 5, // expected: "Fri", // }, // { // idx: 6, // expected: "Sat", // }, } for _, tt := range tests { s := trans.WeekdayAbbreviated(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysNarrow(t *testing.T) { trans := New() days := trans.WeekdaysNarrow() for i, day := range days { s := trans.WeekdayNarrow(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", string(day), s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "S", // }, // { // idx: 1, // expected: "M", // }, // { // idx: 2, // expected: "T", // }, // { // idx: 3, // expected: "W", // }, // { // idx: 4, // expected: "T", // }, // { // idx: 5, // expected: "F", // }, // { // idx: 6, // expected: "S", // }, } for _, tt := range tests { s := trans.WeekdayNarrow(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysShort(t *testing.T) { trans := New() days := trans.WeekdaysShort() for i, day := range days { s := trans.WeekdayShort(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Su", // }, // { // idx: 1, // expected: "Mo", // }, // { // idx: 2, // expected: "Tu", // }, // { // idx: 3, // expected: "We", // }, // { // idx: 4, // expected: "Th", // }, // { // idx: 5, // expected: "Fr", // }, // { // idx: 6, // expected: "Sa", // }, } for _, tt := range tests { s := trans.WeekdayShort(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysWide(t *testing.T) { trans := New() days := trans.WeekdaysWide() for i, day := range days { s := trans.WeekdayWide(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sunday", // }, // { // idx: 1, // expected: "Monday", // }, // { // idx: 2, // expected: "Tuesday", // }, // { // idx: 3, // expected: "Wednesday", // }, // { // idx: 4, // expected: "Thursday", // }, // { // idx: 5, // expected: "Friday", // }, // { // idx: 6, // expected: "Saturday", // }, } for _, tt := range tests { s := trans.WeekdayWide(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsAbbreviated(t *testing.T) { trans := New() months := trans.MonthsAbbreviated() for i, month := range months { s := trans.MonthAbbreviated(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "Jan", // }, // { // idx: 2, // expected: "Feb", // }, // { // idx: 3, // expected: "Mar", // }, // { // idx: 4, // expected: "Apr", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "Jun", // }, // { // idx: 7, // expected: "Jul", // }, // { // idx: 8, // expected: "Aug", // }, // { // idx: 9, // expected: "Sep", // }, // { // idx: 10, // expected: "Oct", // }, // { // idx: 11, // expected: "Nov", // }, // { // idx: 12, // expected: "Dec", // }, } for _, tt := range tests { s := trans.MonthAbbreviated(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsNarrow(t *testing.T) { trans := New() months := trans.MonthsNarrow() for i, month := range months { s := trans.MonthNarrow(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "J", // }, // { // idx: 2, // expected: "F", // }, // { // idx: 3, // expected: "M", // }, // { // idx: 4, // expected: "A", // }, // { // idx: 5, // expected: "M", // }, // { // idx: 6, // expected: "J", // }, // { // idx: 7, // expected: "J", // }, // { // idx: 8, // expected: "A", // }, // { // idx: 9, // expected: "S", // }, // { // idx: 10, // expected: "O", // }, // { // idx: 11, // expected: "N", // }, // { // idx: 12, // expected: "D", // }, } for _, tt := range tests { s := trans.MonthNarrow(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsWide(t *testing.T) { trans := New() months := trans.MonthsWide() for i, month := range months { s := trans.MonthWide(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "January", // }, // { // idx: 2, // expected: "February", // }, // { // idx: 3, // expected: "March", // }, // { // idx: 4, // expected: "April", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "June", // }, // { // idx: 7, // expected: "July", // }, // { // idx: 8, // expected: "August", // }, // { // idx: 9, // expected: "September", // }, // { // idx: 10, // expected: "October", // }, // { // idx: 11, // expected: "November", // }, // { // idx: 12, // expected: "December", // }, } for _, tt := range tests { s := string(trans.MonthWide(time.Month(tt.idx))) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeFull(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } // fixed := time.FixedZone("OTHER", -4) tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am Eastern Standard Time", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, fixed), // expected: "8:05:01 pm OTHER", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeLong(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am EST", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, loc), // expected: "8:05:01 pm EST", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05:01 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05:01 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateFull(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Wednesday, February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateLong(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Feb 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/16", // }, // { // t: time.Date(-500, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/500", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtNumber(t *testing.T)
func TestFmtCurrency(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643, // v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "-$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "-CAD 221,123,456.564", // }, // { // num: 0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtCurrency(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtAccounting(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643, // v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "($221,123,456.564)", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "(CAD 221,123,456.564)", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtAccounting(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtPercent(t *testing.T) { tests := []struct { num float64 v uint64 expected string }{ // { // num: 15, // v: 0, // expected: "15%", // }, // { // num: 15, // v: 2, // expected: "15.00%", // }, // { // num: 434.45, // v: 0, // expected: "434%", // }, // { // num: 34.4, // v: 2, // expected: "34.40%", // }, // { // num: -34, // v: 0, // expected: "-34%", // }, } trans := New() for _, tt := range tests { s := trans.FmtPercent(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }
{ tests := []struct { num float64 v uint64 expected string }{ // { // num: 1123456.5643, // v: 2, // expected: "1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // expected: "1,123,456.6", // }, // { // num: 221123456.5643, // v: 3, // expected: "221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: 0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtNumber(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }
queue.py
''' Created on Aug 9, 2012 :author: Sana Development Team :version: 2.0 ''' from django.db import models from mds.api.utils import make_uuid QUEUE_STATUS=((0,'Failed Dispatch')) class EncounterQueueElement(models.Model):
""" An element that is being processed """ class Meta: app_label = "core" uuid = models.SlugField(max_length=36, unique=True, default=make_uuid, editable=False) """ A universally unique identifier """ created = models.DateTimeField(auto_now_add=True) """ When the object was created """ modified = models.DateTimeField(auto_now=True) """ updated on modification """ object_url = models.CharField(max_length=512) """ The uuid of the cached object """ @property def object_uuid(self): return '' cache = models.TextField(blank=True) """ Dump of the form data for the object """ status = models.IntegerField(choices=QUEUE_STATUS) """ Current state in the queue """ message = models.TextField(blank=True) """ Useful messages returned from processing """
help-circle.e2e.ts
import { newE2EPage } from '@stencil/core/testing'; describe('icon-help-circle', () => { it('renders', async () => { const page = await newE2EPage(); await page.setContent('<icon-help-circle></icon-help-circle>'); const element = await page.find('icon-help-circle'); expect(element).toHaveClass('hydrated'); expect(element).toHaveClass('st-feather-icon'); }); it('renders one-word props', async () => { const page = await newE2EPage();
await page.setContent('<icon-help-circle stroke="blue"></icon-help-circle>'); const element = await page.find('icon-help-circle'); expect(element).toHaveClass('hydrated'); expect(element).toEqualAttribute('stroke', 'blue'); const svg = await page.find('icon-help-circle > svg'); expect(svg).toEqualAttribute('stroke', 'blue'); }); it('renders dashed props', async () => { const page = await newE2EPage(); await page.setContent('<icon-help-circle stroke-width="2"></icon-help-circle>'); const element = await page.find('icon-help-circle'); expect(element).toHaveClass('hydrated'); expect(element).toEqualAttribute('stroke-width', 2); const svg = await page.find('icon-help-circle > svg'); expect(svg).toEqualAttribute('stroke-width', 2); }); });
RigidPatching.py
''' Copyright: 2016-2019 Thomas Kuestner ([email protected]) under Apache2 license @author: Thomas Kuestner ''' import numpy as np import tensorflow as tf import math from utils.Label import Label ######################################################################################################################################### #Function: fRigidPatching # #The function fRigidPatching is responsible for splitting the dicom numpy array in patches depending on the patchSize and the # #patchOverlap. Besides the function creates an 1D array with the corresponding labels. # # # #Input: dicom_numpy_array ---> 3D dicom array (height, width, number of slices) # # patchSize ---> size of patches, example: [40, 40], patchSize[0] = height, patchSize[1] = weight, height and weight can differ # # patchOverlap ---> the ratio for overlapping, example: 0.25 # # mask_numpy_array ---> 3D mask array contains information about the areas of artefacts. movement-artefact = 1, shim-artefact = 2 # # noise-artefact = 3 # # ratio_labeling ---> set the ratio of the number of 'Pixel-Artefacts' to the whole number of pixels of one patch # #Output: dPatches ---> 3D-Numpy-Array, which contain all Patches. # # dLabels ---> 1D-Numpy-Array with all corresponding labels # ######################################################################################################################################### def fRigidPatching(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling): move_artefact = False shim_artefact = False noise_artefact = False #dLabels = [] dOverlap = np.multiply(patchSize, patchOverlap) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]])) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') Mask_zero_pad = np.lib.pad(mask_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') nbPatches = int(((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*dicom_numpy_array.shape[2]) dPatches = np.zeros((patchSize[0], patchSize[1], nbPatches), dtype=float) #dtype=np.float32 dLabels = np.zeros((nbPatches), dtype = float) #dtype = float idxPatch = 0 for iZ in range(0, dicom_numpy_array.shape[2], 1): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] dPatches[:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] patch_number_value = patchSize[0] * patchSize[1] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = 0 if move_artefact == True and shim_artefact != True and noise_artefact != True: label = 1 elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = 2 elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = 3 elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = 4 elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = 5 elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = 6 elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = 7 print(label) dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid done!") print(dLabels) return dPatches, dLabels, nbPatches ######################################################################################################################################### #Function: fRigidPatching3D # #The function fRigidPatching3D is responsible for splitting the dicom numpy array in patches depending on the patchSize and the # #patchOverlap. Besides the function creates an 1D array with the corresponding labels. # # # #Input: dicom_numpy_array ---> 3D dicom array (height, width, number of slices) # # patchSize ---> size of patches, example: [40, 40], patchSize[0] = height, patchSize[1] = weight, height and weight can differ # # patchOverlap ---> the ratio for overlapping, example: 0.25 # # mask_numpy_array ---> 3D mask array contains information about the areas of artefacts. movement-artefact = 1, shim-artefact = 2 # # noise-artefact = 3 # # ratio_labeling ---> set the ratio of the number of 'Pixel-Artefacts' to the whole number of pixels of one patch # #Output: dPatches ---> 3D-Numpy-Array, which contain all Patches. # # dLabels ---> 1D-Numpy-Array with all corresponding labels # ######################################################################################################################################### def fRigidPatching3D(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling): move_artefact = False shim_artefact = False noise_artefact = False #dLabels = [] print(patchSize) dOverlap = np.round(np.multiply(patchSize, patchOverlap)) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) print(dOverlap, dNotOverlap) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((dicom_numpy_array.shape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) print(size_zero_pad.shape) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1], int(size_zero_pad[2]) - dicom_numpy_array.shape[2]])) print(zero_pad.shape) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) print(zero_pad_part.shape) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') print(Img_zero_pad.shape) Mask_zero_pad = np.lib.pad(mask_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') print(Mask_zero_pad.shape) print(size_zero_pad[2]) print(np.round((1-patchOverlap)*patchSize[2])) print(((size_zero_pad[2]-patchSize[2])/(np.round((1-patchOverlap)*patchSize[2]))+1)) nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/(np.round((1-patchOverlap)*patchSize[2]))+1) print(nbPatches) dPatches = np.zeros((patchSize[0], patchSize[1], patchSize[2], int(nbPatches)), dtype=float) dLabels = np.zeros((int(nbPatches)), dtype = int) #float idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] print(dPatch.shape) print(dPatches[:,:,:,idxPatch].shape) dPatches[:,:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] patch_number_value = patchSize[0] * patchSize[1]*patchSize[2] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = 0 if move_artefact == True and shim_artefact != True and noise_artefact != True: label = 1 elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = 2 elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = 3 elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = 4 elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = 5 elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = 6 elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = 7 print(label) dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid done!") print(dLabels.dtype) return dPatches, dLabels, nbPatches def fRigidPatching3DN(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling):
def fRigidPatching_maskLabeling(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling, dataset): dPatches = None move_artefact = False shim_artefact = False noise_artefact = False #body region bodyRegion, bodyRegionLabel = dataset.getBodyRegion() # MRT weighting label (T1, T2) weighting, weightingLabel = dataset.getMRTWeighting() #dOverlap = np.multiply(patchSize, patchOverlap) dOverlap = np.round(np.multiply(patchSize, patchOverlap)) #dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) dNotOverlap = [patchSize[0]-dOverlap[0], patchSize[1]-dOverlap[1]] size_zero_pad = np.array( ([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]])) zero_pad = np.array( ([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') Mask_zero_pad = np.lib.pad(mask_numpy_array, ((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') nbPatches = int(((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*dicom_numpy_array.shape[2]) nbPatches_in_Y = int((size_zero_pad[0]-dOverlap[0])/dNotOverlap[0]) nbPatches_in_X = int((size_zero_pad[1]-dOverlap[1])/dNotOverlap[1]) nbPatches_in_Z = dicom_numpy_array.shape[2] nbPatches = nbPatches_in_X*nbPatches_in_Y*nbPatches_in_Z dPatches = np.zeros((patchSize[0], patchSize[1], nbPatches), dtype=float) # dtype=np.float32 #dLabels = np.zeros((nbPatches), dtype=float) # dtype = float dLabels = np.zeros((nbPatches), dtype=np.dtype('i4')) idxPatch = 0 for iZ in range(0, dicom_numpy_array.shape[2], 1): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] dPatches[:, :, idxPatch] = dPatch #if idxPatch == 7678: # print() dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] patch_number_value = patchSize[0] * patchSize[1] if np.count_nonzero((dPatch_mask == 1).astype(np.int)) > int(ratio_labeling * patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask == 2).astype(np.int)) > int(ratio_labeling * patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask == 3).astype(np.int)) > int(ratio_labeling * patch_number_value): noise_artefact = True label = Label.REFERENCE if move_artefact == True and shim_artefact != True and noise_artefact != True: label = Label.MOTION elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = Label.SHIM elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = Label.NOISE elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = Label.MOTION_AND_SHIM elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = Label.MOTION_AND_NOISE elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = Label.SHIM_AND_NOISE elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = Label.MOTION_AND_SHIM_AND_NOISE # calculate final label label = label + bodyRegionLabel + weightingLabel #print(label) dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid patching done for %s!" % dataset.getPathdata()) #print(dLabels) #return dPatches, dLabels, nbPatches return dPatches, dLabels def fRigidPatching_patchLabeling(dicom_numpy_array, patchSize, patchOverlap, ratio_labeling): dPatches = None move_artefact = False shim_artefact = False noise_artefact = False dLabels = [] dOverlap = np.multiply(patchSize, patchOverlap) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array( ([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]])) zero_pad = np.array( ([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') for iZ in range(0, dicom_numpy_array.shape[2], 1): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] dPatch = dPatch[:, :, np.newaxis] if dPatches is None: dPatches = dPatch else: dPatches = np.concatenate((dPatches, dPatch), axis=2) dLabels = np.ones((dPatches.shape[2]), dtype=np.dtype('i4')) return dPatches, dLabels ######################################################################################################################################### #Function: fRigidPatching3D # #The function fRigidPatching3D is responsible for splitting the dicom numpy array in patches depending on the patchSize and the # #patchOverlap. Besides the function creates an 1D array with the corresponding labels. # # # #Input: dicom_numpy_array ---> 3D dicom array (height, width, number of slices) # # patchSize ---> size of patches, example: [40, 40], patchSize[0] = height, patchSize[1] = weight, height and weight can differ # # patchOverlap ---> the ratio for overlapping, example: 0.25 # # mask_numpy_array ---> 3D mask array contains information about the areas of artefacts. movement-artefact = 1, shim-artefact = 2 # # noise-artefact = 3 # # ratio_labeling ---> set the ratio of the number of 'Pixel-Artefacts' to the whole number of pixels of one patch # #Output: dPatches ---> 3D-Numpy-Array, which contain all Patches. # # dLabels ---> 1D-Numpy-Array with all corresponding labels # ######################################################################################################################################### def fRigidPatching3D_maskLabeling(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling, dataset=None, dopatching=True): #ToDo odd patch size not supported! move_artefact = False shim_artefact = False noise_artefact = False if isinstance(dataset, int): # already pre-processed label bodyRegionLabel + weightingLabel bodyRegionweightingLabel = dataset else: # body region bodyRegion, bodyRegionLabel = dataset.getBodyRegion() # MRT weighting label (T1, T2) weighting, weightingLabel = dataset.getMRTWeighting() dOverlap = np.round(np.multiply(patchSize, patchOverlap)) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((dicom_numpy_array.shape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1], int(size_zero_pad[2]) - dicom_numpy_array.shape[2]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') Mask_zero_pad = np.lib.pad(mask_numpy_array, ((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/(np.round((1-patchOverlap)*patchSize[2]))+1) nbPatches_in_Y = int((size_zero_pad[0] - dOverlap[0]) / dNotOverlap[0]) nbPatches_in_X = int((size_zero_pad[1] - dOverlap[1]) / dNotOverlap[1]) nbPatches_in_Z = int((size_zero_pad[2] - dOverlap[2]) / dNotOverlap[2]) nbPatches = nbPatches_in_X * nbPatches_in_Y * nbPatches_in_Z dPatches = np.zeros((patchSize[0], patchSize[1], patchSize[2], int(nbPatches)), dtype=float) dLabels = np.zeros((int(nbPatches)), dtype = int) #float idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] dPatches[:,:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] patch_number_value = patchSize[0] * patchSize[1]*patchSize[2] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = Label.REFERENCE if move_artefact == True and shim_artefact != True and noise_artefact != True: label = Label.MOTION elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = Label.SHIM elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = Label.NOISE elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = Label.MOTION_AND_SHIM elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = Label.MOTION_AND_NOISE elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = Label.SHIM_AND_NOISE elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = Label.MOTION_AND_SHIM_AND_NOISE if isinstance(dataset, int): label = bodyRegionweightingLabel + label else: label = weightingLabel + bodyRegionLabel + label dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False if isinstance(dataset, int): return dPatches else: print("Rigid patching done for %s " % dataset.getPathdata()) #print(dLabels) return dPatches, dLabels#, nbPatches def fRigidPatching3D_maskLabeling_tf(dicom_tensor, patchSize, patchOverlap, mask_numpy_array, ratio_labeling, dataset=None, dopatching=True): #ToDo odd patch size not supported! dOverlap = tf.math.round(tf.math.multiply(patchSize, patchOverlap)) dNotOverlap = tf.math.round(tf.math.multiply(patchSize, (1 - patchOverlap))) imgShape = dicom_tensor.shape.as_list() size_zero_pad = np.array(([math.ceil((imgShape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((imgShape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((imgShape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) zero_pad = np.array(([int(size_zero_pad[0]) - imgShape[0], int(size_zero_pad[1]) - imgShape[1], int(size_zero_pad[2]) - imgShape[2]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) Img_zero_pad = tf.pad(dicom_tensor, tf.Variable((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') #nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/(tf.math.round((1-patchOverlap)*patchSize[2]))+1) #nbPatches_in_Y = int((size_zero_pad[0] - dOverlap[0]) / dNotOverlap[0]) #nbPatches_in_X = int((size_zero_pad[1] - dOverlap[1]) / dNotOverlap[1]) #nbPatches_in_Z = int((size_zero_pad[2] - dOverlap[2]) / dNotOverlap[2]) #nbPatches = nbPatches_in_X * nbPatches_in_Y * nbPatches_in_Z #dPatches = tf.zeros((patchSize[0], patchSize[1], patchSize[2], int(nbPatches)), dtype=float) patch = [None for _ in range(fcalculatepatches(imgShape, patchSize, patchOverlap))] idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): patch[idxPatch] = tf.slice(Img_zero_pad, begin=[iY, iX, iZ], size=[patchSize[0], patchSize[1], patchSize[2]]) idxPatch += 1 dPatches = tf.stack(patch, axis=3) return dPatches def fcalculatepatches(imageSize, patchSize, patchOverlap): dOverlap = np.round(np.multiply(patchSize, patchOverlap)) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array( ([math.ceil((imageSize[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((imageSize[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((imageSize[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): idxPatch += 1 return idxPatch
move_artefact = False shim_artefact = False noise_artefact = False #dLabels = [] dOverlap = np.multiply(patchSize, patchOverlap) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) print(dOverlap,dNotOverlap) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((dicom_numpy_array.shape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1], int(size_zero_pad[2]) - dicom_numpy_array.shape[2]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') print(Img_zero_pad.shape) Mask_zero_pad = np.lib.pad(mask_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/((1-patchOverlap)*patchSize[2])+1) print(((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)) print(((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)) print(((size_zero_pad[2]-patchSize[2])/((1-patchOverlap)*patchSize[2])+1)) print(int(patchSize[0]), int(patchSize[1]), int(patchSize[2]), int(nbPatches)) dPatches = np.zeros((int(patchSize[0]), int(patchSize[1]), int(patchSize[2]), int(nbPatches)), dtype=float) dLabels = np.zeros((int(nbPatches)), dtype = float) idxPatch = 0 for iZ in range(0, dicom_numpy_array.shape[2], int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): print(iX, iY, iZ) dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] print(idxPatch) print(dPatch.shape) dPatches[:,:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] patch_number_value = patchSize[0] * patchSize[1]*patchSize[2] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = 0 if move_artefact == True and shim_artefact != True and noise_artefact != True: label = 1 elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = 2 elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = 3 elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = 4 elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = 5 elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = 6 elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = 7 dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid patching done!") #print("Rigid done!") #print(dLabels) return dPatches, dLabels, nbPatches
common.go
package conf import ( "encoding/json" "strings" v2net "github.com/whatedcgveg/v2ray-core/common/net" "github.com/whatedcgveg/v2ray-core/common/protocol" ) type StringList []string func NewStringList(raw []string) *StringList { list := StringList(raw) return &list } func (v StringList) Len() int { return len(v) } func (v *StringList) UnmarshalJSON(data []byte) error { var strarray []string if err := json.Unmarshal(data, &strarray); err == nil { *v = *NewStringList(strarray) return nil } var rawstr string if err := json.Unmarshal(data, &rawstr); err == nil { strlist := strings.Split(rawstr, ",") *v = *NewStringList(strlist) return nil } return newError("unknown format of a string list: " + string(data)) } type Address struct { v2net.Address } func (v *Address) UnmarshalJSON(data []byte) error { var rawStr string if err := json.Unmarshal(data, &rawStr); err != nil { return err } v.Address = v2net.ParseAddress(rawStr) return nil } func (v *Address) Build() *v2net.IPOrDomain { return v2net.NewIPOrDomain(v.Address) } type Network string func (v Network) Build() v2net.Network { return v2net.ParseNetwork(string(v)) } type NetworkList []Network func (v *NetworkList) UnmarshalJSON(data []byte) error { var strarray []Network if err := json.Unmarshal(data, &strarray); err == nil { nl := NetworkList(strarray) *v = nl return nil } var rawstr Network if err := json.Unmarshal(data, &rawstr); err == nil { strlist := strings.Split(string(rawstr), ",") nl := make([]Network, len(strlist)) for idx, network := range strlist { nl[idx] = Network(network) } *v = nl return nil } return newError("unknown format of a string list: " + string(data)) } func (v *NetworkList) Build() *v2net.NetworkList { if v == nil { return &v2net.NetworkList{ Network: []v2net.Network{v2net.Network_TCP}, } } list := new(v2net.NetworkList) for _, network := range *v { list.Network = append(list.Network, network.Build()) } return list } func parseIntPort(data []byte) (v2net.Port, error)
func parseStringPort(data []byte) (v2net.Port, v2net.Port, error) { var s string err := json.Unmarshal(data, &s) if err != nil { return v2net.Port(0), v2net.Port(0), err } pair := strings.SplitN(s, "-", 2) if len(pair) == 0 { return v2net.Port(0), v2net.Port(0), newError("Config: Invalid port range: ", s) } if len(pair) == 1 { port, err := v2net.PortFromString(pair[0]) return port, port, err } fromPort, err := v2net.PortFromString(pair[0]) if err != nil { return v2net.Port(0), v2net.Port(0), err } toPort, err := v2net.PortFromString(pair[1]) if err != nil { return v2net.Port(0), v2net.Port(0), err } return fromPort, toPort, nil } type PortRange struct { From uint32 To uint32 } func (v *PortRange) Build() *v2net.PortRange { return &v2net.PortRange{ From: v.From, To: v.To, } } // UnmarshalJSON implements encoding/json.Unmarshaler.UnmarshalJSON func (v *PortRange) UnmarshalJSON(data []byte) error { port, err := parseIntPort(data) if err == nil { v.From = uint32(port) v.To = uint32(port) return nil } from, to, err := parseStringPort(data) if err == nil { v.From = uint32(from) v.To = uint32(to) if v.From > v.To { return newError("invalid port range ", v.From, " -> ", v.To) } return nil } return newError("invalid port range: ", string(data)) } type User struct { EmailString string `json:"email"` LevelByte byte `json:"level"` } func (v *User) Build() *protocol.User { return &protocol.User{ Email: v.EmailString, Level: uint32(v.LevelByte), } }
{ var intPort uint32 err := json.Unmarshal(data, &intPort) if err != nil { return v2net.Port(0), err } return v2net.PortFromInt(intPort) }
icon.service.ts
import { Injectable } from '@angular/core'; import { IconPrefix } from '@fortawesome/fontawesome-svg-core'; @Injectable({providedIn: 'root'}) export class FaIconService { defaultPrefix: IconPrefix = 'fas'; }
icon_reviews.rs
pub struct IconReviews { props: crate::Props, } impl yew::Component for IconReviews { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html { yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><g><path d="M0,0h24v24H0V0z" fill="none"/></g><g><g><path d="M4,17.17L5.17,16H20V4H4V17.17z M10.43,8.43L12,5l1.57,3.43L17,10l-3.43,1.57 L12,15l-1.57-3.43L7,10L10.43,8.43z" enable-background="new" opacity=".3"/><path d="M20,2H4C2.9,2,2,2.9,2,4v18l4-4h14c1.1,0,2-0.9,2-2V4C22,2.9,21.1,2,20,2z M20,16H5.17L4,17.17V4h16V16z"/><polygon points="12,15 13.57,11.57 17,10 13.57,8.43 12,5 10.43,8.43 7,10 10.43,11.57"/></g></g></svg> </svg> } } }
{ true }
db-migrations.go
// This file is part of MinIO Operator // Copyright (c) 2022 MinIO, Inc. // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. package server import ( "context" "fmt" "log" "github.com/lib/pq" ) // dbMigration represents a DB migration using db-client c. // Note: a migration func should be idempotent. type dbMigration func(ctx context.Context, c *DBClient) error var allMigrations = []dbMigration{ addAccessKeyCol, // Add new migrations here below } func (c *DBClient) runMigrations(ctx context.Context) error { for _, migration := range allMigrations { if err := migration(ctx, c); err != nil { return err } } return nil } func duplicateColErr(err error) bool { if pqerr, ok := err.(*pq.Error); ok && pqerr.Code == "42701" { return true } return false } func duplicateTblErr(err error) bool
func (c *DBClient) runQueries(ctx context.Context, queries []string, ignoreErr func(error) bool) error { for _, query := range queries { if _, err := c.ExecContext(ctx, query); err != nil { if ignoreErr(err) { continue } return err } } return nil } // updateAccessKeyCol updates request_info records which where created before // the introduction of access_key column. func updateAccessKeyCol(ctx context.Context, c *DBClient) { updQ := `WITH req AS ( SELECT log->>'requestID' AS request_id, COALESCE( substring( log->'requestHeader'->>'Authorization', e'^AWS4-HMAC-SHA256\\s+Credential\\s*=\\s*([^/]+)' ), substring(log->'requestHeader'->>'Authorization', e'^AWS\\s+([^:]+)') ) AS access_key FROM audit_log_events AS a JOIN request_info AS b ON (a.event_time = b.time) WHERE b.access_key IS NULL ORDER BY event_time LIMIT $1 ) UPDATE request_info SET access_key = req.access_key FROM req WHERE request_info.request_id = req.request_id` for lim := 1000; ; { select { case <-ctx.Done(): return default: } res, err := c.ExecContext(ctx, updQ, lim) if err != nil { log.Printf("Failed to update access_key column in request_info: %v", err) return } if rows, err := res.RowsAffected(); err != nil { log.Printf("Failed to get rows affected: %v", err) return } else if rows < 1000 { break } } } // addAccessKeyCol adds a new column access_key, to request_info table to store // API requests access key/user information wherever applicable. func addAccessKeyCol(ctx context.Context, c *DBClient) error { queries := []string{ `ALTER table request_info ADD access_key text`, } err := c.runQueries(ctx, queries, func(err error) bool { if duplicateColErr(err) { return true } if duplicateTblErr(err) { return true } return false }) if err == nil { go updateAccessKeyCol(ctx, c) } return err } // CreateIndices creates table indexes for audit_log_events and request_info tables. // See auditLogIndices, reqInfoIndices functions for actual indices details. func (c *DBClient) CreateIndices(ctx context.Context) error { tables := []struct { t Table indices []indexOpts }{ { t: auditLogEventsTable, indices: auditLogIndices(), }, { t: requestInfoTable, indices: reqInfoIndices(), }, } for _, table := range tables { // The following procedure creates indices on all partitions of // this table. If an index was created on any of its partitions, // it checks if newer partitions were created meanwhile, so as // to create indices on those partitions too. for { partitions, err := c.getExistingPartitions(ctx, table.t) if err != nil { return err } var indexCreated bool for _, partition := range partitions { indexed, err := c.CreatePartitionIndices(ctx, table.indices, partition) if err != nil { return err } indexCreated = indexCreated || indexed } if !indexCreated { break } } // No more new non-indexed table partitions, creating // parent table indices. err := c.CreateParentIndices(ctx, table.indices) if err != nil { return err } } return nil } // CreatePartitionIndices creates all indices described by optses on partition. // It returns true if a new index was created on this partition. Note: this // function ignores the index already exists error. func (c *DBClient) CreatePartitionIndices(ctx context.Context, optses []indexOpts, partition string) (indexed bool, err error) { for _, opts := range optses { q := opts.createPartitionQuery(partition) _, err := c.ExecContext(ctx, q) if err == nil { indexed = true } if err != nil && !duplicateTblErr(err) { return indexed, err } } return indexed, nil } // CreateParentIndices creates all indices specified by optses on the parent table. func (c *DBClient) CreateParentIndices(ctx context.Context, optses []indexOpts) error { for _, opts := range optses { q := opts.createParentQuery() _, err := c.ExecContext(ctx, q) if err != nil && !duplicateTblErr(err) { return err } } return nil } // auditLogIndices is a slice of audit_log_events' table indices specified as // indexOpt values. func auditLogIndices() []indexOpts { return []indexOpts{ { tableName: "audit_log_events", indexSuffix: "log", col: idxCol{name: `(log->>'requestID')`}, idxType: "btree", }, { tableName: "audit_log_events", col: idxCol{ name: "event_time", order: colDesc, }, }, } } // reqInfoIndices is a slice of request_info's table indices specified as indexOpt values. func reqInfoIndices() []indexOpts { var idxOpts []indexOpts cols := []string{"access_key", "api_name", "bucket", "object", "request_id", "response_status", "time"} for _, col := range cols { idxOpts = append(idxOpts, indexOpts{ tableName: "request_info", col: idxCol{name: col}, }) } return idxOpts } type colOrder bool const ( colDesc colOrder = true ) type idxCol struct { name string order colOrder } func (col idxCol) colWithOrder() string { if col.order == colDesc { return fmt.Sprintf("(%s DESC)", col.name) } return fmt.Sprintf("(%s)", col.name) } // indexOpts type is used to specify a table index type indexOpts struct { tableName string indexSuffix string col idxCol idxType string } func (opts indexOpts) colWithOrder() string { return opts.col.colWithOrder() } func (opts indexOpts) createParentQuery() string { var idxName string if opts.indexSuffix != "" { idxName = fmt.Sprintf("%s_%s_index", opts.tableName, opts.indexSuffix) } else { idxName = fmt.Sprintf("%s_%s_index", opts.tableName, opts.col.name) } var q string if opts.idxType != "" { q = fmt.Sprintf("CREATE INDEX %s ON %s USING %s %s", idxName, opts.tableName, opts.idxType, opts.colWithOrder()) } else { q = fmt.Sprintf("CREATE INDEX %s ON %s %s", idxName, opts.tableName, opts.colWithOrder()) } return q } func (opts indexOpts) createPartitionQuery(partition string) string { var idxName string if opts.indexSuffix != "" { idxName = fmt.Sprintf("%s_%s_index", partition, opts.indexSuffix) } else { idxName = fmt.Sprintf("%s_%s_index", partition, opts.col.name) } var q string if opts.idxType != "" { q = fmt.Sprintf("CREATE INDEX CONCURRENTLY %s ON %s USING %s %s", idxName, partition, opts.idxType, opts.colWithOrder()) } else { q = fmt.Sprintf("CREATE INDEX CONCURRENTLY %s ON %s %s", idxName, partition, opts.colWithOrder()) } return q }
{ if pqerr, ok := err.(*pq.Error); ok && pqerr.Code == "42P07" { return true } return false }
weather.ts
import { ColorResolvable, CommandInteraction, MessageEmbed } from 'discord.js' import { Discord, SimpleCommand, SimpleCommandMessage, SlashOption, Slash, SimpleCommandOption } from 'discordx' import fetch from 'node-fetch' import hslRgb from 'hsl-rgb' interface weatherResponse { msg: MessageEmbed, ephemeral: boolean } interface weatherInfo { id: number, main: string, description: string, icon: string } interface weatherDirections { [key: string]: Array<number> } @Discord() class
{ @SimpleCommand('weather', { description: 'Get the weather for a location', argSplitter: '\n' }) async simple(@SimpleCommandOption('location', { type: 'STRING' }) text: string, command: SimpleCommandMessage) { if (!text) { return command.sendUsageSyntax() } const weatherInfo: weatherResponse = await this.handleInput(text) command.message.channel.send({ embeds: [weatherInfo.msg] }) } @Slash('weather', { description: 'Get the weather for a location' }) async slash( @SlashOption('location', { type: 'STRING' }) message: string, interaction: CommandInteraction ) { const weatherInfo: weatherResponse = await this.handleInput(message) interaction.reply({ embeds: [weatherInfo.msg], ephemeral: weatherInfo.ephemeral }) } private static aboutMessage = `Weather data provided by [OpenWeather (TM)](https://openweathermap.org) Data made available under the [Creative Commons Attribution-ShareAlike 4.0 International licence (CC BY-SA 4.0)](<https://creativecommons.org/licenses/by-sa/4.0/>)` private static helpMessage = `Acceptable input formatting: \`City name\` \`City name, State code\` \`City name, State code, Country code\` \`Zip/Postal code, Country code\` \n**Also acceptable**: \`about\` - Information on the API & data used \`help\` - This command` private static missingApiKey = `This function is currently unavailable.\n**Reason**: Weather API key is missing.` private static apiKey = process.env.OPEN_WEATHER_TOKEN ?? '' private static weatherURL = 'https://api.openweathermap.org/data/2.5/weather' // Very basic check for a postal code, working on the assumpion that they include numbers and cities don't private static possiblyAPostCode = /\d/ private determineType(searchLocation: string): string { if (Weather.possiblyAPostCode.test(searchLocation)) { // Post code return 'zip' } else { // City name return 'q' } } // The API doesn't seem to handle whitespace around commas too well // This is noticable when using a postal code // Not found: "bs1 4uz, gb", Found: "bs1 4uz,gb" private static commaWhitespaceRegex = /(\s{0,}\,\s{0,})/g private sortOutWhitespace(text: string): string { return text.replace(Weather.commaWhitespaceRegex, ',') } private buildRequestURL(searchLocation: string): string { const fragments: string = [ [this.determineType(searchLocation), encodeURIComponent(this.sortOutWhitespace(searchLocation))], ['units', 'metric'], ['appid', Weather.apiKey] ].map(frag => frag.join('=')).join('&') return `${Weather.weatherURL}?${fragments}` } // Text formatting private titleCase(text: string): string { return text .split(' ') .map(word => word.substring(0, 1).toUpperCase() + word.substring(1).toLowerCase() ) .join(' ') } // Wind private windLUT: weatherDirections = { N: [348.75, 11.25], NNE: [11.25, 33.75], NE: [33.75, 56.25], ENE: [56.25, 78.75], E: [78.75, 101.25], ESE: [101.25, 123.75], SE: [123.75, 146.25], SSE: [146.25, 168.75], S: [168.75, 191.25], SSW: [191.25, 213.75], SW: [213.75, 236.25], WSW: [236.25, 258.75], W: [258.75, 281.25], WNW: [281.25, 303.75], NW: [303.75, 326.25], NNW: [326.25, 348.75] } private cardinalDirection(degree: number): string { return Object.keys(this.windLUT).find(dir => { if (this.windLUT[dir][1] < this.windLUT[dir][0]) { return degree >= this.windLUT[dir][0] || degree <= this.windLUT[dir][1] } else { return degree >= this.windLUT[dir][0] && degree <= this.windLUT[dir][1] } }) || '?' } // Temperature private localiseTemperature(celsius: number): string { const fahrenheit = 32 + celsius * 1.8 return `${celsius.toFixed(1)}°C/${fahrenheit.toFixed(1)}°F` } // Speed private localiseMSec(mSec: number): string { const mph = mSec * 2.236936 const kph = mSec * 3.6 return `${kph.toFixed(1)} KPH/${mph.toFixed(1)} MPH` } // Distance private localiseMetres(metres: number): string { const kilometres = metres / 1000 const miles = kilometres * 0.621371 const feet = metres * 3.2808 const imperial = miles < 1 ? `${feet.toLocaleString(undefined, {maximumFractionDigits: 0})} ft` : `${miles.toFixed(1)} mi` return `${kilometres.toFixed(1)} km/${imperial}` } private visibilityText(visibility: number | undefined) { /* Fog: Less than 1 km (3,300 ft) Mist: Between 1 km (0.62 mi) and 2 km (1.2 mi) Haze: From 2 km (1.2 mi) to 5 km (3.1 mi) */ if (!visibility || visibility > 5000) { return '' } return `, **Visibility:** ${this.localiseMetres(visibility)}` } // Locations can be expericing more than one type of weather at the same time private weatherDescription(weather: Array<weatherInfo>): string { return weather.map(w => this.titleCase(w.description)).join(', ') } // Localise and format a time private timestampTo12Hour( timezoneOffset: number, timestamp: number = 0 ): string { const dateObj = new Date( timestamp === 0 ? (timezoneOffset * 1000) + Date.now() : (timezoneOffset + timestamp) * 1000 ) let hours = dateObj.getUTCHours() const amPM = hours > 12 ? 'PM' : 'AM' hours %= 12 if (hours === 0) { hours = 12 } const mins = String(dateObj.getUTCMinutes()).padStart(2, '0') return `${hours}:${mins} ${amPM}` } private sunsetInfo( sunrise: number, sunset: number, latitude: number, timezoneOffset: number ): string { if (sunrise === 0 && sunset === 0) { const currentMonth = new Date().getMonth() if ( (latitude > 0 && (currentMonth < 2 || currentMonth > 8)) || (latitude < 0 && (currentMonth > 3 && currentMonth < 7)) ) { return '🌚 Polar night is occuring 🌚' } else { return '🌞 Midnight sun is occuring 🌞' } } return `**Sunrise:** ${this.timestampTo12Hour(timezoneOffset, sunrise)}, **Sunset:** ${this.timestampTo12Hour(timezoneOffset, sunset)}` } private colorFromTemp(celsius: number): ColorResolvable { const scaledCelsius: number = celsius > 0 ? celsius * 6 : celsius * 3.5 return hslRgb((200 - scaledCelsius) % 360, .75, .6) } private formatWeather(data: any): weatherResponse { const outputStrings: Array<string> = [ //`**${data.name}**, ${data.sys.country} — ${timestampTo12Hour(data.timezone)}`, `**Currently:** ${this.weatherDescription(data.weather)}`, `**Cloud Cover:** ${data.clouds.all}%${this.visibilityText(data.visibility)}`, `**Temp:** ${this.localiseTemperature(data.main.temp)}, **Feels like:** ${this.localiseTemperature(data.main.feels_like)}`, `**Min:** ${this.localiseTemperature(data.main.temp_min)}, **Max:** ${this.localiseTemperature(data.main.temp_max)}`, `**Humidity:** ${data.main.humidity}%`, `**Wind:** ${this.localiseMSec(data.wind.speed)} @ ${data.wind.deg}°/${this.cardinalDirection(data.wind.deg)}`, this.sunsetInfo(data.sys.sunrise, data.sys.sunset, data.coord.lat, data.timezone) ] const tmpCountry = data.sys.country ? `, ${data.sys.country}` : '' return { msg: new MessageEmbed() .setAuthor({ name: `${data.name}${tmpCountry} — ${this.timestampTo12Hour(data.timezone)}`, iconURL: `http://openweathermap.org/img/wn/${data.weather[0].icon}@2x.png` }) .setColor(this.colorFromTemp(data.main.temp)) .setDescription(outputStrings.join('\n')), ephemeral: false } } private newBasicEmbed( description: string = '', color: ColorResolvable = '#ffffff' ): MessageEmbed { return new MessageEmbed() .setColor(color) .setAuthor({ name: 'Weather' }) .setDescription(description) } private formatError(data: any): weatherResponse { let errorMsg: string if (data?.message) { // Pick the best emoji for the response const whichEmoji = data.message.endsWith('not found') ? '🔎' : '🤔' // I think saw a request time out, and the response included the URL, // so lets avoid showing the API key in that situation. errorMsg = `${whichEmoji} ${this.titleCase(data.message.replace(Weather.apiKey, '[redacted]'))}` } else { // Not the most helpful of error messages :) errorMsg = `😵 What happened?!` } return { msg: this.newBasicEmbed(errorMsg, '#e3377b'), ephemeral: true } } private async fetchWeather(searchLocation: string): Promise<any> { return await fetch(this.buildRequestURL(searchLocation)) .then(async (resp) => { if (resp.ok) { return await resp.json() } else { return Promise.reject(await resp.json()) } }) .catch(error => Promise.reject(error) ) } private async handleInput(searchLocation: string): Promise<weatherResponse> { if (searchLocation.toLowerCase() === 'about') { return { msg: this.newBasicEmbed(Weather.aboutMessage, '#eb6e4b'), ephemeral: false } } if (searchLocation.toLowerCase() === 'help') { return { msg: this.newBasicEmbed(Weather.helpMessage, '#47aeef'), ephemeral: false } } if (!Weather.apiKey) { return { msg: this.newBasicEmbed(Weather.missingApiKey, '#e3377b'), ephemeral: true } } if (searchLocation === "Stephenville" || searchLocation === "rex") { searchLocation = "Stephenville, CA" } let output: weatherResponse try { const weather = await this.fetchWeather(searchLocation) output = this.formatWeather(weather) } catch (error) { output = this.formatError(error) } return output } }
Weather
gpt2.py
from typing import Union import torch from torch import nn from ..composition import AdapterCompositionBlock, parse_composition from ..heads import CausalLMHead, ClassificationHead, MultiLabelClassificationHead from ..model_mixin import InvertibleAdaptersMixin, ModelAdaptersMixin from .bert import ( BertEncoderAdaptersMixin, BertOutputAdaptersMixin, BertSelfOutputAdaptersMixin, ModelWithFlexibleHeadsAdaptersMixin, ) class GPT2AttentionAdaptersModule(BertSelfOutputAdaptersMixin, nn.Module): """Adds attention adapters to the Transformer module of DistilBert.""" def __init__(self, parent): super().__init__() # keep a reference to the parent module without registering as a submodule object.__setattr__(self, "parent", parent) self.config = parent.config @property def transformer_layer_norm(self): return None class GPT2OutputAdaptersModule(BertOutputAdaptersMixin, nn.Module): """Adds output adapters to the Transformer module of DistilBert.""" def __init__(self, parent): super().__init__() # keep a reference to the parent module without registering as a submodule object.__setattr__(self, "parent", parent) self.config = parent.config @property def transformer_layer_norm(self): return None class GPT2DecoderBlockAdaptersMixin(BertEncoderAdaptersMixin): """Adds adapters to the TransformerBlock module of DistilBert.""" def _init_adapter_modules(self): self.attention_adapters = GPT2AttentionAdaptersModule(self) self.output_adapters = GPT2OutputAdaptersModule(self) self.attention_adapters._init_adapter_modules() self.output_adapters._init_adapter_modules() def add_fusion_layer(self, adapter_names): self.attention_adapters.add_fusion_layer(adapter_names) self.output_adapters.add_fusion_layer(adapter_names) def add_adapter(self, adapter_name: str, layer_idx: int): self.attention_adapters.add_adapter(adapter_name, layer_idx) self.output_adapters.add_adapter(adapter_name, layer_idx) def delete_adapter(self, adapter_name): self.attention_adapters.delete_adapter(adapter_name) self.output_adapters.delete_adapter(adapter_name) def delete_fusion_layer(self, adapter_names): self.attention_adapters.delete_fusion_layer(adapter_names) self.output_adapters.delete_fusion_layer(adapter_names) def enable_adapters(self, adapter_names: list, unfreeze_adapters: bool, unfreeze_attention: bool): self.attention_adapters.enable_adapters(adapter_names, unfreeze_adapters, unfreeze_attention) self.output_adapters.enable_adapters(adapter_names, unfreeze_adapters, unfreeze_attention) class GPT2ModelAdapterMixin(InvertibleAdaptersMixin, ModelAdaptersMixin): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def _init_adapter_modules(self): super()._init_adapter_modules() # add adapters specified in config; invertible adapter will only be added if required for adapter_name in self.config.adapters.adapters: self._add_adapter(adapter_name) # fusion if hasattr(self.config, "fusion_models"): for fusion_adapter_names in self.config.fusion_models: self.add_fusion_layer(fusion_adapter_names) def _add_adapter(self, adapter_name: str): adapter_config = self.config.adapters.get(adapter_name) leave_out = adapter_config.get("leave_out", []) for i, layer in enumerate(self.base_model.h): if i not in leave_out: layer.add_adapter(adapter_name, i) self.add_invertible_adapter(adapter_name) def train_adapter(self, adapter_setup: Union[list, AdapterCompositionBlock]): self.train() self.freeze_model(True) adapter_setup = parse_composition(adapter_setup) self.enable_adapters(adapter_setup, True, False) self.enable_invertible_adapters(adapter_setup.flatten()) # use the adapters to be trained by default in every forward pass self.set_active_adapters(adapter_setup) def train_adapter_fusion(self, adapter_setup: Union[list, AdapterCompositionBlock], unfreeze_adapters=False):
def enable_adapters( self, adapter_setup: AdapterCompositionBlock, unfreeze_adapters: bool, unfreeze_attention: bool ): for layer in self.base_model.h: layer.enable_adapters(adapter_setup, unfreeze_adapters, unfreeze_attention) def adjust_attention_mask_for_parallel(self, hidden_states, attention_mask): if attention_mask is not None and hidden_states.shape[0] != attention_mask.shape[0]: repeats = [1] * len(attention_mask.shape) repeats[0] = hidden_states.shape[0] // attention_mask.shape[0] attention_mask = attention_mask.repeat(*repeats) return attention_mask def _add_fusion_layer(self, adapter_names): for layer in self.base_model.h: layer.add_fusion_layer(adapter_names) def _delete_adapter(self, adapter_name: str): for layer in self.base_model.h: layer.delete_adapter(adapter_name) self.delete_invertible_adapter(adapter_name) def _delete_fusion_layer(self, adapter_names): for layer in self.base_model.h: layer.delete_fusion_layer(adapter_names) def get_fusion_regularization_loss(self): reg_loss = 0.0 target = torch.zeros((self.config.hidden_size, self.config.hidden_size)).fill_diagonal_(1.0).to(self.device) for _, v in self.base_model.h._modules.items(): for _, layer_fusion in v.output_adapters.adapter_fusion_layer.items(): if hasattr(layer_fusion, "value"): reg_loss += 0.01 * (target - layer_fusion.value.weight).pow(2).sum() for _, layer_fusion in v.attention_adapters.adapter_fusion_layer.items(): if hasattr(layer_fusion, "value"): reg_loss += 0.01 * (target - layer_fusion.value.weight).pow(2).sum() return reg_loss def get_adapter(self, name): return_adapters = {} for idx, layer in enumerate(self.h): adapters = { "attention": layer.attention_adapters.adapters, "output": layer.output_adapters.adapters, } for key, adapt in adapters.items(): if hasattr(adapt, name): if idx not in return_adapters: return_adapters[idx] = {} return_adapters[idx][key] = getattr(adapt, name) return return_adapters class GPT2ModelHeadsMixin(ModelWithFlexibleHeadsAdaptersMixin): """Adds flexible heads to a GPT-2 model.""" head_types = { "classification": ClassificationHead, "multilabel_classification": MultiLabelClassificationHead, "causal_lm": CausalLMHead, } def add_classification_head( self, head_name, num_labels=2, layers=2, activation_function="tanh", overwrite_ok=False, multilabel=False, id2label=None, ): """ Adds a sequence classification head on top of the model. Args: head_name (str): The name of the head. num_labels (int, optional): Number of classification labels. Defaults to 2. layers (int, optional): Number of layers. Defaults to 2. activation_function (str, optional): Activation function. Defaults to 'tanh'. overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False. multilabel (bool, optional): Enable multilabel classification setup. Defaults to False. """ if multilabel: head = MultiLabelClassificationHead(self, head_name, num_labels, layers, activation_function, id2label) else: head = ClassificationHead(self, head_name, num_labels, layers, activation_function, id2label) self.add_prediction_head(head, overwrite_ok) def add_causal_lm_head(self, head_name, overwrite_ok=False): """ Adds a causal language modeling head on top of the model. Args: head_name (str): The name of the head. overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False. """ head = CausalLMHead(self, head_name) self.add_prediction_head(head, overwrite_ok=overwrite_ok)
self.train() self.freeze_model(True) adapter_setup = parse_composition(adapter_setup) self.enable_adapters(adapter_setup, unfreeze_adapters, True) # use the adapters to be trained by default in every forward pass self.set_active_adapters(adapter_setup)
index.js
import { combineReducers } from 'redux' import { channels,channelHasErrored, channelIsLoading } from './channels' import visibilityFilter from './visibilityFilter' const twitchApp = combineReducers({
channels, visibilityFilter, channelHasErrored, channelIsLoading }) export default twitchApp
work-item.ts
export interface WorkItem { id: string; stageDates: Array<string>; name: string; type: string; url: string; attributes?: any; }; // Doesn't do a whole lot at the moment, might consider getting rid of it. const createWorkItem = ({id, stageDates, name, type, url}): WorkItem => { return ({ id, stageDates, name, type, url, }); }; const workItemToCSV = (workItem: WorkItem) => { let s = ''; s += `${workItem.id},`; s += `${workItem.url},`; s += `${(cleanString(workItem.name))}`; workItem.stageDates.forEach(stageDate => s += `,${stageDate}`); s += `,${workItem.type}`; // No attributes right now. // const attributeKeys = Object.keys(this.Attributes); // if (attributeKeys.length === 0) { // s += ','; // } else { // attributeKeys.forEach(attributeKey => { // s += `,${WorkItem.cleanString(this.Attributes[attributeKey])}`; // }); // } return s; };
.replace(/\\/g, '') .trim(); export { createWorkItem, workItemToCSV, };
const cleanString = (s: string = ''): string => s.replace(/"/g, '') .replace(/'/g, '') .replace(/,/g, '')
dyndns.py
#!/usr/bin/env python3 import sys import argparse from requests import get from transip_rest_client import TransipRestClient def getOptions(args=sys.argv[1:]): parser = argparse.ArgumentParser(description="DynDNS: Updates a DNS record for a dynamic IP address.") parser.add_argument("-u", "--user", help="Your username.", required=True) parser.add_argument("-k", "--key", help="Key file containing RSA private key.", required=True) parser.add_argument("-n", "--name", help="Name of the record (e.g. 'www').", required=True) parser.add_argument("-d", "--domain", help="Existing DNS domain (e.g. 'example.com').", required=True) parser.add_argument("-v", "--verbose", action='store_true', help="Verbose mode.") options = parser.parse_args(args) return options def find(arr , id): for x in arr: if x["name"] == id: return x def main(key, username, domain, name, verbose):
if __name__ == "__main__": options = getOptions() if options.verbose: print("Verbose output enabled.") main(options.key, options.user, options.domain, options.name, options.verbose)
with open(key, 'r') as f: my_RSA_key = f.read() if "BEGIN RSA PRIVATE KEY" not in my_RSA_key: print("Key in incorrect format, convert the key with the following command:") print("openssl rsa -in privatekey.txt -out rsaprivatekey.txt") return newIp = get('https://api.ipify.org').text if verbose: print(f"Retrieved IP from api.ipify.org: {newIp}") client = TransipRestClient(user=username, rsaprivate_key=my_RSA_key, global_key=True) entries = client.get_dns_entries(domain=domain) if verbose: print(f"Found {len(entries)} DNS entries") entry = find(entries, name) if entry is None: print(f"No ip found, adding {newIp}") client.post_dns_entry(domain=domain, name=name, expire=300, record_type='A', content=newIp) else: oldIp = entry["content"] if verbose: print(f"Found current IP in DNS entry: {oldIp}") if oldIp != newIp: print(f"Updating {oldIp} to {newIp}") client.patch_dns_entry(domain=domain, name=name, record_type='A', content=newIp) else: print(f"Not updating {oldIp}")
test_entrypoint.py
import json import pytest from ..entrypoint import ( AudiobooksEntryPoint, EbooksEntryPoint, EntryPoint, EverythingEntryPoint, MediumEntryPoint, ) from ..external_search import Filter from ..model import Edition, Work from ..testing import DatabaseTest class TestEntryPoint(DatabaseTest): def test_defaults(self): everything, ebooks, audiobooks = EntryPoint.ENTRY_POINTS assert EverythingEntryPoint == everything assert EbooksEntryPoint == ebooks assert AudiobooksEntryPoint == audiobooks display = EntryPoint.DISPLAY_TITLES assert "eBooks" == display[ebooks] assert "Audiobooks" == display[audiobooks] assert Edition.BOOK_MEDIUM == EbooksEntryPoint.INTERNAL_NAME assert Edition.AUDIO_MEDIUM == AudiobooksEntryPoint.INTERNAL_NAME assert "http://schema.org/CreativeWork" == everything.URI for ep in (EbooksEntryPoint, AudiobooksEntryPoint): assert ep.URI == Edition.medium_to_additional_type[ep.INTERNAL_NAME] def test_no_changes(self): # EntryPoint doesn't modify queries or search filters. qu = self._db.query(Edition) assert qu == EntryPoint.modify_database_query(self._db, qu) args = dict(arg="value") filter = object() assert filter == EverythingEntryPoint.modify_search_filter(filter) def test_register(self): class Mock(object): pass args = [Mock, "Mock!"] with pytest.raises(ValueError) as excinfo: EntryPoint.register(*args) assert "must define INTERNAL_NAME" in str(excinfo.value) # Test successful registration. Mock.INTERNAL_NAME = "a name" EntryPoint.register(*args) assert Mock in EntryPoint.ENTRY_POINTS assert "Mock!" == EntryPoint.DISPLAY_TITLES[Mock] assert Mock not in EntryPoint.DEFAULT_ENABLED # Can't register twice. with pytest.raises(ValueError) as excinfo: EntryPoint.register(*args) assert "Duplicate entry point internal name: a name" in str(excinfo.value) EntryPoint.unregister(Mock)
# Test successful registration as a default-enabled entry point. EntryPoint.register(*args, default_enabled=True) assert Mock in EntryPoint.DEFAULT_ENABLED # Can't register two different entry points with the same # display name. class Mock2(object): INTERNAL_NAME = "mock2" with pytest.raises(ValueError) as excinfo: EntryPoint.register(Mock2, "Mock!") assert "Duplicate entry point display name: Mock!" in str(excinfo.value) EntryPoint.unregister(Mock) assert Mock not in EntryPoint.DEFAULT_ENABLED class TestEverythingEntryPoint(DatabaseTest): def test_no_changes(self): # EverythingEntryPoint doesn't modify queries or searches # beyond the default behavior for any entry point. assert "All" == EverythingEntryPoint.INTERNAL_NAME qu = self._db.query(Edition) assert qu == EntryPoint.modify_database_query(self._db, qu) args = dict(arg="value") filter = object() assert filter == EverythingEntryPoint.modify_search_filter(filter) class TestMediumEntryPoint(DatabaseTest): def test_modify_database_query(self): # Create a video, and a entry point that contains videos. work = self._work(with_license_pool=True) work.license_pools[0].presentation_edition.medium = Edition.VIDEO_MEDIUM class Videos(MediumEntryPoint): INTERNAL_NAME = Edition.VIDEO_MEDIUM qu = self._db.query(Work) # The default entry points filter out the video. for entrypoint in EbooksEntryPoint, AudiobooksEntryPoint: modified = entrypoint.modify_database_query(self._db, qu) assert [] == modified.all() # But the video entry point includes it. videos = Videos.modify_database_query(self._db, qu) assert [work.id] == [x.id for x in videos] def test_modify_search_filter(self): class Mock(MediumEntryPoint): INTERNAL_NAME = object() filter = Filter(media=object()) Mock.modify_search_filter(filter) assert [Mock.INTERNAL_NAME] == filter.media class TestLibrary(DatabaseTest): """Test a Library's interaction with EntryPoints.""" def test_enabled_entrypoints(self): l = self._default_library setting = l.setting(EntryPoint.ENABLED_SETTING) # When the value is not set, the default is used. assert EntryPoint.DEFAULT_ENABLED == list(l.entrypoints) setting.value = None assert EntryPoint.DEFAULT_ENABLED == list(l.entrypoints) # Names that don't correspond to registered entry points are # ignored. Names that do are looked up. setting.value = json.dumps( ["no such entry point", AudiobooksEntryPoint.INTERNAL_NAME] ) assert [AudiobooksEntryPoint] == list(l.entrypoints) # An empty list is a valid value. setting.value = json.dumps([]) assert [] == list(l.entrypoints)
locales.ts
import {LocaliseApi} from "../api/localise-api"; import {resolve} from "path"; import {ensureDirSync, writeJSONSync} from "fs-extra"; import {flatten} from "flat"; import {get} from 'lodash'; import {IConfig} from "../conf/iconfig"; import {ILogger} from "../conf/ilog"; export class
{ readonly api: LocaliseApi; readonly logger: ILogger; constructor(readonly conf: IConfig) { this.api = new LocaliseApi(this.conf.localiseApiKey); this.logger = conf.logger; } /** import translated keys into localise project add tag "new" to new keys - ignore-new=false - ignore-existing=true - delete-absent=true */ async sync(locale: string, keys: { [key: string]: any }) { return this.api.sync(locale, keys); } /* Fetch and merge Locales from Localise Project, then write them to "outDir" It will ignore all keys not existed in the base * */ async fetchAndMerge(baseLanguageKeys: {[key: string]: any}, outDir: string) { ensureDirSync(outDir); const defaultLangJson = flatten(baseLanguageKeys); const contents = await this.api.getExportAllJson(); for (const locale in contents) { const localeCode = locale; const translated = contents[locale]; const result = {}; for (const trans in defaultLangJson) { result[trans] = get(translated, trans) || defaultLangJson[trans]; //translation is empty, so we use default value } const outputFilePath = resolve(outDir, `${localeCode}.json`); writeJSONSync(outputFilePath, result); this.logger.warn(`Pulled localise ${outputFilePath}`); } const avaiLocaleCodes = Object.keys(contents); const locales = (await this.api.getLocales()) .filter(l => avaiLocaleCodes.includes(l.code)) .map(l => { return {code: l.code, name: l.name}; }); writeJSONSync(`${outDir}/locales.json`, locales); this.logger.info(`Languages has been merged in ${outDir}`); } }
Locales
webpack.config.js
const path = require('path'); const webpack = require('webpack'); module.exports = { entry: { camera: ['./client/camera/entry.js'], projector: ['./client/projector/entry.js'], editor: ['./client/editor/entry.js'], }, output: { path: path.join(__dirname, 'www'),
{ test: /\.js$/, exclude: [/node_modules/], loader: 'babel-loader', query: { cacheDirectory: '.babel-cache', sourceMap: false, }, }, { test: /\.css$/, exclude: [/node_modules/], use: [ { loader: 'style-loader' }, { loader: 'css-loader', options: { localIdentName: '[path][name]--[local]--[hash:base64:10]', }, }, ], }, // Per https://github.com/devongovett/pdfkit/issues/659#issuecomment-321452649 { test: /node_modules\/(pdfkit|fontkit|png-js|linebreak|unicode-properties|brotli)\//, loader: 'transform-loader?brfs', }, { test: /node_modules\/unicode-properties.*\.json$/, use: 'json-loader', }, ], }, plugins: [ new require('copy-webpack-plugin')([ { from: 'node_modules/monaco-editor/min/vs', to: 'vs', }, ]), ], }; if (process.env.NODE_ENV !== 'production') { module.exports.plugins.push(new webpack.HotModuleReplacementPlugin()); Object.values(module.exports.entry).forEach(entry => { entry.unshift('webpack-hot-middleware/client?reload=true'); }); }
filename: '[name].js', }, module: { rules: [
tauruscombobox.py
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################# ## # This file is part of Taurus ## # http://taurus-scada.org ## # Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain ## # Taurus is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. ## # Taurus is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. ## # You should have received a copy of the GNU Lesser General Public License # along with Taurus. If not, see <http://www.gnu.org/licenses/>. ## ############################################################################# """This module provides a set of basic taurus widgets based on QCheckBox""" __all__ = ["TaurusAttrListComboBox", "TaurusValueComboBox"] __docformat__ = 'restructuredtext' from taurus.external.qt import Qt from taurus.core import DataType, TaurusEventType from taurus.core.taurusattribute import TaurusAttribute from taurus.qt.qtgui.base import TaurusBaseWidget, TaurusBaseWritableWidget from taurus.core.util import eventfilters class TaurusValueComboBox(Qt.QComboBox, TaurusBaseWritableWidget): '''This widget shows a combobox that offers a limited choice of values that can be set on an attribute.''' def __init__(self, parent=None, designMode=False): self._previousModelName = None self._lastValueByUser = None name = self.__class__.__name__ self.call__init__wo_kw(Qt.QComboBox, parent) self.call__init__(TaurusBaseWritableWidget, name, designMode=designMode) #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # Helper methods #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- def _setCurrentIndex(self, index): bs = self.blockSignals(True) try: self.setCurrentIndex(index) finally: self.blockSignals(bs) #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # TaurusBaseWidget overwriting #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- def preAttach(self): '''reimplemented from :class:`TaurusBaseWritableWidget`''' TaurusBaseWritableWidget.preAttach(self) self.currentIndexChanged.connect(self.writeIndexValue) self.applied.connect(self.writeValue) def postDetach(self): '''reimplemented from :class:`TaurusBaseWritableWidget`''' TaurusBaseWritableWidget.postDetach(self) try: self.currentIndexChanged.disconnect(self.writeIndexValue) self.applied.disconnect(self.writeValue) except TypeError: # In new style-signal if a signal is disconnected without # previously was connected it, it raises a TypeError pass #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # TaurusBaseWritableWidget overwriting / Pending operations #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- def getValue(self): """ Get the value that the widget is displaying now, not the value of the attribute. """ model = self.getModelObj() if model is None: return None dtype = model.type new_value = self.itemData(self.currentIndex()) if new_value is None: return None if dtype == DataType.Integer: func = int elif dtype == DataType.Float: func = float elif dtype == DataType.String: func = str elif dtype == DataType.Boolean: func = bool else: return None new_value = Qt.from_qvariant(new_value, func) return new_value def setValue(self, value): """ Set the value for the widget to display, not the value of the attribute. """ index = self.findData(Qt.QVariant(value)) self._setCurrentIndex(index) def updateStyle(self): '''reimplemented from :class:`TaurusBaseWritableWidget`''' if self.hasPendingOperations(): self.setStyleSheet('TaurusValueComboBox {color: blue; }') else: self.setStyleSheet('TaurusValueComboBox {}') super(TaurusValueComboBox, self).updateStyle() #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # signals, gui events... things related to "write" in the end #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- @Qt.pyqtSlot(int, name='currentIndexChanged') def writeIndexValue(self, index): '''slot called to emit a valueChanged signal when the currentIndex is changed''' self.emitValueChanged() if self.getAutoApply(): self.applied.emit() def keyPressEvent(self, event): '''reimplemented to emit an 'applied()' signal when Enter (or Return) key is pressed''' if event.key() in [Qt.Qt.Key_Return, Qt.Qt.Key_Enter]: self.applied.emit() event.accept() else: return Qt.QComboBox.keyPressEvent(self, event) #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # TaurusValueComboBox own interface #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- def setValueNames(self, names): ''' Sets the correspondence between the values to be applied and their associated text to show in the combobox. :param names: (sequence<tuple>) A sequence of (name,value) tuples, where each attribute value gets a name for display ''' bs = self.blockSignals(True) self.clear() self.blockSignals(bs) self.addValueNames(names) def addValueNames(self, names): ''' Add new value-name associations to the combobox. ... seealso: :meth:`setValueNames` :param names: (sequence<tuple>) A sequence of (name,value) tuples, where each attribute value gets a name for display ''' bs = self.blockSignals(True) try: for k, v in names: self.addItem(k, Qt.QVariant(v)) # Ok, now we should see if the current value matches any # of the newly added names. This is kinda a refresh: mv = self.getModelValueObj() if mv is not None: self.setValue(mv.wvalue) finally: self.blockSignals(bs) self.emitValueChanged() def getValueString(self, value, default='UNKNOWN(%s)'): """Returns the corresponding name in the combobox out of a value (or a default value if not found). :param value: value to look up :param default: (str) value in case it is not found. It accepts a '%s' placeholder which will be substituted with str(value). It defaults to 'UNKNOWN(%s)'. """ item = self.findData(Qt.QVariant(value)) if item < 0: if '%s' in default: return default % str(value) else: return default return str(self.itemText(item)) def teachDisplayTranslationToWidget(self, widget, default='UNKNOWN(%s)'): """ Makes a label object change the displayed text by the corresponding value of the combobox. This is implemented for the general case and may be not what you expect in some cases (as for example, it fires a fake periodic event which may be problematic if these are being filtered out). """ # We reimplement label.displayValue so that instead of the normal # value it displays the string it has associated in the combobox. widget.displayValue = lambda v: self.getValueString(v, default) # Simulate a first event. Otherwise the displayValue will be # the default, not modified by us model = widget.getModelObj() if model: widget.fireEvent(model, TaurusEventType.Periodic, model.getValueObj()) def setQModel(self, *args, **kwargs): '''access to :meth:`QCombobox.setModel` .. seealso: :meth:`setModel` ''' Qt.QComboBox.setModel(self, *args, **kwargs) def setModel(self, m): '''Reimplemented from :meth:`TaurusBaseWritableWidget.setModel` ''' if isinstance(m, Qt.QAbstractItemModel): self.warning( "Deprecation warning: use setQModel() if you want to set a Qt Item Model. The setModel() method is reserved for Taurus models") return Qt.QComboBox.setModel(self, m) ret = TaurusBaseWritableWidget.setModel(self, m) self.emitValueChanged() return ret @classmethod def getQtDesignerPluginInfo(cls): '''reimplemented from :class:`TaurusBaseWritableWidget`''' ret = TaurusBaseWritableWidget.getQtDesignerPluginInfo() ret['module'] = 'taurus.qt.qtgui.input' ret['icon'] = "designer:combobox.png" return ret #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # QT properties #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- model = Qt.pyqtProperty("QString", TaurusBaseWidget.getModel, TaurusBaseWidget.setModel, TaurusBaseWidget.resetModel) useParentModel = Qt.pyqtProperty("bool", TaurusBaseWidget.getUseParentModel, TaurusBaseWidget.setUseParentModel, TaurusBaseWidget.resetUseParentModel) autoApply = Qt.pyqtProperty("bool", TaurusBaseWritableWidget.getAutoApply, TaurusBaseWritableWidget.setAutoApply, TaurusBaseWritableWidget.resetAutoApply) forcedApply = Qt.pyqtProperty("bool", TaurusBaseWritableWidget.getForcedApply, TaurusBaseWritableWidget.setForcedApply, TaurusBaseWritableWidget.resetForcedApply) class TaurusAttrListComboBox(Qt.QComboBox, TaurusBaseWidget): """Combobox whose items reflect the items read from a 1D attribute of dtype str """ def __init__(self, parent=None, designMode=False): name = self.__class__.__name__ self.call__init__wo_kw(Qt.QComboBox, parent) self.call__init__(TaurusBaseWidget, name) self.insertEventFilter(eventfilters.IGNORE_CONFIG) self.setSizeAdjustPolicy(Qt.QComboBox.AdjustToContents) self.defineStyle() self._lastAttrList = None def defineStyle(self): """Defines the initial style for the widget """ self.updateStyle() #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # TaurusBaseWidget over writing #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- def getModelClass(self): """reimplemented from :class:`TaurusBaseWidget`""" return TaurusAttribute def handleEvent(self, evt_src, evt_type, evt_value): """reimplemented from :class:`TaurusBaseWidget`""" if evt_type == TaurusEventType.Error: attrList = [] elif evt_src is None or evt_value is None: attrList = [] else: attrList = list(evt_value.rvalue) attrList.sort() if attrList != self._lastAttrList: self._lastAttrList = attrList self.clear() self.addItems(attrList) self.updateStyle() def updateStyle(self): """reimplemented from :class:`TaurusBaseWidget`""" self.update() def setQModel(self, *args, **kwargs):
def setModel(self, m): """reimplemented from :class:`TaurusBaseWidget`""" if isinstance(m, Qt.QAbstractItemModel): self.warning(("Deprecation warning: use setQModel() if you" + " want to set a Qt Item Model. The setModel()" + " method is reserved for Taurus models")) return Qt.QAbstractItemView.setQModel(self, m) return TaurusBaseWidget.setModel(self, m) @classmethod def getQtDesignerPluginInfo(cls): """reimplemented from :class:`TaurusBaseWidget`""" ret = TaurusBaseWidget.getQtDesignerPluginInfo() ret['group'] = 'Taurus Input' ret['module'] = 'taurus.qt.qtgui.input' ret['icon'] = "designer:combobox.png" return ret #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- # QT properties #-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- model = Qt.pyqtProperty("QString", TaurusBaseWidget.getModel, TaurusBaseWidget.setModel, TaurusBaseWidget.resetModel) useParentModel = Qt.pyqtProperty("bool", TaurusBaseWidget.getUseParentModel, TaurusBaseWidget.setUseParentModel, TaurusBaseWidget.resetUseParentModel) ##################################################################### # Testing ##################################################################### def _taurusAttrListTest(): """tests taurusAttrList. Model: an attribute containing a list of strings""" from taurus.qt.qtgui.application import TaurusApplication a = TaurusApplication() # model = sys.argv[1] # model = "eval:['foo','bar']" model = "sys/tg_test/1/string_spectrum" w = TaurusAttrListComboBox() w.setModel(model) w.show() return a.exec_() def _taurusValueComboboxTest(): from taurus.qt.qtgui.application import TaurusApplication """tests TaurusValueCombobox """ # model = sys.argv[1] model = 'sys/tg_test/1/short_scalar' names = [ ('name0', 0), ('name1', 1), ('name2', 2), ('name3', 3) ] a = TaurusApplication() w = TaurusValueComboBox() w.setModel(model) w.addValueNames(names) #w.autoApply = True w.show() return a.exec_() if __name__ == '__main__': import sys # main = _taurusValueComboboxTest #uncomment to test TaurusValueCombobox main = _taurusAttrListTest # uncomment to testtaurusAttrList sys.exit(main())
"""access to :meth:`QAbstractItemView.setModel` .. seealso: :meth:`setModel` """ return Qt.QAbstractItemView.setModel(self, *args, **kwargs)
iter_chat_members.py
# Pyrogram - Telegram MTProto API Client Library for Python # Copyright (C) 2017-2021 Dan <https://github.com/delivrance> # # This file is part of Pyrogram. # # Pyrogram is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Pyrogram is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Pyrogram. If not, see <http://www.gnu.org/licenses/>. from string import ascii_lowercase from typing import Union, AsyncGenerator, Optional from pyrogram import raw from pyrogram import types from pyrogram.scaffold import Scaffold class Filters: ALL = "all" KICKED = "kicked" RESTRICTED = "restricted" BOTS = "bots" RECENT = "recent" ADMINISTRATORS = "administrators" QUERIES = [""] + [str(i) for i in range(10)] + list(ascii_lowercase) QUERYABLE_FILTERS = (Filters.ALL, Filters.KICKED, Filters.RESTRICTED) class IterChatMembers(Scaffold): async def iter_chat_members( self, chat_id: Union[int, str], limit: int = 0, query: str = "", filter: str = Filters.RECENT, last_member_count: int = 0, # to speedup iteration for small chats ) -> Optional[AsyncGenerator["types.ChatMember", None]]: """Iterate through the members of a chat sequentially. This convenience method does the same as repeatedly calling :meth:`~pyrogram.Client.get_chat_members` in a loop, thus saving you from the hassle of setting up boilerplate code. It is useful for getting the whole members list of a chat with a single call. Parameters: chat_id (``int`` | ``str``): Unique identifier (int) or username (str) of the target chat. limit (``int``, *optional*): Limits the number of members to be retrieved. By default, no limit is applied and all members are returned [1]_. query (``str``, *optional*): Query string to filter members based on their display names and usernames. Defaults to "" (empty string) [2]_. filter (``str``, *optional*): Filter used to select the kind of members you want to retrieve. Only applicable for supergroups and channels. It can be any of the followings:
*"restricted"* - restricted members only, *"bots"* - bots only, *"recent"* - recent members only, *"administrators"* - chat administrators only. Defaults to *"recent"*. last_member_count (``int``): Last member count number .. [1] Server limit: on supergroups, you can get up to 10,000 members for a single query and up to 200 members on channels. .. [2] A query string is applicable only for *"all"*, *"kicked"* and *"restricted"* filters only. Returns: ``Generator``: A generator yielding :obj:`~pyrogram.types.ChatMember` objects. Example: .. code-block:: python # Iterate though all chat members for member in app.iter_chat_members("pyrogramchat"): print(member.user.first_name) # Iterate though all administrators for member in app.iter_chat_members("pyrogramchat", filter="administrators"): print(member.user.first_name) # Iterate though all bots for member in app.iter_chat_members("pyrogramchat", filter="bots"): print(member.user.first_name) """ current = 0 yielded = set() if query: queries = [query] else: if last_member_count > 200: queries = QUERIES else: queries = [query] # queries = [query] if query else QUERIES total = limit or (1 << 31) - 1 limit = min(200, total) resolved_chat_id = await self.resolve_peer(chat_id) if filter not in QUERYABLE_FILTERS: queries = [""] import arrow for q in queries: offset = 0 while True: # now=arrow.utcnow().timestamp() chat_members = await self.get_chat_members( chat_id=chat_id, offset=offset, limit=limit, query=q, filter=filter ) # print(f"got chat members in : {arrow.utcnow().timestamp()-now}") if not chat_members: break if isinstance(resolved_chat_id, raw.types.InputPeerChat): total = len(chat_members) offset += len(chat_members) for chat_member in chat_members: user_id = chat_member.user.id if user_id in yielded: continue yield chat_member yielded.add(chat_member.user.id) current += 1 if current >= total: return
*"all"* - all kind of members, *"kicked"* - kicked (banned) members only,
lib.rs
//! file_url //! //! Makes it easier to Path/PathBuf to/from file URLs. //! //! Author: Jared Adam Smith //! license: MIT //! © 2021 use std::error::Error; use std::fmt; use std::path::{Path, PathBuf}; use std::string::FromUtf8Error; use std::borrow::Cow; use lazy_static::lazy_static; use regex::Regex; use urlencoding::{decode, encode}; lazy_static! { // We don't want to percent encode the colon on a Windows drive letter. static ref WINDOWS_DRIVE: Regex = Regex::new(r"[a-zA-Z]:").unwrap(); static ref SEPARATOR: Regex = Regex::new(r"[/\\]").unwrap(); } static FORWARD_SLASH: &str = "/"; /// Error for file paths that don't decode to /// valid UTF-8 strings. #[derive(Debug)] pub struct UTFDecodeError { details: String, } impl UTFDecodeError { fn new(msg: &str) -> UTFDecodeError {
} impl fmt::Display for UTFDecodeError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.details) } } impl Error for UTFDecodeError { fn description(&self) -> &str { &self.details } } /// Percent-encodes the path component. Ignores /// Microsoft Windows drive letters and separator /// characters. /// /// # Example: /// ``` /// use file_url::encode_file_component; /// /// let enc = encode_file_component("some file.txt"); /// assert_eq!(enc, "some%20file.txt"); /// /// let windows_drive = encode_file_component("C:"); /// assert_eq!(windows_drive, "C:"); /// ``` pub fn encode_file_component(path_part: &str) -> Cow<str> { // If it's a separator char or a Windows drive return // as-is. if SEPARATOR.is_match(path_part) || WINDOWS_DRIVE.is_match(path_part) { Cow::from(path_part) } else { encode(path_part) } } /// Turns a file URL into a PathBuf. Note that because /// `std::path::PathBuf` is backed by a `std::ffi::OsString` /// the result is platform-dependent, i.e. Microsoft Windows /// paths will not be properly processed on Unix-like systems /// and vice-versa. Also note that because the bytes of a /// valid file path can be non-UTF8 we have to return a /// Result in case the string decode fails. /// /// # Examples: /// ``` /// use std::path::PathBuf; /// use file_url::file_url_to_pathbuf; /// /// let p_buf = file_url_to_pathbuf("file:///foo/bar%20baz.txt").unwrap(); /// assert_eq!(p_buf, PathBuf::from("/foo/bar baz.txt")); /// ``` pub fn file_url_to_pathbuf(file_url: &str) -> Result<PathBuf, FromUtf8Error> { SEPARATOR .split(file_url) .enumerate() .map(|(i, url_piece)| { if i == 0 && url_piece == "file:" { // File url should always be abspath Ok(String::from(FORWARD_SLASH)) } else { let dec_str = decode(url_piece); match dec_str { Ok(decoded) => Ok(decoded.into_owned()), Err(e) => Err(e), } } }) .collect() } /// Method for converting std::path::PathBuf and /// `std::path::Path` to a file URL. pub trait PathFileUrlExt { /// Assuming a PathBuf or Path is valid UTF8, converts /// to a file URL as an owned String. fn to_file_url(&self) -> Result<String, UTFDecodeError>; } /// Method for constructing a `std::path::PathBuf` from a file URL. pub trait PathFromFileUrlExt<PathBuf> { /// Constructs a PathBuf from the supplied &str. fn from_file_url(file_url: &str) -> Result<PathBuf, FromUtf8Error>; } impl PathFileUrlExt for Path { fn to_file_url(&self) -> Result<String, UTFDecodeError> { let path_parts: Result<PathBuf, UTFDecodeError> = self .components() .map(|part| match part.as_os_str().to_str() { Some(part) => Ok(encode_file_component(part).to_string()), None => Err(UTFDecodeError::new("File path not UTF-8 compatible!")), }) .collect(); match path_parts { // Unwrap shouldn't fail here since everything should be properly decoded. Ok(parts) => Ok(format!("file://{}", parts.to_str().unwrap())), Err(e) => Err(e), } } } impl PathFromFileUrlExt<PathBuf> for PathBuf { fn from_file_url(file_url: &str) -> Result<PathBuf, FromUtf8Error> { file_url_to_pathbuf(file_url) } } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; #[test] fn basic_pathbuf_to_url() { let p = PathBuf::from("/some/file.txt"); let url = p.to_file_url().unwrap(); let s = url.as_str(); assert_eq!(s, "file:///some/file.txt"); } #[test] fn oddball_pathbuf_to_url() { let p = PathBuf::from("/gi>/some & what.whtvr"); let url = p.to_file_url().unwrap(); let s = url.as_str(); assert_eq!(s, "file:///gi%3E/some%20%26%20what.whtvr"); } #[cfg(target_os = "windows")] #[test] fn windows_pathbuf_to_url() { let p = PathBuf::from(r"c:\WINDOWS\clock.avi"); let url = p.to_file_url().unwrap(); let s = url.as_str(); assert_eq!(s, "file:///c:/WINDOWS/clock.avi"); } #[test] fn basic_pathbuf_from_url() { let one = PathBuf::from("/some/file.txt"); let two = PathBuf::from_file_url("file:///some/file.txt").unwrap(); assert_eq!(one, two); } #[test] fn oddball_pathbuf_from_url() { let one = PathBuf::from_file_url("file:///gi%3E/some%20%26%20what.whtvr").unwrap(); let two = PathBuf::from("/gi>/some & what.whtvr"); assert_eq!(one, two); } #[test] fn basic_path_to_url() { let one = Path::new("/foo/bar.txt").to_file_url().unwrap(); let two = "file:///foo/bar.txt"; assert_eq!(one, two); } }
UTFDecodeError { details: msg.to_string(), } }
Feedback.tsx
import React from 'react'; import Base from './shared/Base'; import { HeaderSections, FEEDBACK_FORM_URL } from './shared/PlaynetConstants'; import './styles/Feedback.scss'; function Feedback(): JSX.Element { return ( <Base section={HeaderSections.FEEDBACK}> <div id={'feedback-container'}> <h2>Feedback</h2> <p> So that’s a very simple overview as to how Youtube works! <br/> If you liked the problems in that space, maybe you’d like computer science :) <br/> <br/> <a href={FEEDBACK_FORM_URL} className='playnet-button' id='feedback-link' target='_blank' rel='noreferrer'>Got any feedback? We’d love to improve!</a> </p> </div> </Base>
export default Feedback;
); }
__init__.py
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. DEPS = [ 'recipe_engine/cipd', 'recipe_engine/context', 'recipe_engine/json', 'recipe_engine/path', 'recipe_engine/platform', 'recipe_engine/step', ] from recipe_engine.recipe_api import Property from recipe_engine.config import ConfigGroup, Single PROPERTIES = { '$gn/macos_sdk': Property( help='Properties specifically for the macos_sdk module.', param_name='sdk_properties', kind=ConfigGroup( # pylint: disable=line-too-long
# # https://chrome-infra-packages.appspot.com/p/infra_internal/ios/xcode/mac/+/ # # For an up to date list of the latest SDK builds. sdk_version=Single(str), # The CIPD toolchain tool package and version. tool_pkg=Single(str), tool_ver=Single(str), ), default={ 'sdk_version': '10b61', 'tool_package': 'infra/tools/mac_toolchain/${platform}', 'tool_version': 'git_revision:434f5462a77e7103f9d610fa5cabc426bb21502e', }, ) }
# XCode build version number. Internally maps to an XCode build id like # '9c40b'. See
AllergyIntolerance_Reaction.rs
#![allow(unused_imports, non_camel_case_types)] use crate::models::r5::Annotation::Annotation; use crate::models::r5::CodeableConcept::CodeableConcept; use crate::models::r5::CodeableReference::CodeableReference; use crate::models::r5::Element::Element; use crate::models::r5::Extension::Extension; use serde_json::json; use serde_json::value::Value; use std::borrow::Cow; /// Risk of harmful or undesirable physiological response which is specific to an /// individual and associated with exposure to a substance. #[derive(Debug)] pub struct AllergyIntolerance_Reaction<'a> { pub(crate) value: Cow<'a, Value>, } impl AllergyIntolerance_Reaction<'_> { pub fn new(value: &Value) -> AllergyIntolerance_Reaction { AllergyIntolerance_Reaction { value: Cow::Borrowed(value), } } pub fn to_json(&self) -> Value { (*self.value).clone() } /// Extensions for description pub fn _description(&self) -> Option<Element> { if let Some(val) = self.value.get("_description") { return Some(Element { value: Cow::Borrowed(val), }); } return None; } /// Extensions for onset pub fn _onset(&self) -> Option<Element> { if let Some(val) = self.value.get("_onset") { return Some(Element { value: Cow::Borrowed(val), }); } return None; } /// Extensions for severity pub fn _severity(&self) -> Option<Element> { if let Some(val) = self.value.get("_severity") { return Some(Element { value: Cow::Borrowed(val), }); } return None; } /// Text description about the reaction as a whole, including details of the /// manifestation if required. pub fn description(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("description") { return Some(string); } return None; } /// Identification of the route by which the subject was exposed to the substance. pub fn exposure_route(&self) -> Option<CodeableConcept> { if let Some(val) = self.value.get("exposureRoute") { return Some(CodeableConcept { value: Cow::Borrowed(val), }); } return None; } /// May be used to represent additional information that is not part of the basic /// definition of the element. To make the use of extensions safe and manageable, /// there is a strict set of governance applied to the definition and use of /// extensions. Though any implementer can define an extension, there is a set of /// requirements that SHALL be met as part of the definition of the extension. pub fn extension(&self) -> Option<Vec<Extension>> { if let Some(Value::Array(val)) = self.value.get("extension") { return Some( val.into_iter() .map(|e| Extension { value: Cow::Borrowed(e), }) .collect::<Vec<_>>(), ); } return None; } /// Unique id for the element within a resource (for internal references). This may be /// any string value that does not contain spaces. pub fn id(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("id") { return Some(string); } return None; } /// Clinical symptoms and/or signs that are observed or associated with the adverse /// reaction event. pub fn manifestation(&self) -> Vec<CodeableReference> { self.value .get("manifestation") .unwrap() .as_array() .unwrap() .into_iter() .map(|e| CodeableReference { value: Cow::Borrowed(e), }) .collect::<Vec<_>>() } /// May be used to represent additional information that is not part of the basic /// definition of the element and that modifies the understanding of the element /// in which it is contained and/or the understanding of the containing element's /// descendants. Usually modifier elements provide negation or qualification. To make /// the use of extensions safe and manageable, there is a strict set of governance /// applied to the definition and use of extensions. Though any implementer can define /// an extension, there is a set of requirements that SHALL be met as part of the /// definition of the extension. Applications processing a resource are required to /// check for modifier extensions. Modifier extensions SHALL NOT change the meaning /// of any elements on Resource or DomainResource (including cannot change the meaning /// of modifierExtension itself). pub fn modifier_extension(&self) -> Option<Vec<Extension>> { if let Some(Value::Array(val)) = self.value.get("modifierExtension") { return Some( val.into_iter() .map(|e| Extension { value: Cow::Borrowed(e), }) .collect::<Vec<_>>(), ); } return None; } /// Additional text about the adverse reaction event not captured in other fields. pub fn note(&self) -> Option<Vec<Annotation>> { if let Some(Value::Array(val)) = self.value.get("note") { return Some( val.into_iter() .map(|e| Annotation { value: Cow::Borrowed(e), }) .collect::<Vec<_>>(), ); } return None; } /// Record of the date and/or time of the onset of the Reaction. pub fn onset(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("onset") { return Some(string); } return None; } /// Clinical assessment of the severity of the reaction event as a whole, potentially /// considering multiple different manifestations. pub fn severity(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("severity") { return Some(string); } return None; } /// Identification of the specific substance (or pharmaceutical product) considered /// to be responsible for the Adverse Reaction event. Note: the substance for a /// specific reaction may be different from the substance identified as the cause /// of the risk, but it must be consistent with it. For instance, it may be a /// more specific substance (e.g. a brand medication) or a composite product that /// includes the identified substance. It must be clinically safe to only process /// the 'code' and ignore the 'reaction.substance'. If a receiving system is unable /// to confirm that AllergyIntolerance.reaction.substance falls within the semantic /// scope of AllergyIntolerance.code, then the receiving system should ignore /// AllergyIntolerance.reaction.substance. pub fn substance(&self) -> Option<CodeableConcept> { if let Some(val) = self.value.get("substance") { return Some(CodeableConcept { value: Cow::Borrowed(val), }); } return None; } pub fn validate(&self) -> bool { if let Some(_val) = self._description() { if !_val.validate() { return false; } } if let Some(_val) = self._onset() { if !_val.validate()
} if let Some(_val) = self._severity() { if !_val.validate() { return false; } } if let Some(_val) = self.description() {} if let Some(_val) = self.exposure_route() { if !_val.validate() { return false; } } if let Some(_val) = self.extension() { if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) { return false; } } if let Some(_val) = self.id() {} if !self .manifestation() .into_iter() .map(|e| e.validate()) .all(|x| x == true) { return false; } if let Some(_val) = self.modifier_extension() { if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) { return false; } } if let Some(_val) = self.note() { if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) { return false; } } if let Some(_val) = self.onset() {} if let Some(_val) = self.severity() {} if let Some(_val) = self.substance() { if !_val.validate() { return false; } } return true; } } #[derive(Debug)] pub struct AllergyIntolerance_ReactionBuilder { pub(crate) value: Value, } impl AllergyIntolerance_ReactionBuilder { pub fn build(&self) -> AllergyIntolerance_Reaction { AllergyIntolerance_Reaction { value: Cow::Owned(self.value.clone()), } } pub fn with(existing: AllergyIntolerance_Reaction) -> AllergyIntolerance_ReactionBuilder { AllergyIntolerance_ReactionBuilder { value: (*existing.value).clone(), } } pub fn new(manifestation: Vec<CodeableReference>) -> AllergyIntolerance_ReactionBuilder { let mut __value: Value = json!({}); __value["manifestation"] = json!(manifestation .into_iter() .map(|e| e.value) .collect::<Vec<_>>()); return AllergyIntolerance_ReactionBuilder { value: __value }; } pub fn _description<'a>( &'a mut self, val: Element, ) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["_description"] = json!(val.value); return self; } pub fn _onset<'a>(&'a mut self, val: Element) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["_onset"] = json!(val.value); return self; } pub fn _severity<'a>(&'a mut self, val: Element) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["_severity"] = json!(val.value); return self; } pub fn description<'a>(&'a mut self, val: &str) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["description"] = json!(val); return self; } pub fn exposure_route<'a>( &'a mut self, val: CodeableConcept, ) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["exposureRoute"] = json!(val.value); return self; } pub fn extension<'a>( &'a mut self, val: Vec<Extension>, ) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>()); return self; } pub fn id<'a>(&'a mut self, val: &str) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["id"] = json!(val); return self; } pub fn modifier_extension<'a>( &'a mut self, val: Vec<Extension>, ) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["modifierExtension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>()); return self; } pub fn note<'a>( &'a mut self, val: Vec<Annotation>, ) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["note"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>()); return self; } pub fn onset<'a>(&'a mut self, val: &str) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["onset"] = json!(val); return self; } pub fn severity<'a>(&'a mut self, val: &str) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["severity"] = json!(val); return self; } pub fn substance<'a>( &'a mut self, val: CodeableConcept, ) -> &'a mut AllergyIntolerance_ReactionBuilder { self.value["substance"] = json!(val.value); return self; } }
{ return false; }
audit.rs
use super::Error as ModelError; use chrono::{DateTime, Utc}; use postgres::GenericConnection; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::net::IpAddr; pub trait ActivityType { fn to_i32(&self) -> i32; } pub struct Activity<T: ActivityType, D: Serialize + Deserialize<'static>> { pub activity_type: T, pub client_addr: IpAddr, pub happened_time: DateTime<Utc>, pub details: D, } #[derive(Clone, Copy)] pub struct Signin; impl ActivityType for Signin { fn to_i32(&self) -> i32 { 0 } } #[derive(Debug, Serialize, Deserialize)] pub struct SigninActivityDetails { pub is_succeed: bool, } pub type SigninActivity = Activity<Signin, SigninActivityDetails>; #[derive(Clone, Copy)] pub struct ChangePassword; impl ActivityType for ChangePassword { fn
(&self) -> i32 { 1 } } #[derive(Debug, Serialize, Deserialize)] pub struct ChangePasswordActivityDetails { pub is_succeed: bool, } pub type ChangePasswordActivity = Activity<ChangePassword, ChangePasswordActivityDetails>; pub fn create<C: GenericConnection, T: ActivityType, D: Serialize + Deserialize<'static>>( pg_conn: &C, username: &str, activity: Activity<T, D>, ) -> Result<(), ModelError> { let details = serde_json::to_value(activity.details)?; let stmt = r#" INSERT INTO sso.audits(user_id, client_addr, type, details) VALUES((SELECT user_id FROM sso.accounts WHERE username = $1), $2, $3, $4) RETURNING * "#; let rows = pg_conn.query( &stmt, &[ &username, &activity.client_addr.to_string(), &activity.activity_type.to_i32(), &details, ], )?; if rows.len() != 1 { Err(ModelError::Unknown) } else { Ok(()) } } pub fn select<C: GenericConnection, T: ActivityType + Copy, D>( pg_conn: &C, username: &str, activity_type: T, happened_time: DateTime<Utc>, ) -> Result<Vec<Activity<T, D>>, ModelError> where for<'de> D: serde::Deserialize<'de> + Serialize, { let stmt = r#" SELECT happened_time, client_addr, details FROM sso.audits LEFT JOIN sso.accounts ON sso.audits.user_id = sso.accounts.user_id WHERE sso.accounts.username = $1 AND type = $2 AND happened_time > $3 "#; let rows = pg_conn.query(&stmt, &[&username, &activity_type.to_i32(), &happened_time])?; let activities = rows .iter() .map(|row| { let client_addr: String = row.get("client_addr"); let details: Value = row.get("details"); let activity = Activity { activity_type: activity_type, client_addr: client_addr.parse().unwrap(), happened_time: row.get("happened_time"), details: serde_json::from_value(details).unwrap(), }; activity }) .collect::<Vec<Activity<T, D>>>(); return Ok(activities); }
to_i32
deploy_helper.py
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Jasper N. Brouwer <[email protected]> # (c) 2014, Ramon de la Fuente <[email protected]> # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type DOCUMENTATION = ''' --- module: deploy_helper author: "Ramon de la Fuente (@ramondelafuente)" short_description: Manages some of the steps common in deploying projects. description: - The Deploy Helper manages some of the steps common in deploying software. It creates a folder structure, manages a symlink for the current release and cleans up old releases. - "Running it with the C(state=query) or C(state=present) will return the C(deploy_helper) fact. C(project_path), whatever you set in the path parameter, C(current_path), the path to the symlink that points to the active release, C(releases_path), the path to the folder to keep releases in, C(shared_path), the path to the folder to keep shared resources in, C(unfinished_filename), the file to check for to recognize unfinished builds, C(previous_release), the release the 'current' symlink is pointing to, C(previous_release_path), the full path to the 'current' symlink target, C(new_release), either the 'release' parameter or a generated timestamp, C(new_release_path), the path to the new release folder (not created by the module)." options: path: type: path required: True aliases: ['dest'] description: - the root path of the project. Alias I(dest). Returned in the C(deploy_helper.project_path) fact. state: type: str description: - the state of the project. C(query) will only gather facts, C(present) will create the project I(root) folder, and in it the I(releases) and I(shared) folders, C(finalize) will remove the unfinished_filename file, create a symlink to the newly deployed release and optionally clean old releases, C(clean) will remove failed & old releases, C(absent) will remove the project folder (synonymous to the M(ansible.builtin.file) module with C(state=absent)) choices: [ present, finalize, absent, clean, query ] default: present release: type: str description: - the release version that is being deployed. Defaults to a timestamp format %Y%m%d%H%M%S (i.e. '20141119223359'). This parameter is optional during C(state=present), but needs to be set explicitly for C(state=finalize). You can use the generated fact C(release={{ deploy_helper.new_release }}). releases_path: type: str description: - the name of the folder that will hold the releases. This can be relative to C(path) or absolute. Returned in the C(deploy_helper.releases_path) fact. default: releases shared_path: type: path description: - the name of the folder that will hold the shared resources. This can be relative to C(path) or absolute. If this is set to an empty string, no shared folder will be created. Returned in the C(deploy_helper.shared_path) fact. default: shared current_path: type: path description: - the name of the symlink that is created when the deploy is finalized. Used in C(finalize) and C(clean). Returned in the C(deploy_helper.current_path) fact. default: current unfinished_filename: type: str description: - the name of the file that indicates a deploy has not finished. All folders in the releases_path that contain this file will be deleted on C(state=finalize) with clean=True, or C(state=clean). This file is automatically deleted from the I(new_release_path) during C(state=finalize). default: DEPLOY_UNFINISHED clean: description: - Whether to run the clean procedure in case of C(state=finalize). type: bool default: 'yes' keep_releases: type: int description: - the number of old releases to keep when cleaning. Used in C(finalize) and C(clean). Any unfinished builds will be deleted first, so only correct releases will count. The current version will not count. default: 5 notes: - Facts are only returned for C(state=query) and C(state=present). If you use both, you should pass any overridden parameters to both calls, otherwise the second call will overwrite the facts of the first one. - When using C(state=clean), the releases are ordered by I(creation date). You should be able to switch to a new naming strategy without problems. - Because of the default behaviour of generating the I(new_release) fact, this module will not be idempotent unless you pass your own release name with C(release). Due to the nature of deploying software, this should not be much of a problem. extends_documentation_fragment: files ''' EXAMPLES = ''' # General explanation, starting with an example folder structure for a project: # root: # releases: # - 20140415234508 # - 20140415235146 # - 20140416082818 # # shared: # - sessions # - uploads # # current: releases/20140416082818 # The 'releases' folder holds all the available releases. A release is a complete build of the application being # deployed. This can be a clone of a repository for example, or a sync of a local folder on your filesystem. # Having timestamped folders is one way of having distinct releases, but you could choose your own strategy like # git tags or commit hashes. # # During a deploy, a new folder should be created in the releases folder and any build steps required should be # performed. Once the new build is ready, the deploy procedure is 'finalized' by replacing the 'current' symlink # with a link to this build. # # The 'shared' folder holds any resource that is shared between releases. Examples of this are web-server # session files, or files uploaded by users of your application. It's quite common to have symlinks from a release # folder pointing to a shared/subfolder, and creating these links would be automated as part of the build steps. # # The 'current' symlink points to one of the releases. Probably the latest one, unless a deploy is in progress. # The web-server's root for the project will go through this symlink, so the 'downtime' when switching to a new # release is reduced to the time it takes to switch the link. # # To distinguish between successful builds and unfinished ones, a file can be placed in the folder of the release # that is currently in progress. The existence of this file will mark it as unfinished, and allow an automated # procedure to remove it during cleanup. # Typical usage - name: Initialize the deploy root and gather facts community.general.deploy_helper: path: /path/to/root - name: Clone the project to the new release folder ansible.builtin.git: repo: ansible.builtin.git://foosball.example.org/path/to/repo.git dest: '{{ deploy_helper.new_release_path }}' version: v1.1.1 - name: Add an unfinished file, to allow cleanup on successful finalize ansible.builtin.file: path: '{{ deploy_helper.new_release_path }}/{{ deploy_helper.unfinished_filename }}' state: touch - name: Perform some build steps, like running your dependency manager for example composer: command: install working_dir: '{{ deploy_helper.new_release_path }}' - name: Create some folders in the shared folder ansible.builtin.file: path: '{{ deploy_helper.shared_path }}/{{ item }}' state: directory with_items: - sessions - uploads - name: Add symlinks from the new release to the shared folder ansible.builtin.file: path: '{{ deploy_helper.new_release_path }}/{{ item.path }}' src: '{{ deploy_helper.shared_path }}/{{ item.src }}' state: link with_items: - path: app/sessions src: sessions - path: web/uploads src: uploads - name: Finalize the deploy, removing the unfinished file and switching the symlink community.general.deploy_helper: path: /path/to/root release: '{{ deploy_helper.new_release }}' state: finalize # Retrieving facts before running a deploy - name: Run 'state=query' to gather facts without changing anything community.general.deploy_helper: path: /path/to/root state: query # Remember to set the 'release' parameter when you actually call 'state=present' later - name: Initialize the deploy root community.general.deploy_helper: path: /path/to/root release: '{{ deploy_helper.new_release }}' state: present # all paths can be absolute or relative (to the 'path' parameter) - community.general.deploy_helper: path: /path/to/root releases_path: /var/www/project/releases shared_path: /var/www/shared current_path: /var/www/active # Using your own naming strategy for releases (a version tag in this case): - community.general.deploy_helper: path: /path/to/root release: v1.1.1 state: present - community.general.deploy_helper: path: /path/to/root release: '{{ deploy_helper.new_release }}' state: finalize # Using a different unfinished_filename: - community.general.deploy_helper: path: /path/to/root unfinished_filename: README.md release: '{{ deploy_helper.new_release }}' state: finalize # Postponing the cleanup of older builds: - community.general.deploy_helper: path: /path/to/root release: '{{ deploy_helper.new_release }}' state: finalize clean: False - community.general.deploy_helper: path: /path/to/root state: clean # Or running the cleanup ahead of the new deploy - community.general.deploy_helper: path: /path/to/root state: clean - community.general.deploy_helper: path: /path/to/root state: present # Keeping more old releases: - community.general.deploy_helper: path: /path/to/root release: '{{ deploy_helper.new_release }}' state: finalize keep_releases: 10 # Or, if you use 'clean=false' on finalize: - community.general.deploy_helper: path: /path/to/root state: clean keep_releases: 10 # Removing the entire project root folder - community.general.deploy_helper: path: /path/to/root state: absent # Debugging the facts returned by the module - community.general.deploy_helper: path: /path/to/root - ansible.builtin.debug: var: deploy_helper ''' import os import shutil import time import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.text.converters import to_native class DeployHelper(object): def __init__(self, module): self.module = module self.file_args = module.load_file_common_arguments(module.params) self.clean = module.params['clean'] self.current_path = module.params['current_path'] self.keep_releases = module.params['keep_releases'] self.path = module.params['path'] self.release = module.params['release'] self.releases_path = module.params['releases_path'] self.shared_path = module.params['shared_path'] self.state = module.params['state'] self.unfinished_filename = module.params['unfinished_filename'] def gather_facts(self): current_path = os.path.join(self.path, self.current_path) releases_path = os.path.join(self.path, self.releases_path) if self.shared_path: shared_path = os.path.join(self.path, self.shared_path) else: shared_path = None previous_release, previous_release_path = self._get_last_release(current_path) if not self.release and (self.state == 'query' or self.state == 'present'): self.release = time.strftime("%Y%m%d%H%M%S") if self.release: new_release_path = os.path.join(releases_path, self.release) else: new_release_path = None return { 'project_path': self.path, 'current_path': current_path, 'releases_path': releases_path, 'shared_path': shared_path, 'previous_release': previous_release, 'previous_release_path': previous_release_path, 'new_release': self.release, 'new_release_path': new_release_path, 'unfinished_filename': self.unfinished_filename } def delete_path(self, path): if not os.path.lexists(path): return False if not os.path.isdir(path): self.module.fail_json(msg="%s exists but is not a directory" % path) if not self.module.check_mode: try: shutil.rmtree(path, ignore_errors=False) except Exception as e: self.module.fail_json(msg="rmtree failed: %s" % to_native(e), exception=traceback.format_exc()) return True def create_path(self, path): changed = False if not os.path.lexists(path): changed = True if not self.module.check_mode: os.makedirs(path) elif not os.path.isdir(path): self.module.fail_json(msg="%s exists but is not a directory" % path) changed += self.module.set_directory_attributes_if_different(self._get_file_args(path), changed) return changed def check_link(self, path): if os.path.lexists(path): if not os.path.islink(path): self.module.fail_json(msg="%s exists but is not a symbolic link" % path) def create_link(self, source, link_name): if os.path.islink(link_name): norm_link = os.path.normpath(os.path.realpath(link_name)) norm_source = os.path.normpath(os.path.realpath(source)) if norm_link == norm_source: changed = False else: changed = True if not self.module.check_mode: if not os.path.lexists(source): self.module.fail_json(msg="the symlink target %s doesn't exists" % source) tmp_link_name = link_name + '.' + self.unfinished_filename if os.path.islink(tmp_link_name): os.unlink(tmp_link_name) os.symlink(source, tmp_link_name) os.rename(tmp_link_name, link_name) else: changed = True if not self.module.check_mode: os.symlink(source, link_name) return changed def remove_unfinished_file(self, new_release_path): changed = False unfinished_file_path = os.path.join(new_release_path, self.unfinished_filename) if os.path.lexists(unfinished_file_path): changed = True if not self.module.check_mode: os.remove(unfinished_file_path) return changed def
(self, releases_path): changes = 0 for release in os.listdir(releases_path): if os.path.isfile(os.path.join(releases_path, release, self.unfinished_filename)): if self.module.check_mode: changes += 1 else: changes += self.delete_path(os.path.join(releases_path, release)) return changes def remove_unfinished_link(self, path): changed = False if not self.release: return changed tmp_link_name = os.path.join(path, self.release + '.' + self.unfinished_filename) if not self.module.check_mode and os.path.exists(tmp_link_name): changed = True os.remove(tmp_link_name) return changed def cleanup(self, releases_path, reserve_version): changes = 0 if os.path.lexists(releases_path): releases = [f for f in os.listdir(releases_path) if os.path.isdir(os.path.join(releases_path, f))] try: releases.remove(reserve_version) except ValueError: pass if not self.module.check_mode: releases.sort(key=lambda x: os.path.getctime(os.path.join(releases_path, x)), reverse=True) for release in releases[self.keep_releases:]: changes += self.delete_path(os.path.join(releases_path, release)) elif len(releases) > self.keep_releases: changes += (len(releases) - self.keep_releases) return changes def _get_file_args(self, path): file_args = self.file_args.copy() file_args['path'] = path return file_args def _get_last_release(self, current_path): previous_release = None previous_release_path = None if os.path.lexists(current_path): previous_release_path = os.path.realpath(current_path) previous_release = os.path.basename(previous_release_path) return previous_release, previous_release_path def main(): module = AnsibleModule( argument_spec=dict( path=dict(aliases=['dest'], required=True, type='path'), release=dict(type='str'), releases_path=dict(type='str', default='releases'), shared_path=dict(type='path', default='shared'), current_path=dict(type='path', default='current'), keep_releases=dict(type='int', default=5), clean=dict(type='bool', default=True), unfinished_filename=dict(type='str', default='DEPLOY_UNFINISHED'), state=dict(choices=['present', 'absent', 'clean', 'finalize', 'query'], default='present') ), required_if=[ ('state', 'finalize', ['release']), ], add_file_common_args=True, supports_check_mode=True ) deploy_helper = DeployHelper(module) facts = deploy_helper.gather_facts() result = { 'state': deploy_helper.state } changes = 0 if deploy_helper.state == 'query': result['ansible_facts'] = {'deploy_helper': facts} elif deploy_helper.state == 'present': deploy_helper.check_link(facts['current_path']) changes += deploy_helper.create_path(facts['project_path']) changes += deploy_helper.create_path(facts['releases_path']) if deploy_helper.shared_path: changes += deploy_helper.create_path(facts['shared_path']) result['ansible_facts'] = {'deploy_helper': facts} elif deploy_helper.state == 'finalize': if deploy_helper.keep_releases <= 0: module.fail_json(msg="'keep_releases' should be at least 1") changes += deploy_helper.remove_unfinished_file(facts['new_release_path']) changes += deploy_helper.create_link(facts['new_release_path'], facts['current_path']) if deploy_helper.clean: changes += deploy_helper.remove_unfinished_link(facts['project_path']) changes += deploy_helper.remove_unfinished_builds(facts['releases_path']) changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release']) elif deploy_helper.state == 'clean': changes += deploy_helper.remove_unfinished_link(facts['project_path']) changes += deploy_helper.remove_unfinished_builds(facts['releases_path']) changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release']) elif deploy_helper.state == 'absent': # destroy the facts result['ansible_facts'] = {'deploy_helper': []} changes += deploy_helper.delete_path(facts['project_path']) if changes > 0: result['changed'] = True else: result['changed'] = False module.exit_json(**result) if __name__ == '__main__': main()
remove_unfinished_builds
registry.rs
use std::collections::{BTreeMap, HashSet}; use std::fs::File; use std::io::{self, BufRead}; use std::iter::repeat; use std::path::PathBuf; use std::str; use std::time::Duration; use std::{cmp, env}; use anyhow::{bail, format_err}; use cargo_util::paths; use crates_io::{self, NewCrate, NewCrateDependency, Registry}; use curl::easy::{Easy, InfoType, SslOpt, SslVersion}; use log::{log, Level}; use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; use crate::core::dependency::DepKind; use crate::core::manifest::ManifestMetadata; use crate::core::resolver::CliFeatures; use crate::core::source::Source; use crate::core::{Package, SourceId, Workspace}; use crate::ops; use crate::sources::{RegistrySource, SourceConfigMap, CRATES_IO_REGISTRY}; use crate::util::config::{self, Config, SslVersionConfig, SslVersionConfigRange}; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::validate_package_name; use crate::util::IntoUrl; use crate::{drop_print, drop_println, version}; mod auth; /// Registry settings loaded from config files. /// /// This is loaded based on the `--registry` flag and the config settings. #[derive(Debug)] pub struct RegistryConfig { /// The index URL. If `None`, use crates.io. pub index: Option<String>, /// The authentication token. pub token: Option<String>, /// Process used for fetching a token. pub credential_process: Option<(PathBuf, Vec<String>)>, } pub struct PublishOpts<'cfg> { pub config: &'cfg Config, pub token: Option<String>, pub index: Option<String>, pub verify: bool, pub allow_dirty: bool, pub jobs: Option<u32>, pub targets: Vec<String>, pub dry_run: bool, pub registry: Option<String>, pub cli_features: CliFeatures, } pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { let pkg = ws.current()?; let mut publish_registry = opts.registry.clone(); if let Some(ref allowed_registries) = *pkg.publish() { if publish_registry.is_none() && allowed_registries.len() == 1 { // If there is only one allowed registry, push to that one directly, // even though there is no registry specified in the command. let default_registry = &allowed_registries[0]; if default_registry != CRATES_IO_REGISTRY { // Don't change the registry for crates.io and don't warn the user. // crates.io will be defaulted even without this. opts.config.shell().note(&format!( "Found `{}` as only allowed registry. Publishing to it automatically.", default_registry ))?; publish_registry = Some(default_registry.clone()); } } let reg_name = publish_registry .clone() .unwrap_or_else(|| CRATES_IO_REGISTRY.to_string()); if !allowed_registries.contains(&reg_name) { bail!( "`{}` cannot be published.\n\ The registry `{}` is not listed in the `publish` value in Cargo.toml.", pkg.name(), reg_name ); } } let (mut registry, _reg_cfg, reg_id) = registry( opts.config, opts.token.clone(), opts.index.clone(), publish_registry, true, !opts.dry_run, )?; verify_dependencies(pkg, &registry, reg_id)?; // Prepare a tarball, with a non-suppressible warning if metadata // is missing since this is being put online. let tarball = ops::package( ws, &ops::PackageOpts { config: opts.config, verify: opts.verify, list: false, check_metadata: true, allow_dirty: opts.allow_dirty, targets: opts.targets.clone(), jobs: opts.jobs, cli_features: opts.cli_features.clone(), }, )? .unwrap(); // Upload said tarball to the specified destination opts.config .shell() .status("Uploading", pkg.package_id().to_string())?; transmit( opts.config, pkg, tarball.file(), &mut registry, reg_id, opts.dry_run, )?; Ok(()) } fn
( pkg: &Package, registry: &Registry, registry_src: SourceId, ) -> CargoResult<()> { for dep in pkg.dependencies().iter() { if dep.source_id().is_path() || dep.source_id().is_git() { if !dep.specified_req() { if !dep.is_transitive() { // dev-dependencies will be stripped in TomlManifest::prepare_for_publish continue; } let which = if dep.source_id().is_path() { "path" } else { "git" }; let dep_version_source = dep.registry_id().map_or_else( || "crates.io".to_string(), |registry_id| registry_id.display_registry_name(), ); bail!( "all dependencies must have a version specified when publishing.\n\ dependency `{}` does not specify a version\n\ Note: The published dependency will use the version from {},\n\ the `{}` specification will be removed from the dependency declaration.", dep.package_name(), dep_version_source, which, ) } // TomlManifest::prepare_for_publish will rewrite the dependency // to be just the `version` field. } else if dep.source_id() != registry_src { if !dep.source_id().is_registry() { // Consider making SourceId::kind a public type that we can // exhaustively match on. Using match can help ensure that // every kind is properly handled. panic!("unexpected source kind for dependency {:?}", dep); } // Block requests to send to crates.io with alt-registry deps. // This extra hostname check is mostly to assist with testing, // but also prevents someone using `--index` to specify // something that points to crates.io. if registry_src.is_default_registry() || registry.host_is_crates_io() { bail!("crates cannot be published to crates.io with dependencies sourced from other\n\ registries. `{}` needs to be published to crates.io before publishing this crate.\n\ (crate `{}` is pulled from {})", dep.package_name(), dep.package_name(), dep.source_id()); } } } Ok(()) } fn transmit( config: &Config, pkg: &Package, tarball: &File, registry: &mut Registry, registry_id: SourceId, dry_run: bool, ) -> CargoResult<()> { let deps = pkg .dependencies() .iter() .filter(|dep| { // Skip dev-dependency without version. dep.is_transitive() || dep.specified_req() }) .map(|dep| { // If the dependency is from a different registry, then include the // registry in the dependency. let dep_registry_id = match dep.registry_id() { Some(id) => id, None => SourceId::crates_io(config)?, }; // In the index and Web API, None means "from the same registry" // whereas in Cargo.toml, it means "from crates.io". let dep_registry = if dep_registry_id != registry_id { Some(dep_registry_id.url().to_string()) } else { None }; Ok(NewCrateDependency { optional: dep.is_optional(), default_features: dep.uses_default_features(), name: dep.package_name().to_string(), features: dep.features().iter().map(|s| s.to_string()).collect(), version_req: dep.version_req().to_string(), target: dep.platform().map(|s| s.to_string()), kind: match dep.kind() { DepKind::Normal => "normal", DepKind::Build => "build", DepKind::Development => "dev", } .to_string(), registry: dep_registry, explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()), }) }) .collect::<CargoResult<Vec<NewCrateDependency>>>()?; let manifest = pkg.manifest(); let ManifestMetadata { ref authors, ref description, ref homepage, ref documentation, ref keywords, ref readme, ref repository, ref license, ref license_file, ref categories, ref badges, ref links, } = *manifest.metadata(); let readme_content = readme .as_ref() .map(|readme| { paths::read(&pkg.root().join(readme)) .chain_err(|| format!("failed to read `readme` file for package `{}`", pkg)) }) .transpose()?; if let Some(ref file) = *license_file { if !pkg.root().join(file).exists() { bail!("the license file `{}` does not exist", file) } } // Do not upload if performing a dry run if dry_run { config.shell().warn("aborting upload due to dry run")?; return Ok(()); } let string_features = match manifest.original().features() { Some(features) => features .iter() .map(|(feat, values)| { ( feat.to_string(), values.iter().map(|fv| fv.to_string()).collect(), ) }) .collect::<BTreeMap<String, Vec<String>>>(), None => BTreeMap::new(), }; let warnings = registry .publish( &NewCrate { name: pkg.name().to_string(), vers: pkg.version().to_string(), deps, features: string_features, authors: authors.clone(), description: description.clone(), homepage: homepage.clone(), documentation: documentation.clone(), keywords: keywords.clone(), categories: categories.clone(), readme: readme_content, readme_file: readme.clone(), repository: repository.clone(), license: license.clone(), license_file: license_file.clone(), badges: badges.clone(), links: links.clone(), v: None, }, tarball, ) .chain_err(|| format!("failed to publish to registry at {}", registry.host()))?; if !warnings.invalid_categories.is_empty() { let msg = format!( "the following are not valid category slugs and were \ ignored: {}. Please see https://crates.io/category_slugs \ for the list of all category slugs. \ ", warnings.invalid_categories.join(", ") ); config.shell().warn(&msg)?; } if !warnings.invalid_badges.is_empty() { let msg = format!( "the following are not valid badges and were ignored: {}. \ Either the badge type specified is unknown or a required \ attribute is missing. Please see \ https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ for valid badge types and their required attributes.", warnings.invalid_badges.join(", ") ); config.shell().warn(&msg)?; } if !warnings.other.is_empty() { for msg in warnings.other { config.shell().warn(&msg)?; } } Ok(()) } /// Returns the index and token from the config file for the given registry. /// /// `registry` is typically the registry specified on the command-line. If /// `None`, `index` is set to `None` to indicate it should use crates.io. pub fn registry_configuration( config: &Config, registry: Option<&str>, ) -> CargoResult<RegistryConfig> { let err_both = |token_key: &str, proc_key: &str| { Err(format_err!( "both `{TOKEN_KEY}` and `{PROC_KEY}` \ were specified in the config\n\ Only one of these values may be set, remove one or the other to proceed.", TOKEN_KEY = token_key, PROC_KEY = proc_key, )) }; // `registry.default` is handled in command-line parsing. let (index, token, process) = match registry { Some(registry) => { validate_package_name(registry, "registry name", "")?; let index = Some(config.get_registry_index(registry)?.to_string()); let token_key = format!("registries.{}.token", registry); let token = config.get_string(&token_key)?.map(|p| p.val); let process = if config.cli_unstable().credential_process { let mut proc_key = format!("registries.{}.credential-process", registry); let mut process = config.get::<Option<config::PathAndArgs>>(&proc_key)?; if process.is_none() && token.is_none() { // This explicitly ignores the global credential-process if // the token is set, as that is "more specific". proc_key = String::from("registry.credential-process"); process = config.get::<Option<config::PathAndArgs>>(&proc_key)?; } else if process.is_some() && token.is_some() { return err_both(&token_key, &proc_key); } process } else { None }; (index, token, process) } None => { // Use crates.io default. config.check_registry_index_not_set()?; let token = config.get_string("registry.token")?.map(|p| p.val); let process = if config.cli_unstable().credential_process { let process = config.get::<Option<config::PathAndArgs>>("registry.credential-process")?; if token.is_some() && process.is_some() { return err_both("registry.token", "registry.credential-process"); } process } else { None }; (None, token, process) } }; let credential_process = process.map(|process| (process.path.resolve_program(config), process.args)); Ok(RegistryConfig { index, token, credential_process, }) } /// Returns the `Registry` and `Source` based on command-line and config settings. /// /// * `token`: The token from the command-line. If not set, uses the token /// from the config. /// * `index`: The index URL from the command-line. This is ignored if /// `registry` is set. /// * `registry`: The registry name from the command-line. If neither /// `registry`, or `index` are set, then uses `crates-io`, honoring /// `[source]` replacement if defined. /// * `force_update`: If `true`, forces the index to be updated. /// * `validate_token`: If `true`, the token must be set. fn registry( config: &Config, token: Option<String>, index: Option<String>, registry: Option<String>, force_update: bool, validate_token: bool, ) -> CargoResult<(Registry, RegistryConfig, SourceId)> { if index.is_some() && registry.is_some() { // Otherwise we would silently ignore one or the other. bail!("both `--index` and `--registry` should not be set at the same time"); } // Parse all configuration options let reg_cfg = registry_configuration(config, registry.as_deref())?; let opt_index = reg_cfg.index.as_ref().or_else(|| index.as_ref()); let sid = get_source_id(config, opt_index, registry.as_ref())?; if !sid.is_remote_registry() { bail!( "{} does not support API commands.\n\ Check for a source-replacement in .cargo/config.", sid ); } let api_host = { let _lock = config.acquire_package_cache_lock()?; let mut src = RegistrySource::remote(sid, &HashSet::new(), config); // Only update the index if the config is not available or `force` is set. let cfg = src.config(); let mut updated_cfg = || { src.update() .chain_err(|| format!("failed to update {}", sid))?; src.config() }; let cfg = if force_update { updated_cfg()? } else { cfg.or_else(|_| updated_cfg())? }; cfg.and_then(|cfg| cfg.api) .ok_or_else(|| format_err!("{} does not support API commands", sid))? }; let token = if validate_token { if index.is_some() { if token.is_none() { bail!("command-line argument --index requires --token to be specified"); } token } else { // Check `is_default_registry` so that the crates.io index can // change config.json's "api" value, and this won't affect most // people. It will affect those using source replacement, but // hopefully that's a relatively small set of users. if token.is_none() && reg_cfg.token.is_some() && registry.is_none() && !sid.is_default_registry() && !crates_io::is_url_crates_io(&api_host) { config.shell().warn( "using `registry.token` config value with source \ replacement is deprecated\n\ This may become a hard error in the future; \ see <https://github.com/rust-lang/cargo/issues/xxx>.\n\ Use the --token command-line flag to remove this warning.", )?; reg_cfg.token.clone() } else { let token = auth::auth_token( config, token.as_deref(), reg_cfg.token.as_deref(), reg_cfg.credential_process.as_ref(), registry.as_deref(), &api_host, )?; log::debug!("found token {:?}", token); Some(token) } } } else { None }; let handle = http_handle(config)?; Ok((Registry::new_handle(api_host, token, handle), reg_cfg, sid)) } /// Creates a new HTTP handle with appropriate global configuration for cargo. pub fn http_handle(config: &Config) -> CargoResult<Easy> { let (mut handle, timeout) = http_handle_and_timeout(config)?; timeout.configure(&mut handle)?; Ok(handle) } pub fn http_handle_and_timeout(config: &Config) -> CargoResult<(Easy, HttpTimeout)> { if config.frozen() { bail!( "attempting to make an HTTP request, but --frozen was \ specified" ) } if !config.network_allowed() { bail!("can't make HTTP request in the offline mode") } // The timeout option for libcurl by default times out the entire transfer, // but we probably don't want this. Instead we only set timeouts for the // connect phase as well as a "low speed" timeout so if we don't receive // many bytes in a large-ish period of time then we time out. let mut handle = Easy::new(); let timeout = configure_http_handle(config, &mut handle)?; Ok((handle, timeout)) } pub fn needs_custom_http_transport(config: &Config) -> CargoResult<bool> { Ok(http_proxy_exists(config)? || *config.http_config()? != Default::default() || env::var_os("HTTP_TIMEOUT").is_some()) } /// Configure a libcurl http handle with the defaults options for Cargo pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<HttpTimeout> { let http = config.http_config()?; if let Some(proxy) = http_proxy(config)? { handle.proxy(&proxy)?; } if let Some(cainfo) = &http.cainfo { let cainfo = cainfo.resolve_path(config); handle.cainfo(&cainfo)?; } if let Some(check) = http.check_revoke { handle.ssl_options(SslOpt::new().no_revoke(!check))?; } if let Some(user_agent) = &http.user_agent { handle.useragent(user_agent)?; } else { handle.useragent(&version().to_string())?; } fn to_ssl_version(s: &str) -> CargoResult<SslVersion> { let version = match s { "default" => SslVersion::Default, "tlsv1" => SslVersion::Tlsv1, "tlsv1.0" => SslVersion::Tlsv10, "tlsv1.1" => SslVersion::Tlsv11, "tlsv1.2" => SslVersion::Tlsv12, "tlsv1.3" => SslVersion::Tlsv13, _ => bail!( "Invalid ssl version `{}`,\ choose from 'default', 'tlsv1', 'tlsv1.0', 'tlsv1.1', 'tlsv1.2', 'tlsv1.3'.", s ), }; Ok(version) } if let Some(ssl_version) = &http.ssl_version { match ssl_version { SslVersionConfig::Single(s) => { let version = to_ssl_version(s.as_str())?; handle.ssl_version(version)?; } SslVersionConfig::Range(SslVersionConfigRange { min, max }) => { let min_version = min .as_ref() .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?; let max_version = max .as_ref() .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?; handle.ssl_min_max_version(min_version, max_version)?; } } } if let Some(true) = http.debug { handle.verbose(true)?; log::debug!("{:#?}", curl::Version::get()); handle.debug_function(|kind, data| { let (prefix, level) = match kind { InfoType::Text => ("*", Level::Debug), InfoType::HeaderIn => ("<", Level::Debug), InfoType::HeaderOut => (">", Level::Debug), InfoType::DataIn => ("{", Level::Trace), InfoType::DataOut => ("}", Level::Trace), InfoType::SslDataIn | InfoType::SslDataOut => return, _ => return, }; match str::from_utf8(data) { Ok(s) => { for mut line in s.lines() { if line.starts_with("Authorization:") { line = "Authorization: [REDACTED]"; } else if line[..line.len().min(10)].eq_ignore_ascii_case("set-cookie") { line = "set-cookie: [REDACTED]"; } log!(level, "http-debug: {} {}", prefix, line); } } Err(_) => { log!( level, "http-debug: {} ({} bytes of data)", prefix, data.len() ); } } })?; } HttpTimeout::new(config) } #[must_use] pub struct HttpTimeout { pub dur: Duration, pub low_speed_limit: u32, } impl HttpTimeout { pub fn new(config: &Config) -> CargoResult<HttpTimeout> { let config = config.http_config()?; let low_speed_limit = config.low_speed_limit.unwrap_or(10); let seconds = config .timeout .or_else(|| env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) .unwrap_or(30); Ok(HttpTimeout { dur: Duration::new(seconds, 0), low_speed_limit, }) } pub fn configure(&self, handle: &mut Easy) -> CargoResult<()> { // The timeout option for libcurl by default times out the entire // transfer, but we probably don't want this. Instead we only set // timeouts for the connect phase as well as a "low speed" timeout so // if we don't receive many bytes in a large-ish period of time then we // time out. handle.connect_timeout(self.dur)?; handle.low_speed_time(self.dur)?; handle.low_speed_limit(self.low_speed_limit)?; Ok(()) } } /// Finds an explicit HTTP proxy if one is available. /// /// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified /// via environment variables are picked up by libcurl. fn http_proxy(config: &Config) -> CargoResult<Option<String>> { let http = config.http_config()?; if let Some(s) = &http.proxy { return Ok(Some(s.clone())); } if let Ok(cfg) = git2::Config::open_default() { if let Ok(s) = cfg.get_string("http.proxy") { return Ok(Some(s)); } } Ok(None) } /// Determine if an http proxy exists. /// /// Checks the following for existence, in order: /// /// * cargo's `http.proxy` /// * git's `http.proxy` /// * `http_proxy` env var /// * `HTTP_PROXY` env var /// * `https_proxy` env var /// * `HTTPS_PROXY` env var fn http_proxy_exists(config: &Config) -> CargoResult<bool> { if http_proxy(config)?.is_some() { Ok(true) } else { Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"] .iter() .any(|v| env::var(v).is_ok())) } } pub fn registry_login( config: &Config, token: Option<String>, reg: Option<String>, ) -> CargoResult<()> { let (registry, reg_cfg, _) = registry(config, token.clone(), None, reg.clone(), false, false)?; let token = match token { Some(token) => token, None => { drop_println!( config, "please paste the API Token found on {}/me below", registry.host() ); let mut line = String::new(); let input = io::stdin(); input .lock() .read_line(&mut line) .chain_err(|| "failed to read stdin")?; // Automatically remove `cargo login` from an inputted token to // allow direct pastes from `registry.host()`/me. line.replace("cargo login", "").trim().to_string() } }; if let Some(old_token) = &reg_cfg.token { if old_token == &token { config.shell().status("Login", "already logged in")?; return Ok(()); } } auth::login( config, token, reg_cfg.credential_process.as_ref(), reg.as_deref(), registry.host(), )?; config.shell().status( "Login", format!( "token for `{}` saved", reg.as_ref().map_or("crates.io", String::as_str) ), )?; Ok(()) } pub fn registry_logout(config: &Config, reg: Option<String>) -> CargoResult<()> { let (registry, reg_cfg, _) = registry(config, None, None, reg.clone(), false, false)?; let reg_name = reg.as_deref().unwrap_or("crates.io"); if reg_cfg.credential_process.is_none() && reg_cfg.token.is_none() { config.shell().status( "Logout", format!("not currently logged in to `{}`", reg_name), )?; return Ok(()); } auth::logout( config, reg_cfg.credential_process.as_ref(), reg.as_deref(), registry.host(), )?; config.shell().status( "Logout", format!( "token for `{}` has been removed from local storage", reg_name ), )?; Ok(()) } pub struct OwnersOptions { pub krate: Option<String>, pub token: Option<String>, pub index: Option<String>, pub to_add: Option<Vec<String>>, pub to_remove: Option<Vec<String>>, pub list: bool, pub registry: Option<String>, } pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { let name = match opts.krate { Some(ref name) => name.clone(), None => { let manifest_path = find_root_manifest_for_wd(config.cwd())?; let ws = Workspace::new(&manifest_path, config)?; ws.current()?.package_id().name().to_string() } }; let (mut registry, _, _) = registry( config, opts.token.clone(), opts.index.clone(), opts.registry.clone(), true, true, )?; if let Some(ref v) = opts.to_add { let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>(); let msg = registry.add_owners(&name, &v).chain_err(|| { format!( "failed to invite owners to crate `{}` on registry at {}", name, registry.host() ) })?; config.shell().status("Owner", msg)?; } if let Some(ref v) = opts.to_remove { let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>(); config .shell() .status("Owner", format!("removing {:?} from crate {}", v, name))?; registry.remove_owners(&name, &v).chain_err(|| { format!( "failed to remove owners from crate `{}` on registry at {}", name, registry.host() ) })?; } if opts.list { let owners = registry.list_owners(&name).chain_err(|| { format!( "failed to list owners of crate `{}` on registry at {}", name, registry.host() ) })?; for owner in owners.iter() { drop_print!(config, "{}", owner.login); match (owner.name.as_ref(), owner.email.as_ref()) { (Some(name), Some(email)) => drop_println!(config, " ({} <{}>)", name, email), (Some(s), None) | (None, Some(s)) => drop_println!(config, " ({})", s), (None, None) => drop_println!(config), } } } Ok(()) } pub fn yank( config: &Config, krate: Option<String>, version: Option<String>, token: Option<String>, index: Option<String>, undo: bool, reg: Option<String>, ) -> CargoResult<()> { let name = match krate { Some(name) => name, None => { let manifest_path = find_root_manifest_for_wd(config.cwd())?; let ws = Workspace::new(&manifest_path, config)?; ws.current()?.package_id().name().to_string() } }; let version = match version { Some(v) => v, None => bail!("a version must be specified to yank"), }; let (mut registry, _, _) = registry(config, token, index, reg, true, true)?; if undo { config .shell() .status("Unyank", format!("{}:{}", name, version))?; registry.unyank(&name, &version).chain_err(|| { format!( "failed to undo a yank from the registry at {}", registry.host() ) })?; } else { config .shell() .status("Yank", format!("{}:{}", name, version))?; registry .yank(&name, &version) .chain_err(|| format!("failed to yank from the registry at {}", registry.host()))?; } Ok(()) } /// Gets the SourceId for an index or registry setting. /// /// The `index` and `reg` values are from the command-line or config settings. /// If both are None, returns the source for crates.io. fn get_source_id( config: &Config, index: Option<&String>, reg: Option<&String>, ) -> CargoResult<SourceId> { match (reg, index) { (Some(r), _) => SourceId::alt_registry(config, r), (_, Some(i)) => SourceId::for_registry(&i.into_url()?), _ => { let map = SourceConfigMap::new(config)?; let src = map.load(SourceId::crates_io(config)?, &HashSet::new())?; Ok(src.replaced_source_id()) } } } pub fn search( query: &str, config: &Config, index: Option<String>, limit: u32, reg: Option<String>, ) -> CargoResult<()> { fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { // We should truncate at grapheme-boundary and compute character-widths, // yet the dependencies on unicode-segmentation and unicode-width are // not worth it. let mut chars = s.chars(); let mut prefix = (&mut chars).take(max_width - 1).collect::<String>(); if chars.next().is_some() { prefix.push('…'); } prefix } let (mut registry, _, source_id) = registry(config, None, index, reg, false, false)?; let (crates, total_crates) = registry.search(query, limit).chain_err(|| { format!( "failed to retrieve search results from the registry at {}", registry.host() ) })?; let names = crates .iter() .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version)) .collect::<Vec<String>>(); let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default(); let description_length = cmp::max(80, 128 - description_margin); let descriptions = crates.iter().map(|krate| { krate .description .as_ref() .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length)) }); for (name, description) in names.into_iter().zip(descriptions) { let line = match description { Some(desc) => { let space = repeat(' ') .take(description_margin - name.len()) .collect::<String>(); name + &space + "# " + &desc } None => name, }; drop_println!(config, "{}", line); } let search_max_limit = 100; if total_crates > limit && limit < search_max_limit { drop_println!( config, "... and {} crates more (use --limit N to see more)", total_crates - limit ); } else if total_crates > limit && limit >= search_max_limit { let extra = if source_id.is_default_registry() { format!( " (go to https://crates.io/search?q={} to see more)", percent_encode(query.as_bytes(), NON_ALPHANUMERIC) ) } else { String::new() }; drop_println!( config, "... and {} crates more{}", total_crates - limit, extra ); } Ok(()) }
verify_dependencies
count_request_builder.go
package count import ( i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go" i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459 "github.com/microsoftgraph/msgraph-beta-sdk-go/models/odataerrors" ) // CountRequestBuilder provides operations to count the resources in the collection. type CountRequestBuilder struct { // Path parameters for the request pathParameters map[string]string // The request adapter to use to execute the requests. requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter // Url template to use to build the URL for the current request builder urlTemplate string } // CountRequestBuilderGetRequestConfiguration configuration for the request such as headers, query parameters, and middleware options. type CountRequestBuilderGetRequestConfiguration struct { // Request headers Headers map[string]string // Request options Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption } // NewCountRequestBuilderInternal instantiates a new CountRequestBuilder and sets the default values. func NewCountRequestBuilderInternal(pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*CountRequestBuilder) { m := &CountRequestBuilder{ } m.urlTemplate = "{+baseurl}/directory/recommendations/$count"; urlTplParams := make(map[string]string) for idx, item := range pathParameters { urlTplParams[idx] = item } m.pathParameters = urlTplParams; m.requestAdapter = requestAdapter; return m } // NewCountRequestBuilder instantiates a new CountRequestBuilder and sets the default values. func NewCountRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*CountRequestBuilder)
// CreateGetRequestInformation get the number of the resource func (m *CountRequestBuilder) CreateGetRequestInformation()(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) { return m.CreateGetRequestInformationWithRequestConfiguration(nil); } // CreateGetRequestInformationWithRequestConfiguration get the number of the resource func (m *CountRequestBuilder) CreateGetRequestInformationWithRequestConfiguration(requestConfiguration *CountRequestBuilderGetRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) { requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation() requestInfo.UrlTemplate = m.urlTemplate requestInfo.PathParameters = m.pathParameters requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.GET if requestConfiguration != nil { requestInfo.AddRequestHeaders(requestConfiguration.Headers) requestInfo.AddRequestOptions(requestConfiguration.Options) } return requestInfo, nil } // Get get the number of the resource func (m *CountRequestBuilder) Get()(*int32, error) { return m.GetWithRequestConfigurationAndResponseHandler(nil, nil); } // GetWithRequestConfigurationAndResponseHandler get the number of the resource func (m *CountRequestBuilder) GetWithRequestConfigurationAndResponseHandler(requestConfiguration *CountRequestBuilderGetRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(*int32, error) { requestInfo, err := m.CreateGetRequestInformationWithRequestConfiguration(requestConfiguration); if err != nil { return nil, err } errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings { "4XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue, "5XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue, } res, err := m.requestAdapter.SendPrimitiveAsync(requestInfo, "int32", responseHandler, errorMapping) if err != nil { return nil, err } return res.(*int32), nil }
{ urlParams := make(map[string]string) urlParams["request-raw-url"] = rawUrl return NewCountRequestBuilderInternal(urlParams, requestAdapter) }
login.go
package api import ( "crypto/tls" "database/sql" "fmt" "github.com/ansible-semaphore/semaphore/api/helpers" "github.com/ansible-semaphore/semaphore/db" "net/http" "net/mail" "strings" "time" log "github.com/Sirupsen/logrus" "github.com/ansible-semaphore/semaphore/util" sq "github.com/masterminds/squirrel" "golang.org/x/crypto/bcrypt" "gopkg.in/ldap.v2" ) func findLDAPUser(username, password string) (*db.User, error) { if !util.Config.LdapEnable { return nil, fmt.Errorf("LDAP not configured") } l, err := ldap.Dial("tcp", util.Config.LdapServer) if err != nil { return nil, err } defer l.Close() // Reconnect with TLS if needed if util.Config.LdapNeedTLS { // TODO: InsecureSkipVerify should be configurable tlsConf := tls.Config{ InsecureSkipVerify: true, //nolint: gas } if err = l.StartTLS(&tlsConf); err != nil { return nil, err } } // First bind with a read only user if err = l.Bind(util.Config.LdapBindDN, util.Config.LdapBindPassword); err != nil { return nil, err } // Search for the given username searchRequest := ldap.NewSearchRequest( util.Config.LdapSearchDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, fmt.Sprintf(util.Config.LdapSearchFilter, username), []string{util.Config.LdapMappings.DN}, nil, ) sr, err := l.Search(searchRequest) if err != nil { return nil, err } if len(sr.Entries) != 1 { return nil, fmt.Errorf("User does not exist or too many entries returned") } // Bind as the user to verify their password userdn := sr.Entries[0].DN if err = l.Bind(userdn, password); err != nil { return nil, err } // Get user info and ensure authentication in case LDAP supports unauthenticated bind searchRequest = ldap.NewSearchRequest( util.Config.LdapSearchDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, fmt.Sprintf(util.Config.LdapSearchFilter, username), []string{util.Config.LdapMappings.DN, util.Config.LdapMappings.Mail, util.Config.LdapMappings.UID, util.Config.LdapMappings.CN}, nil, ) sr, err = l.Search(searchRequest) if err != nil { return nil, err } ldapUser := db.User{ Username: sr.Entries[0].GetAttributeValue(util.Config.LdapMappings.UID), Created: time.Now(), Name: sr.Entries[0].GetAttributeValue(util.Config.LdapMappings.CN), Email: sr.Entries[0].GetAttributeValue(util.Config.LdapMappings.Mail), External: true, Alert: false, } log.Info("User " + ldapUser.Name + " with email " + ldapUser.Email + " authorized via LDAP correctly") return &ldapUser, nil } //nolint: gocyclo func login(w http.ResponseWriter, r *http.Request) { var login struct { Auth string `json:"auth" binding:"required"` Password string `json:"password" binding:"required"` } if !helpers.Bind(w, r, &login) { return } /* logic: - fetch user from ldap if enabled - fetch user from database by username/email - create user in database if doesn't exist & ldap record found - check password if non-ldap user - create session & send cookie */ login.Auth = strings.ToLower(login.Auth) var ldapUser *db.User if util.Config.LdapEnable { // search LDAP for users if lu, err := findLDAPUser(login.Auth, login.Password); err == nil { ldapUser = lu } else { log.Info(err.Error()) } } var user db.User q := sq.Select("*"). From("user") // determine if login.Auth is email or username if _, err := mail.ParseAddress(login.Auth); err == nil { q = q.Where("email=?", login.Auth) } else { q = q.Where("username=?", login.Auth) } query, args, err := q.ToSql() util.LogWarning(err) if err = helpers.Store(r).Sql().SelectOne(&user, query, args...); err != nil && err == sql.ErrNoRows
else if err != nil { panic(err) } // check if ldap user & no ldap user found if user.External && ldapUser == nil { w.WriteHeader(http.StatusUnauthorized) return } // non-ldap login if !user.External { if err = bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(login.Password)); err != nil { w.WriteHeader(http.StatusUnauthorized) return } // authenticated. } newSession, err := helpers.Store(r).CreateSession(db.Session{ UserID: user.ID, Created: time.Now(), LastActive: time.Now(), IP: r.Header.Get("X-Real-IP"), UserAgent: r.Header.Get("user-agent"), Expired: false, }) if err != nil { panic(err) } encoded, err := util.Cookie.Encode("semaphore", map[string]interface{}{ "user": user.ID, "session": newSession.ID, }) if err != nil { panic(err) } http.SetCookie(w, &http.Cookie{ Name: "semaphore", Value: encoded, Path: "/", }) w.WriteHeader(http.StatusNoContent) } func logout(w http.ResponseWriter, r *http.Request) { http.SetCookie(w, &http.Cookie{ Name: "semaphore", Value: "", Expires: time.Now().Add(24 * 7 * time.Hour * -1), Path: "/", }) w.WriteHeader(http.StatusNoContent) }
{ if ldapUser != nil { // create new LDAP user user = *ldapUser if err = helpers.Store(r).Sql().Insert(&user); err != nil { panic(err) } } else { w.WriteHeader(http.StatusUnauthorized) return } }
Dashboard.js
import React from 'react'; import cx from 'classnames'; import { CSSTransition } from 'react-transition-group'; import { connect } from "react-redux"; import { startTask } from '../../state-management/actions/appStateActions'; import style from './Dashboard.css'; import DateDisplay from '../DateDisplay/DateDisplay'; import Button, { COLOR } from '../Button/Button'; class
extends React.Component { constructor(props) { super(props); this.state = { isStart: false }; } btnClick = () => { this.props.startTask(); } render() { const classNames= { enter: style.dashEnter, enterActive: style.dashEnterActive, exit: style.dashExit, exitActive: style.dashExitActive, } const { timerActive } = this.props; return ( <div className={style.container}> <CSSTransition classNames={classNames} in={!timerActive} timeout={500} unmountOnExit mountOnEnter > <div className={style.dashboard}> <DateDisplay /> <div className={style.totalHourContainer}> <div>Your total work hour(s):</div> <div className={style.totalHour}> 08:23 </div> </div> </div> </CSSTransition> <div className={cx(style.startCta, { [style.clicked]: timerActive})}> <Button onClick={this.btnClick} color={COLOR.GREEN}>Start New Task</Button> </div> </div> ) } } const mapStateToProps = state => ({ timerActive: state.appState.timerActive, timerRunning: state.appState.timerRunning, }); const mapDispatchToProps = dispatch => ({ startTask: () => dispatch(startTask()) }); export default connect(mapStateToProps, mapDispatchToProps)(Dashboard)
Dashboard
cross-env_vx.x.x.js
// flow-typed signature: 36fd028cc44b7a67a70f44c69f65595c // flow-typed version: <<STUB>>/cross-env_v^5.2.0/flow_v0.131.0 /** * This is an autogenerated libdef stub for: * * 'cross-env' * * Fill this stub out by replacing all the `any` types. * * Once filled out, we encourage you to share your work with the * community by sending a pull request to: * https://github.com/flowtype/flow-typed */ declare module 'cross-env' { declare module.exports: any; } /** * We include stubs for each file inside this npm package in case you need to * require those files directly. Feel free to delete any files that aren't * needed. */ declare module 'cross-env/dist/bin/cross-env-shell' { declare module.exports: any; } declare module 'cross-env/dist/bin/cross-env' { declare module.exports: any; } declare module 'cross-env/dist/command' { declare module.exports: any; } declare module 'cross-env/dist' {
declare module.exports: any; } declare module 'cross-env/dist/variable' { declare module.exports: any; } // Filename aliases declare module 'cross-env/dist/bin/cross-env-shell.js' { declare module.exports: $Exports<'cross-env/dist/bin/cross-env-shell'>; } declare module 'cross-env/dist/bin/cross-env.js' { declare module.exports: $Exports<'cross-env/dist/bin/cross-env'>; } declare module 'cross-env/dist/command.js' { declare module.exports: $Exports<'cross-env/dist/command'>; } declare module 'cross-env/dist/index' { declare module.exports: $Exports<'cross-env/dist'>; } declare module 'cross-env/dist/index.js' { declare module.exports: $Exports<'cross-env/dist'>; } declare module 'cross-env/dist/variable.js' { declare module.exports: $Exports<'cross-env/dist/variable'>; }
zz_generated.deepcopy.go
// +build !ignore_autogenerated // Code generated by deepcopy-gen. DO NOT EDIT. package v1alpha1 import ( runtime "k8s.io/apimachinery/pkg/runtime" ) // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BackendModule) DeepCopyInto(out *BackendModule) { *out = *in out.TypeMeta = in.TypeMeta in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) return } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BackendModule. func (in *BackendModule) DeepCopy() *BackendModule { if in == nil { return nil } out := new(BackendModule) in.DeepCopyInto(out) return out } // DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. func (in *BackendModule) DeepCopyObject() runtime.Object { if c := in.DeepCopy(); c != nil
return nil } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BackendModuleList) DeepCopyInto(out *BackendModuleList) { *out = *in out.TypeMeta = in.TypeMeta in.ListMeta.DeepCopyInto(&out.ListMeta) if in.Items != nil { in, out := &in.Items, &out.Items *out = make([]BackendModule, len(*in)) for i := range *in { (*in)[i].DeepCopyInto(&(*out)[i]) } } return } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BackendModuleList. func (in *BackendModuleList) DeepCopy() *BackendModuleList { if in == nil { return nil } out := new(BackendModuleList) in.DeepCopyInto(out) return out } // DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. func (in *BackendModuleList) DeepCopyObject() runtime.Object { if c := in.DeepCopy(); c != nil { return c } return nil }
{ return c }
host_test.go
package ice import ( "net" "testing" "gortc.io/ice/gather" ) func TestProcessDualStack(t *testing.T) { const maxCount = 100 tt := make([]struct { V4, V6 int }, 0, maxCount*maxCount) // Not checking v4=0 and v6=0 because that case is invalid for // the processDualStack function. for v4 := 1; v4 <= maxCount; v4++ { for v6 := 1; v6 <= maxCount; v6++ { tt = append(tt, struct{ V4, V6 int }{V4: v4, V6: v6}) } } for _, tc := range tt { var v4, v6, all []gather.Addr for i := 0; i < tc.V4; i++ { a := gather.Addr{ IP: make(net.IP, net.IPv4len), } // "marking" IP so we can count unique ip's. bin.PutUint32(a.IP, uint32(i)) v4 = append(v4, a) all = append(all, a) } for i := 0; i < tc.V6; i++ { a := gather.Addr{ IP: make(net.IP, net.IPv6len), } bin.PutUint32(a.IP, uint32(i)) v6 = append(v6, a) all = append(all, a) } // Checking that output length is equal to total length. result := processDualStack(all, v4, v6) if len(result) != len(all) { t.Errorf("v4: %d, v6: %d: expected %d, got %d", tc.V4, tc.V6, len(all), len(result)) } // Checking unique IP count. gotV4 := make(map[uint32]bool) gotV6 := make(map[uint32]bool) for _, r := range result { if r.IP.To4() == nil { gotV6[bin.Uint32(r.IP)] = true } else { gotV4[bin.Uint32(r.IP)] = true } } if len(gotV4) != len(v4) { t.Errorf("v4: %d, v6: %d: v4 expected %d, got %d", tc.V4, tc.V6, len(v4), len(gotV4)) } if len(gotV6) != len(v6) { t.Errorf("v4: %d, v6: %d: v6 expected %d, got %d", tc.V4, tc.V6, len(v6), len(gotV6)) } } } func TestGatherHostAddresses(t *testing.T) { type outputRow struct { IP string Preference int } for _, tc := range []struct { Name string Input []string Output []outputRow }{ { Name: "blank", }, { Name: "loopback", Input: []string{ "127.0.0.1", }, }, { Name: "Single IPv4", Input: []string{ "1.1.1.1", }, Output: []outputRow{ {"1.1.1.1", 65535}, }, }, { Name: "IPv4", Input: []string{ "1.1.1.1", "1.1.1.2", }, Output: []outputRow{ {"1.1.1.1", 2}, {"1.1.1.2", 1}, }, }, { Name: "Single IPv6", Input: []string{ "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", }, Output: []outputRow{ {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", 65535}, }, }, { Name: "IPv6", Input: []string{ "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", }, Output: []outputRow{ {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", 2}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", 1}, }, }, { // If a host has two IPv4 addresses and six IPv6 addresses, it will // insert an IPv4 address after four IPv6 addresses by choosing the // appropriate local preference values when calculating the pair // priorities. Name: "2xIPv4 and 6xIPv6", Input: []string{ "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa3", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa4", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa5", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa6", "1.1.1.1", "1.1.1.2", }, Output: []outputRow{ {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", 8}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", 7}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa3", 6}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa4", 5}, {"1.1.1.1", 4}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa5", 3}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa6", 2}, {"1.1.1.2", 1}, }, }, } { t.Run(tc.Name, func(t *testing.T) { gatherAddrs := make([]gather.Addr, len(tc.Input)) for i, ip := range tc.Input { gatherAddrs[i] = gather.Addr{ IP: net.ParseIP(ip), } } expected := make([]HostAddr, len(tc.Output)) for i, row := range tc.Output { expected[i] = HostAddr{ IP: net.ParseIP(row.IP), LocalPreference: row.Preference, } } gotAddr, err := HostAddresses(gatherAddrs) if err != nil { t.Fatal(err) } if len(gotAddr) != len(expected) { t.Fatalf("bad length: %d (got) != %d (expected)", len(gotAddr), len(expected), ) } for i := range gotAddr { got := gotAddr[i] exp := expected[i] if got.LocalPreference != exp.LocalPreference || !got.IP.Equal(exp.IP)
} }) } } func TestIsValidHostIP(t *testing.T) { for _, tc := range []struct { Name string IP net.IP V6 bool Valid bool }{ { Name: "blank", }, { Name: "127.0.0.1", IP: localIP, }, { Name: "v4", IP: net.IPv4(10, 0, 0, 1), Valid: true, }, { Name: "v4 for v6 only", IP: net.IPv4(10, 0, 0, 1), V6: true, }, { Name: "Site-local ipv6", IP: net.ParseIP("FEC0::ff:aa"), V6: true, }, { Name: "link-local ipv6", IP: net.ParseIP("fe80::50da:9baa:ef96:15c8"), }, { Name: "ipv4-mapped", IP: net.IPv4(10, 0, 0, 1).To16(), Valid: true, }, { Name: "ipv4-mapped for v6 only", IP: net.IPv4(10, 0, 0, 1).To16(), V6: true, }, } { t.Run(tc.Name, func(t *testing.T) { if v := IsHostIPValid(tc.IP, tc.V6); v != tc.Valid { t.Errorf("valid(%s, v6=%v) %v (got) != %v (expected)", tc.IP, tc.V6, v, tc.Valid, ) } }) } }
{ t.Errorf("[%d]: %s, %d (got) != %s, %d (expected)", i, got.IP, got.LocalPreference, exp.IP, exp.LocalPreference, ) }
Hero.mobile.tsx
import React, { CSSProperties } from 'react'; import { Row, Col, Image } from 'antd'; import { ButtonComponent } from '../Button'; interface ComponentProps { style?: CSSProperties; title?: string;
src?: string; label?: string; onClick?: () => void; } export const HeroMobileComponent = ({ title, src, label, onClick, }: ComponentProps) => { const adStr = ( <h1 style={{ fontWeight: 700, fontSize: 22, marginBottom: 10, textAlign: 'center', width: '80%', }} > {title} </h1> ); return ( <Row align="middle" justify="center" style={{ background: 'rgb(171, 217, 243)' }} > <Col md={24} style={{ display: 'flex', justifyContent: 'center' }}> <div style={{ display: 'flex', flexDirection: 'column', alignItems: 'center', paddingTop: 24, }} > {adStr} <ButtonComponent style={{ maxWidth: 140, color: '#000', borderWidth: 1, borderColor: '#000', }} type="default" shape="round" size="large" ghost={true} href="" label={label} onClick={onClick} /> </div> </Col> <Col md={24} style={{ display: 'flex', justifyContent: 'center' }}> <Image alt="hero" src={src} preview={false} /> </Col> </Row> ); };
store.go
package pgparty import ( "reflect" ) type ( sqlPattern = string ) type Store struct { modelDescriptions map[reflect.Type]*ModelDesc
queryReplacers map[sqlPattern]map[string]ReplaceEntry } func (s *Store) Init() { s.modelDescriptions = make(map[reflect.Type]*ModelDesc) s.queryReplacers = make(map[sqlPattern]map[string]ReplaceEntry) } func (s Store) ModelDescriptions() map[reflect.Type]*ModelDesc { return s.modelDescriptions } func (s Store) QueryReplacers() map[sqlPattern]map[string]ReplaceEntry { return s.queryReplacers } func (s Store) GetModelDescription(model Storable) (*ModelDesc, bool) { ret, ok := s.modelDescriptions[reflect.Indirect(reflect.ValueOf(model)).Type()] return ret, ok } // Получение описания модели из его reflect.Type func (s Store) GetModelDescriptionByType(typ reflect.Type) (*ModelDesc, bool) { ret, ok := s.modelDescriptions[typ] return ret, ok } func (s *Store) Close() { // log.Fatal("(s *Store) Close() unimplemented") }
k8s.go
package k8s import ( "encoding/json" "fmt" "k8s.io/apimachinery/pkg/util/intstr" "github.com/tsuru/nginx-operator/pkg/apis/nginx/v1alpha1" appv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" ) const ( // Default docker image used for nginx defaultNginxImage = "nginx:latest" // Default port names used by the nginx container and the ClusterIP service defaultHTTPPortName = "http" defaultHTTPSPortName = "https" // Mount path where nginx.conf will be placed configMountPath = "/etc/nginx" // Mount path where certificate and key pair will be placed certMountPath = configMountPath + "/certs" // Annotation key used to stored the nginx that created the deployment generatedFromAnnotation = "nginx.tsuru.io/generated-from" ) // NewDeployment creates a deployment for a given Nginx resource. func NewDeployment(n *v1alpha1.Nginx) (*appv1.Deployment, error) { n.Spec.Image = valueOrDefault(n.Spec.Image, defaultNginxImage) deployment := appv1.Deployment{ TypeMeta: metav1.TypeMeta{ Kind: "Deployment", APIVersion: "apps/v1", }, ObjectMeta: metav1.ObjectMeta{ Name: n.Name + "-deployment", Namespace: n.Namespace, OwnerReferences: []metav1.OwnerReference{ *metav1.NewControllerRef(n, schema.GroupVersionKind{ Group: v1alpha1.SchemeGroupVersion.Group, Version: v1alpha1.SchemeGroupVersion.Version, Kind: "Nginx", }), }, }, Spec: appv1.DeploymentSpec{ Replicas: n.Spec.Replicas, Selector: &metav1.LabelSelector{ MatchLabels: LabelsForNginx(n.Name), }, Template: corev1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Namespace: n.Namespace, Labels: LabelsForNginx(n.Name), }, Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Name: "nginx", Image: n.Spec.Image, Ports: []corev1.ContainerPort{ { Name: defaultHTTPPortName, ContainerPort: int32(80), Protocol: corev1.ProtocolTCP, }, }, Resources: n.Spec.PodTemplate.Resources, ReadinessProbe: &corev1.Probe{ Handler: corev1.Handler{ HTTPGet: &corev1.HTTPGetAction{ Path: "/", Port: intstr.FromString(defaultHTTPPortName), Scheme: corev1.URISchemeHTTP, }, }, }, }, }, Affinity: n.Spec.PodTemplate.Affinity, }, }, }, } setupConfig(n.Spec.Config, &deployment) setupTLS(n.Spec.TLSSecret, &deployment) // This is done on the last step because n.Spec may have mutated during these methods if err := SetNginxSpec(&deployment.ObjectMeta, n.Spec); err != nil { return nil, err } return &deployment, nil } // NewService assembles the ClusterIP service for the Nginx func NewService(n *v1alpha1.Nginx) *corev1.Service { service := corev1.Service{ TypeMeta: metav1.TypeMeta{ Kind: "Service", APIVersion: "v1", }, ObjectMeta: metav1.ObjectMeta{ Name: n.Name + "-service", Namespace: n.Namespace, OwnerReferences: []metav1.OwnerReference{ *metav1.NewControllerRef(n, schema.GroupVersionKind{ Group: v1alpha1.SchemeGroupVersion.Group, Version: v1alpha1.SchemeGroupVersion.Version, Kind: "Nginx", }), }, Labels: LabelsForNginx(n.Name), }, Spec: corev1.ServiceSpec{ Ports: []corev1.ServicePort{ { Name: defaultHTTPPortName, Protocol: corev1.ProtocolTCP, TargetPort: intstr.FromString(defaultHTTPPortName), Port: int32(80), }, }, Selector: LabelsForNginx(n.Name), Type: corev1.ServiceTypeClusterIP, }, } if n.Spec.TLSSecret != nil { service.Spec.Ports = append(service.Spec.Ports, corev1.ServicePort{ Name: defaultHTTPSPortName, Protocol: corev1.ProtocolTCP, TargetPort: intstr.FromString(defaultHTTPSPortName), Port: int32(443), }) } return &service } // LabelsForNginx returns the labels for a Nginx CR with the given name func LabelsForNginx(name string) map[string]string { return map[string]string{ "nginx_cr": name, "app": "nginx", } } // ExtractNginxSpec extracts the nginx used to create the object func ExtractNginxSpec(o metav1.ObjectMeta) (v1alpha1.NginxSpec, error) { ann, ok := o.Annotations[generatedFromAnnotation] if !ok { return v1alpha1.NginxSpec{}, fmt.Errorf("missing %q annotation in deployment", generatedFromAnnotation) } var spec v1alpha1.NginxSpec if err := json.Unmarshal([]byte(ann), &spec); err != nil { return v1alpha1.NginxSpec{}, fmt.Errorf("failed to unmarshal nginx from annotation: %v", err) } return spec, nil } // SetNginxSpec sets the nginx spec into the object annotation to be later extracted func SetNginxSpec(o *metav1.ObjectMeta, spec v1alpha1.NginxSpec) error { if o.Annotations == nil { o.Annotations = make(map[string]string) } origSpec, err := json.Marshal(spec) if err != nil { return err } o.Annotations[generatedFromAnnotation] = string(origSpec) return nil } func setupConfig(conf *v1alpha1.ConfigRef, dep *appv1.Deployment) { if conf == nil { return } dep.Spec.Template.Spec.Containers[0].VolumeMounts = append(dep.Spec.Template.Spec.Containers[0].VolumeMounts, corev1.VolumeMount{ Name: "nginx-config", MountPath: configMountPath, }) switch conf.Kind { case v1alpha1.ConfigKindConfigMap: dep.Spec.Template.Spec.Volumes = append(dep.Spec.Template.Spec.Volumes, corev1.Volume{ Name: "nginx-config", VolumeSource: corev1.VolumeSource{ ConfigMap: &corev1.ConfigMapVolumeSource{ LocalObjectReference: corev1.LocalObjectReference{ Name: conf.Name, }, }, }, }) case v1alpha1.ConfigKindInline: if dep.Spec.Template.Annotations == nil { dep.Spec.Template.Annotations = make(map[string]string) } dep.Spec.Template.Annotations[conf.Name] = conf.Value dep.Spec.Template.Spec.Volumes = append(dep.Spec.Template.Spec.Volumes, corev1.Volume{ Name: "nginx-config", VolumeSource: corev1.VolumeSource{ DownwardAPI: &corev1.DownwardAPIVolumeSource{ Items: []corev1.DownwardAPIVolumeFile{ { Path: "nginx.conf", FieldRef: &corev1.ObjectFieldSelector{ FieldPath: fmt.Sprintf("metadata.annotations['%s']", conf.Name), }, }, }, }, }, }) } } // setupTLS appends an https port if TLS secrets are specified func
(secret *v1alpha1.TLSSecret, dep *appv1.Deployment) { if secret == nil { return } dep.Spec.Template.Spec.Containers[0].Ports = append(dep.Spec.Template.Spec.Containers[0].Ports, corev1.ContainerPort{ Name: defaultHTTPSPortName, ContainerPort: int32(443), Protocol: corev1.ProtocolTCP, }) dep.Spec.Template.Spec.Containers[0].ReadinessProbe = &corev1.Probe{ Handler: corev1.Handler{ HTTPGet: &corev1.HTTPGetAction{ Path: "/", Port: intstr.FromString(defaultHTTPSPortName), Scheme: corev1.URISchemeHTTPS, }, }, } dep.Spec.Template.Spec.Containers[0].VolumeMounts = append(dep.Spec.Template.Spec.Containers[0].VolumeMounts, corev1.VolumeMount{ Name: "nginx-certs", MountPath: certMountPath, }) secret.KeyField = valueOrDefault(secret.KeyField, "tls.key") secret.CertificateField = valueOrDefault(secret.CertificateField, "tls.crt") secret.KeyPath = valueOrDefault(secret.KeyPath, secret.KeyField) secret.CertificatePath = valueOrDefault(secret.CertificatePath, secret.CertificateField) dep.Spec.Template.Spec.Volumes = append(dep.Spec.Template.Spec.Volumes, corev1.Volume{ Name: "nginx-certs", VolumeSource: corev1.VolumeSource{ Secret: &corev1.SecretVolumeSource{ SecretName: secret.SecretName, Items: []corev1.KeyToPath{ {Key: secret.KeyField, Path: secret.KeyPath}, {Key: secret.CertificateField, Path: secret.CertificatePath}, }, }, }, }) } func valueOrDefault(value, def string) string { if value != "" { return value } return def }
setupTLS
lib.rs
//! Dummy backend implementation to test the code for compile errors //! outside of the graphics development environment. extern crate gfx_hal as hal; use std::borrow::{Borrow, BorrowMut}; use std::ops::Range; use hal::{ buffer, command, device, error, format, image, mapping, memory, pass, pool, pso, query, queue, }; use hal::range::RangeArg; /// Dummy backend. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum Backend { } impl hal::Backend for Backend { type PhysicalDevice = PhysicalDevice; type Device = Device; type Surface = Surface; type Swapchain = Swapchain; type QueueFamily = QueueFamily; type CommandQueue = RawCommandQueue; type CommandBuffer = RawCommandBuffer; type Memory = (); type CommandPool = RawCommandPool; type ShaderModule = (); type RenderPass = (); type Framebuffer = (); type UnboundBuffer = (); type Buffer = (); type BufferView = (); type UnboundImage = (); type Image = (); type ImageView = (); type Sampler = (); type ComputePipeline = (); type GraphicsPipeline = (); type PipelineLayout = (); type DescriptorSetLayout = (); type DescriptorPool = DescriptorPool; type DescriptorSet = (); type Fence = (); type Semaphore = (); type QueryPool = (); } /// Dummy physical device. pub struct PhysicalDevice; impl hal::PhysicalDevice<Backend> for PhysicalDevice { fn open( &self, _: &[(&QueueFamily, &[hal::QueuePriority])] ) -> Result<hal::Gpu<Backend>, error::DeviceCreationError> { unimplemented!() } fn format_properties(&self, _: Option<format::Format>) -> format::Properties { unimplemented!() } fn image_format_properties( &self, _: format::Format, _dim: u8, _: image:: Tiling, _: image::Usage, _: image::StorageFlags, ) -> Option<image::FormatProperties> { unimplemented!() } fn memory_properties(&self) -> hal::MemoryProperties { unimplemented!() } fn features(&self) -> hal::Features { unimplemented!() } fn limits(&self) -> hal::Limits { unimplemented!() } } /// Dummy command queue doing nothing. pub struct RawCommandQueue; impl queue::RawCommandQueue<Backend> for RawCommandQueue { unsafe fn submit_raw<IC>(&mut self, _: queue::RawSubmission<Backend, IC>, _: Option<&()>) where IC: IntoIterator, IC::Item: Borrow<RawCommandBuffer>, { unimplemented!() } fn present<IS, IW>(&mut self, _: IS, _: IW) where IS: IntoIterator, IS::Item: BorrowMut<Swapchain>, IW: IntoIterator, IW::Item: Borrow<()>, { unimplemented!() } fn wait_idle(&self) -> Result<(), error::HostExecutionError> { unimplemented!() } } /// Dummy device doing nothing. pub struct Device; impl hal::Device<Backend> for Device { fn create_command_pool(&self, _: queue::QueueFamilyId, _: pool::CommandPoolCreateFlags) -> RawCommandPool { unimplemented!() } fn destroy_command_pool(&self, _: RawCommandPool) { unimplemented!() } fn allocate_memory(&self, _: hal::MemoryTypeId, _: u64) -> Result<(), device::OutOfMemory> { unimplemented!() } fn create_render_pass<'a ,IA, IS, ID>(&self, _: IA, _: IS, _: ID) -> () where IA: IntoIterator, IA::Item: Borrow<pass::Attachment>, IS: IntoIterator, IS::Item: Borrow<pass::SubpassDesc<'a>>, ID: IntoIterator, ID::Item: Borrow<pass::SubpassDependency>, { unimplemented!() } fn create_pipeline_layout<IS, IR>(&self, _: IS, _: IR) -> () where IS: IntoIterator, IS::Item: Borrow<()>, IR: IntoIterator, IR::Item: Borrow<(pso::ShaderStageFlags, Range<u32>)>, { unimplemented!() } fn create_framebuffer<I>( &self, _: &(), _: I, _: image::Extent ) -> Result<(), device::FramebufferError> where I: IntoIterator, I::Item: Borrow<()>, { unimplemented!() } fn create_shader_module(&self, _: &[u8]) -> Result<(), device::ShaderError> { unimplemented!() } fn create_sampler(&self, _: image::SamplerInfo) -> () { unimplemented!() } fn create_buffer(&self, _: u64, _: buffer::Usage) -> Result<(), buffer::CreationError> { unimplemented!() } fn get_buffer_requirements(&self, _: &()) -> memory::Requirements { unimplemented!() } fn bind_buffer_memory(&self, _: &(), _: u64, _: ()) -> Result<(), device::BindError> { unimplemented!() } fn create_buffer_view<R: RangeArg<u64>>(&self, _: &(), _: Option<format::Format>, _: R) -> Result<(), buffer::ViewError> { unimplemented!() } fn create_image( &self, _: image::Kind, _: image::Level, _: format::Format, _: image::Tiling, _: image::Usage, _: image::StorageFlags, ) -> Result<(), image::CreationError> { unimplemented!() } fn get_image_requirements(&self, _: &()) -> memory::Requirements { unimplemented!() } fn bind_image_memory(&self, _: &(), _: u64, _: ()) -> Result<(), device::BindError> { unimplemented!() } fn create_image_view( &self, _: &(), _: image::ViewKind, _: format::Format, _: format::Swizzle, _: image::SubresourceRange, ) -> Result<(), image::ViewError> { unimplemented!() } fn create_descriptor_pool<I>(&self, _: usize, _: I) -> DescriptorPool where I: IntoIterator,
I::Item: Borrow<pso::DescriptorRangeDesc>, { unimplemented!() } fn create_descriptor_set_layout<I>(&self, _: I) -> () where I: IntoIterator, I::Item: Borrow<pso::DescriptorSetLayoutBinding>, { unimplemented!() } fn write_descriptor_sets<'a, I, J>(&self, _: I) where I: IntoIterator<Item = pso::DescriptorSetWrite<'a, Backend, J>>, J: IntoIterator, J::Item: Borrow<pso::Descriptor<'a, Backend>>, { unimplemented!() } fn copy_descriptor_sets<'a, I>(&self, _: I) where I: IntoIterator, I::Item: Borrow<pso::DescriptorSetCopy<'a, Backend>> { unimplemented!() } fn create_semaphore(&self) -> () { unimplemented!() } fn create_fence(&self, _: bool) -> () { unimplemented!() } fn get_fence_status(&self, _: &()) -> bool { unimplemented!() } fn create_query_pool(&self, _: query::QueryType, _: u32) -> () { unimplemented!() } fn destroy_query_pool(&self, _: ()) { unimplemented!() } fn map_memory<R: RangeArg<u64>>(&self, _: &(), _: R) -> Result<*mut u8, mapping::Error> { unimplemented!() } fn unmap_memory(&self, _: &()) { unimplemented!() } fn flush_mapped_memory_ranges<'a, I, R>(&self, _: I) where I: IntoIterator, I::Item: Borrow<(&'a (), R)>, R: RangeArg<u64>, { unimplemented!() } fn invalidate_mapped_memory_ranges<'a, I, R>(&self, _: I) where I: IntoIterator, I::Item: Borrow<(&'a (), R)>, R: RangeArg<u64>, { unimplemented!() } fn free_memory(&self, _: ()) { unimplemented!() } fn destroy_shader_module(&self, _: ()) { unimplemented!() } fn destroy_render_pass(&self, _: ()) { unimplemented!() } fn destroy_pipeline_layout(&self, _: ()) { unimplemented!() } fn destroy_graphics_pipeline(&self, _: ()) { unimplemented!() } fn destroy_compute_pipeline(&self, _: ()) { unimplemented!() } fn destroy_framebuffer(&self, _: ()) { unimplemented!() } fn destroy_buffer(&self, _: ()) { unimplemented!() } fn destroy_buffer_view(&self, _: ()) { unimplemented!() } fn destroy_image(&self, _: ()) { unimplemented!() } fn destroy_image_view(&self, _: ()) { unimplemented!() } fn destroy_sampler(&self, _: ()) { unimplemented!() } fn destroy_descriptor_pool(&self, _: DescriptorPool) { unimplemented!() } fn destroy_descriptor_set_layout(&self, _: ()) { unimplemented!() } fn destroy_fence(&self, _: ()) { unimplemented!() } fn destroy_semaphore(&self, _: ()) { unimplemented!() } fn create_swapchain( &self, _: &mut Surface, _: hal::SwapchainConfig, ) -> (Swapchain, hal::Backbuffer<Backend>) { unimplemented!() } fn destroy_swapchain(&self, _: Swapchain) { unimplemented!() } fn wait_idle(&self) -> Result<(), error::HostExecutionError> { unimplemented!() } } #[derive(Debug)] pub struct QueueFamily; impl queue::QueueFamily for QueueFamily { fn queue_type(&self) -> hal::QueueType { unimplemented!() } fn max_queues(&self) -> usize { unimplemented!() } fn id(&self) -> queue::QueueFamilyId { unimplemented!() } } /// Dummy raw command pool. pub struct RawCommandPool; impl pool::RawCommandPool<Backend> for RawCommandPool { fn reset(&mut self) { unimplemented!() } fn allocate(&mut self, _: usize, _: command::RawLevel) -> Vec<RawCommandBuffer> { unimplemented!() } unsafe fn free(&mut self, _: Vec<RawCommandBuffer>) { unimplemented!() } } /// Dummy command buffer, which ignores all the calls. #[derive(Clone)] pub struct RawCommandBuffer; impl command::RawCommandBuffer<Backend> for RawCommandBuffer { fn begin(&mut self, _: command::CommandBufferFlags, _: command::CommandBufferInheritanceInfo<Backend>) { unimplemented!() } fn finish(&mut self) { unimplemented!() } fn reset(&mut self, _: bool) { unimplemented!() } fn pipeline_barrier<'a, T>( &mut self, _: Range<pso::PipelineStage>, _: memory::Dependencies, _: T, ) where T: IntoIterator, T::Item: Borrow<memory::Barrier<'a, Backend>>, { unimplemented!() } fn fill_buffer(&mut self, _: &(), _: Range<buffer::Offset>, _: u32) { unimplemented!() } fn update_buffer(&mut self, _: &(), _: buffer::Offset, _: &[u8]) { unimplemented!() } fn clear_color_image_raw( &mut self, _: &(), _: image::Layout, _: image::SubresourceRange, _: command::ClearColorRaw, ) { unimplemented!() } fn clear_depth_stencil_image_raw( &mut self, _: &(), _: image::Layout, _: image::SubresourceRange, _: command::ClearDepthStencilRaw, ) { unimplemented!() } fn clear_attachments<T, U>(&mut self, _: T, _: U) where T: IntoIterator, T::Item: Borrow<command::AttachmentClear>, U: IntoIterator, U::Item: Borrow<pso::Rect>, { unimplemented!() } fn resolve_image<T>( &mut self, _: &(), _: image::Layout, _: &(), _: image::Layout, _: T, ) where T: IntoIterator, T::Item: Borrow<command::ImageResolve>, { unimplemented!() } fn blit_image<T>( &mut self, _: &(), _: image::Layout, _: &(), _: image::Layout, _: image::Filter, _: T, ) where T: IntoIterator, T::Item: Borrow<command::ImageBlit>, { unimplemented!() } fn bind_index_buffer(&mut self, _: buffer::IndexBufferView<Backend>) { unimplemented!() } fn bind_vertex_buffers(&mut self, _: pso::VertexBufferSet<Backend>) { unimplemented!() } fn set_viewports<T>(&mut self, _: u32, _: T) where T: IntoIterator, T::Item: Borrow<pso::Viewport>, { unimplemented!() } fn set_scissors<T>(&mut self, _: u32, _: T) where T: IntoIterator, T::Item: Borrow<pso::Rect>, { unimplemented!() } fn set_stencil_reference(&mut self, _: pso::StencilValue, _: pso::StencilValue) { unimplemented!() } fn set_blend_constants(&mut self, _: pso::ColorValue) { unimplemented!() } fn begin_render_pass_raw<T>( &mut self, _: &(), _: &(), _: pso::Rect, _: T, _: command::SubpassContents, ) where T: IntoIterator, T::Item: Borrow<command::ClearValueRaw>, { unimplemented!() } fn next_subpass(&mut self, _: command::SubpassContents) { unimplemented!() } fn end_render_pass(&mut self) { unimplemented!() } fn bind_graphics_pipeline(&mut self, _: &()) { unimplemented!() } fn bind_graphics_descriptor_sets<I>(&mut self, _: &(), _: usize, _: I) where I: IntoIterator, I::Item: Borrow<()>, { unimplemented!() } fn bind_compute_pipeline(&mut self, _: &()) { unimplemented!() } fn bind_compute_descriptor_sets<I>(&mut self, _: &(), _: usize, _: I) where I: IntoIterator, I::Item: Borrow<()>, { unimplemented!() } fn dispatch(&mut self, _: hal::WorkGroupCount) { unimplemented!() } fn dispatch_indirect(&mut self, _: &(), _: buffer::Offset) { unimplemented!() } fn copy_buffer<T>(&mut self, _: &(), _: &(), _: T) where T: IntoIterator, T::Item: Borrow<command::BufferCopy>, { unimplemented!() } fn copy_image<T>( &mut self, _: &(), _: image::Layout, _: &(), _: image::Layout, _: T, ) where T: IntoIterator, T::Item: Borrow<command::ImageCopy>, { unimplemented!() } fn copy_buffer_to_image<T>( &mut self, _: &(), _: &(), _: image::Layout, _: T, ) where T: IntoIterator, T::Item: Borrow<command::BufferImageCopy>, { unimplemented!() } fn copy_image_to_buffer<T>( &mut self, _: &(), _: image::Layout, _: &(), _: T, ) where T: IntoIterator, T::Item: Borrow<command::BufferImageCopy>, { unimplemented!() } fn draw(&mut self, _: Range<hal::VertexCount>, _: Range<hal::InstanceCount>, ) { unimplemented!() } fn draw_indexed( &mut self, _: Range<hal::IndexCount>, _: hal::VertexOffset, _: Range<hal::InstanceCount>, ) { unimplemented!() } fn draw_indirect(&mut self, _: &(), _: buffer::Offset, _: u32, _: u32) { unimplemented!() } fn draw_indexed_indirect( &mut self, _: &(), _: buffer::Offset, _: u32, _: u32, ) { unimplemented!() } fn begin_query( &mut self, _: query::Query<Backend>, _: query::QueryControl, ) { unimplemented!() } fn end_query( &mut self, _: query::Query<Backend>, ) { unimplemented!() } fn reset_query_pool( &mut self, _: &(), _: Range<query::QueryId>, ) { unimplemented!() } fn write_timestamp( &mut self, _: pso::PipelineStage, _: query::Query<Backend>, ) { unimplemented!() } fn push_graphics_constants( &mut self, _: &(), _: pso::ShaderStageFlags, _: u32, _: &[u32], ) { unimplemented!() } fn push_compute_constants( &mut self, _: &(), _: u32, _: &[u32], ) { unimplemented!() } fn execute_commands<I>( &mut self, _: I, ) where I: IntoIterator, I::Item: Borrow<RawCommandBuffer> { unimplemented!() } } // Dummy descriptor pool. #[derive(Debug)] pub struct DescriptorPool; impl pso::DescriptorPool<Backend> for DescriptorPool { fn reset(&mut self) { unimplemented!() } } /// Dummy surface. pub struct Surface; impl hal::Surface<Backend> for Surface { fn kind(&self) -> hal::image::Kind { unimplemented!() } fn capabilities_and_formats( &self, _: &PhysicalDevice, ) -> (hal::SurfaceCapabilities, Option<Vec<format::Format>>) { unimplemented!() } fn supports_queue_family(&self, _: &QueueFamily) -> bool { unimplemented!() } } /// Dummy swapchain. pub struct Swapchain; impl hal::Swapchain<Backend> for Swapchain { fn acquire_frame(&mut self, _: hal::FrameSync<Backend>) -> hal::Frame { unimplemented!() } } pub struct Instance; impl hal::Instance for Instance { type Backend = Backend; fn enumerate_adapters(&self) -> Vec<hal::Adapter<Backend>> { unimplemented!() } }
update.py
# Copyright (C) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import logging
from webkitpy.tool.steps.options import Options _log = logging.getLogger(__name__) class Update(AbstractStep): @classmethod def options(cls): return AbstractStep.options() + [ Options.non_interactive, Options.update, Options.quiet, ] def run(self, state): if not self._options.update: return _log.info("Updating working directory") self._tool.executive.run_and_throw_if_fail(self._update_command(), quiet=self._options.quiet, cwd=self._tool.scm().checkout_root) def _update_command(self): update_command = self._tool.deprecated_port().update_webkit_command(self._options.non_interactive) return update_command
from webkitpy.tool.steps.abstractstep import AbstractStep
fundrawtransaction.py
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * def get_unspent(listunspent, amount): for utx in listunspent: if utx['amount'] == amount: return utx raise AssertionError('Could not find unspent with amount={}'.format(amount)) class RawTransactionsTest(BitcoinTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = True self.num_nodes = 4 def setup_network(self, split=False): self.nodes = start_nodes(self.num_nodes, self.options.tmpdir) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) connect_nodes_bi(self.nodes,0,3) self.is_network_split=False self.sync_all() def run_test(self): print("Mining blocks...") min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee'] # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) # if the fee's positive delta is higher than this value tests will fail, # neg. delta always fail the tests. # The size of the signature of every input may be at most 2 bytes larger # than a minimum sized signature. # = 2 bytes * minRelayTxFeePerByte feeTolerance = 2 * min_relay_tx_fee/1000 self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(121) self.sync_all() # ensure that setting changePosition in fundraw with an exact match is handled properly rawmatch = self.nodes[2].createrawtransaction([], {self.nodes[2].getnewaddress():500000}) rawmatch = self.nodes[2].fundrawtransaction(rawmatch, {"changePosition":1, "subtractFeeFromOutputs":[0]}) assert_equal(rawmatch["changepos"], -1) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 15) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 50) self.nodes[0].generate(1) self.sync_all() ############### # simple test # ############### inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) #test that we have enough inputs ############################## # simple test with two coins # ############################## inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 22 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) #test if we have enough inputs ############################## # simple test with two coins # ############################## inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 26 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert(len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ################################ # simple test with two outputs # ################################ inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 26, self.nodes[1].getnewaddress() : 25 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert(len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ######################################################################### # test a fundrawtransaction with a VIN greater than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 50) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : 10 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee ##################################################################### # test a fundrawtransaction with which will not get a change output # ##################################################################### utx = get_unspent(self.nodes[2].listunspent(), 50) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] # Draupnir: Fee is exact, do not use tolerance outputs = { self.nodes[0].getnewaddress() : Decimal(50) - fee } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(rawtxfund['changepos'], -1) assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee #################################################### # test a fundrawtransaction with an invalid option # #################################################### utx = get_unspent(self.nodes[2].listunspent(), 50) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(40) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_jsonrpc(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'}) ############################################################ # test a fundrawtransaction with an invalid change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 50) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] outputs = { self.nodes[0].getnewaddress() : Decimal(40) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_jsonrpc(-5, "changeAddress must be a valid draupnir address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'}) ############################################################ # test a fundrawtransaction with a provided change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 50) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] # Draupnir: Reduce this output so the fee doesn't leave us with no change outputs = { self.nodes[0].getnewaddress() : Decimal(25) } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) change = self.nodes[2].getnewaddress() assert_raises_jsonrpc(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2}) rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0}) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) out = dec_tx['vout'][0] assert_equal(change, out['scriptPubKey']['addresses'][0]) ######################################################################### # test a fundrawtransaction with a VIN smaller than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 10) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] outputs = { self.nodes[0].getnewaddress() : 10 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) # 4-byte version + 1-byte vin count + 36-byte prevout then script_len rawtx = rawtx[:82] + "0100" + rawtx[84:] dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for i, out in enumerate(dec_tx['vout']): totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 else: assert_equal(i, rawtxfund['changepos']) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) ########################################### # test a fundrawtransaction with two VINs # ########################################### utx = get_unspent(self.nodes[2].listunspent(), 10) utx2 = get_unspent(self.nodes[2].listunspent(), 50) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] outputs = { self.nodes[0].getnewaddress() : 60 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) matchingIns = 0 for vinOut in dec_tx['vin']: for vinIn in inputs: if vinIn['txid'] == vinOut['txid']: matchingIns+=1 assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params ######################################################### # test a fundrawtransaction with two VINs and two vOUTs # ######################################################### utx = get_unspent(self.nodes[2].listunspent(), 10) utx2 = get_unspent(self.nodes[2].listunspent(), 50) inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] outputs = { self.nodes[0].getnewaddress() : 60, self.nodes[0].getnewaddress() : 10 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts+=1 assert_equal(matchingOuts, 2) assert_equal(len(dec_tx['vout']), 3) ############################################## # test a fundrawtransaction with invalid vin # ############################################## listunspent = self.nodes[2].listunspent() inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin! outputs = { self.nodes[0].getnewaddress() : 10} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_raises_jsonrpc(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx) ############################################################ #compare fee of a standard pubkeyhash transaction inputs = [] outputs = {self.nodes[1].getnewaddress():11} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawTx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 11) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction with multiple outputs inputs = [] outputs = {self.nodes[1].getnewaddress():110,self.nodes[1].getnewaddress():120,self.nodes[1].getnewaddress():10,self.nodes[1].getnewaddress():130,self.nodes[1].getnewaddress():20,self.nodes[1].getnewaddress():30} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawTx) #create same transaction over sendtoaddress txId = self.nodes[0].sendmany("", outputs) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a 2of2 multisig p2sh transaction # create 2of2 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].validateaddress(addr1) addr2Obj = self.nodes[1].validateaddress(addr2) mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) inputs = [] outputs = {mSigObj:1.1} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawTx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 11) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction # create 4of5 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr3 = self.nodes[1].getnewaddress() addr4 = self.nodes[1].getnewaddress() addr5 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].validateaddress(addr1) addr2Obj = self.nodes[1].validateaddress(addr2) addr3Obj = self.nodes[1].validateaddress(addr3) addr4Obj = self.nodes[1].validateaddress(addr4) addr5Obj = self.nodes[1].validateaddress(addr5) mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']]) inputs = [] outputs = {mSigObj:1.1} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawTx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 11) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ # spend a 2of2 multisig transaction over fundraw # create 2of2 addr addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].validateaddress(addr1) addr2Obj = self.nodes[2].validateaddress(addr2) mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # send 1.2 BTC to msig addr txId = self.nodes[0].sendtoaddress(mSigObj, 12) self.sync_all() self.nodes[1].generate(1) self.sync_all() oldBalance = self.nodes[1].getbalance() inputs = [] outputs = {self.nodes[1].getnewaddress():11} rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx) signedTx = self.nodes[2].signrawtransaction(fundedTx['hex']) txId = self.nodes[2].sendrawtransaction(signedTx['hex']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance+Decimal('11.0000000'), self.nodes[1].getbalance()) ############################################################ # locked wallet test self.nodes[1].encryptwallet("test") self.nodes.pop(1) stop_node(self.nodes[0], 0) stop_node(self.nodes[1], 2) stop_node(self.nodes[2], 3) self.nodes = start_nodes(self.num_nodes, self.options.tmpdir) # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) connect_nodes_bi(self.nodes,0,3) self.is_network_split=False self.sync_all() # drain the keypool self.nodes[1].getnewaddress() inputs = [] outputs = {self.nodes[0].getnewaddress():11} rawTx = self.nodes[1].createrawtransaction(inputs, outputs) # fund a transaction that requires a new key for the change output # creating the key must be impossible because the wallet is locked assert_raises_jsonrpc(-4, "Insufficient funds", self.nodes[1].fundrawtransaction, rawtx) #refill the keypool self.nodes[1].walletpassphrase("test", 100) self.nodes[1].walletlock() assert_raises_jsonrpc(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 12) oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress():11} rawTx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawTx) #now we need to unlock self.nodes[1].walletpassphrase("test", 600) signedTx = self.nodes[1].signrawtransaction(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(signedTx['hex']) self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance+Decimal('500011.00000000'), self.nodes[0].getbalance()) ############################################### # multiple (~19) inputs tx test | Compare fee # ############################################### #empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0,20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs inputs = [] # Draupnir: TX size rounding gives us a fee of 4 RINGS outputs = {self.nodes[0].getnewaddress():15,self.nodes[0].getnewaddress():4} rawTx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawTx) #create same transaction over sendtoaddress txId = self.nodes[1].sendmany("", outputs) signedFee = self.nodes[1].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs ############################################# # multiple (~19) inputs tx test | sign/send # ############################################# #again, empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0,20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 2) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress():15,self.nodes[0].getnewaddress():4} rawTx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawTx) fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(oldBalance+Decimal('500019.00000000'), self.nodes[0].getbalance()) #19+block reward ##################################################### # test fundrawtransaction with OP_RETURN and no vin # ##################################################### rawtx = "0100000000010000000000000000066a047465737400000000" dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(len(dec_tx['vin']), 0) assert_equal(len(dec_tx['vout']), 1) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert_greater_than(len(dec_tx['vin']), 0) # at least one vin assert_equal(len(dec_tx['vout']), 2) # one change output added ################################################## # test a fundrawtransaction using only watchonly # ################################################## watchonly_address = self.nodes[0].getnewaddress() watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"] watchonly_amount = Decimal(2000) self.nodes[3].importpubkey(watchonly_pubkey, "", True) watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount) self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10) self.nodes[0].generate(1) self.sync_all() inputs = [] outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True }) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 1) assert_equal(res_dec["vin"][0]["txid"], watchonly_txid) assert("fee" in result.keys()) assert_greater_than(result["changepos"], -1) ############################################################### # test fundrawtransaction using the entirety of watched funds # ############################################################### inputs = [] outputs = {self.nodes[2].getnewaddress() : watchonly_amount} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) # Backward compatibility test (2nd param is includeWatching) result = self.nodes[3].fundrawtransaction(rawtx, True) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 2) assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid) assert_greater_than(result["fee"], 0) assert_greater_than(result["changepos"], -1) assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10) signedtx = self.nodes[3].signrawtransaction(result["hex"]) assert(not signedtx["complete"]) signedtx = self.nodes[0].signrawtransaction(signedtx["hex"]) assert(signedtx["complete"]) self.nodes[0].sendrawtransaction(signedtx["hex"]) self.nodes[0].generate(1) self.sync_all() ####################### # Test feeRate option # ####################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[3].getnewaddress() : 1} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee) result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}) result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee}) result_fee_rate = result['fee'] * 1000 / round_tx_size(count_bytes(result['hex'])) assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate) assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate) ############################# # Test address reuse option # ############################# result3 = self.nodes[3].fundrawtransaction(rawtx, {"reserveChangeKey": False}) res_dec = self.nodes[0].decoderawtransaction(result3["hex"]) changeaddress = "" for out in res_dec['vout']: if out['value'] > 1.0: changeaddress += out['scriptPubKey']['addresses'][0] assert(changeaddress != "") nextaddr = self.nodes[3].getnewaddress() # frt should not have removed the key from the keypool assert(changeaddress == nextaddr) result3 = self.nodes[3].fundrawtransaction(rawtx) res_dec = self.nodes[0].decoderawtransaction(result3["hex"]) changeaddress = "" for out in res_dec['vout']: if out['value'] > 1.0: changeaddress += out['scriptPubKey']['addresses'][0] assert(changeaddress != "") nextaddr = self.nodes[3].getnewaddress() # Now the change address key should be removed from the keypool assert(changeaddress != nextaddr) ###################################### # Test subtractFeeFromOutputs option # ###################################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[2].getnewaddress(): 10} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [self.nodes[3].fundrawtransaction(rawtx), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0]}), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}), self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee, "subtractFeeFromOutputs": [0]})] dec_tx = [self.nodes[3].decoderawtransaction(tx['hex']) for tx in result] output = [d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result)] change = [d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result)] assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee']) assert_equal(result[3]['fee'], result[4]['fee']) assert_equal(change[0], change[1]) assert_equal(output[0], output[1]) assert_equal(output[0], output[2] + result[2]['fee']) assert_equal(change[0] + result[0]['fee'], change[2]) assert_equal(output[3], output[4] + result[4]['fee']) assert_equal(change[3] + result[3]['fee'], change[4]) inputs = [] outputs = {self.nodes[2].getnewaddress(): value for value in (10, 11, 12, 13)} keys = list(outputs.keys()) rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [self.nodes[3].fundrawtransaction(rawtx), # split the fee between outputs 0, 2, and 3, but not output 1 self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})] dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']), self.nodes[3].decoderawtransaction(result[1]['hex'])] # Nested list of non-change output amounts for each transaction output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']] for d, r in zip(dec_tx, result)] # List of differences in output amounts between normal and subtractFee transactions share = [o0 - o1 for o0, o1 in zip(output[0], output[1])] # output 1 is the same in both transactions assert_equal(share[1], 0) # the other 3 outputs are smaller as a result of subtractFeeFromOutputs assert_greater_than(share[0], 0) assert_greater_than(share[2], 0) assert_greater_than(share[3], 0) # outputs 2 and 3 take the same share of the fee assert_equal(share[2], share[3]) # output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3 assert_greater_than_or_equal(share[0], share[2]) assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0]) # the fee is the same in both transactions assert_equal(result[0]['fee'], result[1]['fee']) # the total subtracted from the outputs is equal to the fee assert_equal(share[0] + share[2] + share[3], result[0]['fee']) if __name__ == '__main__': RawTransactionsTest().main()
cfg.pb.go
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: policy/v1beta1/cfg.proto package v1beta1 import ( fmt "fmt" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys" github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" types "github.com/gogo/protobuf/types" io "io" _ "istio.io/gogo-genproto/googleapis/google/api" math "math" math_bits "math/bits" reflect "reflect" strconv "strconv" strings "strings" time "time" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // Header operation type. type Rule_HeaderOperationTemplate_Operation int32 const ( // Replace a header by name. REPLACE Rule_HeaderOperationTemplate_Operation = 0 // Remove a header by name. Values are ignored. REMOVE Rule_HeaderOperationTemplate_Operation = 1 // Append values to the existing header values. APPEND Rule_HeaderOperationTemplate_Operation = 2 ) var Rule_HeaderOperationTemplate_Operation_name = map[int32]string{ 0: "REPLACE", 1: "REMOVE", 2: "APPEND", } var Rule_HeaderOperationTemplate_Operation_value = map[string]int32{ "REPLACE": 0, "REMOVE": 1, "APPEND": 2, } func (Rule_HeaderOperationTemplate_Operation) EnumDescriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{1, 0, 0} } // Fraction percentages support several fixed denominator values. type FractionalPercent_DenominatorType int32 const ( // 100. // // **Example**: 1/100 = 1%. HUNDRED FractionalPercent_DenominatorType = 0 // 10,000. // // **Example**: 1/10000 = 0.01%. TEN_THOUSAND FractionalPercent_DenominatorType = 1 ) var FractionalPercent_DenominatorType_name = map[int32]string{ 0: "HUNDRED", 1: "TEN_THOUSAND", } var FractionalPercent_DenominatorType_value = map[string]int32{ "HUNDRED": 0, "TEN_THOUSAND": 1, } func (FractionalPercent_DenominatorType) EnumDescriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{9, 0} } // AuthHeader specifies how to pass access token with authorization header. type Tls_AuthHeader int32 const ( // Access token is passed in authorization header as what it is // (authorization: some-token). PLAIN Tls_AuthHeader = 0 // Access token is passed to adapter as bearer token (i.e. authorization: // bearer some-token). BEARER Tls_AuthHeader = 1 ) var Tls_AuthHeader_name = map[int32]string{ 0: "PLAIN", 1: "BEARER", } var Tls_AuthHeader_value = map[string]int32{ "PLAIN": 0, "BEARER": 1, } func (Tls_AuthHeader) EnumDescriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{11, 0} } // AttributeManifest describes a set of Attributes produced by some component // of an Istio deployment. // // <!-- crd generation tags // +cue-gen:AttributeManifest:schema:istio.policy.v1beta1.AttributeManifest // +cue-gen:AttributeManifest:groupName:config.istio.io // +cue-gen:AttributeManifest:version:v1alpha2 // +cue-gen:AttributeManifest:storageVersion // +cue-gen:AttributeManifest:annotations:helm.sh/resource-policy=keep // +cue-gen:AttributeManifest:labels:app=mixer,chart=istio,heritage=Tiller,istio=core,package=istio.io.mixer,release=istio // +cue-gen:AttributeManifest:subresource:status // +cue-gen:AttributeManifest:scope:Namespaced // +cue-gen:AttributeManifest:resource:categories=istio-io,policy-istio-io // --> // // <!-- go code generation tags // +kubetype-gen // +kubetype-gen:groupVersion=config.istio.io/v1alpha2 // +genclient // +k8s:deepcopy-gen=true // --> type AttributeManifest struct { // The revision of this document. Assigned by server. Revision string `protobuf:"bytes,1,opt,name=revision,proto3" json:"revision,omitempty"` // Name of the component producing these attributes. This can be // the proxy (with the canonical name `istio-proxy`) or the name of an // `attributes` kind adapter in Mixer. Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` // The set of attributes this Istio component will be responsible for producing at runtime. // We map from attribute name to the attribute's specification. The name of an attribute, // which is how attributes are referred to in aspect configuration, must conform to: // // Name = IDENT { SEPARATOR IDENT }; // // Where `IDENT` must match the regular expression `[a-z][a-z0-9]+` and `SEPARATOR` must // match the regular expression `[\.-]`. // // Attribute names must be unique within a single Istio deployment. The set of canonical // attributes are described at [here](https://istio.io/docs/reference/config/policy-and-telemetry/attribute-vocabulary/). // Attributes not in that list should be named with a component-specific suffix such as // `request.count-my.component`. Attributes map[string]*AttributeManifest_AttributeInfo `protobuf:"bytes,3,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (m *AttributeManifest) Reset() { *m = AttributeManifest{} } func (*AttributeManifest) ProtoMessage() {} func (*AttributeManifest) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{0} } func (m *AttributeManifest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *AttributeManifest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_AttributeManifest.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *AttributeManifest) XXX_Merge(src proto.Message) { xxx_messageInfo_AttributeManifest.Merge(m, src) } func (m *AttributeManifest) XXX_Size() int { return m.Size() } func (m *AttributeManifest) XXX_DiscardUnknown() { xxx_messageInfo_AttributeManifest.DiscardUnknown(m) } var xxx_messageInfo_AttributeManifest proto.InternalMessageInfo func (m *AttributeManifest) GetRevision() string { if m != nil { return m.Revision } return "" } func (m *AttributeManifest) GetName() string { if m != nil { return m.Name } return "" } func (m *AttributeManifest) GetAttributes() map[string]*AttributeManifest_AttributeInfo { if m != nil { return m.Attributes } return nil } // AttributeInfo describes the schema of an Istio `Attribute`. // // # Istio Attributes // // Istio uses `attributes` to describe runtime activities of Istio services. // An Istio attribute carries a specific piece of information about an activity, // such as the error code of an API request, the latency of an API request, or the // original IP address of a TCP connection. The attributes are often generated // and consumed by different services. For example, a frontend service can // generate an authenticated user attribute and pass it to a backend service for // access control purpose. // // To simplify the system and improve developer experience, Istio uses // shared attribute definitions across all components. For example, the same // authenticated user attribute will be used for logging, monitoring, analytics, // billing, access control, auditing. Many Istio components provide their // functionality by collecting, generating, and operating on attributes. // For example, the proxy collects the error code attribute, and the logging // stores it into a log. // // # Design // // Each Istio attribute must conform to an `AttributeInfo` in an // `AttributeManifest` in the current Istio deployment at runtime. An // [`AttributeInfo`][istio.policy.v1beta1] is used to define an attribute's // metadata: the type of its value and a detailed description that explains // the semantics of the attribute type. Each attribute's name is globally unique; // in other words an attribute name can only appear once across all manifests. // // The runtime presentation of an attribute is intentionally left out of this // specification, because passing attribute using JSON, XML, or Protocol Buffers // does not change the semantics of the attribute. Different implementations // can choose different representations based on their needs. // // # HTTP Mapping // // Because many systems already have REST APIs, it makes sense to define a // standard HTTP mapping for Istio attributes that are compatible with typical // REST APIs. The design is to map one attribute to one HTTP header, the // attribute name and value becomes the HTTP header name and value. The actual // encoding scheme will be decided later. type AttributeManifest_AttributeInfo struct { // A human-readable description of the attribute's purpose. Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` // The type of data carried by this attribute. ValueType ValueType `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=istio.policy.v1beta1.ValueType" json:"value_type,omitempty"` } func (m *AttributeManifest_AttributeInfo) Reset() { *m = AttributeManifest_AttributeInfo{} } func (*AttributeManifest_AttributeInfo) ProtoMessage() {} func (*AttributeManifest_AttributeInfo) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{0, 0} } func (m *AttributeManifest_AttributeInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *AttributeManifest_AttributeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_AttributeManifest_AttributeInfo.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *AttributeManifest_AttributeInfo) XXX_Merge(src proto.Message) { xxx_messageInfo_AttributeManifest_AttributeInfo.Merge(m, src) } func (m *AttributeManifest_AttributeInfo) XXX_Size() int { return m.Size() } func (m *AttributeManifest_AttributeInfo) XXX_DiscardUnknown() { xxx_messageInfo_AttributeManifest_AttributeInfo.DiscardUnknown(m) } var xxx_messageInfo_AttributeManifest_AttributeInfo proto.InternalMessageInfo func (m *AttributeManifest_AttributeInfo) GetDescription() string { if m != nil { return m.Description } return "" } func (m *AttributeManifest_AttributeInfo) GetValueType() ValueType { if m != nil { return m.ValueType } return VALUE_TYPE_UNSPECIFIED } // A Rule is a selector and a set of intentions to be executed when the // selector is `true` // // The following example instructs Mixer to invoke `prometheus-handler` handler for all services and pass it the // instance constructed using the 'RequestCountByService' instance. // // ```yaml // - match: match(destination.service.host, "*") // actions: // - handler: prometheus-handler // instances: // - RequestCountByService // ``` // // <!-- crd generation tags // +cue-gen:Rule:schema:istio.policy.v1beta1.Rule // +cue-gen:Rule:groupName:config.istio.io // +cue-gen:Rule:version:v1alpha2 // +cue-gen:Rule:storageVersion // +cue-gen:Rule:annotations:helm.sh/resource-policy=keep // +cue-gen:Rule:labels:app=mixer,chart=istio,heritage=Tiller,istio=core,package=istio.io.mixer,release=istio // +cue-gen:Rule:subresource:status // +cue-gen:Rule:scope:Namespaced // +cue-gen:Rule:resource:categories=istio-io,policy-istio-io // --> // // <!-- go code generation tags // +kubetype-gen // +kubetype-gen:groupVersion=config.istio.io/v1alpha2 // +genclient // +k8s:deepcopy-gen=true // --> type Rule struct { // Match is an attribute based predicate. When Mixer receives a // request it evaluates the match expression and executes all the associated `actions` // if the match evaluates to true. // // A few example match: // // * an empty match evaluates to `true` // * `true`, a boolean literal; a rule with this match will always be executed // * `match(destination.service.host, "ratings.*")` selects any request targeting a service whose // name starts with "ratings" // * `attr1 == "20" && attr2 == "30"` logical AND, OR, and NOT are also available Match string `protobuf:"bytes,1,opt,name=match,proto3" json:"match,omitempty"` // The actions that will be executed when match evaluates to `true`. Actions []*Action `protobuf:"bytes,2,rep,name=actions,proto3" json:"actions,omitempty"` // Templatized operations on the request headers using values produced by the // rule actions. Require the check action result to be OK. RequestHeaderOperations []*Rule_HeaderOperationTemplate `protobuf:"bytes,3,rep,name=request_header_operations,json=requestHeaderOperations,proto3" json:"request_header_operations,omitempty"` // Templatized operations on the response headers using values produced by the // rule actions. Require the check action result to be OK. ResponseHeaderOperations []*Rule_HeaderOperationTemplate `protobuf:"bytes,4,rep,name=response_header_operations,json=responseHeaderOperations,proto3" json:"response_header_operations,omitempty"` // $hide_from_docs // Provides the ability to add a sampling configuration for Mixer rules. This sampling // will limit the scenarios in which the `actions` of the rule are executed. The sampling will // only take place after a `match` predicate has evaluated to true. // // Default behavior is no sampling (the `actions` are executed for all requests). Sampling *Sampling `protobuf:"bytes,5,opt,name=sampling,proto3" json:"sampling,omitempty"` } func (m *Rule) Reset() { *m = Rule{} } func (*Rule) ProtoMessage() {} func (*Rule) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{1} } func (m *Rule) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Rule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Rule.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Rule) XXX_Merge(src proto.Message) { xxx_messageInfo_Rule.Merge(m, src) } func (m *Rule) XXX_Size() int { return m.Size() } func (m *Rule) XXX_DiscardUnknown() { xxx_messageInfo_Rule.DiscardUnknown(m) } var xxx_messageInfo_Rule proto.InternalMessageInfo func (m *Rule) GetMatch() string { if m != nil { return m.Match } return "" } func (m *Rule) GetActions() []*Action { if m != nil { return m.Actions } return nil } func (m *Rule) GetRequestHeaderOperations() []*Rule_HeaderOperationTemplate { if m != nil { return m.RequestHeaderOperations } return nil } func (m *Rule) GetResponseHeaderOperations() []*Rule_HeaderOperationTemplate { if m != nil { return m.ResponseHeaderOperations } return nil } func (m *Rule) GetSampling() *Sampling { if m != nil { return m.Sampling } return nil } // A template for an HTTP header manipulation. Values in the template are expressions // that may reference action outputs by name. For example, if an action `x` produces an output // with a field `f`, then the header value expressions may use attribute `x.output.f` to reference // the field value: // // ```yaml // request_header_operations: // - name: x-istio-header // values: // - x.output.f // ``` // // If the header value expression evaluates to an empty string, and the operation is to either replace // or append a header, then the operation is not applied. This permits conditional behavior on behalf of the // adapter to optionally modify the headers. type Rule_HeaderOperationTemplate struct { // Header name literal value. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Header value expressions. Values []string `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` // Header operation type. Default operation is to replace the value of the header by name. Operation Rule_HeaderOperationTemplate_Operation `protobuf:"varint,3,opt,name=operation,proto3,enum=istio.policy.v1beta1.Rule_HeaderOperationTemplate_Operation" json:"operation,omitempty"` } func (m *Rule_HeaderOperationTemplate) Reset() { *m = Rule_HeaderOperationTemplate{} } func (*Rule_HeaderOperationTemplate) ProtoMessage() {} func (*Rule_HeaderOperationTemplate) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{1, 0} } func (m *Rule_HeaderOperationTemplate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Rule_HeaderOperationTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Rule_HeaderOperationTemplate.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Rule_HeaderOperationTemplate) XXX_Merge(src proto.Message) { xxx_messageInfo_Rule_HeaderOperationTemplate.Merge(m, src) } func (m *Rule_HeaderOperationTemplate) XXX_Size() int { return m.Size() } func (m *Rule_HeaderOperationTemplate) XXX_DiscardUnknown() { xxx_messageInfo_Rule_HeaderOperationTemplate.DiscardUnknown(m) } var xxx_messageInfo_Rule_HeaderOperationTemplate proto.InternalMessageInfo func (m *Rule_HeaderOperationTemplate) GetName() string { if m != nil { return m.Name } return "" } func (m *Rule_HeaderOperationTemplate) GetValues() []string { if m != nil { return m.Values } return nil } func (m *Rule_HeaderOperationTemplate) GetOperation() Rule_HeaderOperationTemplate_Operation { if m != nil { return m.Operation } return REPLACE } // Action describes which [Handler][istio.policy.v1beta1.Handler] to invoke and what data to pass to it for processing. // // The following example instructs Mixer to invoke 'prometheus-handler' handler and pass it the object // constructed using the instance 'RequestCountByService'. // // ```yaml // handler: prometheus-handler // instances: // - RequestCountByService // ``` type Action struct { // Fully qualified name of the handler to invoke. // Must match the `name` of a [Handler][istio.policy.v1beta1.Handler.name]. Handler string `protobuf:"bytes,2,opt,name=handler,proto3" json:"handler,omitempty"` // Each value must match the fully qualified name of the // [Instance][istio.policy.v1beta1.Instance.name]s. // Referenced instances are evaluated by resolving the attributes/literals for all the fields. // The constructed objects are then passed to the `handler` referenced within this action. Instances []string `protobuf:"bytes,3,rep,name=instances,proto3" json:"instances,omitempty"` // A handle to refer to the results of the action. Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` } func (m *Action) Reset() { *m = Action{} } func (*Action) ProtoMessage() {} func (*Action) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{2} } func (m *Action) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Action.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Action) XXX_Merge(src proto.Message) { xxx_messageInfo_Action.Merge(m, src) } func (m *Action) XXX_Size() int { return m.Size() } func (m *Action) XXX_DiscardUnknown() { xxx_messageInfo_Action.DiscardUnknown(m) } var xxx_messageInfo_Action proto.InternalMessageInfo func (m *Action) GetHandler() string { if m != nil { return m.Handler } return "" } func (m *Action) GetInstances() []string { if m != nil { return m.Instances } return nil } func (m *Action) GetName() string { if m != nil { return m.Name } return "" } // An Instance tells Mixer how to create instances for particular template. // // Instance is defined by the operator. Instance is defined relative to a known // template. Their purpose is to tell Mixer how to use attributes or literals to produce // instances of the specified template at runtime. // // The following example instructs Mixer to construct an instance associated with template // 'istio.mixer.adapter.metric.Metric'. It provides a mapping from the template's fields to expressions. // Instances produced with this instance can be referenced by [Actions][istio.policy.v1beta1.Action] using name // 'RequestCountByService' // // ```yaml // - name: RequestCountByService // template: istio.mixer.adapter.metric.Metric // params: // value: 1 // dimensions: // source: source.name // destination_ip: destination.ip // ``` // // <!-- crd generation tags // +cue-gen:Instance:schema:istio.policy.v1beta1.Instance // +cue-gen:Instance:groupName:config.istio.io // +cue-gen:Instance:version:v1alpha2 // +cue-gen:Instance:storageVersion // +cue-gen:Instance:annotations:helm.sh/resource-policy=keep // +cue-gen:Instance:labels:app=mixer,chart=istio,heritage=Tiller,istio=mixer-instance,package=instance,release=istio // +cue-gen:Instance:subresource:status // +cue-gen:Instance:scope:Namespaced // +cue-gen:Instance:resource:categories=istio-io,policy-istio-io // --> // // <!-- go code generation tags // +kubetype-gen // +kubetype-gen:groupVersion=config.istio.io/v1alpha2 // +genclient // +k8s:deepcopy-gen=true // --> type Instance struct { // The name of this instance // // Must be unique amongst other Instances in scope. Used by [Action][istio.policy.v1beta1.Action] to refer // to an instance produced by this instance. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The name of the compiled in template this instance creates instances for. For referencing non compiled-in // templates, use the `template` field instead. // // The value must match the name of the available template Mixer is built with. CompiledTemplate string `protobuf:"bytes,67794676,opt,name=compiled_template,json=compiledTemplate,proto3" json:"compiled_template,omitempty"` // The name of the template this instance creates instances for. For referencing compiled-in // templates, use the `compiled_template` field instead. // // The value must match the name of the available template in scope. Template string `protobuf:"bytes,2,opt,name=template,proto3" json:"template,omitempty"` // Depends on referenced template. Struct representation of a // proto defined by the template; this varies depending on the value of field `template`. Params *types.Struct `protobuf:"bytes,3,opt,name=params,proto3" json:"params,omitempty"` // Defines attribute bindings to map the output of attribute-producing adapters back into // the attribute space. The variable `output` refers to the output template instance produced // by the adapter. // The following example derives `source.namespace` from `source.uid` in the context of Kubernetes: // ```yaml // params: // # Pass the required attribute data to the adapter // source_uid: source.uid | "" // attribute_bindings: // # Fill the new attributes from the adapter produced output // source.namespace: output.source_namespace // ``` AttributeBindings map[string]string `protobuf:"bytes,4,rep,name=attribute_bindings,json=attributeBindings,proto3" json:"attribute_bindings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (m *Instance) Reset() { *m = Instance{} } func (*Instance) ProtoMessage() {} func (*Instance) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{3} } func (m *Instance) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Instance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Instance.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Instance) XXX_Merge(src proto.Message) { xxx_messageInfo_Instance.Merge(m, src) } func (m *Instance) XXX_Size() int { return m.Size() } func (m *Instance) XXX_DiscardUnknown() { xxx_messageInfo_Instance.DiscardUnknown(m) } var xxx_messageInfo_Instance proto.InternalMessageInfo func (m *Instance) GetName() string { if m != nil { return m.Name } return "" } func (m *Instance) GetCompiledTemplate() string { if m != nil { return m.CompiledTemplate } return "" } func (m *Instance) GetTemplate() string { if m != nil { return m.Template } return "" } func (m *Instance) GetParams() *types.Struct { if m != nil { return m.Params } return nil } func (m *Instance) GetAttributeBindings() map[string]string { if m != nil { return m.AttributeBindings } return nil } // Handler allows the operator to configure a specific adapter implementation. // Each adapter implementation defines its own `params` proto. // // In the following example we define a `metrics` handler for the `prometheus` adapter. // The example is in the form of a Kubernetes resource: // * The `metadata.name` is the name of the handler // * The `kind` refers to the adapter name // * The `spec` block represents adapter-specific configuration as well as the connection information // // ```yaml // # Sample-1: No connection specified (for compiled in adapters) // # Note: if connection information is not specified, the adapter configuration is directly inside // # `spec` block. This is going to be DEPRECATED in favor of Sample-2 // apiVersion: "config.istio.io/v1alpha2" // kind: handler // metadata: // name: requestcount // namespace: istio-system // spec: // compiledAdapter: prometheus // params: // metrics: // - name: request_count // instance_name: requestcount.metric.istio-system // kind: COUNTER // label_names: // - source_service // - source_version // - destination_service // - destination_version // --- // # Sample-2: With connection information (for out-of-process adapters) // # Note: Unlike sample-1, the adapter configuration is parallel to `connection` and is nested inside `param` block. // apiVersion: "config.istio.io/v1alpha2" // kind: handler // metadata: // name: requestcount // namespace: istio-system // spec: // compiledAdapter: prometheus // params: // param: // metrics: // - name: request_count // instance_name: requestcount.metric.istio-system // kind: COUNTER // label_names: // - source_service // - source_version // - destination_service // - destination_version // connection: // address: localhost:8090 // --- // ``` // // <!-- crd generation tags // +cue-gen:Handler:schema:istio.policy.v1beta1.Handler // +cue-gen:Handler:groupName:config.istio.io // +cue-gen:Handler:version:v1alpha2 // +cue-gen:Handler:storageVersion // +cue-gen:Handler:annotations:helm.sh/resource-policy=keep // +cue-gen:Handler:labels:app=mixer,chart=istio,heritage=Tiller,istio=mixer-handler,package=handler,release=istio // +cue-gen:Handler:subresource:status // +cue-gen:Handler:scope:Namespaced // +cue-gen:Handler:resource:categories=istio-io,policy-istio-io // --> // // <!-- go code generation tags // +kubetype-gen // +kubetype-gen:groupVersion=config.istio.io/v1alpha2 // +genclient // +k8s:deepcopy-gen=true // --> type Handler struct { // Must be unique in the entire Mixer configuration. Used by [Actions][istio.policy.v1beta1.Action.handler] // to refer to this handler. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The name of the compiled in adapter this handler instantiates. For referencing non compiled-in // adapters, use the `adapter` field instead. // // The value must match the name of the available adapter Mixer is built with. An adapter's name is typically a // constant in its code. CompiledAdapter string `protobuf:"bytes,67794676,opt,name=compiled_adapter,json=compiledAdapter,proto3" json:"compiled_adapter,omitempty"` // The name of a specific adapter implementation. For referencing compiled-in // adapters, use the `compiled_adapter` field instead. // // An adapter's implementation name is typically a constant in its code. Adapter string `protobuf:"bytes,2,opt,name=adapter,proto3" json:"adapter,omitempty"` // Depends on adapter implementation. Struct representation of a // proto defined by the adapter implementation; this varies depending on the value of field `adapter`. Params *types.Struct `protobuf:"bytes,3,opt,name=params,proto3" json:"params,omitempty"` // Information on how to connect to the out-of-process adapter. // This is used if the adapter is not compiled into Mixer binary and is running as a separate process. Connection *Connection `protobuf:"bytes,4,opt,name=connection,proto3" json:"connection,omitempty"` } func (m *Handler) Reset() { *m = Handler{} } func (*Handler) ProtoMessage() {} func (*Handler) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{4} } func (m *Handler) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Handler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Handler.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Handler) XXX_Merge(src proto.Message) { xxx_messageInfo_Handler.Merge(m, src) } func (m *Handler) XXX_Size() int { return m.Size() } func (m *Handler) XXX_DiscardUnknown() { xxx_messageInfo_Handler.DiscardUnknown(m) } var xxx_messageInfo_Handler proto.InternalMessageInfo func (m *Handler) GetName() string { if m != nil { return m.Name } return "" } func (m *Handler) GetCompiledAdapter() string { if m != nil { return m.CompiledAdapter } return "" } func (m *Handler) GetAdapter() string { if m != nil { return m.Adapter } return "" } func (m *Handler) GetParams() *types.Struct { if m != nil { return m.Params } return nil } func (m *Handler) GetConnection() *Connection { if m != nil { return m.Connection } return nil } // Connection allows the operator to specify the endpoint for out-of-process infrastructure backend. // Connection is part of the handler custom resource and is specified alongside adapter specific configuration. type Connection struct { // The address of the backend. Address string `protobuf:"bytes,2,opt,name=address,proto3" json:"address,omitempty"` // Timeout for remote calls to the backend. Timeout *time.Duration `protobuf:"bytes,3,opt,name=timeout,proto3,stdduration" json:"timeout,omitempty"` // Auth config for the connection to the backend. If omitted, plain text will // be used. Authentication *Authentication `protobuf:"bytes,4,opt,name=authentication,proto3" json:"authentication,omitempty"` } func (m *Connection) Reset() { *m = Connection{} } func (*Connection) ProtoMessage() {} func (*Connection) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{5} } func (m *Connection) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Connection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Connection.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Connection) XXX_Merge(src proto.Message) { xxx_messageInfo_Connection.Merge(m, src) } func (m *Connection) XXX_Size() int { return m.Size() } func (m *Connection) XXX_DiscardUnknown() { xxx_messageInfo_Connection.DiscardUnknown(m) } var xxx_messageInfo_Connection proto.InternalMessageInfo func (m *Connection) GetAddress() string { if m != nil { return m.Address } return "" } func (m *Connection) GetTimeout() *time.Duration { if m != nil { return m.Timeout } return nil } func (m *Connection) GetAuthentication() *Authentication { if m != nil { return m.Authentication } return nil } // $hide_from_docs // Sampling provides configuration of sampling strategies for Rule actions. // Multiple sampling strategies are supported. When multiple strategies are configured, // a request must be selected by all configured sampling strategies. type Sampling struct { // Provides filtering of actions based on random selection per request. Random *RandomSampling `protobuf:"bytes,1,opt,name=random,proto3" json:"random,omitempty"` // Provides filtering of actions based on number of requests observed within // a configured time window. RateLimit *RateLimitSampling `protobuf:"bytes,2,opt,name=rate_limit,json=rateLimit,proto3" json:"rate_limit,omitempty"` } func (m *Sampling) Reset() { *m = Sampling{} } func (*Sampling) ProtoMessage() {} func (*Sampling) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{6} } func (m *Sampling) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Sampling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Sampling.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Sampling) XXX_Merge(src proto.Message) { xxx_messageInfo_Sampling.Merge(m, src) } func (m *Sampling) XXX_Size() int { return m.Size() } func (m *Sampling) XXX_DiscardUnknown() { xxx_messageInfo_Sampling.DiscardUnknown(m) } var xxx_messageInfo_Sampling proto.InternalMessageInfo func (m *Sampling) GetRandom() *RandomSampling { if m != nil { return m.Random } return nil } func (m *Sampling) GetRateLimit() *RateLimitSampling { if m != nil { return m.RateLimit } return nil } // $hide_from_docs // RandomSampling will filter based on the comparison of a randomly-generated value // against the threshold provided. // // Example: To restrict the execution of Rule actions to only 12.5% of requests, the // `sampling_rate` would be set `12.5`. // // This sampling configuration is meant to closely match the access log RuntimeFilter configuration // [supported by Envoy](https://github.com/envoyproxy/data-plane-api/blob/master/envoy/config/filter/accesslog/v2/accesslog.proto#L113) type RandomSampling struct { // Specifies an attribute expression to use to override the numerator in the `percent_sampled` field. // If this value is set, but no value is found OR if that value is not a numeric value, then // the derived sampling rate will be 0 (meaning no `Action`s are executed for a `Rule`). AttributeExpression string `protobuf:"bytes,1,opt,name=attribute_expression,json=attributeExpression,proto3" json:"attribute_expression,omitempty"` // The default sampling rate, expressed as a percentage. Defaults to 0% with a denominator // of 100. PercentSampled *FractionalPercent `protobuf:"bytes,2,opt,name=percent_sampled,json=percentSampled,proto3" json:"percent_sampled,omitempty"` // By default sampling will be based on the value of the request header `x-request-id`. // This behavior will cause consistent sampling across `Rule`s and for the full trace of a // request through a mesh (across hosts). If that value is not present and/or // `use_independent_randomness` is set to true, the sampling will be done based on the value of // attribute specified in `attribute_epxression`. If that attribute does not exist, the system // will behave as if the sampling rate was 0 (meaning no `Action`s are executed for a `Rule`). UseIndependentRandomness bool `protobuf:"varint,3,opt,name=use_independent_randomness,json=useIndependentRandomness,proto3" json:"use_independent_randomness,omitempty"` } func (m *RandomSampling) Reset() { *m = RandomSampling{} } func (*RandomSampling) ProtoMessage() {} func (*RandomSampling) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{7} } func (m *RandomSampling) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *RandomSampling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_RandomSampling.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *RandomSampling) XXX_Merge(src proto.Message) { xxx_messageInfo_RandomSampling.Merge(m, src) } func (m *RandomSampling) XXX_Size() int { return m.Size() } func (m *RandomSampling) XXX_DiscardUnknown() { xxx_messageInfo_RandomSampling.DiscardUnknown(m) } var xxx_messageInfo_RandomSampling proto.InternalMessageInfo func (m *RandomSampling) GetAttributeExpression() string { if m != nil { return m.AttributeExpression } return "" } func (m *RandomSampling) GetPercentSampled() *FractionalPercent { if m != nil { return m.PercentSampled } return nil } func (m *RandomSampling) GetUseIndependentRandomness() bool { if m != nil { return m.UseIndependentRandomness } return false } // $hide_from_docs // RateLimitSampling provides the ability to limit the number of Rule action executions that // occur over a period of time. type RateLimitSampling struct { // Window in which to enforce the sampling rate. SamplingDuration time.Duration `protobuf:"bytes,1,opt,name=sampling_duration,json=samplingDuration,proto3,stdduration" json:"sampling_duration"` // Number of entries to allow during the `sampling_duration` before sampling is enforced. MaxUnsampledEntries int64 `protobuf:"varint,2,opt,name=max_unsampled_entries,json=maxUnsampledEntries,proto3" json:"max_unsampled_entries,omitempty"` // The rate at which to sample entries once the unsampled limit has been reached. Sampling will be enforced // as 1 per every `sampling_rate` entries allowed. SamplingRate int64 `protobuf:"varint,3,opt,name=sampling_rate,json=samplingRate,proto3" json:"sampling_rate,omitempty"` } func (m *RateLimitSampling) Reset() { *m = RateLimitSampling{} } func (*RateLimitSampling) ProtoMessage() {} func (*RateLimitSampling) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{8} } func (m *RateLimitSampling) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *RateLimitSampling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_RateLimitSampling.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *RateLimitSampling) XXX_Merge(src proto.Message) { xxx_messageInfo_RateLimitSampling.Merge(m, src) } func (m *RateLimitSampling) XXX_Size() int { return m.Size() } func (m *RateLimitSampling) XXX_DiscardUnknown() { xxx_messageInfo_RateLimitSampling.DiscardUnknown(m) } var xxx_messageInfo_RateLimitSampling proto.InternalMessageInfo func (m *RateLimitSampling) GetSamplingDuration() time.Duration { if m != nil { return m.SamplingDuration } return 0 } func (m *RateLimitSampling) GetMaxUnsampledEntries() int64 { if m != nil { return m.MaxUnsampledEntries } return 0 } func (m *RateLimitSampling) GetSamplingRate() int64 { if m != nil { return m.SamplingRate } return 0 } // $hide_from_docs // A fractional percentage is used in cases in which for performance reasons performing floating // point to integer conversions during randomness calculations is undesirable. The message includes // both a numerator and denominator that together determine the final fractional value. // // * **Example**: 1/100 = 1%. // * **Example**: 3/10000 = 0.03%. type FractionalPercent struct { // Specifies the numerator. Defaults to 0. Numerator uint32 `protobuf:"varint,1,opt,name=numerator,proto3" json:"numerator,omitempty"` // Specifies the denominator. If the denominator specified is less than the numerator, the final // fractional percentage is capped at 1 (100%). Denominator FractionalPercent_DenominatorType `protobuf:"varint,2,opt,name=denominator,proto3,enum=istio.policy.v1beta1.FractionalPercent_DenominatorType" json:"denominator,omitempty"` } func (m *FractionalPercent) Reset() { *m = FractionalPercent{} } func (*FractionalPercent) ProtoMessage() {} func (*FractionalPercent) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{9} } func (m *FractionalPercent) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *FractionalPercent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_FractionalPercent.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *FractionalPercent) XXX_Merge(src proto.Message) { xxx_messageInfo_FractionalPercent.Merge(m, src) } func (m *FractionalPercent) XXX_Size() int { return m.Size() } func (m *FractionalPercent) XXX_DiscardUnknown() { xxx_messageInfo_FractionalPercent.DiscardUnknown(m) } var xxx_messageInfo_FractionalPercent proto.InternalMessageInfo func (m *FractionalPercent) GetNumerator() uint32 { if m != nil { return m.Numerator } return 0 } func (m *FractionalPercent) GetDenominator() FractionalPercent_DenominatorType { if m != nil { return m.Denominator } return HUNDRED } // Authentication allows the operator to specify the authentication of // connections to out-of-process infrastructure backend. type Authentication struct { // Types that are valid to be assigned to AuthType: // *Authentication_Tls // *Authentication_Mutual AuthType isAuthentication_AuthType `protobuf_oneof:"auth_type"` } func (m *Authentication) Reset() { *m = Authentication{} } func (*Authentication) ProtoMessage() {} func (*Authentication) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{10} } func (m *Authentication) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Authentication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Authentication.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Authentication) XXX_Merge(src proto.Message) { xxx_messageInfo_Authentication.Merge(m, src) } func (m *Authentication) XXX_Size() int { return m.Size() } func (m *Authentication) XXX_DiscardUnknown() { xxx_messageInfo_Authentication.DiscardUnknown(m) } var xxx_messageInfo_Authentication proto.InternalMessageInfo type isAuthentication_AuthType interface { isAuthentication_AuthType() Equal(interface{}) bool MarshalTo([]byte) (int, error) Size() int } type Authentication_Tls struct { Tls *Tls `protobuf:"bytes,1,opt,name=tls,proto3,oneof"` } type Authentication_Mutual struct { Mutual *Mutual `protobuf:"bytes,2,opt,name=mutual,proto3,oneof"` } func (*Authentication_Tls) isAuthentication_AuthType() {} func (*Authentication_Mutual) isAuthentication_AuthType() {} func (m *Authentication) GetAuthType() isAuthentication_AuthType { if m != nil { return m.AuthType } return nil } func (m *Authentication) GetTls() *Tls { if x, ok := m.GetAuthType().(*Authentication_Tls); ok { return x.Tls } return nil } func (m *Authentication) GetMutual() *Mutual { if x, ok := m.GetAuthType().(*Authentication_Mutual); ok { return x.Mutual } return nil } // XXX_OneofWrappers is for the internal use of the proto package. func (*Authentication) XXX_OneofWrappers() []interface{} { return []interface{}{ (*Authentication_Tls)(nil), (*Authentication_Mutual)(nil), } } // Tls let operator specify client authentication setting when TLS is used for // connection to the backend. type Tls struct { // The path to the file holding additional CA certificates to well known // public certs. CaCertificates string `protobuf:"bytes,1,opt,name=ca_certificates,json=caCertificates,proto3" json:"ca_certificates,omitempty"` // Specifies how to get access token for client authn and authz. // // Types that are valid to be assigned to TokenSource: // *Tls_TokenPath // *Tls_Oauth TokenSource isTls_TokenSource `protobuf_oneof:"token_source"` // Specifies how to pass access token to the adapter backend. // // Types that are valid to be assigned to TokenType: // *Tls_AuthHeader_ // *Tls_CustomHeader TokenType isTls_TokenType `protobuf_oneof:"token_type"` // Used to configure mixer TLS client to verify the hostname on the returned // certificates. It is also included in the client's handshake to support SNI. ServerName string `protobuf:"bytes,6,opt,name=server_name,json=serverName,proto3" json:"server_name,omitempty"` } func (m *Tls) Reset() { *m = Tls{} } func (*Tls) ProtoMessage() {} func (*Tls) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{11} } func (m *Tls) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Tls) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Tls.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Tls) XXX_Merge(src proto.Message) { xxx_messageInfo_Tls.Merge(m, src) } func (m *Tls) XXX_Size() int { return m.Size() } func (m *Tls) XXX_DiscardUnknown() { xxx_messageInfo_Tls.DiscardUnknown(m) } var xxx_messageInfo_Tls proto.InternalMessageInfo type isTls_TokenSource interface { isTls_TokenSource() Equal(interface{}) bool MarshalTo([]byte) (int, error) Size() int } type isTls_TokenType interface { isTls_TokenType() Equal(interface{}) bool MarshalTo([]byte) (int, error) Size() int } type Tls_TokenPath struct { TokenPath string `protobuf:"bytes,2,opt,name=token_path,json=tokenPath,proto3,oneof"` } type Tls_Oauth struct { Oauth *OAuth `protobuf:"bytes,3,opt,name=oauth,proto3,oneof"` } type Tls_AuthHeader_ struct { AuthHeader Tls_AuthHeader `protobuf:"varint,4,opt,name=auth_header,json=authHeader,proto3,enum=istio.policy.v1beta1.Tls_AuthHeader,oneof"` } type Tls_CustomHeader struct { CustomHeader string `protobuf:"bytes,5,opt,name=custom_header,json=customHeader,proto3,oneof"` } func (*Tls_TokenPath) isTls_TokenSource() {} func (*Tls_Oauth) isTls_TokenSource() {} func (*Tls_AuthHeader_) isTls_TokenType() {} func (*Tls_CustomHeader) isTls_TokenType() {} func (m *Tls) GetTokenSource() isTls_TokenSource { if m != nil { return m.TokenSource } return nil } func (m *Tls) GetTokenType() isTls_TokenType { if m != nil { return m.TokenType } return nil } func (m *Tls) GetCaCertificates() string { if m != nil { return m.CaCertificates } return "" } func (m *Tls) GetTokenPath() string { if x, ok := m.GetTokenSource().(*Tls_TokenPath); ok { return x.TokenPath } return "" } func (m *Tls) GetOauth() *OAuth { if x, ok := m.GetTokenSource().(*Tls_Oauth); ok { return x.Oauth } return nil } func (m *Tls) GetAuthHeader() Tls_AuthHeader { if x, ok := m.GetTokenType().(*Tls_AuthHeader_); ok { return x.AuthHeader } return PLAIN } func (m *Tls) GetCustomHeader() string { if x, ok := m.GetTokenType().(*Tls_CustomHeader); ok { return x.CustomHeader } return "" } func (m *Tls) GetServerName() string { if m != nil { return m.ServerName } return "" } // XXX_OneofWrappers is for the internal use of the proto package. func (*Tls) XXX_OneofWrappers() []interface{} { return []interface{}{ (*Tls_TokenPath)(nil), (*Tls_Oauth)(nil), (*Tls_AuthHeader_)(nil), (*Tls_CustomHeader)(nil), } } // OAuth let operator specify config to fetch access token via oauth when using // TLS for connection to the backend. type OAuth struct { // OAuth client id for mixer. ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` // The path to the file holding the client secret for oauth. ClientSecret string `protobuf:"bytes,2,opt,name=client_secret,json=clientSecret,proto3" json:"client_secret,omitempty"` // The Resource server's token endpoint URL. TokenUrl string `protobuf:"bytes,3,opt,name=token_url,json=tokenUrl,proto3" json:"token_url,omitempty"` // List of requested permissions. Scopes []string `protobuf:"bytes,4,rep,name=scopes,proto3" json:"scopes,omitempty"` // Additional parameters for requests to the token endpoint. EndpointParams map[string]string `protobuf:"bytes,5,rep,name=endpoint_params,json=endpointParams,proto3" json:"endpoint_params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (m *OAuth) Reset() { *m = OAuth{} } func (*OAuth) ProtoMessage() {} func (*OAuth) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{12} } func (m *OAuth) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *OAuth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_OAuth.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *OAuth) XXX_Merge(src proto.Message) { xxx_messageInfo_OAuth.Merge(m, src) } func (m *OAuth) XXX_Size() int { return m.Size() } func (m *OAuth) XXX_DiscardUnknown() { xxx_messageInfo_OAuth.DiscardUnknown(m) } var xxx_messageInfo_OAuth proto.InternalMessageInfo func (m *OAuth) GetClientId() string { if m != nil { return m.ClientId } return "" } func (m *OAuth) GetClientSecret() string { if m != nil { return m.ClientSecret } return "" } func (m *OAuth) GetTokenUrl() string { if m != nil { return m.TokenUrl } return "" } func (m *OAuth) GetScopes() []string { if m != nil { return m.Scopes } return nil } func (m *OAuth) GetEndpointParams() map[string]string { if m != nil { return m.EndpointParams } return nil } // Mutual let operator specify TLS configuration for Mixer as client if mutual TLS is used to // secure connection to adapter backend. type Mutual struct { // The path to the file holding the private key for mutual TLS. If omitted, the // default Mixer private key will be used. PrivateKey string `protobuf:"bytes,1,opt,name=private_key,json=privateKey,proto3" json:"private_key,omitempty"` // The path to the file holding client certificate for mutual TLS. If omitted, the // default Mixer certificates will be used. ClientCertificate string `protobuf:"bytes,2,opt,name=client_certificate,json=clientCertificate,proto3" json:"client_certificate,omitempty"` // The path to the file holding additional CA certificates that are needed to // verify the presented adapter certificates. By default Mixer should already // include Istio CA certificates and system certificates in cert pool. CaCertificates string `protobuf:"bytes,3,opt,name=ca_certificates,json=caCertificates,proto3" json:"ca_certificates,omitempty"` // Used to configure mixer mutual TLS client to supply server name for SNI. // It is not used to verify the hostname of the peer certificate, since // Istio verifies whitelisted SAN fields in mutual TLS. ServerName string `protobuf:"bytes,4,opt,name=server_name,json=serverName,proto3" json:"server_name,omitempty"` } func (m *Mutual) Reset() { *m = Mutual{} } func (*Mutual) ProtoMessage() {} func (*Mutual) Descriptor() ([]byte, []int) { return fileDescriptor_dfb7777e81b6b919, []int{13} } func (m *Mutual) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } func (m *Mutual) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { return xxx_messageInfo_Mutual.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) if err != nil { return nil, err } return b[:n], nil } } func (m *Mutual) XXX_Merge(src proto.Message) { xxx_messageInfo_Mutual.Merge(m, src) } func (m *Mutual) XXX_Size() int { return m.Size() } func (m *Mutual) XXX_DiscardUnknown() { xxx_messageInfo_Mutual.DiscardUnknown(m) } var xxx_messageInfo_Mutual proto.InternalMessageInfo func (m *Mutual) GetPrivateKey() string { if m != nil { return m.PrivateKey } return "" } func (m *Mutual) GetClientCertificate() string { if m != nil { return m.ClientCertificate } return "" } func (m *Mutual) GetCaCertificates() string { if m != nil { return m.CaCertificates } return "" } func (m *Mutual) GetServerName() string { if m != nil { return m.ServerName } return "" } func init() { proto.RegisterEnum("istio.policy.v1beta1.Rule_HeaderOperationTemplate_Operation", Rule_HeaderOperationTemplate_Operation_name, Rule_HeaderOperationTemplate_Operation_value) proto.RegisterEnum("istio.policy.v1beta1.FractionalPercent_DenominatorType", FractionalPercent_DenominatorType_name, FractionalPercent_DenominatorType_value) proto.RegisterEnum("istio.policy.v1beta1.Tls_AuthHeader", Tls_AuthHeader_name, Tls_AuthHeader_value) proto.RegisterType((*AttributeManifest)(nil), "istio.policy.v1beta1.AttributeManifest") proto.RegisterMapType((map[string]*AttributeManifest_AttributeInfo)(nil), "istio.policy.v1beta1.AttributeManifest.AttributesEntry") proto.RegisterType((*AttributeManifest_AttributeInfo)(nil), "istio.policy.v1beta1.AttributeManifest.AttributeInfo") proto.RegisterType((*Rule)(nil), "istio.policy.v1beta1.Rule") proto.RegisterType((*Rule_HeaderOperationTemplate)(nil), "istio.policy.v1beta1.Rule.HeaderOperationTemplate") proto.RegisterType((*Action)(nil), "istio.policy.v1beta1.Action") proto.RegisterType((*Instance)(nil), "istio.policy.v1beta1.Instance") proto.RegisterMapType((map[string]string)(nil), "istio.policy.v1beta1.Instance.AttributeBindingsEntry") proto.RegisterType((*Handler)(nil), "istio.policy.v1beta1.Handler") proto.RegisterType((*Connection)(nil), "istio.policy.v1beta1.Connection") proto.RegisterType((*Sampling)(nil), "istio.policy.v1beta1.Sampling") proto.RegisterType((*RandomSampling)(nil), "istio.policy.v1beta1.RandomSampling") proto.RegisterType((*RateLimitSampling)(nil), "istio.policy.v1beta1.RateLimitSampling") proto.RegisterType((*FractionalPercent)(nil), "istio.policy.v1beta1.FractionalPercent") proto.RegisterType((*Authentication)(nil), "istio.policy.v1beta1.Authentication") proto.RegisterType((*Tls)(nil), "istio.policy.v1beta1.Tls") proto.RegisterType((*OAuth)(nil), "istio.policy.v1beta1.OAuth") proto.RegisterMapType((map[string]string)(nil), "istio.policy.v1beta1.OAuth.EndpointParamsEntry") proto.RegisterType((*Mutual)(nil), "istio.policy.v1beta1.Mutual") } func init() { proto.RegisterFile("policy/v1beta1/cfg.proto", fileDescriptor_dfb7777e81b6b919) } var fileDescriptor_dfb7777e81b6b919 = []byte{ // 1557 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x57, 0xbf, 0x73, 0x1b, 0xc5, 0x17, 0xd7, 0x49, 0x96, 0x2c, 0x3d, 0xd9, 0xb2, 0xbc, 0xf1, 0x37, 0x56, 0x94, 0x7c, 0x65, 0xa3, 0xc0, 0x24, 0x4d, 0xa4, 0xd8, 0x81, 0x04, 0x32, 0x29, 0x90, 0x6c, 0x05, 0x79, 0xe2, 0xd8, 0x62, 0x6d, 0x07, 0x48, 0xc1, 0xcd, 0xfa, 0x6e, 0x6d, 0xdf, 0xe4, 0x7e, 0x71, 0xb7, 0xa7, 0x89, 0x3b, 0x0a, 0x7a, 0x52, 0x52, 0x53, 0x01, 0x0d, 0xff, 0x01, 0x75, 0x1a, 0x66, 0x32, 0xc3, 0x0c, 0xe3, 0x0a, 0x12, 0xa5, 0xa1, 0x61, 0x26, 0x05, 0x7f, 0x00, 0x73, 0xbb, 0x7b, 0x77, 0xb2, 0x2c, 0x65, 0x12, 0xba, 0xdb, 0xf7, 0x3e, 0xef, 0xf7, 0xdb, 0xf7, 0xf6, 0xa0, 0xe2, 0x3a, 0xa6, 0xa1, 0x1d, 0x37, 0xfb, 0x2b, 0xfb, 0x94, 0x91, 0x95, 0xa6, 0x76, 0x70, 0xd8, 0x70, 0x3d, 0x87, 0x39, 0x68, 0xc1, 0xf0, 0x99, 0xe1, 0x34, 0x04, 0xbf, 0x21, 0xf9, 0xd5, 0x85, 0x43, 0xe7, 0xd0, 0xe1, 0x80, 0x66, 0xf8, 0x25, 0xb0, 0xd5, 0xa5, 0x43, 0xc7, 0x39, 0x34, 0x69, 0x93, 0xb8, 0x46, 0xf3, 0xc0, 0xa0, 0xa6, 0xae, 0xee, 0xd3, 0x23, 0xd2, 0x37, 0x1c, 0x4f, 0x02, 0x2e, 0x49, 0x00, 0x3f, 0xed, 0x07, 0x07, 0x4d, 0x9f, 0x79, 0x81, 0xc6, 0x24, 0xb7, 0x36, 0xca, 0xd5, 0x03, 0x8f, 0x30, 0xc3, 0xb1, 0x23, 0xf5, 0x23, 0x4e, 0xf6, 0x89, 0x19, 0x50, 0x95, 0x1d, 0xbb, 0x54, 0x00, 0xea, 0xdf, 0x66, 0x60, 0xbe, 0xc5, 0x98, 0x67, 0xec, 0x07, 0x8c, 0xde, 0x27, 0xb6, 0x71, 0x40, 0x7d, 0x86, 0xaa, 0x90, 0xf7, 0x68, 0xdf, 0xf0, 0x0d, 0xc7, 0xae, 0x28, 0xcb, 0xca, 0xd5, 0x02, 0x8e, 0xcf, 0x68, 0x11, 0xa6, 0x6c, 0x62, 0xd1, 0x4a, 0x3a, 0xa4, 0xb7, 0x33, 0xcf, 0x5b, 0x69, 0xcc, 0x09, 0xe8, 0x33, 0x00, 0x12, 0x69, 0xf2, 0x2b, 0x99, 0xe5, 0xcc, 0xd5, 0xe2, 0xea, 0xad, 0xc6, 0xb8, 0x5c, 0x34, 0xce, 0x58, 0x4c, 0x28, 0x7e, 0xc7, 0x66, 0xde, 0x31, 0x1e, 0x52, 0x55, 0xed, 0xc3, 0x6c, 0xcc, 0xde, 0xb0, 0x0f, 0x1c, 0xb4, 0x0c, 0x45, 0x9d, 0xfa, 0x9a, 0x67, 0xb8, 0x2c, 0xf1, 0x70, 0x98, 0x84, 0xd6, 0x00, 0x92, 0x50, 0xb9, 0xab, 0xa5, 0xd5, 0xa5, 0xf1, 0xbe, 0x3c, 0x08, 0x71, 0xbb, 0xc7, 0x2e, 0x15, 0xb1, 0x14, 0xfa, 0xd1, 0xb9, 0xca, 0x60, 0x6e, 0xc4, 0x2d, 0x54, 0x86, 0xcc, 0x23, 0x7a, 0x2c, 0x2d, 0x86, 0x9f, 0xe8, 0x1e, 0x64, 0xb9, 0x04, 0x37, 0x52, 0x5c, 0xfd, 0xe0, 0xad, 0x03, 0x0e, 0x23, 0xc2, 0x42, 0xc7, 0xed, 0xf4, 0x87, 0x4a, 0xfd, 0xd7, 0x29, 0x98, 0xc2, 0x81, 0x49, 0xd1, 0x02, 0x64, 0x2d, 0xc2, 0xb4, 0x23, 0x69, 0x4d, 0x1c, 0xd0, 0x4d, 0x98, 0x26, 0x5a, 0x18, 0xa3, 0x5f, 0x49, 0xf3, 0x14, 0x5f, 0x9a, 0x60, 0x91, 0x83, 0x70, 0x04, 0x46, 0x36, 0x5c, 0xf0, 0xe8, 0x57, 0x01, 0xf5, 0x99, 0x7a, 0x44, 0x89, 0x4e, 0x3d, 0xd5, 0x71, 0xa9, 0xe8, 0x95, 0xa8, 0x58, 0xab, 0xe3, 0x35, 0x85, 0xce, 0x34, 0xba, 0x5c, 0x66, 0x3b, 0x12, 0xd9, 0xa5, 0x96, 0x6b, 0x12, 0x46, 0xf1, 0xa2, 0x54, 0x3a, 0xc2, 0xf7, 0x91, 0x0b, 0x55, 0x8f, 0xfa, 0xae, 0x63, 0xfb, 0x74, 0x8c, 0xc1, 0xa9, 0xff, 0x6c, 0xb0, 0x12, 0x69, 0x3d, 0x63, 0xf1, 0x36, 0xe4, 0x7d, 0x62, 0xb9, 0xa6, 0x61, 0x1f, 0x56, 0xb2, 0xbc, 0x18, 0xb5, 0xf1, 0xfa, 0x77, 0x24, 0x0a, 0xc7, 0xf8, 0xea, 0x89, 0x02, 0x8b, 0x13, 0x2c, 0xc6, 0x0d, 0xaf, 0x8c, 0x36, 0xfc, 0x79, 0xc8, 0xf1, 0xb2, 0x89, 0x4a, 0x14, 0xb0, 0x3c, 0xa1, 0x87, 0x50, 0x88, 0x43, 0xad, 0x64, 0x78, 0xef, 0xdd, 0x79, 0xfb, 0x48, 0x1b, 0x31, 0x05, 0x27, 0xea, 0xea, 0xd7, 0xa1, 0x10, 0xd3, 0x51, 0x11, 0xa6, 0x71, 0xa7, 0xb7, 0xd9, 0x5a, 0xeb, 0x94, 0x53, 0x08, 0x20, 0x87, 0x3b, 0xf7, 0xb7, 0x1f, 0x74, 0xca, 0x4a, 0xf8, 0xdd, 0xea, 0xf5, 0x3a, 0x5b, 0xeb, 0xe5, 0x74, 0xfd, 0x4b, 0xc8, 0x89, 0x5e, 0x40, 0xff, 0x87, 0xe9, 0x23, 0x62, 0xeb, 0x26, 0xf5, 0x86, 0x2f, 0x6f, 0x44, 0x43, 0xef, 0x40, 0xc1, 0xb0, 0x7d, 0x46, 0x6c, 0x4d, 0x5e, 0x5f, 0x09, 0x48, 0xa8, 0x08, 0xc9, 0x54, 0x4c, 0xf1, 0x8e, 0xe4, 0xdf, 0xf5, 0xdf, 0xd3, 0x90, 0xdf, 0x90, 0x88, 0xc9, 0xb9, 0xba, 0x01, 0xf3, 0x9a, 0x63, 0xb9, 0x86, 0x49, 0x75, 0x95, 0xc9, 0x08, 0x2b, 0xff, 0xfc, 0xfd, 0xe3, 0x72, 0x02, 0x2c, 0x47, 0x80, 0x38, 0xf3, 0x55, 0xc8, 0xc7, 0xd8, 0xb4, 0x18, 0x43, 0xd1, 0x19, 0xbd, 0x0f, 0x39, 0x97, 0x78, 0xc4, 0xf2, 0x79, 0x86, 0x8b, 0xab, 0x8b, 0x0d, 0x31, 0x0a, 0x1b, 0xd1, 0x28, 0x6c, 0xec, 0xf0, 0x41, 0x29, 0x74, 0x4b, 0x2c, 0xd2, 0x01, 0xc5, 0x83, 0x45, 0xdd, 0x37, 0x6c, 0xdd, 0xb0, 0x0f, 0xa3, 0x6e, 0x9c, 0x70, 0x75, 0xa3, 0xd8, 0x92, 0x1b, 0xdb, 0x96, 0x72, 0x62, 0x52, 0xcd, 0x93, 0x51, 0x7a, 0x75, 0x1d, 0xce, 0x8f, 0x07, 0x8f, 0x99, 0x1f, 0x0b, 0xc3, 0xf3, 0xa3, 0x30, 0x3c, 0x08, 0x06, 0x0a, 0x4c, 0x77, 0x65, 0x6d, 0x26, 0xe6, 0x75, 0x05, 0xe2, 0xb4, 0xa9, 0x44, 0x27, 0x2e, 0xa3, 0xde, 0x48, 0x5a, 0xe7, 0x22, 0x7e, 0x4b, 0xb0, 0x51, 0x05, 0xa6, 0x23, 0xa4, 0xb0, 0x19, 0x1d, 0x51, 0xf3, 0x0d, 0x73, 0x1a, 0xa7, 0xf3, 0x63, 0x00, 0xcd, 0xb1, 0x6d, 0xca, 0xfb, 0x8b, 0x77, 0x45, 0x71, 0x75, 0x79, 0x7c, 0x1a, 0xd7, 0x62, 0x1c, 0x1e, 0x92, 0xa9, 0xff, 0xac, 0x00, 0x24, 0x2c, 0xe1, 0x9b, 0xee, 0x51, 0xdf, 0x4f, 0x7c, 0xe3, 0x47, 0xf4, 0x11, 0x4c, 0x33, 0xc3, 0xa2, 0x4e, 0xc0, 0xa4, 0x73, 0x17, 0xce, 0x38, 0xb7, 0x2e, 0x77, 0x5f, 0x7b, 0xea, 0xbb, 0x3f, 0x97, 0x14, 0x1c, 0xe1, 0xd1, 0x26, 0x94, 0x48, 0xc0, 0x8e, 0xa8, 0xcd, 0x0c, 0x8d, 0x0c, 0x79, 0xfa, 0xee, 0x84, 0xc9, 0x79, 0x0a, 0x8b, 0x47, 0x64, 0xeb, 0x4f, 0x14, 0xc8, 0x47, 0x13, 0x04, 0xdd, 0x81, 0x9c, 0x47, 0x6c, 0xdd, 0xb1, 0x78, 0x65, 0x26, 0xaa, 0xc4, 0x1c, 0x13, 0xcf, 0x1d, 0x29, 0x83, 0xee, 0x02, 0x78, 0x84, 0x51, 0xd5, 0x34, 0x2c, 0x83, 0xc9, 0x05, 0x72, 0x65, 0x92, 0x06, 0x46, 0x37, 0x43, 0x58, 0xac, 0xa4, 0xe0, 0x45, 0xa4, 0xfa, 0x6f, 0x0a, 0x94, 0x4e, 0x9b, 0x40, 0x2b, 0xb0, 0x90, 0x34, 0x3a, 0x7d, 0xec, 0x86, 0x39, 0x4c, 0x76, 0xe5, 0xb9, 0x98, 0xd7, 0x89, 0x59, 0xa8, 0x07, 0x73, 0x2e, 0xf5, 0x34, 0x6a, 0x33, 0x95, 0xcf, 0x45, 0xaa, 0xbf, 0xde, 0xa5, 0xbb, 0x9e, 0xd8, 0x2d, 0xc4, 0xec, 0x09, 0x31, 0x5c, 0x92, 0xf2, 0x3b, 0x42, 0x1c, 0xdd, 0x81, 0x6a, 0xe0, 0x53, 0xd5, 0xb0, 0x75, 0xea, 0x52, 0x5b, 0x0f, 0x35, 0x8b, 0xc8, 0xed, 0xb0, 0xc0, 0x61, 0x19, 0xf3, 0xb8, 0x12, 0xf8, 0x74, 0x23, 0x01, 0xe0, 0x98, 0x5f, 0xff, 0x45, 0x81, 0xf9, 0x33, 0x61, 0xa3, 0x1e, 0xcc, 0x47, 0x53, 0x5b, 0x8d, 0x1e, 0x3b, 0x32, 0xf9, 0xaf, 0xe9, 0x88, 0xfc, 0xd3, 0x3f, 0x96, 0x52, 0xbc, 0x2b, 0xca, 0x91, 0x74, 0xc4, 0x43, 0xab, 0xf0, 0x3f, 0x8b, 0x3c, 0x56, 0x03, 0x5b, 0x46, 0xad, 0x52, 0x9b, 0x79, 0x06, 0x15, 0x1d, 0x98, 0xc1, 0xe7, 0x2c, 0xf2, 0x78, 0x2f, 0xe2, 0x75, 0x04, 0x0b, 0x5d, 0x86, 0xd9, 0xd8, 0x8b, 0xb0, 0x0e, 0x3c, 0x98, 0x0c, 0x9e, 0x89, 0x88, 0xa1, 0xdf, 0x3c, 0x80, 0x33, 0x49, 0x42, 0x97, 0xa0, 0x60, 0x07, 0x56, 0x38, 0xc2, 0x1d, 0x8f, 0x3b, 0x3e, 0x8b, 0x13, 0x02, 0xfa, 0x22, 0x7c, 0xda, 0xd8, 0x8e, 0x65, 0xd8, 0x9c, 0x2f, 0x5e, 0x2e, 0xb7, 0xde, 0xb0, 0x00, 0x8d, 0xf5, 0x44, 0x34, 0x7c, 0xc1, 0xe0, 0x61, 0x5d, 0xf5, 0xeb, 0x30, 0x37, 0xc2, 0x0f, 0x17, 0x48, 0x77, 0x6f, 0x6b, 0x1d, 0x77, 0xd6, 0xcb, 0x29, 0x54, 0x86, 0x99, 0xdd, 0xce, 0x96, 0xba, 0xdb, 0xdd, 0xde, 0xdb, 0x69, 0x6d, 0xad, 0x97, 0x95, 0xfa, 0x37, 0x0a, 0x94, 0x4e, 0xdf, 0x06, 0x74, 0x0d, 0x32, 0xcc, 0xf4, 0xe3, 0x84, 0x8f, 0xf5, 0x6b, 0xd7, 0xf4, 0xbb, 0x29, 0x1c, 0xe2, 0xd0, 0x4d, 0xc8, 0x59, 0x01, 0x0b, 0x88, 0x29, 0x5b, 0x69, 0xc2, 0x63, 0xe5, 0x3e, 0xc7, 0x74, 0x53, 0x58, 0xa2, 0xdb, 0x45, 0x28, 0x84, 0xd7, 0x8e, 0x3f, 0xdf, 0xea, 0x27, 0x69, 0xc8, 0xec, 0x9a, 0x3e, 0xba, 0x02, 0x73, 0x1a, 0x51, 0x35, 0xea, 0x31, 0xe3, 0x20, 0xf4, 0x87, 0xfa, 0xb2, 0x9d, 0x4b, 0x1a, 0x59, 0x1b, 0xa2, 0xa2, 0x25, 0x00, 0xe6, 0x3c, 0xa2, 0xb6, 0xea, 0x12, 0x76, 0x24, 0x06, 0x49, 0x37, 0x85, 0x0b, 0x9c, 0xd6, 0x23, 0xec, 0x08, 0xdd, 0x80, 0xac, 0x13, 0xea, 0x97, 0xa3, 0xe4, 0xe2, 0x78, 0xaf, 0xb6, 0xc3, 0xd8, 0xbb, 0x29, 0x2c, 0xb0, 0xe8, 0x13, 0x28, 0x72, 0x9f, 0xc4, 0x6b, 0x86, 0xcf, 0x90, 0xd2, 0xa4, 0x0b, 0xbf, 0x6b, 0xfa, 0x7c, 0x8e, 0x88, 0xdd, 0xde, 0x55, 0x30, 0x90, 0xf8, 0x84, 0xde, 0x83, 0x59, 0x2d, 0xf0, 0x99, 0x63, 0x45, 0xaa, 0xb2, 0xdc, 0x43, 0x05, 0xcf, 0x08, 0xb2, 0x84, 0x2d, 0x41, 0xd1, 0xa7, 0x5e, 0x9f, 0x7a, 0x2a, 0x1f, 0xfd, 0x39, 0x1e, 0x2a, 0x08, 0xd2, 0x56, 0xb8, 0x79, 0x2f, 0x03, 0x24, 0x36, 0x50, 0x01, 0xb2, 0xbd, 0xcd, 0xd6, 0xc6, 0x96, 0x78, 0x0a, 0xb4, 0x3b, 0x2d, 0xdc, 0xc1, 0x65, 0xa5, 0x5d, 0x82, 0x19, 0x91, 0x0b, 0xdf, 0x09, 0x3c, 0x8d, 0xb6, 0x67, 0xa2, 0xdc, 0xf0, 0xd4, 0xfe, 0x94, 0x86, 0x2c, 0x0f, 0x13, 0x2d, 0x43, 0x41, 0x33, 0x8d, 0xf0, 0x8a, 0x1a, 0xfa, 0xf0, 0x9a, 0xc9, 0x0b, 0xea, 0x86, 0x8e, 0xae, 0xc2, 0xac, 0x44, 0xf8, 0x54, 0xf3, 0x28, 0x1b, 0x7e, 0x44, 0xcc, 0x08, 0xce, 0x0e, 0x67, 0x84, 0xba, 0x84, 0x8d, 0xc0, 0x33, 0x79, 0x8a, 0x23, 0x5d, 0x9c, 0xba, 0xe7, 0x99, 0xe1, 0xd3, 0xc9, 0xd7, 0x1c, 0x97, 0x8a, 0xdd, 0x5b, 0xc0, 0xf2, 0x84, 0x3e, 0x87, 0x39, 0x6a, 0xeb, 0xae, 0x63, 0xd8, 0x4c, 0x95, 0xab, 0x28, 0xcb, 0x97, 0x73, 0xf3, 0x35, 0x25, 0x6a, 0x74, 0xa4, 0x48, 0x8f, 0x4b, 0x88, 0xb5, 0x5c, 0xa2, 0xa7, 0x88, 0xd5, 0x16, 0x9c, 0x1b, 0x03, 0x7b, 0xab, 0x85, 0xfc, 0xbd, 0x02, 0x39, 0xd1, 0xa9, 0x61, 0x6d, 0x5c, 0xcf, 0xe8, 0x87, 0xc3, 0x3b, 0x11, 0x07, 0x49, 0xba, 0x47, 0x8f, 0xd1, 0x35, 0x40, 0x32, 0x59, 0x43, 0xfd, 0x2a, 0x55, 0xce, 0x0b, 0xce, 0x50, 0xcb, 0x8e, 0x6b, 0xed, 0xcc, 0x84, 0xd6, 0x3e, 0xd5, 0x14, 0x53, 0xa3, 0x4d, 0xd1, 0xfe, 0xf4, 0xd9, 0x8b, 0x5a, 0xea, 0xe4, 0x45, 0x2d, 0xf5, 0xea, 0x45, 0x4d, 0xf9, 0x7a, 0x50, 0x53, 0x7e, 0x18, 0xd4, 0x94, 0xa7, 0x83, 0x9a, 0xf2, 0x6c, 0x50, 0x53, 0x9e, 0x0f, 0x6a, 0xca, 0x5f, 0x83, 0x5a, 0xea, 0xd5, 0xa0, 0xa6, 0x3c, 0x79, 0x59, 0x4b, 0x3d, 0x7b, 0x59, 0x4b, 0x9d, 0xbc, 0xac, 0xa5, 0x1e, 0x5e, 0x14, 0xd9, 0x35, 0x1c, 0xfe, 0x23, 0x7a, 0xfa, 0xa7, 0x71, 0x3f, 0xc7, 0xe7, 0xe9, 0x8d, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x8b, 0x0c, 0x72, 0x92, 0xf2, 0x0e, 0x00, 0x00, } func (x Rule_HeaderOperationTemplate_Operation) String() string { s, ok := Rule_HeaderOperationTemplate_Operation_name[int32(x)] if ok { return s } return strconv.Itoa(int(x)) } func (x FractionalPercent_DenominatorType) String() string { s, ok := FractionalPercent_DenominatorType_name[int32(x)] if ok { return s } return strconv.Itoa(int(x)) } func (x Tls_AuthHeader) String() string { s, ok := Tls_AuthHeader_name[int32(x)] if ok { return s } return strconv.Itoa(int(x)) } func (this *AttributeManifest) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*AttributeManifest) if !ok { that2, ok := that.(AttributeManifest) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Revision != that1.Revision { return false } if this.Name != that1.Name { return false } if len(this.Attributes) != len(that1.Attributes) { return false } for i := range this.Attributes { if !this.Attributes[i].Equal(that1.Attributes[i]) { return false } } return true } func (this *AttributeManifest_AttributeInfo) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*AttributeManifest_AttributeInfo) if !ok { that2, ok := that.(AttributeManifest_AttributeInfo) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Description != that1.Description { return false } if this.ValueType != that1.ValueType { return false } return true } func (this *Rule) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Rule) if !ok { that2, ok := that.(Rule) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Match != that1.Match { return false } if len(this.Actions) != len(that1.Actions) { return false } for i := range this.Actions { if !this.Actions[i].Equal(that1.Actions[i]) { return false } } if len(this.RequestHeaderOperations) != len(that1.RequestHeaderOperations) { return false } for i := range this.RequestHeaderOperations { if !this.RequestHeaderOperations[i].Equal(that1.RequestHeaderOperations[i]) { return false } } if len(this.ResponseHeaderOperations) != len(that1.ResponseHeaderOperations) { return false } for i := range this.ResponseHeaderOperations { if !this.ResponseHeaderOperations[i].Equal(that1.ResponseHeaderOperations[i]) { return false } } if !this.Sampling.Equal(that1.Sampling) { return false } return true } func (this *Rule_HeaderOperationTemplate) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Rule_HeaderOperationTemplate) if !ok { that2, ok := that.(Rule_HeaderOperationTemplate) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Name != that1.Name { return false } if len(this.Values) != len(that1.Values) { return false } for i := range this.Values { if this.Values[i] != that1.Values[i] { return false } } if this.Operation != that1.Operation { return false } return true } func (this *Action) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Action) if !ok { that2, ok := that.(Action) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Handler != that1.Handler { return false } if len(this.Instances) != len(that1.Instances) { return false } for i := range this.Instances { if this.Instances[i] != that1.Instances[i] { return false } } if this.Name != that1.Name { return false } return true } func (this *Instance) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Instance) if !ok { that2, ok := that.(Instance) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Name != that1.Name { return false } if this.CompiledTemplate != that1.CompiledTemplate { return false } if this.Template != that1.Template { return false } if !this.Params.Equal(that1.Params) { return false } if len(this.AttributeBindings) != len(that1.AttributeBindings) { return false } for i := range this.AttributeBindings { if this.AttributeBindings[i] != that1.AttributeBindings[i] { return false } } return true } func (this *Handler) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Handler) if !ok { that2, ok := that.(Handler) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Name != that1.Name { return false } if this.CompiledAdapter != that1.CompiledAdapter { return false } if this.Adapter != that1.Adapter { return false } if !this.Params.Equal(that1.Params) { return false } if !this.Connection.Equal(that1.Connection) { return false } return true } func (this *Connection) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Connection) if !ok { that2, ok := that.(Connection) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Address != that1.Address { return false } if this.Timeout != nil && that1.Timeout != nil { if *this.Timeout != *that1.Timeout { return false } } else if this.Timeout != nil { return false } else if that1.Timeout != nil { return false } if !this.Authentication.Equal(that1.Authentication) { return false } return true } func (this *Sampling) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Sampling) if !ok { that2, ok := that.(Sampling) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if !this.Random.Equal(that1.Random) { return false } if !this.RateLimit.Equal(that1.RateLimit) { return false } return true } func (this *RandomSampling) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*RandomSampling) if !ok { that2, ok := that.(RandomSampling) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.AttributeExpression != that1.AttributeExpression { return false } if !this.PercentSampled.Equal(that1.PercentSampled) { return false } if this.UseIndependentRandomness != that1.UseIndependentRandomness { return false } return true } func (this *RateLimitSampling) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*RateLimitSampling) if !ok { that2, ok := that.(RateLimitSampling) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.SamplingDuration != that1.SamplingDuration { return false } if this.MaxUnsampledEntries != that1.MaxUnsampledEntries { return false } if this.SamplingRate != that1.SamplingRate { return false } return true } func (this *FractionalPercent) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*FractionalPercent) if !ok { that2, ok := that.(FractionalPercent) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.Numerator != that1.Numerator { return false } if this.Denominator != that1.Denominator { return false } return true } func (this *Authentication) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Authentication) if !ok { that2, ok := that.(Authentication) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if that1.AuthType == nil { if this.AuthType != nil { return false } } else if this.AuthType == nil { return false } else if !this.AuthType.Equal(that1.AuthType) { return false } return true } func (this *Authentication_Tls) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Authentication_Tls) if !ok { that2, ok := that.(Authentication_Tls) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if !this.Tls.Equal(that1.Tls) { return false } return true } func (this *Authentication_Mutual) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Authentication_Mutual) if !ok { that2, ok := that.(Authentication_Mutual) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if !this.Mutual.Equal(that1.Mutual) { return false } return true } func (this *Tls) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Tls) if !ok { that2, ok := that.(Tls) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.CaCertificates != that1.CaCertificates { return false } if that1.TokenSource == nil { if this.TokenSource != nil { return false } } else if this.TokenSource == nil { return false } else if !this.TokenSource.Equal(that1.TokenSource) { return false } if that1.TokenType == nil { if this.TokenType != nil { return false } } else if this.TokenType == nil { return false } else if !this.TokenType.Equal(that1.TokenType) { return false } if this.ServerName != that1.ServerName { return false } return true } func (this *Tls_TokenPath) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Tls_TokenPath) if !ok { that2, ok := that.(Tls_TokenPath) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.TokenPath != that1.TokenPath { return false } return true } func (this *Tls_Oauth) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Tls_Oauth) if !ok { that2, ok := that.(Tls_Oauth) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if !this.Oauth.Equal(that1.Oauth) { return false } return true } func (this *Tls_AuthHeader_) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Tls_AuthHeader_) if !ok { that2, ok := that.(Tls_AuthHeader_) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.AuthHeader != that1.AuthHeader { return false } return true } func (this *Tls_CustomHeader) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Tls_CustomHeader) if !ok { that2, ok := that.(Tls_CustomHeader) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.CustomHeader != that1.CustomHeader { return false } return true } func (this *OAuth) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*OAuth) if !ok { that2, ok := that.(OAuth) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.ClientId != that1.ClientId { return false } if this.ClientSecret != that1.ClientSecret { return false } if this.TokenUrl != that1.TokenUrl { return false } if len(this.Scopes) != len(that1.Scopes) { return false } for i := range this.Scopes { if this.Scopes[i] != that1.Scopes[i] { return false } } if len(this.EndpointParams) != len(that1.EndpointParams) { return false } for i := range this.EndpointParams { if this.EndpointParams[i] != that1.EndpointParams[i] { return false } } return true } func (this *Mutual) Equal(that interface{}) bool { if that == nil { return this == nil } that1, ok := that.(*Mutual) if !ok { that2, ok := that.(Mutual) if ok { that1 = &that2 } else { return false } } if that1 == nil { return this == nil } else if this == nil { return false } if this.PrivateKey != that1.PrivateKey { return false } if this.ClientCertificate != that1.ClientCertificate { return false } if this.CaCertificates != that1.CaCertificates { return false } if this.ServerName != that1.ServerName { return false } return true } func (this *AttributeManifest) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 7) s = append(s, "&v1beta1.AttributeManifest{") s = append(s, "Revision: "+fmt.Sprintf("%#v", this.Revision)+",\n") s = append(s, "Name: "+fmt.Sprintf("%#v", this.Name)+",\n") keysForAttributes := make([]string, 0, len(this.Attributes)) for k, _ := range this.Attributes { keysForAttributes = append(keysForAttributes, k) } github_com_gogo_protobuf_sortkeys.Strings(keysForAttributes) mapStringForAttributes := "map[string]*AttributeManifest_AttributeInfo{" for _, k := range keysForAttributes { mapStringForAttributes += fmt.Sprintf("%#v: %#v,", k, this.Attributes[k]) } mapStringForAttributes += "}" if this.Attributes != nil { s = append(s, "Attributes: "+mapStringForAttributes+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *AttributeManifest_AttributeInfo) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 6) s = append(s, "&v1beta1.AttributeManifest_AttributeInfo{") s = append(s, "Description: "+fmt.Sprintf("%#v", this.Description)+",\n") s = append(s, "ValueType: "+fmt.Sprintf("%#v", this.ValueType)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *Rule) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 9) s = append(s, "&v1beta1.Rule{") s = append(s, "Match: "+fmt.Sprintf("%#v", this.Match)+",\n") if this.Actions != nil { s = append(s, "Actions: "+fmt.Sprintf("%#v", this.Actions)+",\n") } if this.RequestHeaderOperations != nil { s = append(s, "RequestHeaderOperations: "+fmt.Sprintf("%#v", this.RequestHeaderOperations)+",\n") } if this.ResponseHeaderOperations != nil { s = append(s, "ResponseHeaderOperations: "+fmt.Sprintf("%#v", this.ResponseHeaderOperations)+",\n") } if this.Sampling != nil { s = append(s, "Sampling: "+fmt.Sprintf("%#v", this.Sampling)+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *Rule_HeaderOperationTemplate) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 7) s = append(s, "&v1beta1.Rule_HeaderOperationTemplate{") s = append(s, "Name: "+fmt.Sprintf("%#v", this.Name)+",\n") s = append(s, "Values: "+fmt.Sprintf("%#v", this.Values)+",\n") s = append(s, "Operation: "+fmt.Sprintf("%#v", this.Operation)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *Action) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 7) s = append(s, "&v1beta1.Action{") s = append(s, "Handler: "+fmt.Sprintf("%#v", this.Handler)+",\n") s = append(s, "Instances: "+fmt.Sprintf("%#v", this.Instances)+",\n") s = append(s, "Name: "+fmt.Sprintf("%#v", this.Name)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *Instance) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 9) s = append(s, "&v1beta1.Instance{") s = append(s, "Name: "+fmt.Sprintf("%#v", this.Name)+",\n") s = append(s, "CompiledTemplate: "+fmt.Sprintf("%#v", this.CompiledTemplate)+",\n") s = append(s, "Template: "+fmt.Sprintf("%#v", this.Template)+",\n") if this.Params != nil { s = append(s, "Params: "+fmt.Sprintf("%#v", this.Params)+",\n") } keysForAttributeBindings := make([]string, 0, len(this.AttributeBindings)) for k, _ := range this.AttributeBindings { keysForAttributeBindings = append(keysForAttributeBindings, k) } github_com_gogo_protobuf_sortkeys.Strings(keysForAttributeBindings) mapStringForAttributeBindings := "map[string]string{" for _, k := range keysForAttributeBindings { mapStringForAttributeBindings += fmt.Sprintf("%#v: %#v,", k, this.AttributeBindings[k]) } mapStringForAttributeBindings += "}" if this.AttributeBindings != nil { s = append(s, "AttributeBindings: "+mapStringForAttributeBindings+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *Handler) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 9) s = append(s, "&v1beta1.Handler{") s = append(s, "Name: "+fmt.Sprintf("%#v", this.Name)+",\n") s = append(s, "CompiledAdapter: "+fmt.Sprintf("%#v", this.CompiledAdapter)+",\n") s = append(s, "Adapter: "+fmt.Sprintf("%#v", this.Adapter)+",\n") if this.Params != nil { s = append(s, "Params: "+fmt.Sprintf("%#v", this.Params)+",\n") } if this.Connection != nil { s = append(s, "Connection: "+fmt.Sprintf("%#v", this.Connection)+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *Connection) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 7) s = append(s, "&v1beta1.Connection{") s = append(s, "Address: "+fmt.Sprintf("%#v", this.Address)+",\n") s = append(s, "Timeout: "+fmt.Sprintf("%#v", this.Timeout)+",\n") if this.Authentication != nil { s = append(s, "Authentication: "+fmt.Sprintf("%#v", this.Authentication)+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *Sampling) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 6) s = append(s, "&v1beta1.Sampling{") if this.Random != nil { s = append(s, "Random: "+fmt.Sprintf("%#v", this.Random)+",\n") } if this.RateLimit != nil { s = append(s, "RateLimit: "+fmt.Sprintf("%#v", this.RateLimit)+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *RandomSampling) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 7) s = append(s, "&v1beta1.RandomSampling{") s = append(s, "AttributeExpression: "+fmt.Sprintf("%#v", this.AttributeExpression)+",\n") if this.PercentSampled != nil { s = append(s, "PercentSampled: "+fmt.Sprintf("%#v", this.PercentSampled)+",\n") } s = append(s, "UseIndependentRandomness: "+fmt.Sprintf("%#v", this.UseIndependentRandomness)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *RateLimitSampling) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 7) s = append(s, "&v1beta1.RateLimitSampling{") s = append(s, "SamplingDuration: "+fmt.Sprintf("%#v", this.SamplingDuration)+",\n") s = append(s, "MaxUnsampledEntries: "+fmt.Sprintf("%#v", this.MaxUnsampledEntries)+",\n") s = append(s, "SamplingRate: "+fmt.Sprintf("%#v", this.SamplingRate)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *FractionalPercent) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 6) s = append(s, "&v1beta1.FractionalPercent{") s = append(s, "Numerator: "+fmt.Sprintf("%#v", this.Numerator)+",\n") s = append(s, "Denominator: "+fmt.Sprintf("%#v", this.Denominator)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *Authentication) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 6) s = append(s, "&v1beta1.Authentication{") if this.AuthType != nil { s = append(s, "AuthType: "+fmt.Sprintf("%#v", this.AuthType)+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *Authentication_Tls) GoString() string { if this == nil { return "nil" } s := strings.Join([]string{`&v1beta1.Authentication_Tls{` + `Tls:` + fmt.Sprintf("%#v", this.Tls) + `}`}, ", ") return s } func (this *Authentication_Mutual) GoString() string { if this == nil { return "nil" } s := strings.Join([]string{`&v1beta1.Authentication_Mutual{` + `Mutual:` + fmt.Sprintf("%#v", this.Mutual) + `}`}, ", ") return s } func (this *Tls) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 10) s = append(s, "&v1beta1.Tls{") s = append(s, "CaCertificates: "+fmt.Sprintf("%#v", this.CaCertificates)+",\n") if this.TokenSource != nil { s = append(s, "TokenSource: "+fmt.Sprintf("%#v", this.TokenSource)+",\n") } if this.TokenType != nil { s = append(s, "TokenType: "+fmt.Sprintf("%#v", this.TokenType)+",\n") } s = append(s, "ServerName: "+fmt.Sprintf("%#v", this.ServerName)+",\n") s = append(s, "}") return strings.Join(s, "") } func (this *Tls_TokenPath) GoString() string { if this == nil { return "nil" } s := strings.Join([]string{`&v1beta1.Tls_TokenPath{` + `TokenPath:` + fmt.Sprintf("%#v", this.TokenPath) + `}`}, ", ") return s } func (this *Tls_Oauth) GoString() string { if this == nil { return "nil" } s := strings.Join([]string{`&v1beta1.Tls_Oauth{` + `Oauth:` + fmt.Sprintf("%#v", this.Oauth) + `}`}, ", ") return s } func (this *Tls_AuthHeader_) GoString() string { if this == nil { return "nil" } s := strings.Join([]string{`&v1beta1.Tls_AuthHeader_{` + `AuthHeader:` + fmt.Sprintf("%#v", this.AuthHeader) + `}`}, ", ") return s } func (this *Tls_CustomHeader) GoString() string { if this == nil { return "nil" } s := strings.Join([]string{`&v1beta1.Tls_CustomHeader{` + `CustomHeader:` + fmt.Sprintf("%#v", this.CustomHeader) + `}`}, ", ") return s } func (this *OAuth) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 9) s = append(s, "&v1beta1.OAuth{") s = append(s, "ClientId: "+fmt.Sprintf("%#v", this.ClientId)+",\n") s = append(s, "ClientSecret: "+fmt.Sprintf("%#v", this.ClientSecret)+",\n") s = append(s, "TokenUrl: "+fmt.Sprintf("%#v", this.TokenUrl)+",\n") s = append(s, "Scopes: "+fmt.Sprintf("%#v", this.Scopes)+",\n") keysForEndpointParams := make([]string, 0, len(this.EndpointParams)) for k, _ := range this.EndpointParams { keysForEndpointParams = append(keysForEndpointParams, k) } github_com_gogo_protobuf_sortkeys.Strings(keysForEndpointParams) mapStringForEndpointParams := "map[string]string{" for _, k := range keysForEndpointParams { mapStringForEndpointParams += fmt.Sprintf("%#v: %#v,", k, this.EndpointParams[k]) } mapStringForEndpointParams += "}" if this.EndpointParams != nil { s = append(s, "EndpointParams: "+mapStringForEndpointParams+",\n") } s = append(s, "}") return strings.Join(s, "") } func (this *Mutual) GoString() string { if this == nil { return "nil" } s := make([]string, 0, 8) s = append(s, "&v1beta1.Mutual{") s = append(s, "PrivateKey: "+fmt.Sprintf("%#v", this.PrivateKey)+",\n") s = append(s, "ClientCertificate: "+fmt.Sprintf("%#v", this.ClientCertificate)+",\n") s = append(s, "CaCertificates: "+fmt.Sprintf("%#v", this.CaCertificates)+",\n") s = append(s, "ServerName: "+fmt.Sprintf("%#v", this.ServerName)+",\n") s = append(s, "}") return strings.Join(s, "") } func valueToGoStringCfg(v interface{}, typ string) string { rv := reflect.ValueOf(v) if rv.IsNil() { return "nil" } pv := reflect.Indirect(rv).Interface() return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv) } func (m *AttributeManifest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *AttributeManifest) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *AttributeManifest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Attributes) > 0 { for k := range m.Attributes { v := m.Attributes[k] baseI := i if v != nil { { size, err := v.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } i -= len(k) copy(dAtA[i:], k) i = encodeVarintCfg(dAtA, i, uint64(len(k))) i-- dAtA[i] = 0xa i = encodeVarintCfg(dAtA, i, uint64(baseI-i)) i-- dAtA[i] = 0x1a } } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintCfg(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0x12 } if len(m.Revision) > 0 { i -= len(m.Revision) copy(dAtA[i:], m.Revision) i = encodeVarintCfg(dAtA, i, uint64(len(m.Revision))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *AttributeManifest_AttributeInfo) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *AttributeManifest_AttributeInfo) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *AttributeManifest_AttributeInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.ValueType != 0 { i = encodeVarintCfg(dAtA, i, uint64(m.ValueType)) i-- dAtA[i] = 0x10 } if len(m.Description) > 0 { i -= len(m.Description) copy(dAtA[i:], m.Description) i = encodeVarintCfg(dAtA, i, uint64(len(m.Description))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Rule) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Rule) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Rule) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Sampling != nil { { size, err := m.Sampling.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x2a } if len(m.ResponseHeaderOperations) > 0 { for iNdEx := len(m.ResponseHeaderOperations) - 1; iNdEx >= 0; iNdEx-- { { size, err := m.ResponseHeaderOperations[iNdEx].MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x22 } } if len(m.RequestHeaderOperations) > 0 { for iNdEx := len(m.RequestHeaderOperations) - 1; iNdEx >= 0; iNdEx-- { { size, err := m.RequestHeaderOperations[iNdEx].MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } } if len(m.Actions) > 0 { for iNdEx := len(m.Actions) - 1; iNdEx >= 0; iNdEx-- { { size, err := m.Actions[iNdEx].MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } } if len(m.Match) > 0 { i -= len(m.Match) copy(dAtA[i:], m.Match) i = encodeVarintCfg(dAtA, i, uint64(len(m.Match))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Rule_HeaderOperationTemplate) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Rule_HeaderOperationTemplate) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Rule_HeaderOperationTemplate) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Operation != 0 { i = encodeVarintCfg(dAtA, i, uint64(m.Operation)) i-- dAtA[i] = 0x18 } if len(m.Values) > 0 { for iNdEx := len(m.Values) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Values[iNdEx]) copy(dAtA[i:], m.Values[iNdEx]) i = encodeVarintCfg(dAtA, i, uint64(len(m.Values[iNdEx]))) i-- dAtA[i] = 0x12 } } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintCfg(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Action) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Action) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Action) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintCfg(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0x22 } if len(m.Instances) > 0 { for iNdEx := len(m.Instances) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Instances[iNdEx]) copy(dAtA[i:], m.Instances[iNdEx]) i = encodeVarintCfg(dAtA, i, uint64(len(m.Instances[iNdEx]))) i-- dAtA[i] = 0x1a } } if len(m.Handler) > 0 { i -= len(m.Handler) copy(dAtA[i:], m.Handler) i = encodeVarintCfg(dAtA, i, uint64(len(m.Handler))) i-- dAtA[i] = 0x12 } return len(dAtA) - i, nil } func (m *Instance) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Instance) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Instance) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.CompiledTemplate) > 0 { i -= len(m.CompiledTemplate) copy(dAtA[i:], m.CompiledTemplate) i = encodeVarintCfg(dAtA, i, uint64(len(m.CompiledTemplate))) i-- dAtA[i] = 0x2 i-- dAtA[i] = 0x82 i-- dAtA[i] = 0xce i-- dAtA[i] = 0xef i-- dAtA[i] = 0xa2 } if len(m.AttributeBindings) > 0 { for k := range m.AttributeBindings { v := m.AttributeBindings[k] baseI := i i -= len(v) copy(dAtA[i:], v) i = encodeVarintCfg(dAtA, i, uint64(len(v))) i-- dAtA[i] = 0x12 i -= len(k) copy(dAtA[i:], k) i = encodeVarintCfg(dAtA, i, uint64(len(k))) i-- dAtA[i] = 0xa i = encodeVarintCfg(dAtA, i, uint64(baseI-i)) i-- dAtA[i] = 0x22 } } if m.Params != nil { { size, err := m.Params.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } if len(m.Template) > 0 { i -= len(m.Template) copy(dAtA[i:], m.Template) i = encodeVarintCfg(dAtA, i, uint64(len(m.Template))) i-- dAtA[i] = 0x12 } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintCfg(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Handler) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Handler) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Handler) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.CompiledAdapter) > 0 { i -= len(m.CompiledAdapter) copy(dAtA[i:], m.CompiledAdapter) i = encodeVarintCfg(dAtA, i, uint64(len(m.CompiledAdapter))) i-- dAtA[i] = 0x2 i-- dAtA[i] = 0x82 i-- dAtA[i] = 0xce i-- dAtA[i] = 0xef i-- dAtA[i] = 0xa2 } if m.Connection != nil { { size, err := m.Connection.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x22 } if m.Params != nil { { size, err := m.Params.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } if len(m.Adapter) > 0 { i -= len(m.Adapter) copy(dAtA[i:], m.Adapter) i = encodeVarintCfg(dAtA, i, uint64(len(m.Adapter))) i-- dAtA[i] = 0x12 } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintCfg(dAtA, i, uint64(len(m.Name))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Connection) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Connection) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Connection) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Authentication != nil { { size, err := m.Authentication.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x22 } if m.Timeout != nil { n7, err7 := github_com_gogo_protobuf_types.StdDurationMarshalTo(*m.Timeout, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdDuration(*m.Timeout):]) if err7 != nil { return 0, err7 } i -= n7 i = encodeVarintCfg(dAtA, i, uint64(n7)) i-- dAtA[i] = 0x1a } if len(m.Address) > 0 { i -= len(m.Address) copy(dAtA[i:], m.Address) i = encodeVarintCfg(dAtA, i, uint64(len(m.Address))) i-- dAtA[i] = 0x12 } return len(dAtA) - i, nil } func (m *Sampling) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Sampling) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Sampling) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.RateLimit != nil { { size, err := m.RateLimit.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } if m.Random != nil { { size, err := m.Random.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *RandomSampling) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *RandomSampling) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *RandomSampling) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.UseIndependentRandomness { i-- if m.UseIndependentRandomness { dAtA[i] = 1 } else { dAtA[i] = 0 } i-- dAtA[i] = 0x18 } if m.PercentSampled != nil { { size, err := m.PercentSampled.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } if len(m.AttributeExpression) > 0 { i -= len(m.AttributeExpression) copy(dAtA[i:], m.AttributeExpression) i = encodeVarintCfg(dAtA, i, uint64(len(m.AttributeExpression))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *RateLimitSampling) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *RateLimitSampling) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *RateLimitSampling) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.SamplingRate != 0 { i = encodeVarintCfg(dAtA, i, uint64(m.SamplingRate)) i-- dAtA[i] = 0x18 } if m.MaxUnsampledEntries != 0 { i = encodeVarintCfg(dAtA, i, uint64(m.MaxUnsampledEntries)) i-- dAtA[i] = 0x10 } n11, err11 := github_com_gogo_protobuf_types.StdDurationMarshalTo(m.SamplingDuration, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdDuration(m.SamplingDuration):]) if err11 != nil { return 0, err11 } i -= n11 i = encodeVarintCfg(dAtA, i, uint64(n11)) i-- dAtA[i] = 0xa return len(dAtA) - i, nil } func (m *FractionalPercent) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *FractionalPercent) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *FractionalPercent) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.Denominator != 0 { i = encodeVarintCfg(dAtA, i, uint64(m.Denominator)) i-- dAtA[i] = 0x10 } if m.Numerator != 0 { i = encodeVarintCfg(dAtA, i, uint64(m.Numerator)) i-- dAtA[i] = 0x8 } return len(dAtA) - i, nil } func (m *Authentication) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Authentication) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Authentication) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if m.AuthType != nil { { size := m.AuthType.Size() i -= size if _, err := m.AuthType.MarshalTo(dAtA[i:]); err != nil { return 0, err } } } return len(dAtA) - i, nil } func (m *Authentication_Tls) MarshalTo(dAtA []byte) (int, error) { return m.MarshalToSizedBuffer(dAtA[:m.Size()]) } func (m *Authentication_Tls) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.Tls != nil { { size, err := m.Tls.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Authentication_Mutual) MarshalTo(dAtA []byte) (int, error) { return m.MarshalToSizedBuffer(dAtA[:m.Size()]) } func (m *Authentication_Mutual) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.Mutual != nil { { size, err := m.Mutual.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x12 } return len(dAtA) - i, nil } func (m *Tls) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Tls) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Tls) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.ServerName) > 0 { i -= len(m.ServerName) copy(dAtA[i:], m.ServerName) i = encodeVarintCfg(dAtA, i, uint64(len(m.ServerName))) i-- dAtA[i] = 0x32 } if m.TokenType != nil { { size := m.TokenType.Size() i -= size if _, err := m.TokenType.MarshalTo(dAtA[i:]); err != nil { return 0, err } } } if m.TokenSource != nil { { size := m.TokenSource.Size() i -= size if _, err := m.TokenSource.MarshalTo(dAtA[i:]); err != nil { return 0, err } } } if len(m.CaCertificates) > 0 { i -= len(m.CaCertificates) copy(dAtA[i:], m.CaCertificates) i = encodeVarintCfg(dAtA, i, uint64(len(m.CaCertificates))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Tls_TokenPath) MarshalTo(dAtA []byte) (int, error) { return m.MarshalToSizedBuffer(dAtA[:m.Size()]) } func (m *Tls_TokenPath) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) i -= len(m.TokenPath) copy(dAtA[i:], m.TokenPath) i = encodeVarintCfg(dAtA, i, uint64(len(m.TokenPath))) i-- dAtA[i] = 0x12 return len(dAtA) - i, nil } func (m *Tls_Oauth) MarshalTo(dAtA []byte) (int, error) { return m.MarshalToSizedBuffer(dAtA[:m.Size()]) } func (m *Tls_Oauth) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) if m.Oauth != nil { { size, err := m.Oauth.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } i -= size i = encodeVarintCfg(dAtA, i, uint64(size)) } i-- dAtA[i] = 0x1a } return len(dAtA) - i, nil } func (m *Tls_AuthHeader_) MarshalTo(dAtA []byte) (int, error) { return m.MarshalToSizedBuffer(dAtA[:m.Size()]) } func (m *Tls_AuthHeader_) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) i = encodeVarintCfg(dAtA, i, uint64(m.AuthHeader)) i-- dAtA[i] = 0x20 return len(dAtA) - i, nil } func (m *Tls_CustomHeader) MarshalTo(dAtA []byte) (int, error) { return m.MarshalToSizedBuffer(dAtA[:m.Size()]) } func (m *Tls_CustomHeader) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) i -= len(m.CustomHeader) copy(dAtA[i:], m.CustomHeader) i = encodeVarintCfg(dAtA, i, uint64(len(m.CustomHeader))) i-- dAtA[i] = 0x2a return len(dAtA) - i, nil } func (m *OAuth) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *OAuth) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *OAuth) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.EndpointParams) > 0 { for k := range m.EndpointParams { v := m.EndpointParams[k] baseI := i i -= len(v) copy(dAtA[i:], v) i = encodeVarintCfg(dAtA, i, uint64(len(v))) i-- dAtA[i] = 0x12 i -= len(k) copy(dAtA[i:], k) i = encodeVarintCfg(dAtA, i, uint64(len(k))) i-- dAtA[i] = 0xa i = encodeVarintCfg(dAtA, i, uint64(baseI-i)) i-- dAtA[i] = 0x2a } } if len(m.Scopes) > 0 { for iNdEx := len(m.Scopes) - 1; iNdEx >= 0; iNdEx-- { i -= len(m.Scopes[iNdEx]) copy(dAtA[i:], m.Scopes[iNdEx]) i = encodeVarintCfg(dAtA, i, uint64(len(m.Scopes[iNdEx]))) i-- dAtA[i] = 0x22 } } if len(m.TokenUrl) > 0 { i -= len(m.TokenUrl) copy(dAtA[i:], m.TokenUrl) i = encodeVarintCfg(dAtA, i, uint64(len(m.TokenUrl))) i-- dAtA[i] = 0x1a } if len(m.ClientSecret) > 0 { i -= len(m.ClientSecret) copy(dAtA[i:], m.ClientSecret) i = encodeVarintCfg(dAtA, i, uint64(len(m.ClientSecret))) i-- dAtA[i] = 0x12 } if len(m.ClientId) > 0 { i -= len(m.ClientId) copy(dAtA[i:], m.ClientId) i = encodeVarintCfg(dAtA, i, uint64(len(m.ClientId))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func (m *Mutual) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) if err != nil { return nil, err } return dAtA[:n], nil } func (m *Mutual) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } func (m *Mutual) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int _ = l if len(m.ServerName) > 0 { i -= len(m.ServerName) copy(dAtA[i:], m.ServerName) i = encodeVarintCfg(dAtA, i, uint64(len(m.ServerName))) i-- dAtA[i] = 0x22 } if len(m.CaCertificates) > 0 { i -= len(m.CaCertificates) copy(dAtA[i:], m.CaCertificates) i = encodeVarintCfg(dAtA, i, uint64(len(m.CaCertificates))) i-- dAtA[i] = 0x1a } if len(m.ClientCertificate) > 0 { i -= len(m.ClientCertificate) copy(dAtA[i:], m.ClientCertificate) i = encodeVarintCfg(dAtA, i, uint64(len(m.ClientCertificate))) i-- dAtA[i] = 0x12 } if len(m.PrivateKey) > 0 { i -= len(m.PrivateKey) copy(dAtA[i:], m.PrivateKey) i = encodeVarintCfg(dAtA, i, uint64(len(m.PrivateKey))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } func encodeVarintCfg(dAtA []byte, offset int, v uint64) int { offset -= sovCfg(v) base := offset for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } dAtA[offset] = uint8(v) return base } func (m *AttributeManifest) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Revision) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.Name) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if len(m.Attributes) > 0 { for k, v := range m.Attributes { _ = k _ = v l = 0 if v != nil { l = v.Size() l += 1 + sovCfg(uint64(l)) } mapEntrySize := 1 + len(k) + sovCfg(uint64(len(k))) + l n += mapEntrySize + 1 + sovCfg(uint64(mapEntrySize)) } } return n } func (m *AttributeManifest_AttributeInfo) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Description) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if m.ValueType != 0 { n += 1 + sovCfg(uint64(m.ValueType)) } return n } func (m *Rule) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Match) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if len(m.Actions) > 0 { for _, e := range m.Actions { l = e.Size() n += 1 + l + sovCfg(uint64(l)) } } if len(m.RequestHeaderOperations) > 0 { for _, e := range m.RequestHeaderOperations { l = e.Size() n += 1 + l + sovCfg(uint64(l)) } } if len(m.ResponseHeaderOperations) > 0 { for _, e := range m.ResponseHeaderOperations { l = e.Size() n += 1 + l + sovCfg(uint64(l)) } } if m.Sampling != nil { l = m.Sampling.Size() n += 1 + l + sovCfg(uint64(l)) } return n } func (m *Rule_HeaderOperationTemplate) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Name) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if len(m.Values) > 0 { for _, s := range m.Values { l = len(s) n += 1 + l + sovCfg(uint64(l)) } } if m.Operation != 0 { n += 1 + sovCfg(uint64(m.Operation)) } return n } func (m *Action) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Handler) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if len(m.Instances) > 0 { for _, s := range m.Instances { l = len(s) n += 1 + l + sovCfg(uint64(l)) } } l = len(m.Name) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } return n } func (m *Instance) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Name) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.Template) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if m.Params != nil { l = m.Params.Size() n += 1 + l + sovCfg(uint64(l)) } if len(m.AttributeBindings) > 0 { for k, v := range m.AttributeBindings { _ = k _ = v mapEntrySize := 1 + len(k) + sovCfg(uint64(len(k))) + 1 + len(v) + sovCfg(uint64(len(v))) n += mapEntrySize + 1 + sovCfg(uint64(mapEntrySize)) } } l = len(m.CompiledTemplate) if l > 0 { n += 5 + l + sovCfg(uint64(l)) } return n } func (m *Handler) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Name) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.Adapter) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if m.Params != nil { l = m.Params.Size() n += 1 + l + sovCfg(uint64(l)) } if m.Connection != nil { l = m.Connection.Size() n += 1 + l + sovCfg(uint64(l)) } l = len(m.CompiledAdapter) if l > 0 { n += 5 + l + sovCfg(uint64(l)) } return n } func (m *Connection) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.Address) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if m.Timeout != nil { l = github_com_gogo_protobuf_types.SizeOfStdDuration(*m.Timeout) n += 1 + l + sovCfg(uint64(l)) } if m.Authentication != nil { l = m.Authentication.Size() n += 1 + l + sovCfg(uint64(l)) } return n } func (m *Sampling) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Random != nil { l = m.Random.Size() n += 1 + l + sovCfg(uint64(l)) } if m.RateLimit != nil { l = m.RateLimit.Size() n += 1 + l + sovCfg(uint64(l)) } return n } func (m *RandomSampling) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.AttributeExpression) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if m.PercentSampled != nil { l = m.PercentSampled.Size() n += 1 + l + sovCfg(uint64(l)) } if m.UseIndependentRandomness { n += 2 } return n } func (m *RateLimitSampling) Size() (n int) { if m == nil { return 0 } var l int _ = l l = github_com_gogo_protobuf_types.SizeOfStdDuration(m.SamplingDuration) n += 1 + l + sovCfg(uint64(l)) if m.MaxUnsampledEntries != 0 { n += 1 + sovCfg(uint64(m.MaxUnsampledEntries)) } if m.SamplingRate != 0 { n += 1 + sovCfg(uint64(m.SamplingRate)) } return n } func (m *FractionalPercent) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Numerator != 0 { n += 1 + sovCfg(uint64(m.Numerator)) } if m.Denominator != 0 { n += 1 + sovCfg(uint64(m.Denominator)) } return n } func (m *Authentication) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.AuthType != nil { n += m.AuthType.Size() } return n } func (m *Authentication_Tls) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Tls != nil { l = m.Tls.Size() n += 1 + l + sovCfg(uint64(l)) } return n } func (m *Authentication_Mutual) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Mutual != nil { l = m.Mutual.Size() n += 1 + l + sovCfg(uint64(l)) } return n } func (m *Tls) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.CaCertificates) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if m.TokenSource != nil { n += m.TokenSource.Size() } if m.TokenType != nil { n += m.TokenType.Size() } l = len(m.ServerName) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } return n } func (m *Tls_TokenPath) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.TokenPath) n += 1 + l + sovCfg(uint64(l)) return n } func (m *Tls_Oauth) Size() (n int) { if m == nil { return 0 } var l int _ = l if m.Oauth != nil { l = m.Oauth.Size() n += 1 + l + sovCfg(uint64(l)) } return n } func (m *Tls_AuthHeader_) Size() (n int) { if m == nil { return 0 } var l int _ = l n += 1 + sovCfg(uint64(m.AuthHeader)) return n } func (m *Tls_CustomHeader) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.CustomHeader) n += 1 + l + sovCfg(uint64(l)) return n } func (m *OAuth) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.ClientId) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.ClientSecret) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.TokenUrl) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } if len(m.Scopes) > 0 { for _, s := range m.Scopes { l = len(s) n += 1 + l + sovCfg(uint64(l)) } } if len(m.EndpointParams) > 0 { for k, v := range m.EndpointParams { _ = k _ = v mapEntrySize := 1 + len(k) + sovCfg(uint64(len(k))) + 1 + len(v) + sovCfg(uint64(len(v))) n += mapEntrySize + 1 + sovCfg(uint64(mapEntrySize)) } } return n } func (m *Mutual) Size() (n int) { if m == nil { return 0 } var l int _ = l l = len(m.PrivateKey) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.ClientCertificate) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.CaCertificates) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } l = len(m.ServerName) if l > 0 { n += 1 + l + sovCfg(uint64(l)) } return n } func sovCfg(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } func sozCfg(x uint64) (n int) { return sovCfg(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (this *AttributeManifest) String() string { if this == nil { return "nil" } keysForAttributes := make([]string, 0, len(this.Attributes)) for k, _ := range this.Attributes { keysForAttributes = append(keysForAttributes, k) } github_com_gogo_protobuf_sortkeys.Strings(keysForAttributes) mapStringForAttributes := "map[string]*AttributeManifest_AttributeInfo{" for _, k := range keysForAttributes { mapStringForAttributes += fmt.Sprintf("%v: %v,", k, this.Attributes[k]) } mapStringForAttributes += "}" s := strings.Join([]string{`&AttributeManifest{`, `Revision:` + fmt.Sprintf("%v", this.Revision) + `,`, `Name:` + fmt.Sprintf("%v", this.Name) + `,`, `Attributes:` + mapStringForAttributes + `,`, `}`, }, "") return s } func (this *AttributeManifest_AttributeInfo) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&AttributeManifest_AttributeInfo{`, `Description:` + fmt.Sprintf("%v", this.Description) + `,`, `ValueType:` + fmt.Sprintf("%v", this.ValueType) + `,`, `}`, }, "") return s
if this == nil { return "nil" } repeatedStringForActions := "[]*Action{" for _, f := range this.Actions { repeatedStringForActions += strings.Replace(f.String(), "Action", "Action", 1) + "," } repeatedStringForActions += "}" repeatedStringForRequestHeaderOperations := "[]*Rule_HeaderOperationTemplate{" for _, f := range this.RequestHeaderOperations { repeatedStringForRequestHeaderOperations += strings.Replace(fmt.Sprintf("%v", f), "Rule_HeaderOperationTemplate", "Rule_HeaderOperationTemplate", 1) + "," } repeatedStringForRequestHeaderOperations += "}" repeatedStringForResponseHeaderOperations := "[]*Rule_HeaderOperationTemplate{" for _, f := range this.ResponseHeaderOperations { repeatedStringForResponseHeaderOperations += strings.Replace(fmt.Sprintf("%v", f), "Rule_HeaderOperationTemplate", "Rule_HeaderOperationTemplate", 1) + "," } repeatedStringForResponseHeaderOperations += "}" s := strings.Join([]string{`&Rule{`, `Match:` + fmt.Sprintf("%v", this.Match) + `,`, `Actions:` + repeatedStringForActions + `,`, `RequestHeaderOperations:` + repeatedStringForRequestHeaderOperations + `,`, `ResponseHeaderOperations:` + repeatedStringForResponseHeaderOperations + `,`, `Sampling:` + strings.Replace(this.Sampling.String(), "Sampling", "Sampling", 1) + `,`, `}`, }, "") return s } func (this *Rule_HeaderOperationTemplate) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Rule_HeaderOperationTemplate{`, `Name:` + fmt.Sprintf("%v", this.Name) + `,`, `Values:` + fmt.Sprintf("%v", this.Values) + `,`, `Operation:` + fmt.Sprintf("%v", this.Operation) + `,`, `}`, }, "") return s } func (this *Action) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Action{`, `Handler:` + fmt.Sprintf("%v", this.Handler) + `,`, `Instances:` + fmt.Sprintf("%v", this.Instances) + `,`, `Name:` + fmt.Sprintf("%v", this.Name) + `,`, `}`, }, "") return s } func (this *Instance) String() string { if this == nil { return "nil" } keysForAttributeBindings := make([]string, 0, len(this.AttributeBindings)) for k, _ := range this.AttributeBindings { keysForAttributeBindings = append(keysForAttributeBindings, k) } github_com_gogo_protobuf_sortkeys.Strings(keysForAttributeBindings) mapStringForAttributeBindings := "map[string]string{" for _, k := range keysForAttributeBindings { mapStringForAttributeBindings += fmt.Sprintf("%v: %v,", k, this.AttributeBindings[k]) } mapStringForAttributeBindings += "}" s := strings.Join([]string{`&Instance{`, `Name:` + fmt.Sprintf("%v", this.Name) + `,`, `Template:` + fmt.Sprintf("%v", this.Template) + `,`, `Params:` + strings.Replace(fmt.Sprintf("%v", this.Params), "Struct", "types.Struct", 1) + `,`, `AttributeBindings:` + mapStringForAttributeBindings + `,`, `CompiledTemplate:` + fmt.Sprintf("%v", this.CompiledTemplate) + `,`, `}`, }, "") return s } func (this *Handler) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Handler{`, `Name:` + fmt.Sprintf("%v", this.Name) + `,`, `Adapter:` + fmt.Sprintf("%v", this.Adapter) + `,`, `Params:` + strings.Replace(fmt.Sprintf("%v", this.Params), "Struct", "types.Struct", 1) + `,`, `Connection:` + strings.Replace(this.Connection.String(), "Connection", "Connection", 1) + `,`, `CompiledAdapter:` + fmt.Sprintf("%v", this.CompiledAdapter) + `,`, `}`, }, "") return s } func (this *Connection) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Connection{`, `Address:` + fmt.Sprintf("%v", this.Address) + `,`, `Timeout:` + strings.Replace(fmt.Sprintf("%v", this.Timeout), "Duration", "types.Duration", 1) + `,`, `Authentication:` + strings.Replace(this.Authentication.String(), "Authentication", "Authentication", 1) + `,`, `}`, }, "") return s } func (this *Sampling) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Sampling{`, `Random:` + strings.Replace(this.Random.String(), "RandomSampling", "RandomSampling", 1) + `,`, `RateLimit:` + strings.Replace(this.RateLimit.String(), "RateLimitSampling", "RateLimitSampling", 1) + `,`, `}`, }, "") return s } func (this *RandomSampling) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&RandomSampling{`, `AttributeExpression:` + fmt.Sprintf("%v", this.AttributeExpression) + `,`, `PercentSampled:` + strings.Replace(this.PercentSampled.String(), "FractionalPercent", "FractionalPercent", 1) + `,`, `UseIndependentRandomness:` + fmt.Sprintf("%v", this.UseIndependentRandomness) + `,`, `}`, }, "") return s } func (this *RateLimitSampling) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&RateLimitSampling{`, `SamplingDuration:` + strings.Replace(strings.Replace(fmt.Sprintf("%v", this.SamplingDuration), "Duration", "types.Duration", 1), `&`, ``, 1) + `,`, `MaxUnsampledEntries:` + fmt.Sprintf("%v", this.MaxUnsampledEntries) + `,`, `SamplingRate:` + fmt.Sprintf("%v", this.SamplingRate) + `,`, `}`, }, "") return s } func (this *FractionalPercent) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&FractionalPercent{`, `Numerator:` + fmt.Sprintf("%v", this.Numerator) + `,`, `Denominator:` + fmt.Sprintf("%v", this.Denominator) + `,`, `}`, }, "") return s } func (this *Authentication) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Authentication{`, `AuthType:` + fmt.Sprintf("%v", this.AuthType) + `,`, `}`, }, "") return s } func (this *Authentication_Tls) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Authentication_Tls{`, `Tls:` + strings.Replace(fmt.Sprintf("%v", this.Tls), "Tls", "Tls", 1) + `,`, `}`, }, "") return s } func (this *Authentication_Mutual) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Authentication_Mutual{`, `Mutual:` + strings.Replace(fmt.Sprintf("%v", this.Mutual), "Mutual", "Mutual", 1) + `,`, `}`, }, "") return s } func (this *Tls) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Tls{`, `CaCertificates:` + fmt.Sprintf("%v", this.CaCertificates) + `,`, `TokenSource:` + fmt.Sprintf("%v", this.TokenSource) + `,`, `TokenType:` + fmt.Sprintf("%v", this.TokenType) + `,`, `ServerName:` + fmt.Sprintf("%v", this.ServerName) + `,`, `}`, }, "") return s } func (this *Tls_TokenPath) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Tls_TokenPath{`, `TokenPath:` + fmt.Sprintf("%v", this.TokenPath) + `,`, `}`, }, "") return s } func (this *Tls_Oauth) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Tls_Oauth{`, `Oauth:` + strings.Replace(fmt.Sprintf("%v", this.Oauth), "OAuth", "OAuth", 1) + `,`, `}`, }, "") return s } func (this *Tls_AuthHeader_) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Tls_AuthHeader_{`, `AuthHeader:` + fmt.Sprintf("%v", this.AuthHeader) + `,`, `}`, }, "") return s } func (this *Tls_CustomHeader) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Tls_CustomHeader{`, `CustomHeader:` + fmt.Sprintf("%v", this.CustomHeader) + `,`, `}`, }, "") return s } func (this *OAuth) String() string { if this == nil { return "nil" } keysForEndpointParams := make([]string, 0, len(this.EndpointParams)) for k, _ := range this.EndpointParams { keysForEndpointParams = append(keysForEndpointParams, k) } github_com_gogo_protobuf_sortkeys.Strings(keysForEndpointParams) mapStringForEndpointParams := "map[string]string{" for _, k := range keysForEndpointParams { mapStringForEndpointParams += fmt.Sprintf("%v: %v,", k, this.EndpointParams[k]) } mapStringForEndpointParams += "}" s := strings.Join([]string{`&OAuth{`, `ClientId:` + fmt.Sprintf("%v", this.ClientId) + `,`, `ClientSecret:` + fmt.Sprintf("%v", this.ClientSecret) + `,`, `TokenUrl:` + fmt.Sprintf("%v", this.TokenUrl) + `,`, `Scopes:` + fmt.Sprintf("%v", this.Scopes) + `,`, `EndpointParams:` + mapStringForEndpointParams + `,`, `}`, }, "") return s } func (this *Mutual) String() string { if this == nil { return "nil" } s := strings.Join([]string{`&Mutual{`, `PrivateKey:` + fmt.Sprintf("%v", this.PrivateKey) + `,`, `ClientCertificate:` + fmt.Sprintf("%v", this.ClientCertificate) + `,`, `CaCertificates:` + fmt.Sprintf("%v", this.CaCertificates) + `,`, `ServerName:` + fmt.Sprintf("%v", this.ServerName) + `,`, `}`, }, "") return s } func valueToStringCfg(v interface{}) string { rv := reflect.ValueOf(v) if rv.IsNil() { return "nil" } pv := reflect.Indirect(rv).Interface() return fmt.Sprintf("*%v", pv) } func (m *AttributeManifest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: AttributeManifest: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: AttributeManifest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Revision", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Revision = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Attributes", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Attributes == nil { m.Attributes = make(map[string]*AttributeManifest_AttributeInfo) } var mapkey string var mapvalue *AttributeManifest_AttributeInfo for iNdEx < postIndex { entryPreIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) if fieldNum == 1 { var stringLenmapkey uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLenmapkey |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLenmapkey := int(stringLenmapkey) if intStringLenmapkey < 0 { return ErrInvalidLengthCfg } postStringIndexmapkey := iNdEx + intStringLenmapkey if postStringIndexmapkey < 0 { return ErrInvalidLengthCfg } if postStringIndexmapkey > l { return io.ErrUnexpectedEOF } mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) iNdEx = postStringIndexmapkey } else if fieldNum == 2 { var mapmsglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ mapmsglen |= int(b&0x7F) << shift if b < 0x80 { break } } if mapmsglen < 0 { return ErrInvalidLengthCfg } postmsgIndex := iNdEx + mapmsglen if postmsgIndex < 0 { return ErrInvalidLengthCfg } if postmsgIndex > l { return io.ErrUnexpectedEOF } mapvalue = &AttributeManifest_AttributeInfo{} if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil { return err } iNdEx = postmsgIndex } else { iNdEx = entryPreIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > postIndex { return io.ErrUnexpectedEOF } iNdEx += skippy } } m.Attributes[mapkey] = mapvalue iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *AttributeManifest_AttributeInfo) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: AttributeInfo: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: AttributeInfo: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Description", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Description = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field ValueType", wireType) } m.ValueType = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.ValueType |= ValueType(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Rule) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Rule: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Rule: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Match", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Match = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Actions", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Actions = append(m.Actions, &Action{}) if err := m.Actions[len(m.Actions)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RequestHeaderOperations", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.RequestHeaderOperations = append(m.RequestHeaderOperations, &Rule_HeaderOperationTemplate{}) if err := m.RequestHeaderOperations[len(m.RequestHeaderOperations)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ResponseHeaderOperations", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.ResponseHeaderOperations = append(m.ResponseHeaderOperations, &Rule_HeaderOperationTemplate{}) if err := m.ResponseHeaderOperations[len(m.ResponseHeaderOperations)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Sampling", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Sampling == nil { m.Sampling = &Sampling{} } if err := m.Sampling.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Rule_HeaderOperationTemplate) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: HeaderOperationTemplate: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: HeaderOperationTemplate: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Values", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Values = append(m.Values, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex case 3: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Operation", wireType) } m.Operation = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Operation |= Rule_HeaderOperationTemplate_Operation(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Action) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Action: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Action: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Handler", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Handler = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Instances", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Instances = append(m.Instances, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Instance) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Instance: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Instance: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Template", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Template = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Params", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Params == nil { m.Params = &types.Struct{} } if err := m.Params.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field AttributeBindings", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.AttributeBindings == nil { m.AttributeBindings = make(map[string]string) } var mapkey string var mapvalue string for iNdEx < postIndex { entryPreIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) if fieldNum == 1 { var stringLenmapkey uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLenmapkey |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLenmapkey := int(stringLenmapkey) if intStringLenmapkey < 0 { return ErrInvalidLengthCfg } postStringIndexmapkey := iNdEx + intStringLenmapkey if postStringIndexmapkey < 0 { return ErrInvalidLengthCfg } if postStringIndexmapkey > l { return io.ErrUnexpectedEOF } mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) iNdEx = postStringIndexmapkey } else if fieldNum == 2 { var stringLenmapvalue uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLenmapvalue |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLenmapvalue := int(stringLenmapvalue) if intStringLenmapvalue < 0 { return ErrInvalidLengthCfg } postStringIndexmapvalue := iNdEx + intStringLenmapvalue if postStringIndexmapvalue < 0 { return ErrInvalidLengthCfg } if postStringIndexmapvalue > l { return io.ErrUnexpectedEOF } mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue]) iNdEx = postStringIndexmapvalue } else { iNdEx = entryPreIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > postIndex { return io.ErrUnexpectedEOF } iNdEx += skippy } } m.AttributeBindings[mapkey] = mapvalue iNdEx = postIndex case 67794676: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field CompiledTemplate", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.CompiledTemplate = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Handler) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Handler: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Handler: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Adapter", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Adapter = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Params", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Params == nil { m.Params = &types.Struct{} } if err := m.Params.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Connection", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Connection == nil { m.Connection = &Connection{} } if err := m.Connection.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 67794676: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field CompiledAdapter", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.CompiledAdapter = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Connection) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Connection: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Connection: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Address = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Timeout", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Timeout == nil { m.Timeout = new(time.Duration) } if err := github_com_gogo_protobuf_types.StdDurationUnmarshal(m.Timeout, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Authentication", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Authentication == nil { m.Authentication = &Authentication{} } if err := m.Authentication.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Sampling) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Sampling: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Sampling: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Random", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.Random == nil { m.Random = &RandomSampling{} } if err := m.Random.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RateLimit", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.RateLimit == nil { m.RateLimit = &RateLimitSampling{} } if err := m.RateLimit.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *RandomSampling) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: RandomSampling: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: RandomSampling: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field AttributeExpression", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.AttributeExpression = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PercentSampled", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.PercentSampled == nil { m.PercentSampled = &FractionalPercent{} } if err := m.PercentSampled.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 3: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field UseIndependentRandomness", wireType) } var v int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ v |= int(b&0x7F) << shift if b < 0x80 { break } } m.UseIndependentRandomness = bool(v != 0) default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *RateLimitSampling) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: RateLimitSampling: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: RateLimitSampling: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SamplingDuration", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if err := github_com_gogo_protobuf_types.StdDurationUnmarshal(&m.SamplingDuration, dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field MaxUnsampledEntries", wireType) } m.MaxUnsampledEntries = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.MaxUnsampledEntries |= int64(b&0x7F) << shift if b < 0x80 { break } } case 3: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field SamplingRate", wireType) } m.SamplingRate = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.SamplingRate |= int64(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *FractionalPercent) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: FractionalPercent: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: FractionalPercent: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Numerator", wireType) } m.Numerator = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Numerator |= uint32(b&0x7F) << shift if b < 0x80 { break } } case 2: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Denominator", wireType) } m.Denominator = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Denominator |= FractionalPercent_DenominatorType(b&0x7F) << shift if b < 0x80 { break } } default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Authentication) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Authentication: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Authentication: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Tls", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } v := &Tls{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.AuthType = &Authentication_Tls{v} iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Mutual", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } v := &Mutual{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.AuthType = &Authentication_Mutual{v} iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Tls) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Tls: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Tls: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field CaCertificates", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.CaCertificates = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field TokenPath", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.TokenSource = &Tls_TokenPath{string(dAtA[iNdEx:postIndex])} iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Oauth", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } v := &OAuth{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.TokenSource = &Tls_Oauth{v} iNdEx = postIndex case 4: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field AuthHeader", wireType) } var v Tls_AuthHeader for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ v |= Tls_AuthHeader(b&0x7F) << shift if b < 0x80 { break } } m.TokenType = &Tls_AuthHeader_{v} case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field CustomHeader", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.TokenType = &Tls_CustomHeader{string(dAtA[iNdEx:postIndex])} iNdEx = postIndex case 6: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ServerName", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.ServerName = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *OAuth) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: OAuth: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: OAuth: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ClientId", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.ClientId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ClientSecret", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.ClientSecret = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field TokenUrl", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.TokenUrl = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Scopes", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.Scopes = append(m.Scopes, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field EndpointParams", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= int(b&0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } if m.EndpointParams == nil { m.EndpointParams = make(map[string]string) } var mapkey string var mapvalue string for iNdEx < postIndex { entryPreIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) if fieldNum == 1 { var stringLenmapkey uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLenmapkey |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLenmapkey := int(stringLenmapkey) if intStringLenmapkey < 0 { return ErrInvalidLengthCfg } postStringIndexmapkey := iNdEx + intStringLenmapkey if postStringIndexmapkey < 0 { return ErrInvalidLengthCfg } if postStringIndexmapkey > l { return io.ErrUnexpectedEOF } mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) iNdEx = postStringIndexmapkey } else if fieldNum == 2 { var stringLenmapvalue uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLenmapvalue |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLenmapvalue := int(stringLenmapvalue) if intStringLenmapvalue < 0 { return ErrInvalidLengthCfg } postStringIndexmapvalue := iNdEx + intStringLenmapvalue if postStringIndexmapvalue < 0 { return ErrInvalidLengthCfg } if postStringIndexmapvalue > l { return io.ErrUnexpectedEOF } mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue]) iNdEx = postStringIndexmapvalue } else { iNdEx = entryPreIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > postIndex { return io.ErrUnexpectedEOF } iNdEx += skippy } } m.EndpointParams[mapkey] = mapvalue iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *Mutual) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: Mutual: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: Mutual: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PrivateKey", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.PrivateKey = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ClientCertificate", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.ClientCertificate = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field CaCertificates", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.CaCertificates = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ServerName", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowCfg } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= uint64(b&0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthCfg } postIndex := iNdEx + intStringLen if postIndex < 0 { return ErrInvalidLengthCfg } if postIndex > l { return io.ErrUnexpectedEOF } m.ServerName = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCfg(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) < 0 { return ErrInvalidLengthCfg } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipCfg(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCfg } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCfg } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if dAtA[iNdEx-1] < 0x80 { break } } return iNdEx, nil case 1: iNdEx += 8 return iNdEx, nil case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCfg } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if length < 0 { return 0, ErrInvalidLengthCfg } iNdEx += length if iNdEx < 0 { return 0, ErrInvalidLengthCfg } return iNdEx, nil case 3: for { var innerWire uint64 var start int = iNdEx for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowCfg } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ innerWire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } innerWireType := int(innerWire & 0x7) if innerWireType == 4 { break } next, err := skipCfg(dAtA[start:]) if err != nil { return 0, err } iNdEx = start + next if iNdEx < 0 { return 0, ErrInvalidLengthCfg } } return iNdEx, nil case 4: return iNdEx, nil case 5: iNdEx += 4 return iNdEx, nil default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } } panic("unreachable") } var ( ErrInvalidLengthCfg = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowCfg = fmt.Errorf("proto: integer overflow") )
} func (this *Rule) String() string {
mathutil.go
// Copyright (c) 2014 The mathutil Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package mathutil provides utilities supplementing the standard 'math' and // 'math/rand' packages. // // Release history and compatibility issues // // 2018-10-21 Added BinaryLog // // 2018-04-25: New functions for determinig Max/Min of nullable values. Ex: // func MaxPtr(a, b *int) *int { // func MinPtr(a, b *int) *int { // func MaxBytePtr(a, b *byte) *byte { // func MinBytePtr(a, b *byte) *byte { // ... // // 2017-10-14: New variadic functions for Max/Min. Ex: // func MaxVal(val int, vals ...int) int { // func MinVal(val int, vals ...int) int { // func MaxByteVal(val byte, vals ...byte) byte { // func MinByteVal(val byte, vals ...byte) byte { // ... // // 2016-10-10: New functions QuadPolyDiscriminant and QuadPolyFactors. // // 2013-12-13: The following functions have been REMOVED // // func Uint64ToBigInt(n uint64) *big.Int // func Uint64FromBigInt(n *big.Int) (uint64, bool) // // 2013-05-13: The following functions are now DEPRECATED // // func Uint64ToBigInt(n uint64) *big.Int // func Uint64FromBigInt(n *big.Int) (uint64, bool) // // These functions will be REMOVED with Go release 1.1+1. // // 2013-01-21: The following functions have been REMOVED // // func MaxInt() int // func MinInt() int // func MaxUint() uint // func UintPtrBits() int // // They are now replaced by untyped constants // // MaxInt // MinInt // MaxUint // UintPtrBits // // Additionally one more untyped constant was added // // IntBits // // This change breaks any existing code depending on the above removed // functions. They should have not been published in the first place, that was // unfortunate. Instead, defining such architecture and/or implementation // specific integer limits and bit widths as untyped constants improves // performance and allows for static dead code elimination if it depends on // these values. Thanks to minux for pointing it out in the mail list // (https://groups.google.com/d/msg/golang-nuts/tlPpLW6aJw8/NT3mpToH-a4J). // // 2012-12-12: The following functions will be DEPRECATED with Go release // 1.0.3+1 and REMOVED with Go release 1.0.3+2, b/c of // http://code.google.com/p/go/source/detail?r=954a79ee3ea8 // // func Uint64ToBigInt(n uint64) *big.Int // func Uint64FromBigInt(n *big.Int) (uint64, bool) package mathutil // import "modernc.org/mathutil" import ( "math" "math/big" ) // Architecture and/or implementation specific integer limits and bit widths. const ( MaxInt = 1<<(IntBits-1) - 1 MinInt = -MaxInt - 1 MaxUint = 1<<IntBits - 1 IntBits = 1 << (^uint(0)>>32&1 + ^uint(0)>>16&1 + ^uint(0)>>8&1 + 3) UintPtrBits = 1 << (^uintptr(0)>>32&1 + ^uintptr(0)>>16&1 + ^uintptr(0)>>8&1 + 3) ) var ( _m1 = big.NewInt(-1) _1 = big.NewInt(1) _2 = big.NewInt(2) ) // GCDByte returns the greatest common divisor of a and b. Based on: // http://en.wikipedia.org/wiki/Euclidean_algorithm#Implementations func GCDByte(a, b byte) byte { for b != 0 { a, b = b, a%b } return a } // GCDUint16 returns the greatest common divisor of a and b. func GCDUint16(a, b uint16) uint16 { for b != 0 { a, b = b, a%b } return a } // GCDUint32 returns the greatest common divisor of a and b. func GCDUint32(a, b uint32) uint32 { for b != 0 { a, b = b, a%b } return a } // GCDUint64 returns the greatest common divisor of a and b. func GCDUint64(a, b uint64) uint64 { for b != 0 { a, b = b, a%b } return a } // ISqrt returns floor(sqrt(n)). Typical run time is few hundreds of ns. func ISqrt(n uint32) (x uint32) { if n == 0 { return } if n >= math.MaxUint16*math.MaxUint16 { return math.MaxUint16 } var px, nx uint32 for x = n; ; px, x = x, nx { nx = (x + n/x) / 2 if nx == x || nx == px { break } } return } // SqrtUint64 returns floor(sqrt(n)). Typical run time is about 0.5 µs. func SqrtUint64(n uint64) (x uint64) { if n == 0 { return } if n >= math.MaxUint32*math.MaxUint32 { return math.MaxUint32 } var px, nx uint64 for x = n; ; px, x = x, nx { nx = (x + n/x) / 2 if nx == x || nx == px { break } } return } // SqrtBig returns floor(sqrt(n)). It panics on n < 0. func SqrtBig(n *big.Int) (x *big.Int) { switch n.Sign() { case -1: panic(-1) case 0: return big.NewInt(0) } var px, nx big.Int x = big.NewInt(0) x.SetBit(x, n.BitLen()/2+1, 1) for { nx.Rsh(nx.Add(x, nx.Div(n, x)), 1) if nx.Cmp(x) == 0 || nx.Cmp(&px) == 0 { break } px.Set(x) x.Set(&nx) } return } // Log2Byte returns log base 2 of n. It's the same as index of the highest // bit set in n. For n == 0 -1 is returned. func Log2Byte(n byte) int { return log2[n] } // Log2Uint16 returns log base 2 of n. It's the same as index of the highest // bit set in n. For n == 0 -1 is returned. func Log2Uint16(n uint16) int { if b := n >> 8; b != 0 { return log2[b] + 8 } return log2[n] } // Log2Uint32 returns log base 2 of n. It's the same as index of the highest // bit set in n. For n == 0 -1 is returned. func Log2Uint32(n uint32) int { if b := n >> 24; b != 0 { return log2[b] + 24 } if b := n >> 16; b != 0 { return log2[b] + 16 } if b := n >> 8; b != 0 { return log2[b] + 8 } return log2[n] } // Log2Uint64 returns log base 2 of n. It's the same as index of the highest // bit set in n. For n == 0 -1 is returned. func Log2Uint64(n uint64) int { if b := n >> 56; b != 0 { return log2[b] + 56 } if b := n >> 48; b != 0 { return log2[b] + 48 } if b := n >> 40; b != 0 { return log2[b] + 40 } if b := n >> 32; b != 0 { return log2[b] + 32 } if b := n >> 24; b != 0 { return log2[b] + 24 } if b := n >> 16; b != 0 { return log2[b] + 16 } if b := n >> 8; b != 0 { return log2[b] + 8 } return log2[n] } // ModPowByte computes (b^e)%m. It panics for m == 0 || b == e == 0. // // See also: http://en.wikipedia.org/wiki/Modular_exponentiation#Right-to-left_binary_method func ModPowByte(b, e, m byte) byte { if b == 0 && e == 0 { panic(0) } if m == 1 { return 0 } r := uint16(1) for b, m := uint16(b), uint16(m); e > 0; b, e = b*b%m, e>>1 { if e&1 == 1 { r = r * b % m } } return byte(r) } // ModPowUint16 computes (b^e)%m. It panics for m == 0 || b == e == 0. func ModPowUint16(b, e, m uint16) uint16 { if b == 0 && e == 0 { panic(0) } if m == 1 { return 0 } r := uint32(1) for b, m := uint32(b), uint32(m); e > 0; b, e = b*b%m, e>>1 { if e&1 == 1 { r = r * b % m } } return uint16(r) } // ModPowUint32 computes (b^e)%m. It panics for m == 0 || b == e == 0. func ModPowUint32(b, e, m uint32) uint32 { if b == 0 && e == 0 { panic(0) } if m == 1 { return 0 } r := uint64(1) for b, m := uint64(b), uint64(m); e > 0; b, e = b*b%m, e>>1 { if e&1 == 1 { r = r * b % m } } return uint32(r) } // ModPowUint64 computes (b^e)%m. It panics for m == 0 || b == e == 0. func ModPowUint64(b, e, m uint64) (r uint64) { if b == 0 && e == 0 { panic(0) } if m == 1 { return 0 } return modPowBigInt(big.NewInt(0).SetUint64(b), big.NewInt(0).SetUint64(e), big.NewInt(0).SetUint64(m)).Uint64() } func m
b, e, m *big.Int) (r *big.Int) { r = big.NewInt(1) for i, n := 0, e.BitLen(); i < n; i++ { if e.Bit(i) != 0 { r.Mod(r.Mul(r, b), m) } b.Mod(b.Mul(b, b), m) } return } // ModPowBigInt computes (b^e)%m. Returns nil for e < 0. It panics for m == 0 || b == e == 0. func ModPowBigInt(b, e, m *big.Int) (r *big.Int) { if b.Sign() == 0 && e.Sign() == 0 { panic(0) } if m.Cmp(_1) == 0 { return big.NewInt(0) } if e.Sign() < 0 { return } return modPowBigInt(big.NewInt(0).Set(b), big.NewInt(0).Set(e), m) } var uint64ToBigIntDelta big.Int func init() { uint64ToBigIntDelta.SetBit(&uint64ToBigIntDelta, 63, 1) } var uintptrBits int func init() { x := uint64(math.MaxUint64) uintptrBits = BitLenUintptr(uintptr(x)) } // UintptrBits returns the bit width of an uintptr at the executing machine. func UintptrBits() int { return uintptrBits } // AddUint128_64 returns the uint128 sum of uint64 a and b. func AddUint128_64(a, b uint64) (hi uint64, lo uint64) { lo = a + b if lo < a { hi = 1 } return hi, lo } // MulUint128_64 returns the uint128 bit product of uint64 a and b. func MulUint128_64(a, b uint64) (hi, lo uint64) { /* 2^(2 W) ahi bhi + 2^W alo bhi + 2^W ahi blo + alo blo FEDCBA98 76543210 FEDCBA98 76543210 ---- alo*blo ---- ---- alo*bhi ---- ---- ahi*blo ---- ---- ahi*bhi ---- */ const w = 32 const m = 1<<w - 1 ahi, bhi, alo, blo := a>>w, b>>w, a&m, b&m lo = alo * blo mid1 := alo * bhi mid2 := ahi * blo c1, lo := AddUint128_64(lo, mid1<<w) c2, lo := AddUint128_64(lo, mid2<<w) _, hi = AddUint128_64(ahi*bhi, mid1>>w+mid2>>w+c1+c2) return } // PowerizeBigInt returns (e, p) such that e is the smallest number for which p // == b^e is greater or equal n. For n < 0 or b < 2 (0, nil) is returned. // // NOTE: Run time for large values of n (above about 2^1e6 ~= 1e300000) can be // significant and/or unacceptabe. For any smaller values of n the function // typically performs in sub second time. For "small" values of n (cca bellow // 2^1e3 ~= 1e300) the same can be easily below 10 µs. // // A special (and trivial) case of b == 2 is handled separately and performs // much faster. func PowerizeBigInt(b, n *big.Int) (e uint32, p *big.Int) { switch { case b.Cmp(_2) < 0 || n.Sign() < 0: return case n.Sign() == 0 || n.Cmp(_1) == 0: return 0, big.NewInt(1) case b.Cmp(_2) == 0: p = big.NewInt(0) e = uint32(n.BitLen() - 1) p.SetBit(p, int(e), 1) if p.Cmp(n) < 0 { p.Mul(p, _2) e++ } return } bw := b.BitLen() nw := n.BitLen() p = big.NewInt(1) var bb, r big.Int for { switch p.Cmp(n) { case -1: x := uint32((nw - p.BitLen()) / bw) if x == 0 { x = 1 } e += x switch x { case 1: p.Mul(p, b) default: r.Set(_1) bb.Set(b) e := x for { if e&1 != 0 { r.Mul(&r, &bb) } if e >>= 1; e == 0 { break } bb.Mul(&bb, &bb) } p.Mul(p, &r) } case 0, 1: return } } } // PowerizeUint32BigInt returns (e, p) such that e is the smallest number for // which p == b^e is greater or equal n. For n < 0 or b < 2 (0, nil) is // returned. // // More info: see PowerizeBigInt. func PowerizeUint32BigInt(b uint32, n *big.Int) (e uint32, p *big.Int) { switch { case b < 2 || n.Sign() < 0: return case n.Sign() == 0 || n.Cmp(_1) == 0: return 0, big.NewInt(1) case b == 2: p = big.NewInt(0) e = uint32(n.BitLen() - 1) p.SetBit(p, int(e), 1) if p.Cmp(n) < 0 { p.Mul(p, _2) e++ } return } var bb big.Int bb.SetInt64(int64(b)) return PowerizeBigInt(&bb, n) } /* ProbablyPrimeUint32 returns true if n is prime or n is a pseudoprime to base a. It implements the Miller-Rabin primality test for one specific value of 'a' and k == 1. Wrt pseudocode shown at http://en.wikipedia.org/wiki/Miller-Rabin_primality_test#Algorithm_and_running_time Input: n > 3, an odd integer to be tested for primality; Input: k, a parameter that determines the accuracy of the test Output: composite if n is composite, otherwise probably prime write n − 1 as 2^s·d with d odd by factoring powers of 2 from n − 1 LOOP: repeat k times: pick a random integer a in the range [2, n − 2] x ← a^d mod n if x = 1 or x = n − 1 then do next LOOP for r = 1 .. s − 1 x ← x^2 mod n if x = 1 then return composite if x = n − 1 then do next LOOP return composite return probably prime ... this function behaves like passing 1 for 'k' and additionally a fixed/non-random 'a'. Otherwise it's the same algorithm. See also: http://mathworld.wolfram.com/Rabin-MillerStrongPseudoprimeTest.html */ func ProbablyPrimeUint32(n, a uint32) bool { d, s := n-1, 0 for ; d&1 == 0; d, s = d>>1, s+1 { } x := uint64(ModPowUint32(a, d, n)) if x == 1 || uint32(x) == n-1 { return true } for ; s > 1; s-- { if x = x * x % uint64(n); x == 1 { return false } if uint32(x) == n-1 { return true } } return false } // ProbablyPrimeUint64_32 returns true if n is prime or n is a pseudoprime to // base a. It implements the Miller-Rabin primality test for one specific value // of 'a' and k == 1. See also ProbablyPrimeUint32. func ProbablyPrimeUint64_32(n uint64, a uint32) bool { d, s := n-1, 0 for ; d&1 == 0; d, s = d>>1, s+1 { } x := ModPowUint64(uint64(a), d, n) if x == 1 || x == n-1 { return true } bx, bn := big.NewInt(0).SetUint64(x), big.NewInt(0).SetUint64(n) for ; s > 1; s-- { if x = bx.Mod(bx.Mul(bx, bx), bn).Uint64(); x == 1 { return false } if x == n-1 { return true } } return false } // ProbablyPrimeBigInt_32 returns true if n is prime or n is a pseudoprime to // base a. It implements the Miller-Rabin primality test for one specific value // of 'a' and k == 1. See also ProbablyPrimeUint32. func ProbablyPrimeBigInt_32(n *big.Int, a uint32) bool { var d big.Int d.Set(n) d.Sub(&d, _1) // d <- n-1 s := 0 for ; d.Bit(s) == 0; s++ { } nMinus1 := big.NewInt(0).Set(&d) d.Rsh(&d, uint(s)) x := ModPowBigInt(big.NewInt(int64(a)), &d, n) if x.Cmp(_1) == 0 || x.Cmp(nMinus1) == 0 { return true } for ; s > 1; s-- { if x = x.Mod(x.Mul(x, x), n); x.Cmp(_1) == 0 { return false } if x.Cmp(nMinus1) == 0 { return true } } return false } // ProbablyPrimeBigInt returns true if n is prime or n is a pseudoprime to base // a. It implements the Miller-Rabin primality test for one specific value of // 'a' and k == 1. See also ProbablyPrimeUint32. func ProbablyPrimeBigInt(n, a *big.Int) bool { var d big.Int d.Set(n) d.Sub(&d, _1) // d <- n-1 s := 0 for ; d.Bit(s) == 0; s++ { } nMinus1 := big.NewInt(0).Set(&d) d.Rsh(&d, uint(s)) x := ModPowBigInt(a, &d, n) if x.Cmp(_1) == 0 || x.Cmp(nMinus1) == 0 { return true } for ; s > 1; s-- { if x = x.Mod(x.Mul(x, x), n); x.Cmp(_1) == 0 { return false } if x.Cmp(nMinus1) == 0 { return true } } return false } // Max returns the larger of a and b. func Max(a, b int) int { if a > b { return a } return b } // Min returns the smaller of a and b. func Min(a, b int) int { if a < b { return a } return b } // MaxPtr returns a pointer to the larger of a and b, or nil. func MaxPtr(a, b *int) *int { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinPtr returns a pointer to the smaller of a and b, or nil. func MinPtr(a, b *int) *int { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxVal returns the largest argument passed. func MaxVal(val int, vals ...int) int { res := val for _, v := range vals { if v > res { res = v } } return res } // MinVal returns the smallest argument passed. func MinVal(val int, vals ...int) int { res := val for _, v := range vals { if v < res { res = v } } return res } // Clamp returns a value restricted between lo and hi. func Clamp(v, lo, hi int) int { return Min(Max(v, lo), hi) } // UMax returns the larger of a and b. func UMax(a, b uint) uint { if a > b { return a } return b } // UMin returns the smaller of a and b. func UMin(a, b uint) uint { if a < b { return a } return b } // UMaxPtr returns a pointer to the larger of a and b, or nil. func UMaxPtr(a, b *uint) *uint { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // UMinPtr returns a pointer to the smaller of a and b, or nil. func UMinPtr(a, b *uint) *uint { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // UMaxVal returns the largest argument passed. func UMaxVal(val uint, vals ...uint) uint { res := val for _, v := range vals { if v > res { res = v } } return res } // UMinVal returns the smallest argument passed. func UMinVal(val uint, vals ...uint) uint { res := val for _, v := range vals { if v < res { res = v } } return res } // UClamp returns a value restricted between lo and hi. func UClamp(v, lo, hi uint) uint { return UMin(UMax(v, lo), hi) } // MaxByte returns the larger of a and b. func MaxByte(a, b byte) byte { if a > b { return a } return b } // MinByte returns the smaller of a and b. func MinByte(a, b byte) byte { if a < b { return a } return b } // MaxBytePtr returns a pointer to the larger of a and b, or nil. func MaxBytePtr(a, b *byte) *byte { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinBytePtr returns a pointer to the smaller of a and b, or nil. func MinBytePtr(a, b *byte) *byte { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxByteVal returns the largest argument passed. func MaxByteVal(val byte, vals ...byte) byte { res := val for _, v := range vals { if v > res { res = v } } return res } // MinByteVal returns the smallest argument passed. func MinByteVal(val byte, vals ...byte) byte { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampByte returns a value restricted between lo and hi. func ClampByte(v, lo, hi byte) byte { return MinByte(MaxByte(v, lo), hi) } // MaxInt8 returns the larger of a and b. func MaxInt8(a, b int8) int8 { if a > b { return a } return b } // MinInt8 returns the smaller of a and b. func MinInt8(a, b int8) int8 { if a < b { return a } return b } // MaxInt8Ptr returns a pointer to the larger of a and b, or nil. func MaxInt8Ptr(a, b *int8) *int8 { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinInt8Ptr returns a pointer to the smaller of a and b, or nil. func MinInt8Ptr(a, b *int8) *int8 { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxInt8Val returns the largest argument passed. func MaxInt8Val(val int8, vals ...int8) int8 { res := val for _, v := range vals { if v > res { res = v } } return res } // MinInt8Val returns the smallest argument passed. func MinInt8Val(val int8, vals ...int8) int8 { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampInt8 returns a value restricted between lo and hi. func ClampInt8(v, lo, hi int8) int8 { return MinInt8(MaxInt8(v, lo), hi) } // MaxUint16 returns the larger of a and b. func MaxUint16(a, b uint16) uint16 { if a > b { return a } return b } // MinUint16 returns the smaller of a and b. func MinUint16(a, b uint16) uint16 { if a < b { return a } return b } // MaxUint16Ptr returns a pointer to the larger of a and b, or nil. func MaxUint16Ptr(a, b *uint16) *uint16 { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinUint16Ptr returns a pointer to the smaller of a and b, or nil. func MinUint16Ptr(a, b *uint16) *uint16 { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxUint16Val returns the largest argument passed. func MaxUint16Val(val uint16, vals ...uint16) uint16 { res := val for _, v := range vals { if v > res { res = v } } return res } // MinUint16Val returns the smallest argument passed. func MinUint16Val(val uint16, vals ...uint16) uint16 { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampUint16 returns a value restricted between lo and hi. func ClampUint16(v, lo, hi uint16) uint16 { return MinUint16(MaxUint16(v, lo), hi) } // MaxInt16 returns the larger of a and b. func MaxInt16(a, b int16) int16 { if a > b { return a } return b } // MinInt16 returns the smaller of a and b. func MinInt16(a, b int16) int16 { if a < b { return a } return b } // MaxInt16Ptr returns a pointer to the larger of a and b, or nil. func MaxInt16Ptr(a, b *int16) *int16 { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinInt16Ptr returns a pointer to the smaller of a and b, or nil. func MinInt16Ptr(a, b *int16) *int16 { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxInt16Val returns the largest argument passed. func MaxInt16Val(val int16, vals ...int16) int16 { res := val for _, v := range vals { if v > res { res = v } } return res } // MinInt16Val returns the smallest argument passed. func MinInt16Val(val int16, vals ...int16) int16 { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampInt16 returns a value restricted between lo and hi. func ClampInt16(v, lo, hi int16) int16 { return MinInt16(MaxInt16(v, lo), hi) } // MaxUint32 returns the larger of a and b. func MaxUint32(a, b uint32) uint32 { if a > b { return a } return b } // MinUint32 returns the smaller of a and b. func MinUint32(a, b uint32) uint32 { if a < b { return a } return b } // MaxUint32Ptr returns a pointer to the larger of a and b, or nil. func MaxUint32Ptr(a, b *uint32) *uint32 { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinUint32Ptr returns a pointer to the smaller of a and b, or nil. func MinUint32Ptr(a, b *uint32) *uint32 { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxUint32Val returns the largest argument passed. func MaxUint32Val(val uint32, vals ...uint32) uint32 { res := val for _, v := range vals { if v > res { res = v } } return res } // MinUint32Val returns the smallest argument passed. func MinUint32Val(val uint32, vals ...uint32) uint32 { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampUint32 returns a value restricted between lo and hi. func ClampUint32(v, lo, hi uint32) uint32 { return MinUint32(MaxUint32(v, lo), hi) } // MaxInt32 returns the larger of a and b. func MaxInt32(a, b int32) int32 { if a > b { return a } return b } // MinInt32 returns the smaller of a and b. func MinInt32(a, b int32) int32 { if a < b { return a } return b } // MaxInt32Ptr returns a pointer to the larger of a and b, or nil. func MaxInt32Ptr(a, b *int32) *int32 { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinInt32Ptr returns a pointer to the smaller of a and b, or nil. func MinInt32Ptr(a, b *int32) *int32 { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxInt32Val returns the largest argument passed. func MaxInt32Val(val int32, vals ...int32) int32 { res := val for _, v := range vals { if v > res { res = v } } return res } // MinInt32Val returns the smallest argument passed. func MinInt32Val(val int32, vals ...int32) int32 { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampInt32 returns a value restricted between lo and hi. func ClampInt32(v, lo, hi int32) int32 { return MinInt32(MaxInt32(v, lo), hi) } // MaxUint64 returns the larger of a and b. func MaxUint64(a, b uint64) uint64 { if a > b { return a } return b } // MinUint64 returns the smaller of a and b. func MinUint64(a, b uint64) uint64 { if a < b { return a } return b } // MaxUint64Ptr returns a pointer to the larger of a and b, or nil. func MaxUint64Ptr(a, b *uint64) *uint64 { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinUint64Ptr returns a pointer to the smaller of a and b, or nil. func MinUint64Ptr(a, b *uint64) *uint64 { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxUint64Val returns the largest argument passed. func MaxUint64Val(val uint64, vals ...uint64) uint64 { res := val for _, v := range vals { if v > res { res = v } } return res } // MinUint64Val returns the smallest argument passed. func MinUint64Val(val uint64, vals ...uint64) uint64 { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampUint64 returns a value restricted between lo and hi. func ClampUint64(v, lo, hi uint64) uint64 { return MinUint64(MaxUint64(v, lo), hi) } // MaxInt64 returns the larger of a and b. func MaxInt64(a, b int64) int64 { if a > b { return a } return b } // MinInt64 returns the smaller of a and b. func MinInt64(a, b int64) int64 { if a < b { return a } return b } // MaxInt64Ptr returns a pointer to the larger of a and b, or nil. func MaxInt64Ptr(a, b *int64) *int64 { if a == nil { return b } if b == nil { return a } if *a > *b { return a } return b } // MinInt64Ptr returns a pointer to the smaller of a and b, or nil. func MinInt64Ptr(a, b *int64) *int64 { if a == nil { return b } if b == nil { return a } if *a < *b { return a } return b } // MaxInt64Val returns the largest argument passed. func MaxInt64Val(val int64, vals ...int64) int64 { res := val for _, v := range vals { if v > res { res = v } } return res } // MinInt64Val returns the smallest argument passed. func MinInt64Val(val int64, vals ...int64) int64 { res := val for _, v := range vals { if v < res { res = v } } return res } // ClampInt64 returns a value restricted between lo and hi. func ClampInt64(v, lo, hi int64) int64 { return MinInt64(MaxInt64(v, lo), hi) } // ToBase produces n in base b. For example // // ToBase(2047, 22) -> [1, 5, 4] // // 1 * 22^0 1 // 5 * 22^1 110 // 4 * 22^2 1936 // ---- // 2047 // // ToBase panics for bases < 2. func ToBase(n *big.Int, b int) []int { var nn big.Int nn.Set(n) if b < 2 { panic("invalid base") } k := 1 switch nn.Sign() { case -1: nn.Neg(&nn) k = -1 case 0: return []int{0} } bb := big.NewInt(int64(b)) var r []int rem := big.NewInt(0) for nn.Sign() != 0 { nn.QuoRem(&nn, bb, rem) r = append(r, k*int(rem.Int64())) } return r }
odPowBigInt(
product.service.ts
import {Injectable} from '@angular/core'; import {AngularFireDatabase, FirebaseListObservable} from 'angularfire2/database'; import {Observable} from 'rxjs/Observable'; import 'rxjs/add/operator/catch'; import 'rxjs/add/observable/empty'; import 'rxjs/add/observable/throw'; export interface Product { // Unique Id
id: string; // Ref on category belongs to categoryId: string; // The title title: string; // Price price: number; // Mark product with specialproce isSpecial: boolean; // Description desc: string; // Path to small image imageS: string; // Path to large image imageL: string; } @Injectable() export class ProductService { // URL to Products web api private productsUrl = 'products'; constructor(private db: AngularFireDatabase) { } getProducts(category?: string, search?: string): Observable<Product[]> { if (category || search) { let query = <any>{}; if (category) { query.orderByChild = 'categoryId'; query.equalTo = category; } else { query.orderByChild = 'title'; query.startAt = search.toUpperCase(); query.endAt = query.startAt + '\uf8ff'; } return this.db .list(this.productsUrl, { query: query }) .catch(this.handleError); } else { return Observable.empty(); } } getProduct(id: string): Observable<Product> { return this.db .list(this.productsUrl, { query: { orderByChild: 'id', equalTo: id } }) .map((products: Product[]) => { return products[0]; }) .catch(this.handleError); } private handleError(error: any): Observable<any> { let errMsg = (error.message) ? error.message : error.status ? `${error.status} - ${error.statusText}` : 'Server error'; window.alert(`An error occurred: ${errMsg}`); return Observable.throw(errMsg); } }
replicaset.go
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // This file was automatically generated by lister-gen package v1beta2 import ( v1beta2 "k8s.io/api/apps/v1beta2" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" "k8s.io/client-go/tools/cache" ) // ReplicaSetLister helps list ReplicaSets. type ReplicaSetLister interface { // List lists all ReplicaSets in the indexer. List(selector labels.Selector) (ret []*v1beta2.ReplicaSet, err error) // ReplicaSets returns an object that can list and get ReplicaSets. ReplicaSets(namespace string) ReplicaSetNamespaceLister ReplicaSetListerExpansion } // replicaSetLister implements the ReplicaSetLister interface. type replicaSetLister struct { indexer cache.Indexer } // NewReplicaSetLister returns a new ReplicaSetLister. func NewReplicaSetLister(indexer cache.Indexer) ReplicaSetLister { return &replicaSetLister{indexer: indexer} } // List lists all ReplicaSets in the indexer. func (s *replicaSetLister) List(selector labels.Selector) (ret []*v1beta2.ReplicaSet, err error) { err = cache.ListAll(s.indexer, selector, func(m interface{}) { ret = append(ret, m.(*v1beta2.ReplicaSet)) }) return ret, err } // ReplicaSets returns an object that can list and get ReplicaSets. func (s *replicaSetLister) ReplicaSets(namespace string) ReplicaSetNamespaceLister { return replicaSetNamespaceLister{indexer: s.indexer, namespace: namespace} } // ReplicaSetNamespaceLister helps list and get ReplicaSets. type ReplicaSetNamespaceLister interface { // List lists all ReplicaSets in the indexer for a given namespace. List(selector labels.Selector) (ret []*v1beta2.ReplicaSet, err error) // Get retrieves the ReplicaSet from the indexer for a given namespace and name. Get(name string) (*v1beta2.ReplicaSet, error) ReplicaSetNamespaceListerExpansion } // replicaSetNamespaceLister implements the ReplicaSetNamespaceLister // interface. type replicaSetNamespaceLister struct { indexer cache.Indexer namespace string } // List lists all ReplicaSets in the indexer for a given namespace. func (s replicaSetNamespaceLister) List(selector labels.Selector) (ret []*v1beta2.ReplicaSet, err error) { err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { ret = append(ret, m.(*v1beta2.ReplicaSet)) }) return ret, err } // Get retrieves the ReplicaSet from the indexer for a given namespace and name. func (s replicaSetNamespaceLister) Get(name string) (*v1beta2.ReplicaSet, error) { obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) if err != nil
if !exists { return nil, errors.NewNotFound(v1beta2.Resource("replicaset"), name) } return obj.(*v1beta2.ReplicaSet), nil }
{ return nil, err }
AppRow.js
import React, { memo, useMemo, useCallback } from "react"; import { View, StyleSheet } from "react-native"; import type { App } from "@ledgerhq/live-common/lib/types/manager"; import type { State, Action } from "@ledgerhq/live-common/lib/apps"; import { useNotEnoughMemoryToInstall } from "@ledgerhq/live-common/lib/apps/react"; import { Trans } from "react-i18next"; import { useTheme } from "@react-navigation/native"; import LText from "../../../components/LText"; import Touchable from "../../../components/Touchable"; import Warning from "../../../icons/Warning"; import AppIcon from "./AppIcon"; import AppStateButton from "./AppStateButton"; import ByteSize from "../../../components/ByteSize"; type Props = { app: App, state: State, dispatch: Action => void, isInstalledView: boolean, setAppInstallWithDependencies: ({ app: App, dependencies: App[] }) => void, setAppUninstallWithDependencies: ({ dependents: App[], app: App }) => void, setStorageWarning: () => void, managerTabs: *, optimisticState: State, }; const AppRow = ({ app, state, dispatch, isInstalledView, setAppInstallWithDependencies, setAppUninstallWithDependencies, setStorageWarning, optimisticState, }: Props) => { const { name, bytes, version: appVersion, displayName } = app; const { installed, deviceInfo } = state; const isInstalled = useMemo(() => installed.find(i => i.name === name), [ installed, name, ]); const version = (isInstalled && isInstalled.version) || appVersion; const availableVersion = (isInstalled && isInstalled.availableVersion) || appVersion; const notEnoughMemoryToInstall = useNotEnoughMemoryToInstall( optimisticState, name, ); const onSizePress = useCallback(() => setStorageWarning(name), [ setStorageWarning, name, ]); const { colors } = useTheme(); return ( <View style={styles.root}> <View style={[ styles.item, { backgroundColor: colors.card, borderBottomColor: colors.lightFog, }, ]} > <AppIcon app={app} /> <View style={styles.labelContainer}> <LText numberOfLines={1} bold> {displayName} </LText> <LText numberOfLines={1} style={styles.versionText} color="grey"> {version}{" "} {isInstalled && !isInstalled.updated && ( <Trans i18nKey="manager.appList.versionNew" values={{ newVersion: availableVersion !== version ? ` ${availableVersion}` : "", }} /> )} </LText> </View> {!isInstalled && notEnoughMemoryToInstall ? ( <Touchable activeOpacity={0.5} onPress={onSizePress} style={styles.warnText} event="ManagerAppNotEnoughMemory" eventProperties={{ appName: name }} > <Warning size={16} color={colors.lightOrange} /> <LText semiBold style={[styles.versionText, styles.sizeText, styles.warnText]} color="grey" > <ByteSize value={bytes} deviceModel={state.deviceModel} firmwareVersion={deviceInfo.version} formatFunction={Math.ceil} /> </LText> </Touchable> ) : ( <LText style={[ styles.versionText, styles.sizeText, notEnoughMemoryToInstall ? styles.warnText : {}, ]} color={notEnoughMemoryToInstall ? "lightOrange" : "grey"} > <ByteSize value={bytes} deviceModel={state.deviceModel} firmwareVersion={deviceInfo.version} formatFunction={Math.ceil} /> </LText> )} <AppStateButton app={app} state={state} dispatch={dispatch} notEnoughMemoryToInstall={notEnoughMemoryToInstall} isInstalled={!!isInstalled} isInstalledView={isInstalledView} setAppInstallWithDependencies={setAppInstallWithDependencies} setAppUninstallWithDependencies={setAppUninstallWithDependencies} /> </View> </View> ); }; const styles = StyleSheet.create({ root: { height: 64, }, item: { flexDirection: "row", alignItems: "center", justifyContent: "flex-start", paddingVertical: 14, paddingHorizontal: 16, borderRadius: 0, height: 64, borderBottomWidth: 1, }, labelContainer: { flexGrow: 0, flexShrink: 1, flexBasis: "40%", flexDirection: "column", alignItems: "flex-start", justifyContent: "center", paddingHorizontal: 10, }, versionText: { fontSize: 12, fontWeight: "bold", }, sizeText: { fontSize: 12, width: 44, marginHorizontal: 10, }, warnText: { flexDirection: "row", alignItems: "center", justifyContent: "center", }, installedLabel: { flexGrow: 1, flexShrink: 0, flexBasis: "auto",
justifyContent: "flex-end", borderRadius: 4, overflow: "hidden", paddingHorizontal: 10, }, appButton: { flexGrow: 1, flexShrink: 0, flexBasis: "auto", alignItems: "flex-start", height: 38, paddingHorizontal: 10, paddingVertical: 12, zIndex: 5, }, }); export default memo(AppRow);
flexDirection: "row", alignItems: "center",
build_status.pb.go
// Code generated by protoc-gen-go. // source: google/devtools/build/v1/build_status.proto // DO NOT EDIT! package build import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import _ "google.golang.org/genproto/googleapis/api/annotations" import _ "github.com/golang/protobuf/ptypes/any" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // The end result of the Build. type BuildStatus_Result int32 const ( // Unspecified or unknown. BuildStatus_UNKNOWN_STATUS BuildStatus_Result = 0 // Build was successful and tests (if requested) all pass. BuildStatus_COMMAND_SUCCEEDED BuildStatus_Result = 1 // Build error and/or test failure. BuildStatus_COMMAND_FAILED BuildStatus_Result = 2 // Unable to obtain a result due to input provided by the user. BuildStatus_USER_ERROR BuildStatus_Result = 3 // Unable to obtain a result due to a failure within the build system. BuildStatus_SYSTEM_ERROR BuildStatus_Result = 4 // Build required too many resources, such as build tool RAM. BuildStatus_RESOURCE_EXHAUSTED BuildStatus_Result = 5 // An invocation attempt time exceeded its deadline. BuildStatus_INVOCATION_DEADLINE_EXCEEDED BuildStatus_Result = 6 // Build request time exceeded the request_deadline BuildStatus_REQUEST_DEADLINE_EXCEEDED BuildStatus_Result = 8 // The build was cancelled by a call to CancelBuild. BuildStatus_CANCELLED BuildStatus_Result = 7 ) var BuildStatus_Result_name = map[int32]string{ 0: "UNKNOWN_STATUS", 1: "COMMAND_SUCCEEDED", 2: "COMMAND_FAILED", 3: "USER_ERROR", 4: "SYSTEM_ERROR", 5: "RESOURCE_EXHAUSTED", 6: "INVOCATION_DEADLINE_EXCEEDED", 8: "REQUEST_DEADLINE_EXCEEDED", 7: "CANCELLED", } var BuildStatus_Result_value = map[string]int32{ "UNKNOWN_STATUS": 0, "COMMAND_SUCCEEDED": 1, "COMMAND_FAILED": 2, "USER_ERROR": 3, "SYSTEM_ERROR": 4, "RESOURCE_EXHAUSTED": 5, "INVOCATION_DEADLINE_EXCEEDED": 6, "REQUEST_DEADLINE_EXCEEDED": 8, "CANCELLED": 7, } func (x BuildStatus_Result) String() string { return proto.EnumName(BuildStatus_Result_name, int32(x)) } func (BuildStatus_Result) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{0, 0} } // Status used for both invocation attempt and overall build completion. type BuildStatus struct { // The end result. Result BuildStatus_Result `protobuf:"varint,1,opt,name=result,enum=google.devtools.build.v1.BuildStatus_Result" json:"result,omitempty"` } func (m *BuildStatus) Reset() { *m = BuildStatus{} } func (m *BuildStatus) String() string { return proto.CompactTextString(m) } func (*BuildStatus) ProtoMessage() {} func (*BuildStatus) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} } func (m *BuildStatus) GetResult() BuildStatus_Result { if m != nil
return BuildStatus_UNKNOWN_STATUS } func init() { proto.RegisterType((*BuildStatus)(nil), "google.devtools.build.v1.BuildStatus") proto.RegisterEnum("google.devtools.build.v1.BuildStatus_Result", BuildStatus_Result_name, BuildStatus_Result_value) } func init() { proto.RegisterFile("google/devtools/build/v1/build_status.proto", fileDescriptor1) } var fileDescriptor1 = []byte{ // 370 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0x5f, 0x4b, 0xe3, 0x40, 0x14, 0xc5, 0x37, 0xdd, 0xdd, 0xec, 0xee, 0xec, 0x6e, 0xc9, 0x0e, 0xac, 0xb4, 0xa5, 0x42, 0xe9, 0x93, 0xa0, 0x4c, 0xa8, 0x3e, 0x8a, 0x0f, 0x69, 0xe6, 0x8a, 0xc1, 0x76, 0x52, 0x67, 0x12, 0xff, 0xbd, 0x84, 0xd4, 0xc6, 0x10, 0x88, 0x99, 0xd2, 0x4c, 0x0a, 0x7e, 0x22, 0x3f, 0x8f, 0xdf, 0xc6, 0x47, 0xc9, 0x9f, 0x42, 0x41, 0xfb, 0x76, 0x73, 0xcf, 0xef, 0xdc, 0x13, 0xce, 0xa0, 0xc3, 0x58, 0xca, 0x38, 0x8d, 0xcc, 0x45, 0xb4, 0x56, 0x52, 0xa6, 0xb9, 0x39, 0x2f, 0x92, 0x74, 0x61, 0xae, 0x47, 0xf5, 0x10, 0xe4, 0x2a, 0x54, 0x45, 0x4e, 0x96, 0x2b, 0xa9, 0x24, 0xee, 0xd4, 0x30, 0xd9, 0xc0, 0xa4, 0x62, 0xc8, 0x7a, 0xd4, 0xeb, 0x37, 0x67, 0xc2, 0x65, 0x62, 0x86, 0x59, 0x26, 0x55, 0xa8, 0x12, 0x99, 0x35, 0xbe, 0x5e, 0xb7, 0x51, 0xab, 0xaf, 0x79, 0xf1, 0x68, 0x86, 0xd9, 0x73, 0x2d, 0x0d, 0x5f, 0x5a, 0xe8, 0xf7, 0xb8, 0xbc, 0x22, 0xaa, 0x20, 0x4c, 0x91, 0xbe, 0x8a, 0xf2, 0x22, 0x55, 0x1d, 0x6d, 0xa0, 0x1d, 0xb4, 0x8f, 0x8f, 0xc8, 0xae, 0x4c, 0xb2, 0x65, 0x23, 0xbc, 0xf2, 0xf0, 0xc6, 0x3b, 0x7c, 0xd5, 0x90, 0x5e, 0xaf, 0x30, 0x46, 0x6d, 0x9f, 0x5d, 0x32, 0xf7, 0x86, 0x05, 0xc2, 0xb3, 0x3c, 0x5f, 0x18, 0x5f, 0xf0, 0x7f, 0xf4, 0xcf, 0x76, 0xa7, 0x53, 0x8b, 0xd1, 0x40, 0xf8, 0xb6, 0x0d, 0x40, 0x81, 0x1a, 0x5a, 0x89, 0x6e, 0xd6, 0xe7, 0x96, 0x33, 0x01, 0x6a, 0xb4, 0x70, 0x1b, 0x21, 0x5f, 0x00, 0x0f, 0x80, 0x73, 0x97, 0x1b, 0x5f, 0xb1, 0x81, 0xfe, 0x88, 0x3b, 0xe1, 0xc1, 0xb4, 0xd9, 0x7c, 0xc3, 0x7b, 0x08, 0x73, 0x10, 0xae, 0xcf, 0x6d, 0x08, 0xe0, 0xf6, 0xc2, 0xf2, 0x85, 0x07, 0xd4, 0xf8, 0x8e, 0x07, 0xa8, 0xef, 0xb0, 0x6b, 0xd7, 0xb6, 0x3c, 0xc7, 0x65, 0x01, 0x05, 0x8b, 0x4e, 0x1c, 0x56, 0x22, 0x4d, 0x9e, 0x8e, 0xf7, 0x51, 0x97, 0xc3, 0x95, 0x0f, 0xc2, 0xfb, 0x44, 0xfe, 0x89, 0xff, 0xa2, 0x5f, 0xb6, 0xc5, 0x6c, 0x98, 0x94, 0x7f, 0xf2, 0x63, 0xac, 0x50, 0xff, 0x41, 0x3e, 0xed, 0xac, 0x63, 0x6c, 0x6c, 0xf5, 0x31, 0x2b, 0xbb, 0x9d, 0x69, 0xf7, 0x67, 0x0d, 0x1d, 0xcb, 0x34, 0xcc, 0x62, 0x22, 0x57, 0xb1, 0x19, 0x47, 0x59, 0xd5, 0xbc, 0x59, 0x4b, 0xe1, 0x32, 0xc9, 0x3f, 0x3e, 0xfe, 0x69, 0x35, 0xbc, 0x69, 0xda, 0x5c, 0xaf, 0xe0, 0x93, 0xf7, 0x00, 0x00, 0x00, 0xff, 0xff, 0xde, 0x3c, 0xd5, 0xd5, 0x28, 0x02, 0x00, 0x00, }
{ return m.Result }
webhook_models.py
# -*- coding: utf-8 -*- from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from .base_models import IcebergBaseModel WEBHOOK_MAX_TRIGGER_AGGREGATION = getattr(settings, 'WEBHOOK_MAX_TRIGGER_AGGREGATION', 200) WEBHOOK_DEFAULT_AGGREGATION_DELAY = getattr(settings, 'WEBHOOK_DEFAULT_AGGREGATION_DELAY', 5*60) WEBHOOK_DEFAULT_RETRY_DELAY = getattr(settings, 'WEBHOOK_DEFAULT_RETRY_DELAY', 15*60) WEBHOOK_DEFAULT_MAX_ATTEMPTS = getattr(settings, 'WEBHOOK_DEFAULT_MAX_ATTEMPTS', 5) WEBHOOK_DEFAULT_MAX_TRIGGER_AGGREGATION = getattr(settings, 'WEBHOOK_DEFAULT_MAX_TRIGGER_AGGREGATION', 10) ICEBERG_WEBHOOK_MODEL = getattr(settings, "ICEBERG_WEBHOOK_MODEL", "django_iceberg.IcebergWebhook") import logging logger = logging.getLogger(__name__) class AbstractIcebergWebhook(IcebergBaseModel): """ Abstract model that can store an Iceberg Webhook """ class Meta: abstract = True EVENT_CHOICES = ( ('cart_status_changed', ('A cart status has changed')), ('merchant_order_authorized', ('A new merchant order is now authorized')), ('merchant_order_confirmed', ('A merchant order is now confirmed')), ('merchant_order_cancelled', ('An authorized merchant order is now cancelled')), ('merchant_order_sent', ('A merchant order is now sent')), ('merchant_order_received', ('A merchant order is now received')), ('order_item_cancelled', ('An authorized order item is now cancelled')), ('new_merchant_available', ('A new merchant is available for your application')), ('merchant_activated', ('A merchant is now activated')), ('merchant_paused', ('A merchant is now paused')), ('merchant_stopped', ('A merchant is now stopped')), ('bank_account_saved', ("A merchant saved bank information.")), ('user_payment_card_created', ('A user has added a new payment card')), ('user_payment_card_status_changed', ('A user payment card has its status changed')), ('payment_status_changed', ('A payment object has its status updated')), ('product_updated', ('An active product has been updated')), ('product_offer_updated', ('An active product offer has been updated')), ('user_profile_updated', ('A user profile has been updated')), ('order_authorized', ('A new order is now authorized')), ('order_confirmed', ('An order is now confirmed')), ('order_cancelled', ('An authorized order is now cancelled')), ('order_sent', ('An order is now sent')), ('order_received', ('An order is now received')), ('return_opened', ('A new return request has been opened')), ('return_reopened', ('A closed or cancelled return request has been reopened and set to accepted status')), ('return_accepted', ('An open return request is now accepted')), ('return_cancelled', ('An open or accepted return request has been cancelled')), ('return_package_received', ('An accepted return request\'s package was received.')), ('return_closed_by_seller', ('A return request has been closed by seller')), ('return_closed_by_buyer', ('A return request has been closed by buyer')), ('return_request_closed', ('A return request has been closed either by buyer or by seller')), ('package_tracking_status_changed', ('A tracked package has its status updated')), ('package_tracking_overall_status_changed', ('A tracked package has its overall status updated')), ('package_tracking_number_added', ('A tracked package has now a tracking number')), ('new_package_tracking', ('A new package is being tracked')), ('new_message', ('A new message has been sent over the marketplace')), ('message_read', ('A message has been read on the marketplace')), ('message_closed', ('A thread of messages is closed on the marketplace')), ) event = models.CharField(max_length=100, choices=EVENT_CHOICES, db_index=True) url = models.URLField('Target URL', max_length=255) active_merchant_only = models.BooleanField( _('Limit this webhook to active merchant(s) ?'), default=True ) aggregation_delay = models.PositiveIntegerField( _('Delay in seconds before triggering the aggregated webhook'), default=WEBHOOK_DEFAULT_AGGREGATION_DELAY ) application_id = models.PositiveIntegerField(blank=True, null=True) merchant_id = models.PositiveIntegerField(blank=True, null=True) comment = models.CharField(max_length=255, null=True, blank=True) label = models.CharField(max_length=128, null=True, blank=True, db_index=True) created_at = models.DateTimeField(blank=True, null=True) max_attempts = models.PositiveSmallIntegerField( _('Maximum Attempts'), default = WEBHOOK_DEFAULT_MAX_ATTEMPTS ) new_attempt_delay = models.PositiveIntegerField( _('Delay in seconds before retrying to fire the webhook'), default = WEBHOOK_DEFAULT_RETRY_DELAY ) max_trigger_aggregation = models.PositiveSmallIntegerField( _('Maximum number of triggers that can be aggregated (1 if no aggregation)'), default = WEBHOOK_DEFAULT_MAX_TRIGGER_AGGREGATION ) status = models.CharField(null=True, blank=True, max_length=20, db_index=True) updated_at = models.DateTimeField(blank=True, null=True) user_id = models.PositiveIntegerField(blank=True, null=True) version = models.CharField( _('Version of the webhook (different formats)'), max_length=10, blank=True, null=True ) def __unicode__(self): return u"[%s]%s" % (self.id, self.event) def save(self, api_handler=None, iceberg_sync=True, *args, **kwargs): """ if an api_handler is given, update or create the webhook on iceberg """ self.full_clean() super(AbstractIcebergWebhook, self).save(*args, **kwargs) if not iceberg_sync: return if api_handler: self.create_or_update_on_iceberg(api_handler) else: if self.iceberg_id is None: logger.warn("No api_handler given as save() params, not created on Iceberg.\ Call self.create_or_update_on_iceberg to create it") else: logger.warn("No api_handler given as save() params, not updated on Iceberg.\ Call self.create_or_update_on_iceberg to update it") def create_or_update_on_iceberg(self, api_handler): iceberg_webhook = api_handler.Webhook.find(self.iceberg_id) if self.iceberg_id else api_handler.Webhook() iceberg_webhook.application = api_handler.Application.find(self.application_id) if self.application_id else None iceberg_webhook.merchant = api_handler.Store.find(self.merchant_id) if self.merchant_id else None iceberg_webhook.event = self.event iceberg_webhook.url = self.url iceberg_webhook.active_merchant_only = self.active_merchant_only iceberg_webhook.aggregation_delay = self.aggregation_delay iceberg_webhook.comment = self.comment iceberg_webhook.label = self.label iceberg_webhook.max_attempts = self.max_attempts iceberg_webhook.new_attempt_delay = self.new_attempt_delay iceberg_webhook.max_trigger_aggregation = self.max_trigger_aggregation iceberg_webhook.save() self.iceberg_id = iceberg_webhook.id ## calling iceberg_sync() to update the fields of 'self' to the actual values (some fields might be uneditable) self.iceberg_sync(api_handler) def delete(self, api_handler=None, *args, **kwargs): """ if an api_handler is given, try to delete the webhook on iceberg """ if api_handler: self._iceberg_delete(api_handler, fail_silently=True) super(AbstractIcebergWebhook, self).delete(*args, **kwargs) def
(self, api_handler): if self.iceberg_id is None: raise Exception("%s instance has no iceberg_id, can't sync" % self.__class__.__name__) iceberg_webhook = api_handler.Webhook.find(self.iceberg_id) self.application_id = iceberg_webhook.application.id if iceberg_webhook.application else None self.merchant_id = iceberg_webhook.merchant.id if iceberg_webhook.merchant else None self.url = iceberg_webhook.url self.event = iceberg_webhook.event self.status = iceberg_webhook.status self.max_attempts = iceberg_webhook.max_attempts self.new_attempt_delay = iceberg_webhook.new_attempt_delay self.label = iceberg_webhook.label self.version = iceberg_webhook.version self.max_trigger_aggregation = iceberg_webhook.max_trigger_aggregation self.aggregation_delay = iceberg_webhook.aggregation_delay self.active_merchant_only = iceberg_webhook.active_merchant_only self.created_at = iceberg_webhook.created_at self.updated_at = iceberg_webhook.updated_at super(AbstractIcebergWebhook, self).save() ## just calling the original save() def _iceberg_delete(self, api_handler, fail_silently=False): try: if self.iceberg_id is None: raise Exception("%s instance has no iceberg_id, can't sync" % self.__class__.__name__) iceberg_webhook = api_handler.Webhook.find(self.iceberg_id) iceberg_webhook.delete() except Exception as err: if not fail_silently: raise logger.warn("Couldnt delete webhook %s on iceberg: %s", self.iceberg_id, err) if ICEBERG_WEBHOOK_MODEL == "django_iceberg.IcebergWebhook": ### if defined as ICEBERG_WEBHOOK_MODEL, defining non abstract model class IcebergWebhook(AbstractIcebergWebhook): pass
iceberg_sync
handlers.py
from aiogram import Dispatcher, types from aiogram.dispatcher import FSMContext from delivery_bots.bots.tgbot.settings import TgBotSettings from delivery_bots.bots.tgbot.states import BotState async def handle_payment(query: types.CallbackQuery, state: FSMContext): """Handle payment.""" current_state = await state.get_data() payment_token = TgBotSettings().payment_token delivery_total_amount = current_state['delivery_total_amount'] order_description = current_state['order_description'] order_total_amount = current_state['order_total_amount'] await query.message.bot.send_invoice( query.from_user.id, title='Оплата заказа', description=order_description, provider_token=payment_token, currency='RUB', is_flexible=False, prices=[ types.LabeledPrice(label='Заказ', amount=order_total_amount), types.LabeledPrice(label='Доставка', amount=delivery_total_amount), ], start_parameter='create_invoice_pizza', payload='pizza_order', need_phone_number=True, ) async def handle_pre_checkout(query: types.CallbackQuery, pre_checkout_query: types.PreCheckoutQuery): """Handle pre checkout.""
'Hoooooray! Спасибо за оплату! Мы обработаем ваш заказ на `{} {}`' + ' быстро настолько, насколько это возможно! Оставайтесь на связи.' ).format( message.successful_payment.total_amount / 100, message.successful_payment.currency, ), ) def register_payment_handler(dp: Dispatcher): """Register payment handler.""" dp.register_callback_query_handler(handle_payment, state=BotState.payment) dp.pre_checkout_query_handler(handle_pre_checkout, lambda query: True, state=BotState.payment) dp.message_handler(handle_got_payment, content_types=types.ContentTypes.SUCCESSFUL_PAYMENT, state=BotState.payment)
" await query.message.bot.answer_pre_checkout_query( pre_checkout_query.id, ok=True, error_message=( 'Инопланетяне пытались украсть вашу карту, но мы успешно защитили ваши учетные данные,' + 'попробуем заплатить еще раз через несколько минут, нам нужен небольшой отдых.' ), ) async def handle_got_payment(message: types.Message): """Handle got payment.""" await message.bot.send_message( message.chat.id, (
account.ts
import { API_ROOT } from 'src/constants'; import Request, { setData, setMethod, setURL } from 'src/request'; import { updateAccountSchema, UpdateAccountSettingsSchema } from './account.schema'; import { Account, AccountSettings, CancelAccount, CancelAccountPayload, NetworkUtilization } from './types'; /** * getAccountInfo * * Return account information, * including contact and billing info. * */ export const getAccountInfo = () => { return Request<Account>(setURL(`${API_ROOT}/account`), setMethod('GET')); }; /** * getNetworkUtilization * * Return your current network transfer quota and usage. * */ export const getNetworkUtilization = () => Request<NetworkUtilization>( setURL(`${API_ROOT}/account/transfer`), setMethod('GET') ); /** * updateAccountInfo * * Update your contact or billing information. * */ export const updateAccountInfo = (data: Partial<Account>) => Request<Account>( setURL(`${API_ROOT}/account`), setMethod('PUT'), setData(data, updateAccountSchema) ); /** * getAccountSettings * * Retrieve general account-level settings. * */ export const getAccountSettings = () => Request<AccountSettings>( setURL(`${API_ROOT}/account/settings`), setMethod('GET') );
/** * updateAccountSettings * * Update a user's account settings. * */ export const updateAccountSettings = (data: Partial<AccountSettings>) => Request<AccountSettings>( setURL(`${API_ROOT}/account/settings`), setMethod('PUT'), setData(data, UpdateAccountSettingsSchema) ); /** * cancelAccount * * Cancels an account and returns a survey monkey link for a user to fill out */ export const cancelAccount = (data: CancelAccountPayload) => { return Request<CancelAccount>( setURL(`${API_ROOT}/account/cancel`), setMethod('POST'), setData(data) ); };
enchant.go
package protocol import ( "bytes" "encoding/binary" ) // EnchantmentOption represents a single option in the enchantment table for a single item. type EnchantmentOption struct { // Cost is the cost of the option. This is the amount of XP levels required to select this enchantment // option. Cost uint32 // Enchantments holds the enchantments that will be applied to the item when this option is clicked. Enchantments ItemEnchantments // Name is a name that will be translated to the 'Standard Galactic Alphabet' // (https://minecraft.gamepedia.com/Enchanting_Table#Standard_Galactic_Alphabet) client-side. The names // generally have no meaning, such as: // 'animal imbue range galvanize ' // 'bless inside creature shrink ' // 'elder free of inside ' Name string // RecipeNetworkID is a unique network ID for this enchantment option. When enchanting, the client // will submit this network ID in a ItemStackRequest packet with the CraftRecipe action, so that the // server knows which enchantment was selected. // Note that this ID should still be unique with other actual recipes. It's recommended to start counting // for enchantment network IDs from the counter used for producing network IDs for the normal recipes. RecipeNetworkID uint32 } // WriteEnchantOption writes an EnchantmentOption x to Buffer dst. func WriteEnchantOption(dst *bytes.Buffer, x EnchantmentOption) error { return chainErr( WriteVaruint32(dst, x.Cost), WriteItemEnchants(dst, x.Enchantments), WriteString(dst, x.Name), WriteVaruint32(dst, x.RecipeNetworkID), ) } // EnchantOption reads an EnchantmentOption x from Buffer src. func EnchantOption(src *bytes.Buffer, x *EnchantmentOption) error { return chainErr( Varuint32(src, &x.Cost), ItemEnchants(src, &x.Enchantments), String(src, &x.Name), Varuint32(src, &x.RecipeNetworkID), ) } const ( EnchantmentSlotNone = 0 EnchantmentSlotAll = 0xffff EnchantmentSlotArmour = EnchantmentSlotHelmet | EnchantmentSlotChestplate | EnchantmentSlotLeggings | EnchantmentSlotBoots EnchantmentSlotHelmet = 0x1 EnchantmentSlotChestplate = 0x2 EnchantmentSlotLeggings = 0x4 EnchantmentSlotBoots = 0x8 EnchantmentSlotSword = 0x10 EnchantmentSlotBow = 0x20 EnchantmentSlotToolOther = EnchantmentSlotHoe | EnchantmentSlotShears | EnchantmentSlotFlintAndSteel EnchantmentSlotHoe = 0x40 EnchantmentSlotShears = 0x80 EnchantmentSlotFlintAndSteel = 0x100 EnchantmentSlotDig = EnchantmentSlotAxe | EnchantmentSlotPickaxe | EnchantmentSlotShovel EnchantmentSlotAxe = 0x200 EnchantmentSlotPickaxe = 0x400 EnchantmentSlotShovel = 0x800 EnchantmentSlotFishingRod = 0x1000 EnchantmentSlotCarrotOnAStick = 0x2000 EnchantmentSlotElytra = 0x4000 EnchantmentSlotTrident = 0x8000 ) // ItemEnchantments holds information on the enchantments that are applied to an item when a specific button // is clicked in the enchantment table. type ItemEnchantments struct { // Slot is the enchantment slot of the item that was put into the enchantment table, for which the // following enchantments will apply. // The possible slots can be found above. Slot int32 // Enchantments is an array of 3 slices of enchantment instances. Each array represents enchantments that // will be added to the item with a different activation type. The arrays in which enchantments are sent // by the vanilla server are as follows: // slice 1 { protection, fire protection, feather falling, blast protection, projectile protection, // thorns, respiration, depth strider, aqua affinity, frost walker, soul speed } // slice 2 { sharpness, smite, bane of arthropods, fire aspect, looting, silk touch, unbreaking, fortune, // flame, luck of the sea, impaling } // slice 3 { knockback, efficiency, power, punch, infinity, lure, mending, curse of binding, // curse of vanishing, riptide, loyalty, channeling, multishot, piercing, quick charge } // The first slice holds armour enchantments, the differences between the slice 2 and slice 3 are more // vaguely defined. Enchantments [3][]EnchantmentInstance } // WriteItemEnchants writes an ItemEnchantments x to Buffer dst. func WriteItemEnchants(dst *bytes.Buffer, x ItemEnchantments) error { if err := binary.Write(dst, binary.LittleEndian, x.Slot); err != nil { return err } for _, enchantments := range x.Enchantments { if err := WriteVaruint32(dst, uint32(len(enchantments))); err != nil { return err } for _, enchantment := range enchantments { if err := WriteEnchant(dst, enchantment); err != nil { return err } } } return nil } // ItemEnchants reads an ItemEnchantments x from Buffer src. func ItemEnchants(src *bytes.Buffer, x *ItemEnchantments) error { if err := binary.Read(src, binary.LittleEndian, &x.Slot); err != nil { return err } for i := 0; i < 3; i++ { var l uint32 if err := Varuint32(src, &l); err != nil { return err } x.Enchantments[i] = make([]EnchantmentInstance, l) for j := uint32(0); j < l; j++ { if err := Enchant(src, &x.Enchantments[i][j]); err != nil { return err } } } return nil } // EnchantmentInstance represents a single enchantment instance with the type of the enchantment and its // level. type EnchantmentInstance struct { Type byte Level byte } // WriteEnchant writes an EnchantmentInstance x to Buffer dst. func WriteEnchant(dst *bytes.Buffer, x EnchantmentInstance) error { dst.WriteByte(x.Type) dst.WriteByte(x.Level) return nil } // Enchant reads an EnchantmentInstance x from Buffer src. func
(src *bytes.Buffer, x *EnchantmentInstance) error { return chainErr( binary.Read(src, binary.LittleEndian, &x.Type), binary.Read(src, binary.LittleEndian, &x.Level), ) }
Enchant
language.ts
/* eslint-disable import/no-cycle */ import { CollapsableMenus } from '../containers/Toolbar'; import { ComponentTypes } from '../components'; export function getComponentHelperTextByComponentType(type: string, language: any): string { switch (type) { case ComponentTypes.Header: { return language.ux_editor.helper_text_for_header; } case ComponentTypes.Input: { return language.ux_editor.helper_text_for_input; } case ComponentTypes.Checkboxes: { return language.ux_editor.helper_text_for_check_box; } case ComponentTypes.RadioButtons: { return language.ux_editor.helper_text_for_radio_button; } case ComponentTypes.AttachmentList: { return language.ux_editor.helper_text_for_attachment_list; } default: { // Several components does not yet have a helper text, a default is shown. return language.ux_editor.helper_text_default; } } } export function getComponentTitleByComponentType(type: string, language: any): string { switch (type) { case ComponentTypes.Checkboxes: { return language.ux_editor.component_checkbox; } case ComponentTypes.Dropdown: { return language.ux_editor.component_dropdown; } case ComponentTypes.FileUpload: { return language.ux_editor.component_file_upload; } case ComponentTypes.Header: { return language.ux_editor.component_header; } case ComponentTypes.Input: { return language.ux_editor.component_input; } case ComponentTypes.Datepicker: { return language.ux_editor.component_datepicker; } case ComponentTypes.Button: { return language.ux_editor.component_button; } case ComponentTypes.TextArea: { return language.ux_editor.component_text_area; } case ComponentTypes.RadioButtons: { return language.ux_editor.component_radio_button;
case ComponentTypes.Paragraph: { return language.ux_editor.component_paragraph; } case ComponentTypes.AddressComponent: { return language.ux_editor.component_advanced_address; } case ComponentTypes.Group: { return language.ux_editor.component_group; } case ComponentTypes.NavigationButtons: { return language.ux_editor.component_navigation_buttons; } case ComponentTypes.AttachmentList: { return language.ux_editor.component_attachment_list; } default: { return ''; } } } export function getCollapsableMenuTitleByType(menu: CollapsableMenus, language: any): string { switch (menu) { case CollapsableMenus.Components: { return language.ux_editor.collapsable_schema_components; } case CollapsableMenus.Texts: { return language.ux_editor.collapsable_text_components; } case CollapsableMenus.AdvancedComponents: { return language.ux_editor.collapsable_text_advanced_components; } case CollapsableMenus.Widgets: { return language.ux_editor.collapsable_text_widgets; } default: { return ''; } } } export function truncate(s: string, size: number) { if (s && s.length > size) { return (`${s.substring(0, size)}...`); } return s; } export function getTextResource(resourceKey: string, textResources: ITextResource[]): string { const textResource = textResources.find((resource) => resource.id === resourceKey); return textResource ? textResource.value : resourceKey; } export function formatCreateTextLabel(textToCreate: string, language: any): string { return language.general.create.concat(' ', textToCreate); }
}
from_into.rs
// The From trait is used for value-to-value conversions. // If From is implemented correctly for a type, the Into trait should work conversely. // You can read more about it at https://doc.rust-lang.org/std/convert/trait.From.html #[derive(Debug)] struct Person { name: String, age: usize, } // We implement the Default trait to use it as a fallback // when the provided string is not convertible into a Person object impl Default for Person { fn default() -> Person { Person { name: String::from("John"), age: 30, } } } // Your task is to complete this implementation // in order for the line `let p = Person::from("Mark,20")` to compile // Please note that you'll need to parse the age component into a `usize` // with something like `"4".parse::<usize>()`. The outcome of this needs to // be handled appropriately. // // Steps: // 1. If the length of the provided string is 0, then return the default of Person // 2. Split the given string on the commas present in it // 3. Extract the first element from the split operation and use it as the name // 4. If the name is empty, then return the default of Person // 5. Extract the other element from the split operation and parse it into a `usize` as the age // If while parsing the age, something goes wrong, then return the default of Person // Otherwise, then return an instantiated Person object with the results impl From<&str> for Person { fn from(s: &str) -> Person { if s.len() == 0 { return Person::default(); } let parse_person = |s: &str| -> Option<Person> { let mut parts = s.split(','); let name = parts.next()?.to_string(); if name.len() == 0 { return None; } let person = Some(Person { name, age: parts.next()?.parse::<usize>().ok()?, }); match parts.next() { None => person, _ => None, } }; parse_person(s).unwrap_or(Person::default()) } } fn main() { // Use the `from` function let p1 = Person::from("Mark,20"); // Since From is implemented for Person, we should be able to use Into let p2: Person = "Gerald,70".into(); println!("{:?}", p1); println!("{:?}", p2); } #[cfg(test)] mod tests { use super::*; #[test] fn test_default() { // Test that the default person is 30 year old John let dp = Person::default(); assert_eq!(dp.name, "John"); assert_eq!(dp.age, 30); } #[test] fn test_bad_convert() { // Test that John is returned when bad string is provided let p = Person::from(""); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn test_good_convert() { // Test that "Mark,20" works let p = Person::from("Mark,20"); assert_eq!(p.name, "Mark"); assert_eq!(p.age, 20); } #[test] fn test_bad_age() { // Test that "Mark,twenty" will return the default person due to an error in parsing age let p = Person::from("Mark,twenty"); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn test_missing_comma_and_age() { let p: Person = Person::from("Mark"); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn test_missing_age() { let p: Person = Person::from("Mark,"); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn test_missing_name() { let p: Person = Person::from(",1"); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn test_missing_name_and_age() { let p: Person = Person::from(","); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn test_missing_name_and_invalid_age() { let p: Person = Person::from(",one"); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn
() { let p: Person = Person::from("Mike,32,"); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } #[test] fn test_trailing_comma_and_some_string() { let p: Person = Person::from("Mike,32,man"); assert_eq!(p.name, "John"); assert_eq!(p.age, 30); } }
test_trailing_comma
next.config.js
// $FlowIssue this is what mapbox/rehype-prism uses under the hood
"styled-template-string": { pattern: /(styled(\.\w+|\([^\)]*\))(\.\w+(\([^\)]*\))*)*|css|injectGlobal|createGlobalStyle|keyframes|\.extend|\.withComponent)`(?:\$\{[^}]+\}|\\\\|\\?[^\\])*?`/, lookbehind: true, greedy: true, inside: { interpolation: { pattern: /\$\{[^}]+\}/, inside: { "interpolation-punctuation": { pattern: /^\$\{|\}$/, alias: "punctuation" }, rest: refract.languages.jsx } }, string: { pattern: /[^$;]+/, inside: refract.languages.css, alias: "language-css" } } } }; refract.languages.insertBefore("jsx", "template-string", styledHighlight); refract.languages.insertBefore("js", "template-string", styledHighlight); const withMDX = require("@next/mdx")({ extension: /\.mdx?$/, options: { // $FlowIssue hastPlugins: [require("@mapbox/rehype-prism")] } }); const fs = require("fs"); const { join } = require("path"); const generateJsonFeed = require("./data/generate-json-feed"); const { promisify } = require("util"); const copyFile = promisify(fs.copyFile); const staticFilesToCopy = ["favicon.ico"]; module.exports = withMDX({ pageExtensions: ["js", "jsx", "mdx", "md"], exportPathMap: async function( defaultPathMap, { dev, dir, outDir, distDir, buildId } ) { if (dev) return defaultPathMap; generateJsonFeed(outDir); await Promise.all( staticFilesToCopy.map(file => copyFile(join(dir, file), join(outDir, file)) ) ); return defaultPathMap; }, webpack(config, options) { config.module.rules.push({ test: /.svg$/, use: [ { loader: "@svgr/webpack", options: { icon: true } } ] }); config.module.rules.push({ test: /.css$/, use: "raw-loader" }); return config; } });
const refract = require("refractor"); // NOTE: This highlights template-strings as strings of CSS const styledHighlight = {
no-amd-name.js
/** * @fileoverview Enforce no naming of AMD modules * @author Adam Davies */ 'use strict'; // ------------------------------------------------------------------------------ // Requirements // ------------------------------------------------------------------------------ const RuleTester = require('eslint').RuleTester; const rule = require('../../lib/rules/no-amd-name'); const parserOptions = { ecmaVersion: 2015, sourceType: 'module' };
// Tests // ------------------------------------------------------------------------------ const ruleTester = new RuleTester({ parserOptions }); ruleTester.run('no-amd-name', rule, { valid: [ { code: 'define([], function() {});' }, { code: 'define(["N/search"], function(search) {});' }, { code: 'define("moduleName");' }, { code: 'define("moduleName", ["N/search"]);' } ], invalid: [ { code: 'define("moduleName", [], function() {});', errors: [{ messageId: 'noModuleName' }], output: 'define([], function() {});' }, { code: 'define("moduleName", ["N/search"], function(search) {});', errors: [{ messageId: 'noModuleName' }], output: 'define(["N/search"], function(search) {});' } ] });
// ------------------------------------------------------------------------------
contract_with_abi_and_structs.rs
//! Main entry point for ContractMonitor
abigen!(VerifierContract, "ethers-contract/tests/solidity-contracts/verifier_abi.json"); /// This example only demonstrates how to use generated structs for solidity functions that /// have structs as input. #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let ganache = Ganache::new().spawn(); let provider = Provider::<Http>::try_from(ganache.endpoint())?.interval(Duration::from_millis(10u64)); let wallet: LocalWallet = ganache.keys()[0].clone().into(); let client = SignerMiddleware::new(provider, wallet); let client = Arc::new(client); let contract = VerifierContract::new(Address::zero(), client); // NOTE: this is all just dummy data let g1 = G1Point { x: U256::zero(), y: U256::zero() }; let g2 = G2Point { x: [U256::zero(), U256::zero()], y: [U256::zero(), U256::zero()] }; let vk = VerifyingKey { alfa_1: g1.clone(), beta_2: g2.clone(), gamma_2: g2.clone(), delta_2: g2.clone(), ic: vec![g1.clone()], }; let proof = Proof { a: g1.clone(), b: g2, c: g1 }; let _ = contract.verify(vec![], proof, vk); Ok(()) }
use ethers::{prelude::*, utils::Ganache}; use std::{convert::TryFrom, sync::Arc, time::Duration};
answer.py
from typing import List class Solution: def searchRange(self, nums: List[int], target: int) -> List[int]: if not nums: return [-1, -1] N = len(nums) left = 0 right = N - 1 while left < right: mid = (left + right) // 2 if nums[mid] == target: right = mid elif nums[mid] < target: left = mid + 1 else: right = mid - 1 start = -1 if nums[left] != target else left left = 0 right = N - 1 while left < right: mid = (left + right + 1) // 2 if nums[mid] == target: left = mid elif nums[mid] < target: left = mid + 1 else: right = mid - 1 end = -1 if nums[right] != target else right return [start, end] if __name__ == "__main__": s = Solution() result = s.searchRange([2, 2, 3], 3)
print(result)
utils.go
// Copyright 2017 Cwen. All rights reserved. // Use of this source code is governed by a MIT style // license that can be found in the LICENSE file. package justso import "path" func joinPaths(absolutePath, relativePath string) string { if len(relativePath) == 0 { return absolutePath }
appendSlash := lastChar(relativePath) == '/' && lastChar(finalPath) != '/' if appendSlash { return finalPath + "/" } return finalPath } func lastChar(str string) uint8 { size := len(str) if size == 0 { panic("The length of the string can't be 0") } return str[size-1] }
finalPath := path.Join(absolutePath, relativePath)
try.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from pyteaser import SummarizeUrl from scipy import spatial import re, math from collections import Counter #http://stackoverflow.com/questions/15173225/how-to-calculate-cosine-similarity-given-2-sentence-strings-python WORD = re.compile(r'\w+') def get_cosine(vec1, vec2): intersection = set(vec1.keys()) & set(vec2.keys()) numerator = sum([vec1[x] * vec2[x] for x in intersection]) sum1 = sum([vec1[x]**2 for x in vec1.keys()]) sum2 = sum([vec2[x]**2 for x in vec2.keys()]) denominator = math.sqrt(sum1) * math.sqrt(sum2) if not denominator: return 0.0 else: return float(numerator) / denominator def text_to_vector(text): words = WORD.findall(text) return Counter(words)
sums = " ".join(summaries) print sums.replace('\n', '') url2 = 'https://www.svd.se/bjorn-granath-ar-dod/om/kultur:scen' summaries2 = SummarizeUrl(url2) sums2 = " ".join(summaries2) print sums2.replace('\n', '') url3 = 'https://www.dn.se/kultur-noje/bjorn-granath-ar-dod/' summaries3 = SummarizeUrl(url3) sums3 = " ".join(summaries3) print sums3.replace('\n', '') vector1 = text_to_vector(sums) vector2 = text_to_vector(sums2) vector3 = text_to_vector(sums3) print 'Cosine:', get_cosine(vector1, vector2) print 'Cosine:', get_cosine(vector1, vector3) print 'Cosine:', get_cosine(vector2, vector3) #result = 1 - spatial.distance.cosine(sums, sums) #print result
url = 'http://www.svt.se/kultur/bjorn-granath-har-avlidit' summaries = SummarizeUrl(url)
add_target_request.rs
/* * Hetzner Cloud API * * Copied from the official API documentation for the Public Hetzner Cloud. * * The version of the OpenAPI document: 0.4.0 * * Generated by: https://openapi-generator.tech */ /// AddTargetRequest : Request for POST https://api.hetzner.cloud/v1/load_balancers/{id}/actions/add_target #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AddTargetRequest { /// Type of the resource #[serde(rename = "type")] pub _type: Type, #[serde(rename = "server", skip_serializing_if = "Option::is_none")] pub server: Option<Box<crate::models::AddTargetRequestServer>>,
pub label_selector: Option<Box<crate::models::LabelSelector>>, #[serde(rename = "ip", skip_serializing_if = "Option::is_none")] pub ip: Option<Box<crate::models::AddTargetRequestIp>>, } impl AddTargetRequest { /// Request for POST https://api.hetzner.cloud/v1/load_balancers/{id}/actions/add_target pub fn new(_type: Type) -> AddTargetRequest { AddTargetRequest { _type, server: None, use_private_ip: None, label_selector: None, ip: None, } } } /// Type of the resource #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Type { #[serde(rename = "ip")] Ip, #[serde(rename = "label_selector")] LabelSelector, #[serde(rename = "server")] Server, }
/// Use the private network IP instead of the public IP of the Server, requires the Server and Load Balancer to be in the same network. Default value is false. #[serde(rename = "use_private_ip", skip_serializing_if = "Option::is_none")] pub use_private_ip: Option<bool>, #[serde(rename = "label_selector", skip_serializing_if = "Option::is_none")]
user_test.go
package dao import ( "fmt" "gim/internal/business/model" "gim/pkg/db" "testing" ) func init() { fmt.Println("init db") db.InitByTest() } func TestUserDao_Add(t *testing.T) { id, err := UserDao.Add(model.User{ PhoneNumber: "18829291351", Nickname: "Alber", Sex: 1, AvatarUrl: "AvatarUrl", Extra: "Extra", }) fmt.Printf("%+v\n %+v\n ", id, err) } func TestUserDao_Get(t *testing.T) { user, err := UserDao.Get(1) fmt.Printf("%+v\n %+v\n ", user, err) } func TestUserDao_GetByIds(t *testing.T) { users, err := UserDao.GetByIds([]int64{1, 2, 3}) fmt.Printf("%+v\n %+v\n ", users, err) } func TestUserDao_GetByPhoneNumber(t *testing.T) { user, err := UserDao.GetByPhoneNumber("18829291351") fmt.Printf("%+v\n %+v\n ", user, err) } func TestUserDao_Update(t *testing.T) { fmt.Println(UserDao.Update(model.User{ Id: 1, PhoneNumber: "18829291352", Nickname: "alber", Sex: 2, AvatarUrl: "url", Extra: "e", })) }
fmt.Printf("%+v\n %+v\n ", users, err) }
func TestUserDao_Search(t *testing.T) { users, err := UserDao.Search("哈哈哈")
aether_anemo.rs
use num_derive::FromPrimitive; use crate::attribute::{Attribute, AttributeName}; use crate::character::character_common_data::CharacterCommonData; use crate::character::{CharacterConfig, CharacterName, CharacterStaticData}; use crate::character::character_sub_stat::CharacterSubStatFamily; use crate::character::characters::albedo::AlbedoRoleEnum; use crate::character::skill_config::CharacterSkillConfig; use crate::character::traits::{CharacterSkillMap, CharacterSkillMapItem, CharacterTrait}; use crate::common::{ChangeAttribute, Element, SkillType, WeaponType}; use crate::damage::damage_builder::DamageBuilder; use crate::damage::DamageContext; use crate::target_functions::TargetFunction; use crate::team::TeamQuantization; use crate::weapon::weapon_common_data::WeaponCommonData; pub struct AetherAnemoSkillType { pub normal_dmg1: [f64; 15], pub normal_dmg2: [f64; 15], pub normal_dmg3: [f64; 15], pub normal_dmg4: [f64; 15], pub normal_dmg5: [f64; 15], pub charged_dmg11: [f64; 15], pub charged_dmg12: [f64; 15], pub plunging_dmg1: [f64; 15], pub plunging_dmg2: [f64; 15], pub plunging_dmg3: [f64; 15], pub elemental_skill_dmg1: [f64; 15], pub elemental_skill_dmg2: [f64; 15], pub elemental_skill_dmg3: [f64; 15], pub elemental_skill_dmg4: [f64; 15], pub elemental_burst_dmg1: [f64; 15], pub elemental_burst_dmg2: [f64; 15], } const AETHER_ANEMO_SKILL: AetherAnemoSkillType = AetherAnemoSkillType { normal_dmg1: [0.4446, 0.4808, 0.517, 0.5687, 0.6049, 0.6463, 0.7031, 0.76, 0.8169, 0.8789, 0.9409, 1.003, 1.065, 1.1271, 1.1891], normal_dmg2: [0.4343, 0.4697, 0.505, 0.5555, 0.5909, 0.6313, 0.6868, 0.7423, 0.7979, 0.8585, 0.9191, 0.9797, 1.0403, 1.1009, 1.1615], normal_dmg3: [0.5298, 0.5729, 0.616, 0.6776, 0.7207, 0.77, 0.8378, 0.9055, 0.9733, 1.0472, 1.1211, 1.195, 1.269, 1.3429, 1.4168], normal_dmg4: [0.5831, 0.6305, 0.678, 0.7458, 0.7933, 0.8475, 0.9221, 0.9967, 1.0712, 1.1526, 1.234, 1.3153, 1.3967, 1.478, 1.5594], normal_dmg5: [0.7078, 0.7654, 0.823, 0.9053, 0.9629, 1.0288, 1.1193, 1.2098, 1.3003, 1.3991, 1.4979, 1.5966, 1.6954, 1.7941, 1.8929], charged_dmg11: [0.559, 0.6045, 0.65, 0.715, 0.7605, 0.8125, 0.884, 0.9555, 1.027, 1.105, 1.183, 1.261, 1.339, 1.417, 1.495], charged_dmg12: [0.6072, 0.6566, 0.706, 0.7766, 0.826, 0.8825, 0.9602, 1.0378, 1.1155, 1.2002, 1.2849, 1.3696, 1.4544, 1.5391, 1.6238], plunging_dmg1: [0.6393, 0.6914, 0.7434, 0.8177, 0.8698, 0.9293, 1.011, 1.0928, 1.1746, 1.2638, 1.353, 1.4422, 1.5314, 1.6206, 1.7098], plunging_dmg2: [1.2784, 1.3824, 1.4865, 1.6351, 1.7392, 1.8581, 2.0216, 2.1851, 2.3486, 2.527, 2.7054, 2.8838, 3.0622, 3.2405, 3.4189], plunging_dmg3: [1.5968, 1.7267, 1.8567, 2.0424, 2.1723, 2.3209, 2.5251, 2.7293, 2.9336, 3.1564, 3.3792, 3.602, 3.8248, 4.0476, 4.2704], elemental_skill_dmg1: [0.12, 0.129, 0.138, 0.15, 0.159, 0.168, 0.18, 0.192, 0.204, 0.216, 0.228, 0.24, 0.255, 0.27, 0.285], elemental_skill_dmg2: [0.168, 0.1806, 0.1932, 0.21, 0.2226, 0.2352, 0.252, 0.2688, 0.2856, 0.3024, 0.3192, 0.336, 0.357, 0.378, 0.399], elemental_skill_dmg3: [1.76, 1.892, 2.024, 2.2, 2.332, 2.464, 2.64, 2.816, 2.992, 3.168, 3.344, 3.52, 3.74, 3.96, 4.18], elemental_skill_dmg4: [1.92, 2.064, 2.208, 2.4, 2.544, 2.688, 2.88, 3.072, 3.264, 3.456, 3.648, 3.84, 4.08, 4.32, 4.56], elemental_burst_dmg1: [0.808, 0.8686, 0.9292, 1.01, 1.0706, 1.1312, 1.212, 1.2928, 1.3736, 1.4544, 1.5352, 1.616, 1.717, 1.818, 1.919], elemental_burst_dmg2: [0.248, 0.2666, 0.2852, 0.31, 0.3286, 0.3472, 0.372, 0.3968, 0.4216, 0.4464, 0.4712, 0.496, 0.527, 0.558, 0.589] }; pub struct AetherAnemoEffect { pub c2: bool } impl<A: Attribute> ChangeAttribute<A> for AetherAnemoEffect { fn change_attribute(&self, attribute: &mut A) { if self.c2 { attribute.set_value_by(AttributeName::Recharge, "命座2革新的旋风", 0.16); } } } #[derive(Copy, Clone, Eq, PartialEq)] #[derive(FromPrimitive)] pub enum AetherAnemoDamageEnum { Normal1, Normal2, Normal3, Normal4, Normal5, Charged11, Charged12, Plunging1, Plunging2, Plunging3, E1, E2, E3, E4, Q1, Q2Pyro, Q2Cryo, Q2Electro, Q2Hydro } impl Into<usize> for AetherAnemoDamageEnum { fn into(self) -> usize { self as usize } } impl AetherAnemoDamageEnum { pub fn get_element(&self) -> Element { use AetherAnemoDamageEnum::*; match *self { Normal1 | Normal2 | Normal3 | Normal4 | Normal5 | Charged11 | Charged12 | Plunging1 | Plunging2 | Plunging3 => Element::Physical, E1 | E2 | E3 | E4 | Q1 => Element::Anemo, Q2Pyro => Element::Pyro, Q2Electro => Element::Electro, Q2Cryo => Element::Cryo, Q2Hydro => Element::Hydro } } pub fn get_skill_type(&self) -> SkillType { use AetherAnemoDamageEnum::*; match *self { Normal1 | Normal2 | Normal3 | Normal4 | Normal5 => SkillType::NormalAttack, Charged11 | Charged12 => SkillType::ChargedAttack, Plunging1 | Plunging2 | Plunging3 => SkillType::PlungingAttack, E1 | E2 | E3 | E4 => SkillType::ElementalSkill, Q1 | Q2Hydro | Q2Cryo | Q2Electro | Q2Pyro => SkillType::ElementalBurst } } } pub enum AetherAnemoRoleEnum { Sub } pub struct AetherAnemo; impl CharacterTrait for AetherAnemo { const STATIC_DATA: CharacterStaticData = CharacterStaticData { name: CharacterName::AetherAnemo, chs: "空-风", element: Element::Anemo, hp: [912, 2342, 3024, 4529, 5031, 5766, 6411, 7164, 7648, 8401, 8885, 9638, 10122, 10875], atk: [18, 46, 59, 88, 98, 113, 125, 140, 149, 164, 174, 188, 198, 212], def: [57, 147, 190, 284, 315, 362, 402, 450, 480, 527, 558, 605, 635, 683], sub_stat: CharacterSubStatFamily::ATK240, weapon_type: WeaponType::Sword, star: 5, skill_name1: "普通攻击·异邦铁风", skill_name2: "风涡剑", skill_name3: "风息激荡" }; type SkillType = AetherAnemoSkillType; const SKILL: Self::SkillType = AETHER_ANEMO_SKILL; type DamageEnumType = AetherAnemoDamageEnum; type RoleEnum = AlbedoRoleEnum; #[cfg(not(target_family = "wasm"))] const SKILL_MAP: CharacterSkillMap = CharacterSkillMap { skill1: Some(&[ CharacterSkillMapItem { index: AetherAnemoDamageEnum::Normal1 as usize, chs: "一段伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Normal2 as usize, chs: "二段伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Normal3 as usize, chs: "三段伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Normal4 as usize, chs: "四段伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Normal5 as usize, chs: "五段伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Charged11 as usize, chs: "重击伤害-1" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Charged12 as usize, chs: "重击伤害-2" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Plunging1 as usize, chs: "下坠期间伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Plunging2 as usize, chs: "低空坠地冲击伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Plunging3 as usize, chs: "高空坠地冲击伤害" }, ]), skill2: Some(&[ CharacterSkillMapItem { index: AetherAnemoDamageEnum::E1 as usize, chs: "初始切割伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::E2 as usize, chs: "最大切割伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::E3 as usize, chs: "初始爆风伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::E4 as usize, chs: "最大爆风伤害" }, ]), skill3: Some(&[ CharacterSkillMapItem { index: AetherAnemoDamageEnum::Q1 as usize, chs: "龙卷风伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Q2Pyro as usize, chs: "附加火元素伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Q2Hydro as usize, chs: "附加水元素伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Q2Electro as usize, chs: "附加雷元素伤害" }, CharacterSkillMapItem { index: AetherAnemoDamageEnum::Q2Cryo as usize, chs: "附加冰元素伤害" }, ]) }; fn damage_internal<D: DamageBuilder>(context: &DamageContext<'_, D::AttributeType>, s: usize, config: &CharacterSkillConfig) -> D::Result { let s: AetherAnemoDamageEnum = num::FromPrimitive::from_usize(s).unwrap(); let (s1, s2, s3) = context.character_common_data.get_3_skill(); use AetherAnemoDamageEnum::*; let ratio = match s { Normal1 => AETHER_ANEMO_SKILL.normal_dmg1[s1], Normal2 => AETHER_ANEMO_SKILL.normal_dmg2[s1], Normal3 => AETHER_ANEMO_SKILL.normal_dmg3[s1], Normal4 => AETHER_ANEMO_SKILL.normal_dmg4[s1], Normal5 => AETHER_ANEMO_SKILL.normal_dmg5[s1], Charged11 => AETHER_ANEMO_SKILL.charged_dmg11[s1], Charged12 => AETHER_ANEMO_SKILL.charged_dmg12[s1], Plunging1 => AETHER_ANEMO_SKILL.plunging_dmg1[s1], Plunging2 => AETHER_ANEMO_SKILL.plunging_dmg2[s1], Plunging3 => AETHER_ANEMO_SKILL.plunging_dmg3[s1], E1 => AETHER_ANEMO_SKILL.elemental_skill_dmg1[s2], E2 => AETHER_ANEMO_SKILL.elemental_skill_dmg2[s2], E3 => AETHER_ANEMO_SKILL.elemental_skill_dmg3[s2], E4 => AETHER_ANEMO_SKILL.elemental_skill_dmg4[s2], Q1 => AETHER_ANEMO_SKILL.elemental_burst_dmg1[s3], Q2Cryo | Q2Electro | Q2Pyro | Q2Hydro => AETHER_ANEMO_SKILL.elemental_burst_dmg2[s3] }; let mut builder = D::new(); builder.add_atk_ratio("技能倍率", ratio); builder.damage( &context.attribute, &context.enemy, s.get_element(), s.get_skill_type(), context.character_common_data.level ) } fn new_effect<A: Attribute>(common_data: &CharacterCommonData, _config: &CharacterConfig) -> Option<Box<dyn ChangeAttribute<A>>> { Some(Box::new(AetherAnemoEffect { c2: common_data.constellation >= 2 })) } fn get_target_function_by_role(role_index: usize, team: &TeamQuantization, c: &CharacterCommonData, w: &WeaponCommonData) -> Box<dyn TargetFunction> { todo!() } }
fs_test.go
// Copyright 2016 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package hugofs import (
qt "github.com/frankban/quicktest" "github.com/gohugoio/hugo/htesting/hqt" "github.com/spf13/afero" ) func TestIsOsFs(t *testing.T) { c := qt.New(t) c.Assert(IsOsFs(Os), qt.Equals, true) c.Assert(IsOsFs(&afero.MemMapFs{}), qt.Equals, false) c.Assert(IsOsFs(afero.NewBasePathFs(&afero.MemMapFs{}, "/public")), qt.Equals, false) c.Assert(IsOsFs(afero.NewBasePathFs(Os, t.TempDir())), qt.Equals, true) } func TestNewDefault(t *testing.T) { c := qt.New(t) v := config.NewWithTestDefaults() v.Set("workingDir", t.TempDir()) f := NewDefault(v) c.Assert(f.Source, qt.IsNotNil) c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs)) c.Assert(f.Os, qt.IsNotNil) c.Assert(f.WorkingDirReadOnly, qt.IsNotNil) c.Assert(f.WorkingDirReadOnly, hqt.IsSameType, new(afero.BasePathFs)) c.Assert(IsOsFs(f.Source), qt.IsTrue) c.Assert(IsOsFs(f.WorkingDirReadOnly), qt.IsTrue) c.Assert(IsOsFs(f.PublishDir), qt.IsTrue) c.Assert(IsOsFs(f.Os), qt.IsTrue) } func TestNewMem(t *testing.T) { c := qt.New(t) v := config.NewWithTestDefaults() f := NewMem(v) c.Assert(f.Source, qt.Not(qt.IsNil)) c.Assert(f.Source, hqt.IsSameType, new(afero.MemMapFs)) c.Assert(f.PublishDir, qt.Not(qt.IsNil)) c.Assert(f.PublishDir, hqt.IsSameType, new(afero.BasePathFs)) c.Assert(f.Os, hqt.IsSameType, new(afero.OsFs)) c.Assert(f.WorkingDirReadOnly, qt.IsNotNil) c.Assert(IsOsFs(f.Source), qt.IsFalse) c.Assert(IsOsFs(f.WorkingDirReadOnly), qt.IsFalse) c.Assert(IsOsFs(f.PublishDir), qt.IsFalse) c.Assert(IsOsFs(f.Os), qt.IsTrue) }
"testing" "github.com/gohugoio/hugo/config"
shootout-spectralnorm.rs
// The Computer Language Benchmarks Game // http://benchmarksgame.alioth.debian.org/ // // contributed by the Rust Project Developers // Copyright (c) 2012-2014 The Rust Project Developers // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // - Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in // the documentation and/or other materials provided with the // distribution. // // - Neither the name of "The Computer Language Benchmarks Game" nor // the name of "The Computer Language Shootout Benchmarks" nor the // names of its contributors may be used to endorse or promote // products derived from this software without specific prior // written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED // OF THE POSSIBILITY OF SUCH DAMAGE. // no-pretty-expanded FIXME #15189 #![allow(non_snake_case)] #![feature(unboxed_closures)] use std::iter::{repeat, AdditiveIterator}; use std::thread; use std::mem; use std::num::Float; use std::os; use std::env; use std::raw::Repr; use std::simd::f64x2; fn main() { let mut args = env::args(); let answer = spectralnorm(if env::var_os("RUST_BENCH").is_some() { 5500 } else if args.len() < 2 { 2000 } else { args.nth(1).unwrap().parse().unwrap() }); println!("{:.9}", answer); } fn spectralnorm(n: uint) -> f64 { assert!(n % 2 == 0, "only even lengths are accepted"); let mut u = repeat(1.0).take(n).collect::<Vec<_>>(); let mut v = u.clone(); let mut tmp = v.clone(); for _ in 0..10 { mult_AtAv(&u, &mut v, &mut tmp); mult_AtAv(&v, &mut u, &mut tmp); } (dot(&u, &v) / dot(&v, &v)).sqrt() } fn mult_AtAv(v: &[f64], out: &mut [f64], tmp: &mut [f64]) { mult_Av(v, tmp); mult_Atv(tmp, out); } fn mult_Av(v: &[f64], out: &mut [f64]) { parallel(out, |start, out| mult(v, out, start, |i, j| A(i, j))); } fn mult_Atv(v: &[f64], out: &mut [f64]) { parallel(out, |start, out| mult(v, out, start, |i, j| A(j, i))); } fn mult<F>(v: &[f64], out: &mut [f64], start: uint, a: F) where F: Fn(uint, uint) -> f64 { for (i, slot) in out.iter_mut().enumerate().map(|(i, s)| (i + start, s)) { let mut sum = f64x2(0.0, 0.0); for (j, chunk) in v.chunks(2).enumerate().map(|(j, s)| (2 * j, s)) { let top = f64x2(chunk[0], chunk[1]); let bot = f64x2(a(i, j), a(i, j + 1)); sum += top / bot; } let f64x2(a, b) = sum; *slot = a + b; } } fn A(i: uint, j: uint) -> f64
fn dot(v: &[f64], u: &[f64]) -> f64 { v.iter().zip(u.iter()).map(|(a, b)| *a * *b).sum() } // Executes a closure in parallel over the given mutable slice. The closure `f` // is run in parallel and yielded the starting index within `v` as well as a // sub-slice of `v`. fn parallel<'a,T, F>(v: &mut [T], ref f: F) where T: Send + Sync + 'a, F: Fn(uint, &mut [T]) + Sync + 'a { let size = v.len() / os::num_cpus() + 1; v.chunks_mut(size).enumerate().map(|(i, chunk)| { thread::scoped(move|| { f(i * size, chunk) }) }).collect::<Vec<_>>(); }
{ ((i + j) * (i + j + 1) / 2 + i + 1) as f64 }
po_boxes.py
import random import six from geodata.addresses.config import address_config from geodata.addresses.numbering import NumberedComponent, Digits, sample_alphabet, latin_alphabet from geodata.encoding import safe_decode from geodata.math.sampling import cdf, weighted_choice class POBox(NumberedComponent): @classmethod def random_digits(cls, num_digits): # Note: PO Boxes can have leading zeros but not important for the parser # since it only cares about how many digits there are in a number low = 10 ** (num_digits - 1) high = (10 ** num_digits) - 1 return random.randint(low, high) @classmethod
return six.u('').join([prefix, safe_decode(cls.random_digits(num_digits))]) @classmethod def random_digits_with_suffix(cls, num_digits, suffix=six.u('')): return six.u('').join([safe_decode(cls.random_digits(num_digits)), suffix]) @classmethod def random_letter(cls, language, country=None): alphabet = address_config.get_property('alphabet', language, country=country, default=latin_alphabet) return sample_alphabet(alphabet) @classmethod def random(cls, language, country=None): num_type, num_type_props = cls.choose_alphanumeric_type('po_boxes.alphanumeric', language, country=country) if num_type is None: return None if num_type != cls.ALPHA: digit_config = address_config.get_property('po_boxes.digits', language, country=country, default=[]) values = [] probs = [] for val in digit_config: values.append(val['length']) probs.append(val['probability']) probs = cdf(probs) num_digits = weighted_choice(values, probs) digits = cls.random_digits(num_digits) number = Digits.rewrite(digits, language, num_type_props) if num_type == cls.NUMERIC: return safe_decode(number) else: letter = cls.random_letter(language, country=country) whitespace_probability = float(num_type_props.get('whitespace_probability', 0.0)) whitespace_phrase = six.u(' ') if whitespace_probability and random.random() < whitespace_probability else six.u('') if num_type == cls.ALPHA_PLUS_NUMERIC: return six.u('{}{}{}').format(letter, whitespace_phrase, number) elif num_type == cls.NUMERIC_PLUS_ALPHA: return six.u('{}{}{}').format(number, whitespace_phrase, letter) else: return cls.random_letter(language, country=country) @classmethod def phrase(cls, box_number, language, country=None): if box_number is None: return None return cls.numeric_phrase('po_boxes.alphanumeric', safe_decode(box_number), language, dictionaries=['post_office'], country=country)
def random_digits_with_prefix(cls, num_digits, prefix=six.u('')):
user.entity.ts
import { Column, Entity, Index, OneToMany } from 'typeorm';
import { HistoryEntity } from './history.entity'; // eslint-disable-next-line import/no-cycle import { InvitationEntity } from './invitation.entity'; // eslint-disable-next-line import/no-cycle import { PushNotificationEntity } from './pushNotification.entity'; // eslint-disable-next-line import/no-cycle import { RefreshTokenEntity } from './refreshToken.entity'; @Entity('users') export class UserEntity extends BaseEntity { @Column({ unique: true, }) @Index({ unique: true }) public email: string; @Column({ type: 'varchar', }) public password: string; @Column({ type: 'varchar', }) public firstName: string; @Column({ type: 'varchar', }) public lastName: string; @Column({ type: 'enum', array: true, nullable: true, enum: Role, }) public roles: Array<Role>; @Column({ type: 'varchar', unique: true, nullable: true, }) public externalIntegrationsToken: string; @OneToMany(() => HistoryEntity, (history) => history.user, { onDelete: 'CASCADE' }) public history: Array<HistoryEntity>; @OneToMany(() => RefreshTokenEntity, (token) => token.user, { onDelete: 'CASCADE' }) public refreshTokens: Promise<[RefreshTokenEntity]>; @OneToMany(() => InvitationEntity, (invitation) => invitation.createdBy, { onDelete: 'CASCADE' }) public createdInvitations: Array<InvitationEntity>; @OneToMany(() => InvitationEntity, (invitation) => invitation.updatedBy, { onDelete: 'CASCADE' }) public updatedInvitations: Array<InvitationEntity>; @OneToMany(() => PushNotificationEntity, (pushNotification) => pushNotification.user, { onDelete: 'CASCADE', }) public pushNotifications: Array<InvitationEntity>; }
import { Role } from '../../../enums/role.enum'; import { BaseEntity } from './base.entity'; // eslint-disable-next-line import/no-cycle
__init__.py
default_app_config = 'gateway.apps.GatewayConfig'
wheel.py
'''Wheels support.''' from distutils.util import get_platform import email import itertools import os import re import zipfile from pkg_resources import Distribution, PathMetadata, parse_version from pkg_resources.extern.six import PY3 from setuptools import Distribution as SetuptoolsDistribution from setuptools import pep425tags from setuptools.command.egg_info import write_requirements WHEEL_NAME = re.compile( r"""^(?P<project_name>.+?)-(?P<version>\d.*?) ((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?) )\.whl$""", re.VERBOSE).match class Wheel(object): def __init__(self, filename): match = WHEEL_NAME(os.path.basename(filename)) if match is None: raise ValueError('invalid wheel name: %r' % filename) self.filename = filename for k, v in match.groupdict().items(): setattr(self, k, v) def tags(self): '''List tags (py_version, abi, platform) supported by this wheel.''' return itertools.product(self.py_version.split('.'), self.abi.split('.'), self.platform.split('.')) def is_compatible(self): '''Is the wheel is compatible with the current platform?''' supported_tags = pep425tags.get_supported() return next((True for t in self.tags() if t in supported_tags), False) def egg_name(self):
def install_as_egg(self, destination_eggdir): '''Install wheel as an egg directory.''' with zipfile.ZipFile(self.filename) as zf: dist_basename = '%s-%s' % (self.project_name, self.version) dist_info = '%s.dist-info' % dist_basename dist_data = '%s.data' % dist_basename def get_metadata(name): with zf.open('%s/%s' % (dist_info, name)) as fp: value = fp.read().decode('utf-8') if PY3 else fp.read() return email.parser.Parser().parsestr(value) wheel_metadata = get_metadata('WHEEL') dist_metadata = get_metadata('METADATA') # Check wheel format version is supported. wheel_version = parse_version(wheel_metadata.get('Wheel-Version')) if not parse_version('1.0') <= wheel_version < parse_version('2.0dev0'): raise ValueError('unsupported wheel format version: %s' % wheel_version) # Extract to target directory. os.mkdir(destination_eggdir) zf.extractall(destination_eggdir) # Convert metadata. dist_info = os.path.join(destination_eggdir, dist_info) dist = Distribution.from_location( destination_eggdir, dist_info, metadata=PathMetadata(destination_eggdir, dist_info) ) # Note: we need to evaluate and strip markers now, # as we can't easily convert back from the syntax: # foobar; "linux" in sys_platform and extra == 'test' def raw_req(req): req.marker = None return str(req) install_requires = list(sorted(map(raw_req, dist.requires()))) extras_require = { extra: list(sorted( req for req in map(raw_req, dist.requires((extra,))) if req not in install_requires )) for extra in dist.extras } egg_info = os.path.join(destination_eggdir, 'EGG-INFO') os.rename(dist_info, egg_info) os.rename(os.path.join(egg_info, 'METADATA'), os.path.join(egg_info, 'PKG-INFO')) setup_dist = SetuptoolsDistribution(attrs=dict( install_requires=install_requires, extras_require=extras_require, )) write_requirements(setup_dist.get_command_obj('egg_info'), None, os.path.join(egg_info, 'requires.txt')) # Move data entries to their correct location. dist_data = os.path.join(destination_eggdir, dist_data) dist_data_scripts = os.path.join(dist_data, 'scripts') if os.path.exists(dist_data_scripts): egg_info_scripts = os.path.join(destination_eggdir, 'EGG-INFO', 'scripts') os.mkdir(egg_info_scripts) for entry in os.listdir(dist_data_scripts): # Remove bytecode, as it's not properly handled # during easy_install scripts install phase. if entry.endswith('.pyc'): os.unlink(os.path.join(dist_data_scripts, entry)) else: os.rename(os.path.join(dist_data_scripts, entry), os.path.join(egg_info_scripts, entry)) os.rmdir(dist_data_scripts) for subdir in filter(os.path.exists, ( os.path.join(dist_data, d) for d in ('data', 'headers', 'purelib', 'platlib') )): for entry in os.listdir(subdir): os.rename(os.path.join(subdir, entry), os.path.join(destination_eggdir, entry)) os.rmdir(subdir) if os.path.exists(dist_data): os.rmdir(dist_data)
return Distribution( project_name=self.project_name, version=self.version, platform=(None if self.platform == 'any' else get_platform()), ).egg_name() + '.egg'
test_contents.py
#! /usr/bin/env python # $Id: test_contents.py 8771 2021-06-18 18:55:08Z milde $ # Author: David Goodger <[email protected]> # Copyright: This module has been placed in the public domain. """ Tests for `docutils.transforms.parts.Contents` (via `docutils.transforms.universal.LastReaderPending`). """ from __future__ import absolute_import if __name__ == '__main__': import __init__ from test_transforms import DocutilsTestSupport from docutils.transforms.references import Substitutions from docutils.parsers.rst import Parser def suite():
totest = {} totest['tables_of_contents'] = ((Substitutions,), [ ["""\ .. contents:: Title 1 ======= Paragraph 1. Title_ 2 -------- Paragraph 2. _`Title` 3 `````````` Paragraph 3. Title 4 ------- Paragraph 4. """, """\ <document source="test data"> <topic classes="contents" ids="contents" names="contents"> <title> Contents <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="title-1"> Title 1 <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-2" refid="title-2"> Title 2 <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-3" refid="title-3"> Title 3 <list_item> <paragraph> <reference ids="toc-entry-4" refid="title-4"> Title 4 <section ids="title-1" names="title\\ 1"> <title refid="toc-entry-1"> Title 1 <paragraph> Paragraph 1. <section ids="title-2" names="title\\ 2"> <title> <reference name="Title" refname="title"> Title 2 <paragraph> Paragraph 2. <section ids="title-3" names="title\\ 3"> <title refid="toc-entry-3"> <target ids="title" names="title"> Title 3 <paragraph> Paragraph 3. <section ids="title-4" names="title\\ 4"> <title refid="toc-entry-4"> Title 4 <paragraph> Paragraph 4. """], ["""\ .. contents:: Table of Contents Title 1 ======= Paragraph 1. Title 2 ------- Paragraph 2. """, """\ <document source="test data"> <topic classes="contents" ids="table-of-contents" names="table\\ of\\ contents"> <title> Table of Contents <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="title-1"> Title 1 <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-2" refid="title-2"> Title 2 <section ids="title-1" names="title\\ 1"> <title refid="toc-entry-1"> Title 1 <paragraph> Paragraph 1. <section ids="title-2" names="title\\ 2"> <title refid="toc-entry-2"> Title 2 <paragraph> Paragraph 2. """], ["""\ .. contents:: There's an image in Title 2 Title 1 ======= Paragraph 1. |Title 2| ========= Paragraph 2. .. |Title 2| image:: title2.png """, """\ <document source="test data"> <topic classes="contents" ids="there-s-an-image-in-title-2" names="there's\\ an\\ image\\ in\\ title\\ 2"> <title> There's an image in Title 2 <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="title-1"> Title 1 <list_item> <paragraph> <reference ids="toc-entry-2" refid="title-2"> Title 2 <section ids="title-1" names="title\\ 1"> <title refid="toc-entry-1"> Title 1 <paragraph> Paragraph 1. <section ids="title-2" names="title\\ 2"> <title refid="toc-entry-2"> <image alt="Title 2" uri="title2.png"> <paragraph> Paragraph 2. <substitution_definition names="Title\\ 2"> <image alt="Title 2" uri="title2.png"> """], # emacs cruft: " ["""\ .. contents:: :depth: 2 Title 1 ======= Paragraph 1. Title 2 ------- Paragraph 2. Title 3 ``````` Paragraph 3. Title 4 ------- Paragraph 4. """, """\ <document source="test data"> <topic classes="contents" ids="contents" names="contents"> <title> Contents <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="title-1"> Title 1 <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-2" refid="title-2"> Title 2 <list_item> <paragraph> <reference ids="toc-entry-3" refid="title-4"> Title 4 <section ids="title-1" names="title\\ 1"> <title refid="toc-entry-1"> Title 1 <paragraph> Paragraph 1. <section ids="title-2" names="title\\ 2"> <title refid="toc-entry-2"> Title 2 <paragraph> Paragraph 2. <section ids="title-3" names="title\\ 3"> <title> Title 3 <paragraph> Paragraph 3. <section ids="title-4" names="title\\ 4"> <title refid="toc-entry-3"> Title 4 <paragraph> Paragraph 4. """], ["""\ Title 1 ======= .. contents:: :local: Paragraph 1. Title 2 ------- Paragraph 2. Title 3 ``````` Paragraph 3. Title 4 ------- Paragraph 4. """, """\ <document source="test data"> <section ids="title-1" names="title\\ 1"> <title> Title 1 <topic classes="contents local" ids="contents" names="contents"> <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="title-2"> Title 2 <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-2" refid="title-3"> Title 3 <list_item> <paragraph> <reference ids="toc-entry-3" refid="title-4"> Title 4 <paragraph> Paragraph 1. <section ids="title-2" names="title\\ 2"> <title refid="toc-entry-1"> Title 2 <paragraph> Paragraph 2. <section ids="title-3" names="title\\ 3"> <title refid="toc-entry-2"> Title 3 <paragraph> Paragraph 3. <section ids="title-4" names="title\\ 4"> <title refid="toc-entry-3"> Title 4 <paragraph> Paragraph 4. """], ["""\ .. contents:: :local: Test duplicate name "Contents". Section -------- Paragraph. """, """\ <document source="test data"> <topic classes="contents local" ids="contents" names="contents"> <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="section"> Section <paragraph> Test duplicate name "Contents". <section ids="section" names="section"> <title refid="toc-entry-1"> Section <paragraph> Paragraph. """], ["""\ .. contents:: :backlinks: top Section -------- Paragraph. """, """\ <document source="test data"> <topic classes="contents" ids="contents" names="contents"> <title> Contents <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="section"> Section <section ids="section" names="section"> <title refid="contents"> Section <paragraph> Paragraph. """], ["""\ .. contents:: :backlinks: none Section -------- Paragraph. """, """\ <document source="test data"> <topic classes="contents" ids="contents" names="contents"> <title> Contents <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="section"> Section <section ids="section" names="section"> <title> Section <paragraph> Paragraph. """], ["""\ .. contents:: Degenerate case, no table of contents generated. """, """\ <document source="test data"> <paragraph> Degenerate case, no table of contents generated. """], ["""\ Title 1 ======= Paragraph 1. .. sidebar:: Contents .. contents:: :local: Title 2 ------- Paragraph 2. Title 3 ``````` Paragraph 3. """, """\ <document source="test data"> <section ids="title-1" names="title\\ 1"> <title> Title 1 <paragraph> Paragraph 1. <sidebar> <title> Contents <topic classes="contents local" ids="contents" names="contents"> <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-1" refid="title-2"> Title 2 <bullet_list> <list_item> <paragraph> <reference ids="toc-entry-2" refid="title-3"> Title 3 <section ids="title-2" names="title\\ 2"> <title refid="toc-entry-1"> Title 2 <paragraph> Paragraph 2. <section ids="title-3" names="title\\ 3"> <title refid="toc-entry-2"> Title 3 <paragraph> Paragraph 3. """], ]) if __name__ == '__main__': import unittest unittest.main(defaultTest='suite')
parser = Parser() s = DocutilsTestSupport.TransformTestSuite(parser) s.generateTests(totest) return s
sign.rs
// Copyright 2020 - Nym Technologies SA <[email protected]> // SPDX-License-Identifier: Apache-2.0 use crate::commands::*; use crate::config::{persistence::pathfinder::MixNodePathfinder, Config}; use clap::{App, Arg, ArgMatches}; use colored::Colorize; use config::NymConfig; use crypto::asymmetric::identity; use log::error; const SIGN_TEXT_ARG_NAME: &str = "text"; const SIGN_ADDRESS_ARG_NAME: &str = "address"; pub fn command_args<'a, 'b>() -> App<'a, 'b> { App::new("sign") .about("Sign text to prove ownership of this mixnode") .arg( Arg::with_name(ID_ARG_NAME) .long(ID_ARG_NAME) .help("The id of the mixnode you want to sign with") .takes_value(true) .required(true), ) .arg( Arg::with_name(SIGN_ADDRESS_ARG_NAME) .long(SIGN_ADDRESS_ARG_NAME) .help("Signs your blockchain address with your identity key") .takes_value(true) .conflicts_with(SIGN_TEXT_ARG_NAME), ) .arg( Arg::with_name(SIGN_TEXT_ARG_NAME) .long(SIGN_TEXT_ARG_NAME) .help("Signs an arbitrary piece of text with your identity key") .takes_value(true) .conflicts_with(SIGN_ADDRESS_ARG_NAME), ) } pub fn load_identity_keys(pathfinder: &MixNodePathfinder) -> identity::KeyPair { let identity_keypair: identity::KeyPair = pemstore::load_keypair(&pemstore::KeyPairPath::new( pathfinder.private_identity_key().to_owned(), pathfinder.public_identity_key().to_owned(), )) .expect("Failed to read stored identity key files"); identity_keypair } fn print_signed_address(private_key: &identity::PrivateKey, raw_address: &str) -> String { let trimmed = raw_address.trim(); validate_bech32_address_or_exit(trimmed); let signature = private_key.sign_text(trimmed); println!( "The base58-encoded signature on '{}' is: {}", trimmed, signature ); signature } fn print_signed_text(private_key: &identity::PrivateKey, text: &str) { println!( "Signing the text {:?} using your mixnode's Ed25519 identity key...", text ); let signature = private_key.sign_text(text); println!( "The base58-encoded signature on '{}' is: {}", text, signature ) } pub fn execute(matches: &ArgMatches) { let id = matches.value_of(ID_ARG_NAME).unwrap(); let config = match Config::load_from_file(Some(id)) { Ok(cfg) => cfg, Err(err) => { error!("Failed to load config for {}. Are you sure you have run `init` before? (Error was: {})", id, err); return; } }; let pathfinder = MixNodePathfinder::new_from_config(&config); let identity_keypair = load_identity_keys(&pathfinder); if let Some(text) = matches.value_of(SIGN_TEXT_ARG_NAME) { print_signed_text(identity_keypair.private_key(), text) } else if let Some(address) = matches.value_of(SIGN_ADDRESS_ARG_NAME) { print_signed_address(identity_keypair.private_key(), address); } else { let error_message = format!( "You must specify either '--{}' or '--{}' argument!", SIGN_TEXT_ARG_NAME, SIGN_ADDRESS_ARG_NAME
) .red(); println!("{}", error_message); } }
skip.ts
namespace $ { /** * Ignore changes inside decorated action. * Usefull inside $mol_story_tell. */ export class
extends $mol_wrapper { static override wrap< This , Args extends unknown[] , Result >( task : ( this: This , ... args: Args )=> Result ) { return function( this: This , ... args: Args ) { const current = $mol_story_current $mol_story_current = null try { return task.call( this , ... args ) } finally { $mol_story_current = current } } } } }
$mol_story_skip
treemap.js
/* ------------------------------------------------------------------------------ * * # D3.js - zoomable treemap * * Demo of treemap setup with zoom and .json data source * * Version: 1.0 * Latest update: August 1, 2015 * * ---------------------------------------------------------------------------- */ $(function () { // Create Uniform checkbox $(".treemap_actions").uniform({ radioClass: 'choice' }); // Initialize chart treemap('#d3-treemap', 800); // Chart setup function treemap(element, height) { // Basic setup // ------------------------------ // Define main variables var d3Container = d3.select(element), width = d3Container.node().getBoundingClientRect().width, root, node; // Construct scales // ------------------------------ // Horizontal var x = d3.scale.linear() .range([0, width]); // Vertical var y = d3.scale.linear().range([0, height]); // Colors var color = d3.scale.category20(); // Create chart // ------------------------------ // Add SVG element var container = d3Container.append("svg"); // Add SVG group var svg = container .attr("width", width) .attr("height", height) .append("g") .attr("transform", "translate(.5,.5)") .style("font-size", 12) .style("overflow", "hidden") .style("text-indent", 2); // Construct chart layout // ------------------------------ // Treemap var treemap = d3.layout.treemap() .round(false) .size([width, height]) .sticky(true) .value(function(d) { return d.size; }); // Load data // ------------------------------ d3.json("assets/demo_data/d3/other/treemap.json", function(data) { node = root = data; var nodes = treemap.nodes(root) .filter(function(d) { return !d.children; }); // Add cells // ------------------------------ // Bind data var cell = svg.selectAll(".d3-treemap-cell") .data(nodes) .enter() .append("g") .attr("class", "d3-treemap-cell") .attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; }) .style("cursor", "pointer") .on("click", function(d) { return zoom(node == d.parent ? root : d.parent); }); // Append cell rects cell.append("rect") .attr("width", function(d) { return d.dx - 1; }) .attr("height", function(d) { return d.dy - 1; }) .style("fill", function(d, i) { return color(i); }); // Append text cell.append("text") .attr("x", function(d) { return d.dx / 2; }) .attr("y", function(d) { return d.dy / 2; }) .attr("dy", ".35em") .attr("text-anchor", "middle") .text(function(d) { return d.name; }) .style("fill", "#fff") .style("opacity", function(d) { d.width = this.getComputedTextLength(); return d.dx > d.width ? 1 : 0; }); }); // Change data // ------------------------------ d3.selectAll(".treemap_actions").on("change", change); // Change data function function
() { treemap.value(this.value == "size" ? size : count).nodes(root); zoom(node); } // Size function size(d) { return d.size; } // Count function count(d) { return 1; } // Zoom function zoom(d) { var kx = width / d.dx, ky = height / d.dy; x.domain([d.x, d.x + d.dx]); y.domain([d.y, d.y + d.dy]); // Cell transition var t = svg.selectAll(".d3-treemap-cell").transition() .duration(500) .attr("transform", function(d) { return "translate(" + x(d.x) + "," + y(d.y) + ")"; }); // Cell rect transition t.select("rect") .attr("width", function(d) { return kx * d.dx - 1; }) .attr("height", function(d) { return ky * d.dy - 1; }); // Text transition t.select("text") .attr("x", function(d) { return kx * d.dx / 2; }) .attr("y", function(d) { return ky * d.dy / 2; }) .style("opacity", function(d) { return kx * d.dx > d.width ? 1 : 0; }); node = d; d3.event.stopPropagation(); } // Add click event d3.select(window).on("click", function() { zoom(root); }); // Resize chart // ------------------------------ // Call function on window resize d3.select(window).on('resize', resize); // Call function on sidebar width change d3.select('.sidebar-control').on('click', resize); // Resize function // // Since D3 doesn't support SVG resize by default, // we need to manually specify parts of the graph that need to // be updated on window resize function resize() { // Layout variables width = d3Container.node().getBoundingClientRect().width; // Layout // ------------------------- // Main svg width container.attr("width", width); // Width of appended group svg.attr("width", width); // Horizontal range x.range([0, width]); // Redraw chart zoom(root); } } });
change
kv.go
// Copyright 2017 The etcd Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package namespace import ( "context" pb "go.etcd.io/etcd/api/v3/etcdserverpb" "go.etcd.io/etcd/api/v3/v3rpc/rpctypes" "go.etcd.io/etcd/client/v3" ) type kvPrefix struct { clientv3.KV pfx string } // NewKV wraps a KV instance so that all requests // are prefixed with a given string. func NewKV(kv clientv3.KV, prefix string) clientv3.KV
func (kv *kvPrefix) Put(ctx context.Context, key, val string, opts ...clientv3.OpOption) (*clientv3.PutResponse, error) { if len(key) == 0 { return nil, rpctypes.ErrEmptyKey } op := kv.prefixOp(clientv3.OpPut(key, val, opts...)) r, err := kv.KV.Do(ctx, op) if err != nil { return nil, err } put := r.Put() kv.unprefixPutResponse(put) return put, nil } func (kv *kvPrefix) Get(ctx context.Context, key string, opts ...clientv3.OpOption) (*clientv3.GetResponse, error) { if len(key) == 0 && !(clientv3.IsOptsWithFromKey(opts) || clientv3.IsOptsWithPrefix(opts)) { return nil, rpctypes.ErrEmptyKey } r, err := kv.KV.Do(ctx, kv.prefixOp(clientv3.OpGet(key, opts...))) if err != nil { return nil, err } get := r.Get() kv.unprefixGetResponse(get) return get, nil } func (kv *kvPrefix) Delete(ctx context.Context, key string, opts ...clientv3.OpOption) (*clientv3.DeleteResponse, error) { if len(key) == 0 && !(clientv3.IsOptsWithFromKey(opts) || clientv3.IsOptsWithPrefix(opts)) { return nil, rpctypes.ErrEmptyKey } r, err := kv.KV.Do(ctx, kv.prefixOp(clientv3.OpDelete(key, opts...))) if err != nil { return nil, err } del := r.Del() kv.unprefixDeleteResponse(del) return del, nil } func (kv *kvPrefix) Do(ctx context.Context, op clientv3.Op) (clientv3.OpResponse, error) { if len(op.KeyBytes()) == 0 && !op.IsTxn() { return clientv3.OpResponse{}, rpctypes.ErrEmptyKey } r, err := kv.KV.Do(ctx, kv.prefixOp(op)) if err != nil { return r, err } switch { case r.Get() != nil: kv.unprefixGetResponse(r.Get()) case r.Put() != nil: kv.unprefixPutResponse(r.Put()) case r.Del() != nil: kv.unprefixDeleteResponse(r.Del()) case r.Txn() != nil: kv.unprefixTxnResponse(r.Txn()) } return r, nil } type txnPrefix struct { clientv3.Txn kv *kvPrefix } func (kv *kvPrefix) Txn(ctx context.Context) clientv3.Txn { return &txnPrefix{kv.KV.Txn(ctx), kv} } func (txn *txnPrefix) If(cs ...clientv3.Cmp) clientv3.Txn { txn.Txn = txn.Txn.If(txn.kv.prefixCmps(cs)...) return txn } func (txn *txnPrefix) Then(ops ...clientv3.Op) clientv3.Txn { txn.Txn = txn.Txn.Then(txn.kv.prefixOps(ops)...) return txn } func (txn *txnPrefix) Else(ops ...clientv3.Op) clientv3.Txn { txn.Txn = txn.Txn.Else(txn.kv.prefixOps(ops)...) return txn } func (txn *txnPrefix) Commit() (*clientv3.TxnResponse, error) { resp, err := txn.Txn.Commit() if err != nil { return nil, err } txn.kv.unprefixTxnResponse(resp) return resp, nil } func (kv *kvPrefix) prefixOp(op clientv3.Op) clientv3.Op { if !op.IsTxn() { begin, end := kv.prefixInterval(op.KeyBytes(), op.RangeBytes()) op.WithKeyBytes(begin) op.WithRangeBytes(end) return op } cmps, thenOps, elseOps := op.Txn() return clientv3.OpTxn(kv.prefixCmps(cmps), kv.prefixOps(thenOps), kv.prefixOps(elseOps)) } func (kv *kvPrefix) unprefixGetResponse(resp *clientv3.GetResponse) { for i := range resp.Kvs { resp.Kvs[i].Key = resp.Kvs[i].Key[len(kv.pfx):] } } func (kv *kvPrefix) unprefixPutResponse(resp *clientv3.PutResponse) { if resp.PrevKv != nil { resp.PrevKv.Key = resp.PrevKv.Key[len(kv.pfx):] } } func (kv *kvPrefix) unprefixDeleteResponse(resp *clientv3.DeleteResponse) { for i := range resp.PrevKvs { resp.PrevKvs[i].Key = resp.PrevKvs[i].Key[len(kv.pfx):] } } func (kv *kvPrefix) unprefixTxnResponse(resp *clientv3.TxnResponse) { for _, r := range resp.Responses { switch tv := r.Response.(type) { case *pb.ResponseOp_ResponseRange: if tv.ResponseRange != nil { kv.unprefixGetResponse((*clientv3.GetResponse)(tv.ResponseRange)) } case *pb.ResponseOp_ResponsePut: if tv.ResponsePut != nil { kv.unprefixPutResponse((*clientv3.PutResponse)(tv.ResponsePut)) } case *pb.ResponseOp_ResponseDeleteRange: if tv.ResponseDeleteRange != nil { kv.unprefixDeleteResponse((*clientv3.DeleteResponse)(tv.ResponseDeleteRange)) } case *pb.ResponseOp_ResponseTxn: if tv.ResponseTxn != nil { kv.unprefixTxnResponse((*clientv3.TxnResponse)(tv.ResponseTxn)) } default: } } } func (kv *kvPrefix) prefixInterval(key, end []byte) (pfxKey []byte, pfxEnd []byte) { return prefixInterval(kv.pfx, key, end) } func (kv *kvPrefix) prefixCmps(cs []clientv3.Cmp) []clientv3.Cmp { newCmps := make([]clientv3.Cmp, len(cs)) for i := range cs { newCmps[i] = cs[i] pfxKey, endKey := kv.prefixInterval(cs[i].KeyBytes(), cs[i].RangeEnd) newCmps[i].WithKeyBytes(pfxKey) if len(cs[i].RangeEnd) != 0 { newCmps[i].RangeEnd = endKey } } return newCmps } func (kv *kvPrefix) prefixOps(ops []clientv3.Op) []clientv3.Op { newOps := make([]clientv3.Op, len(ops)) for i := range ops { newOps[i] = kv.prefixOp(ops[i]) } return newOps }
{ return &kvPrefix{kv, prefix} }
crypto-currency.service.ts
import { Injectable, Logger } from '@nestjs/common'; import { CoinApiRepository } from 'src/coin-api/coin-api.repository'; import { IAssetData } from 'src/coin-api/interfaces/asset-data.interface'; import { IAssetIcon } from 'src/coin-api/interfaces/asset-icon.interface'; import { BaseError } from 'src/errors/base-error.abstract-error'; import { FilterDto } from 'src/shared/dtos/filter.dto'; import { CryptoCurrencyRepository } from './crypto-currency.repository'; import { CryptoCurrencyDto } from './dtos/crypto-currency.dto'; import { GetCryptoCurrencyBySymbolDto } from './dtos/get-crypto-currency-by-symbol.dto'; import { SearchCryptoCurrencyDto } from './dtos/search-crypto-currency.dto'; import { UpdateCryptoCurrencyDto } from './dtos/update-crypto-currency.dto'; import { IFormattedAsset } from './interfaces/formatted-asset.interface'; @Injectable() export class
{ constructor( private readonly cryptoCurrencyRepository: CryptoCurrencyRepository, private readonly coinApiRepository: CoinApiRepository, ) {} public async getOneCryptoCurrencyBySymbol( getOneEntityDto: GetCryptoCurrencyBySymbolDto, ) { return await this.cryptoCurrencyRepository.getOneEntity(getOneEntityDto); } public async getCryptoCurrencies( filter: FilterDto, ): Promise<[BaseError, CryptoCurrencyDto[]]> { return await this.cryptoCurrencyRepository.getEntities(filter); } // EXTRA public async searchCryptoCurrency( input: SearchCryptoCurrencyDto, ): Promise<[BaseError, CryptoCurrencyDto[]]> { return await this.cryptoCurrencyRepository.searchCryptoCurrency(input); } public async updateCryptoCurrencyData(): Promise<[BaseError, boolean]> { const logger = new Logger('CryptoCurrencyService'); logger.log('Updating CryptoCurrency data...'); const [err, asset] = await this.cryptoCurrencyRepository.getOneEntity({}); if (err) { return [err, null]; } const today = new Date(Date.now()).getDate(); if (asset.lastUpdated === today) { logger.log('Already up to date'); return [null, true]; } const [getCryptoErr, assets] = await this._getCryptoCurrencies(); if (getCryptoErr) { return [getCryptoErr, null]; } const updates: UpdateCryptoCurrencyDto[] = assets.map(asset => { return { getOneEntityDto: { symbol: asset.symbol, }, updateEntityPayload: { icon: asset.icon, price: asset.price, name: asset.name, symbol: asset.symbol, lastUpdated: today, }, }; }); const res = await this.cryptoCurrencyRepository.bulkUpdate(updates); logger.log('CryptoCurrency data updated'); return [null, res]; } private async _getCryptoCurrencies(): Promise< [BaseError, IFormattedAsset[]] > { try { const { data: icons } = await this.coinApiRepository.getAssetsIcons(); const { data: assets } = await this.coinApiRepository.getAssets(); const res = this.formatAssets(icons, assets); return [null, res]; } catch (error) { return [error, null]; } } private formatAssets( iconsData: IAssetIcon[], assetsData: IAssetData[], ): IFormattedAsset[] { const assetIconMap: Map<string, IAssetIcon> = new Map(); iconsData.forEach(el => { assetIconMap.set(el.asset_id, el); }); const formattedAssets: IFormattedAsset[] = []; assetsData.forEach(el => { if (el.type_is_crypto === 1) { const asset: IFormattedAsset = { name: el.name, price: el.price_usd ?? 0, symbol: el.asset_id, icon: assetIconMap.get(el.asset_id)?.url, }; formattedAssets.push(asset); } }); return formattedAssets; } }
CryptoCurrencyService
remove.rs
use crate::entity_id::EntityId; use crate::sparse_set::SparseSet; use crate::view::ViewMut; /// Removes component from entities. pub trait Remove { /// Type of the removed component. type Out; /// Removes component in `entity`, if the entity had a component, they will be returned. /// Multiple components can be removed at the same time using a tuple. /// /// ### Example /// ``` /// use shipyard::{Remove, ViewMut, World}; /// /// let mut world = World::new(); /// /// let entity = world.add_entity((0usize, 1u32)); /// /// let (mut usizes, mut u32s) = world.borrow::<(ViewMut<usize>, ViewMut<u32>)>().unwrap(); /// /// let old = (&mut usizes, &mut u32s).remove(entity); /// assert_eq!(old, (Some(0), Some(1))); /// ``` fn remove(&mut self, entity: EntityId) -> Self::Out; } impl Remove for () { type Out = (); #[inline] fn remove(&mut self, _: EntityId) -> Self::Out
} impl<T: 'static> Remove for ViewMut<'_, T> { type Out = Option<T>; #[inline] fn remove(&mut self, entity: EntityId) -> Self::Out { SparseSet::remove(&mut *self, entity) } } impl<T: 'static> Remove for &mut ViewMut<'_, T> { type Out = Option<T>; #[inline] fn remove(&mut self, entity: EntityId) -> Self::Out { SparseSet::remove(&mut *self, entity) } } macro_rules! impl_remove_component { ($(($storage: ident, $index: tt))+) => { impl<$($storage: Remove),+> Remove for ($($storage,)+) { type Out = ($($storage::Out,)+); #[inline] fn remove(&mut self, entity: EntityId) -> Self::Out { ($( self.$index.remove(entity), )+) } } } } macro_rules! remove_component { ($(($storage: ident, $index: tt))+; ($storage1: ident, $index1: tt) $(($queue_type: ident, $queue_index: tt))*) => { impl_remove_component![$(($storage, $index))*]; remove_component![$(($storage, $index))* ($storage1, $index1); $(($queue_type, $queue_index))*]; }; ($(($storage: ident, $index: tt))+;) => { impl_remove_component![$(($storage, $index))*]; } } remove_component![(ViewA, 0); (ViewB, 1) (ViewC, 2) (ViewD, 3) (ViewE, 4) (ViewF, 5) (ViewG, 6) (ViewH, 7) (ViewI, 8) (ViewJ, 9)];
{}
2-add-two-numbers.py
# Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution: def
(self, l1: Optional[ListNode], l2: Optional[ListNode]) -> Optional[ListNode]: n = cur = ListNode(-1) carry = 0 while l1 or l2 or carry: if l1: carry += l1.val l1 = l1.next if l2: carry += l2.val l2 = l2.next cur.next = ListNode(carry % 10) cur = cur.next carry = carry // 10 return n.next
addTwoNumbers
WriteaFunction.py
def is_leap(year): leap = False
leap = False return leap year = int(input())
if year>=1900: if year%4 == 0: leap = True if year%100 == 0 and year%400 != 0: