file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
0004_local_authorities_and_gss_codes.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class
(migrations.Migration): dependencies = [ ('postcode_api', '0003_populate_postcode_area'), ] operations = [ migrations.CreateModel( name='LocalAuthority', fields=[ ('gss_code', models.CharField( max_length=9, serialize=False, primary_key=True, db_index=True)), ('name', models.CharField(max_length=128, db_index=True)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='PostcodeGssCode', fields=[ ('postcode_index', models.CharField( max_length=7, db_index=True)), ('local_authority_gss_code', models.CharField( max_length=9, serialize=False, primary_key=True, db_index=True)), ], options={ }, bases=(models.Model,), ), migrations.AlterField( model_name='address', name='postcode_area', field=models.CharField( default=b'', max_length=4, db_index=True, blank=True), preserve_default=True, ), ]
Migration
list_test.go
package single import ( "github.com/stretchr/testify/require" "strings" "testing" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) func reverse(s []string) { for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { s[i], s[j] = s[j], s[i] } } func TestInsetHead(t *testing.T) { tests := []struct { sep string want string }{ {sep: "hello"}, } for _, test := range tests { var want []string for _, s := range test.sep { want = append(want, string(s)) } reverse(want) test.want = strings.Join(want, "->") l := NewLinkList() for _, str := range test.sep { l.InsertHead(string(str)) } out := l.Print() if test.want != out { t.Errorf("err: LIST: %q,out=%q,want %q", test.sep, out, test.want) } t.Logf("info: LIST: %q,length=%d", test.sep, l.len) } } func TestInsetTail(t *testing.T) { tests := []struct { sep string want string }{ {sep: "hello"}, } for _, test := range tests { var want []string for _, s := range test.sep { want = append(want, string(s)) } test.want = strings.Join(want, "->") l := NewLinkList() for _, str := range test.sep { l.InsertTail(string(str)) } out := l.Print() if test.want != out { t.Errorf("err: LIST: %q,out=%q,want %q", test.sep, out, test.want) } t.Logf("info: LIST: %q,length=%d", test.sep, l.len) } } func TestSearchListBynode(t *testing.T) { l := NewLinkList() strs := "hello" for _, str := range strs { l.InsertTail(string(str)) } tests := []struct { sep string }{ {sep: "h"}, {sep: "o"}, {sep: "a"}, {sep: "c"}, } for _, test := range tests { if !strings.Contains(strs, test.sep) { continue } node := l.SearchListByValue(test.sep) require.NotNilf(t, node, "LIST: %q, %q should be found", strs, test.sep) out := l.SearchListByNode(node) require.Truef(t, out, "LIST: %q, %q should be found", strs, test.sep) } } func TestSearchListByValue(t *testing.T) { l := NewLinkList() node := l.SearchListByValue("h") require.Nil(t, node) strs := "hello" for _, str := range strs { l.InsertTail(string(str)) } tests := []struct { sep string }{ {sep: "h"}, {sep: "o"}, {sep: "a"}, {sep: "c"}, } for _, test := range tests { node := l.SearchListByValue(test.sep) if !strings.Contains(strs, test.sep) { require.Nil(t, node) continue } require.NotNilf(t, node, "LIST: %q, %q should be found", strs, test.sep) } } func TestSwap(t *testing.T) { l := NewLinkList() strs := "6415273" for _, str := range strs { l.InsertTail(string(str)) } require.Equal(t, "6->4->1->5->2->7->3", l.Print()) l.Swap(l.head, l.head.next) require.Equal(t, "4->6->1->5->2->7->3", l.Print()) node := l.head for ; nil != node.next.next.next; node = node.next { } require.Equal(t, "2", node.value) l.Swap(node, node.next) require.Equal(t, "4->6->1->5->2->3->7", l.Print()) } func TestDeleteNodeByValue(t *testing.T) { l := NewLinkList() strs := "hello" for _, str := range strs { l.InsertTail(string(str)) } tests := []struct { sep string }{ {sep: "o"}, {sep: "l"}, {sep: "l"}, {sep: "a"}, {sep: "e"}, {sep: "h"}, } for _, test := range tests { out := l.DeleteNodeByValue(test.sep) if strings.Contains(strs, test.sep) { require.Equal(t, true, out) } else { require.Equal(t, false, out) } } } func TestPrint(t *testing.T) { tests := []struct { sep string want string }{ {sep: "hello"}, {sep: "world,lee"}, } for _, test := range tests { var want []string for _, s := range test.sep { want = append(want, string(s)) } test.want = strings.Join(want, "->") l := NewLinkList() for _, str := range test.sep { l.InsertTail(string(str)) } out := l.Print() if test.want != out { t.Errorf("LIST: %q,out=%q,want %q", test.sep, out, test.want) } } } func
(t *testing.T) { tests := []struct { sep string want string }{ {sep: ""}, {sep: "h"}, {sep: "ab"}, {sep: "abc"}, {sep: "abcdef"}, } for _, test := range tests { var want []string for _, s := range test.sep { want = append(want, string(s)) } reverse(want) test.want = strings.Join(want, "->") l := NewLinkList() for _, str := range test.sep { l.InsertTail(string(str)) } l.Reverse() require.Equal(t, test.want, l.Print()) } } func TestHasCycle1(t *testing.T) { l := NewLinkList() for _, str := range "abcdef" { l.InsertTail(string(str)) } node := l.head var tmp *ListNode var i int for ; nil != node.next; node = node.next { if 3 == i { tmp = node } i++ } // the next of the last node is head, the list has cycle node.next = l.head out := l.HasCycle1() require.Equal(t, true, out) // the next of the last node is own, the list has cycle node.next = node out = l.HasCycle1() require.Equal(t, true, out) // the next of the last node is a random node between the first and the last, the list has cycle node.next = tmp out = l.HasCycle1() require.Equal(t, true, out) // dismantle the cycle node.next = nil out = l.HasCycle1() require.Equal(t, false, out) } var _ = Describe("TestHasCycle2", func() { It("only head node", func() { l := NewLinkList() out := l.HasCycle2() Expect(out).Should(Equal(false)) }) It("only one node except for head", func() { l := NewLinkList() for _, str := range "a" { l.InsertTail(string(str)) } node := l.head.next // the next of the last node is head, the list has cycle node.next = l.head out := l.HasCycle2() Expect(out).Should(Equal(true)) // dismantle the cycle node.next = nil out = l.HasCycle2() Expect(out).Should(Equal(false)) }) It("two node at least except for head", func() { l := NewLinkList() for _, str := range "abcdef" { l.InsertTail(string(str)) } node := l.head var tmp *ListNode var i int for ; nil != node.next; node = node.next { if 3 == i { tmp = node } i++ } // the next of the last node is head, the list has cycle node.next = l.head out := l.HasCycle2() Expect(out).Should(Equal(true)) // the next of the last node is own, the list has cycle node.next = node out = l.HasCycle2() Expect(out).Should(Equal(true)) // the next of the last node is a random node between the first and the last, the list has cycle node.next = tmp out = l.HasCycle2() Expect(out).Should(Equal(true)) // dismantle the cycle node.next = nil out = l.HasCycle1() Expect(out).Should(Equal(false)) }) }) var _ = Describe("TestMergeSortedList", func() { It("l1 and l2 has no node, the new list has no node", func() { l1 := NewLinkList() l2 := NewLinkList() l3 := l1.MergeSortedList(l2) Expect(len(l3.Print())).Should(Equal(0)) Expect(l3.len).Should(Equal(uint32(0))) }) It("l1 has no node and l2 has node, the new list has l2`s node", func() { l1 := NewLinkList() l2 := NewLinkList() for _, str := range "a" { l2.InsertTail(string(str)) } l3 := l1.MergeSortedList(l2) Expect(l3.Print()).Should(Equal("a")) Expect(l3.len).Should(Equal(uint32(1))) }) It("l2 has no node and l1 has node, the new list has l1`s node", func() { l1 := NewLinkList() l2 := NewLinkList() for _, str := range "c" { l1.InsertTail(string(str)) } l3 := l1.MergeSortedList(l2) Expect(l3.Print()).Should(Equal("c")) Expect(l3.len).Should(Equal(uint32(1))) }) It("l1 and l2 has node, the new list has l1+l2`s node", func() { l1 := NewLinkList() l2 := NewLinkList() for _, str := range "1357" { l1.InsertTail(string(str)) } for _, str := range "2468" { l2.InsertTail(string(str)) } l3 := l1.MergeSortedList(l2) Expect(l3.Print()).Should(Equal("1->2->3->4->5->6->7->8")) Expect(l3.len).Should(Equal(uint32(8))) l1 = NewLinkList() l2 = NewLinkList() for _, str := range "13" { l1.InsertTail(string(str)) } for _, str := range "2468" { l2.InsertTail(string(str)) } l3 = l1.MergeSortedList(l2) Expect(l3.Print()).Should(Equal("1->2->3->4->6->8")) Expect(l3.len).Should(Equal(uint32(6))) }) }) var _ = Describe("TestDeleteBottomN", func() { It("only head node", func() { l := NewLinkList() err := l.DeleteBottomN(2) Expect(err).Should(MatchError("N can not be greater than the length of the list")) }) It("only one node except for head", func() { l := NewLinkList() for _, str := range "a" { l.InsertTail(string(str)) } err := l.DeleteBottomN(1) Expect(err).NotTo(HaveOccurred()) Expect(l.Print()).Should(Equal("")) }) It("two node at least except for head", func() { l := NewLinkList() for _, str := range "abcde" { l.InsertTail(string(str)) } // delete the first from the bottom err := l.DeleteBottomN(1) Expect(err).NotTo(HaveOccurred()) Expect(l.Print()).Should(Equal("a->b->c->d")) // delete the first node err = l.DeleteBottomN(4) Expect(err).NotTo(HaveOccurred()) Expect(l.Print()).Should(Equal("b->c->d")) }) }) var _ = Describe("TestDeleteBottomN1", func() { It("only head node", func() { l := NewLinkList() err := l.DeleteBottomN1(2) Expect(err).Should(MatchError("N can not be greater than the length of the list")) }) It("only one node except for head", func() { l := NewLinkList() for _, str := range "a" { l.InsertTail(string(str)) } err := l.DeleteBottomN1(1) Expect(err).NotTo(HaveOccurred()) Expect(l.Print()).Should(Equal("")) }) It("two node at least except for head", func() { l := NewLinkList() for _, str := range "abcde" { l.InsertTail(string(str)) } // delete the first from the bottom err := l.DeleteBottomN1(1) Expect(err).NotTo(HaveOccurred()) Expect(l.Print()).Should(Equal("a->b->c->d")) // delete the first node err = l.DeleteBottomN1(4) Expect(err).NotTo(HaveOccurred()) Expect(l.Print()).Should(Equal("b->c->d")) }) }) var _ = Describe("FindMiddleNode", func() { It("only head node", func() { l := NewLinkList() var nil *ListNode out := l.FindMiddleNode() Expect(out).Should(Equal(nil)) }) It("only one node except for head", func() { l := NewLinkList() for _, str := range "a" { l.InsertTail(string(str)) } out := l.FindMiddleNode() Expect(out.value).Should(Equal("a")) }) It("two node at least except for head", func() { l := NewLinkList() for _, str := range "abcdef" { l.InsertTail(string(str)) } out := l.FindMiddleNode() Expect(out.value).Should(Equal("c")) l = NewLinkList() for _, str := range "abcdefg" { l.InsertTail(string(str)) } out = l.FindMiddleNode() Expect(out.value).Should(Equal("d")) }) })
TestReverse
FastaParser.ts
namespace MIME { /** * The fasta sequence parser and data model */ export interface FastaSeq { headers: string[]; sequence: string; } export function Pa
tream: string): FastaSeq[] { const seq: FastaSeq[] = []; // 使用正则表达式进行切割并去除空白行 const lines: string[] = $from(stream.split(/\n/)) .Where(l => !Strings.Empty(l, true)) .ToArray(); let header: string; let seqBuffer: string = ""; const isnull = function () { return Strings.Empty(header) && Strings.Empty(seqBuffer); } for (let i: number = 0; i < lines.length; i++) { const line: string = lines[i]; if (line.charAt(0) == ">") { // 是新的序列起始 if (!isnull()) { seq.push(<FastaSeq>{ headers: header.split("|"), sequence: seqBuffer }); } header = line.substr(1); seqBuffer = ""; } else { seqBuffer = seqBuffer + line; } } if (!isnull()) { seq.push(<FastaSeq>{ headers: header.split("|"), sequence: seqBuffer }); } return seq; } }
rseFasta(s
debug.js
const debug = require('debug');
module.exports = module => debug(`${global.APP_NAME}:${module}`);
sender.py
#!/usr/bin/python3 from Sprko.ui import arguments, console from requests import post as sendPostRequest from urllib3 import disable_warnings from modules import readData # skip ssl error disable_warnings() msg = console.msg telegram_config = readData.sender('telegram') telegram_bot_api = readData.sender('telegram')['bot_api'] telegram_chat_id = readData.sender('telegram')['chat_id'] slack_config = readData.sender('slack') slack_secret = readData.sender('slack')['secret'] class contact():
def telegram(telegram_bot, telegram_chat, message): if telegram_config['enable'] != False: telegram_api_url = "https://api.telegram.org/bot{}/sendMessage".format(str(telegram_bot)) post_data = {"chat_id":str(telegram_chat), "text":str(message)} try: response = sendPostRequest(telegram_api_url, data=post_data, verify=False) if str(response.status_code)[0] in ("3","4","5"): print(msg.faield("Telegram send notification")) except Exception as e: of = open("log/errors.log", "a+") wf = of.writelines("error sender.py [telegram]: "+url+": "+str(e)+"\n") cf = of.close() def slack(slack_secret, message): if slack_config['enable'] != False: slack_api_url = "https://hooks.slack.com/services/{}".format(slack_secret) json_data = {"text":str(message)} try: response = sendPostRequest(slack_api_url, json=json_data, verify=False) if str(response)[0] in ("3","4","5"): print(msg.faield("Slack send notification")) except Exception as e: of = open("log/errors.log", "a+") wf = of.writelines("error sender.py: [slack]"+url+": "+str(e)+"\n") cf = of.close()
website_script.py
# Copyright (c) 2015, Dataent Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import dataent from dataent.utils import strip from dataent.website.doctype.website_theme.website_theme import get_active_theme no_sitemap = 1 base_template_path = "templates/www/website_script.js" def get_context(context): context.javascript = dataent.db.get_single_value('Website Script', 'javascript') or "" theme = get_active_theme() js = strip(theme and theme.js or "") if js: context.javascript += "\n" + js
context["google_analytics_id"] = (dataent.db.get_single_value("Website Settings", "google_analytics_id") or dataent.conf.get("google_analytics_id"))
if not dataent.conf.developer_mode:
raw.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Unix-specific primitives available on all unix platforms #![stable(feature = "raw_ext", since = "1.1.0")] #![rustc_deprecated(since = "1.8.0", reason = "these type aliases are no longer supported by \ the standard library, the `libc` crate on \ crates.io should be used instead for the correct \
#[stable(feature = "raw_ext", since = "1.1.0")] pub type uid_t = u32; #[stable(feature = "raw_ext", since = "1.1.0")] pub type gid_t = u32; #[stable(feature = "raw_ext", since = "1.1.0")] pub type pid_t = i32; #[doc(inline)] #[stable(feature = "pthread_t", since = "1.8.0")] pub use sys::platform::raw::pthread_t; #[doc(inline)] #[stable(feature = "raw_ext", since = "1.1.0")] pub use sys::platform::raw::{dev_t, ino_t, mode_t, nlink_t, off_t, blksize_t}; #[doc(inline)] #[stable(feature = "raw_ext", since = "1.1.0")] pub use sys::platform::raw::{blkcnt_t, time_t};
definitions")] #![allow(deprecated)]
sceneInfoController.js
import SceneInfoView from '../views/sceneInfoView.js'; import { NarrativeEvents } from '../aux/consts.js'; import { extractEntities } from '../aux/extractEntities.js'; const SceneInfoController = (narrative) => { const sceneInfoView = SceneInfoView(); function init() { sceneInfoView.resetFocusEvent.addListener(resetSceneFocus); sceneInfoView.scrollEvent.addListener(scrollScenes); narrative .getNarrativeEvent(NarrativeEvents.ACTIVE_SCENE_CHANGE_EVENT) .addListener(showActiveScene); }
function resetSceneFocus() { narrative.resetSceneFocus(); } function scrollScenes(direction) { narrative.scrollScene(direction); } function showActiveScene(scene) { sceneInfoView.update( scene ? scene.title : undefined, scene ? scene.date : undefined, scene ? extractEntities( scene.description, scene.characters.map((character) => ({ name: character.name, color: character.affiliation === 'none' ? character.color : character.affiliation, synonyms: character.synonyms, })) ) : undefined, scene && scene.location ? scene.location.where : undefined ); } function run() { sceneInfoView.init(); } return { init, run, }; }; export default SceneInfoController;
keyable.rs
//! The implementation for #[derive(Keyable)] use crate::attr::{FieldKind, Fields}; use proc_macro2::Ident; use quote::{quote, quote_spanned};
pub(crate) fn derive_keyable_impl( input: syn::DeriveInput, ) -> Result<proc_macro2::TokenStream, syn::Error> { match &input.data { Data::Struct(s) => derive_struct(&input, s), Data::Enum(e) => Err(syn::Error::new( e.enum_token.span(), "Keyable cannot currently be derived for enums", )), Data::Union(u) => Err(syn::Error::new( u.union_token.span(), "Data implementations cannot be derived from unions", )), } } fn derive_struct( input: &syn::DeriveInput, s: &DataStruct, ) -> Result<proc_macro2::TokenStream, syn::Error> { let ident = &input.ident; let impl_generics = add_generic_bounds(&input.generics, quote!(::keypath::Keyable)); let (_, ty_generics, where_clause) = &input.generics.split_for_impl(); let fields = Fields::parse_ast(&s.fields)?; let get_field_arms = fields.iter().map(|fld| fld.match_arms(quote!(get_field))); let get_mut_field_arms = fields .iter() .map(|fld| fld.match_arms(quote!(get_field_mut))); let (fragment_decl, typed_trait_decl) = mirror_struct(ident, &input.vis, &input.generics, &fields)?; let res = quote! { impl<#impl_generics> ::keypath::internals::RawKeyable for #ident #ty_generics #where_clause { fn as_any(&self) -> &dyn ::std::any::Any { self } fn as_any_mut(&mut self) -> &mut dyn ::std::any::Any { self } fn get_field(&self, ident: &[::keypath::internals::PathComponent]) -> Result<&dyn ::keypath::internals::RawKeyable, ::keypath::FieldError> { match ident.split_first() { None => Ok(self), #( #get_field_arms )* Some((field, rest)) => Err( ::keypath::FieldErrorKind::InvalidField(field.clone()).into_error(self, rest.len()) ), } } fn get_field_mut(&mut self, ident: &[::keypath::internals::PathComponent]) -> Result<&mut dyn ::keypath::internals::RawKeyable, ::keypath::FieldError> { match ident.split_first() { None => Ok(self), #( #get_mut_field_arms )* Some((field, rest)) => Err( ::keypath::FieldErrorKind::InvalidField(field.clone()).into_error(self, rest.len()) ), } } } #fragment_decl impl<#impl_generics> ::keypath::Keyable for #ident #ty_generics #where_clause { #typed_trait_decl } impl <Value: 'static, #impl_generics> std::ops::Index<&::keypath::KeyPath<#ident #ty_generics, Value>> for #ident #ty_generics #where_clause { type Output = Value; fn index(&self, index: &::keypath::KeyPath<#ident #ty_generics, Value>) -> &Self::Output { self.item_at_path(index) } } impl <Value: 'static, #impl_generics> std::ops::IndexMut<&::keypath::KeyPath<#ident #ty_generics, Value>> for #ident #ty_generics #where_clause { fn index_mut(&mut self, index: &::keypath::KeyPath<#ident #ty_generics, Value>) -> &mut Self::Output { self.item_at_path_mut(index) } } }; Ok(res) } fn mirror_struct( base_ident: &Ident, base_vis: &syn::Visibility, generics: &syn::Generics, fields: &Fields, ) -> Result<(proc_macro2::TokenStream, proc_macro2::TokenStream), syn::Error> { let (_, ty_generics, _) = generics.split_for_impl(); let impl_generics = add_generic_bounds(generics, quote!(::keypath::Keyable)); let mirror_ident = mirror_ident_for_base_ident(base_ident); let field_decls = fields.generate_mirror_decls(); let struct_decl = match fields.kind { FieldKind::Named => { quote!(pub struct #mirror_ident <#impl_generics>{#field_decls}) } FieldKind::Unnamed => { quote!(pub struct #mirror_ident <#impl_generics>(#field_decls);) } }; let struct_decl = quote!(#[allow(non_camel_case_types)] #struct_decl); let generic_idents = get_generic_idents(generics); let optional_const_token = if generic_idents.is_empty() { quote!(const) } else { proc_macro2::TokenStream::new() }; let struct_field_init = fields.generate_mirror_inits(&generic_idents); let struct_init = match fields.kind { FieldKind::Named => quote!(Self {#struct_field_init}), FieldKind::Unnamed => quote!(Self (#struct_field_init)), }; let tokens = quote!( #struct_decl impl< #impl_generics> #mirror_ident #ty_generics { #optional_const_token fn new() -> Self { #struct_init } #base_vis #optional_const_token fn to_key_path_with_root<Root>(self, fields: &'static [::keypath::internals::PathComponent]) -> ::keypath::KeyPath<Root, #base_ident #ty_generics> { ::keypath::KeyPath::__conjure_from_abyss(fields) } } ); let trait_impl = quote!( type Mirror = #mirror_ident #ty_generics; fn mirror() -> #mirror_ident #ty_generics{ #mirror_ident::new() } ); Ok((tokens, trait_impl)) } fn mirror_ident_for_base_ident(ident: &Ident) -> Ident { Ident::new( &format!("{}{}", DERIVED_MIRROR_STRUCT_PREFIX, ident), ident.span(), ) } fn get_generic_idents(generics: &syn::Generics) -> Vec<Ident> { generics .params .iter() .filter_map(|gp| match gp { syn::GenericParam::Type(ty) => Some(ty.ident.clone()), _ => None, }) .collect() } fn add_generic_bounds( generics: &syn::Generics, with_bounds: proc_macro2::TokenStream, ) -> proc_macro2::TokenStream { let res = generics.params.iter().map(|gp| { use syn::GenericParam::*; match gp { Type(ty) => { let ident = &ty.ident; let bounds = &ty.bounds; if bounds.is_empty() { quote_spanned!(ty.span()=> #ident : #with_bounds) } else { quote_spanned!(ty.span()=> #ident : #bounds + #with_bounds) } } Lifetime(lf) => quote!(#lf), Const(cst) => quote!(#cst), } }); quote!( #( #res, )* ) }
use syn::{spanned::Spanned, Data, DataStruct}; static DERIVED_MIRROR_STRUCT_PREFIX: &str = "KeyableDerivedMirrorOf_";
config.rs
// Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { anyhow::{format_err, Context, Error}, cm_rust::{CapabilityName, CapabilityTypeName, FidlIntoNative}, cm_types::{Name, Url}, fidl::encoding::decode_persistent, fidl_fuchsia_component_decl as fdecl, fidl_fuchsia_component_internal::{ self as component_internal, BuiltinBootResolver, BuiltinPkgResolver, CapabilityPolicyAllowlists, DebugRegistrationPolicyAllowlists, LogDestination, OutDirContents, RealmBuilderResolverAndRunner, }, moniker::{AbsoluteMoniker, AbsoluteMonikerBase, ExtendedMoniker, MonikerError}, std::{ collections::{HashMap, HashSet}, convert::TryFrom, iter::FromIterator, path::Path, str::FromStr, }, thiserror::Error, }; /// Runtime configuration options. /// This configuration intended to be "global", in that the same configuration /// is applied throughout a given running instance of component_manager. #[derive(Debug, PartialEq, Eq)] pub struct RuntimeConfig { /// How many children, maximum, are returned by a call to `ChildIterator.next()`. pub list_children_batch_size: usize, /// Security policy configuration. pub security_policy: SecurityPolicy, /// If true, component manager will be in debug mode. In this mode, component manager /// provides the `EventSource` protocol and exposes this protocol. The root component /// must be manually started using the LifecycleController protocol in the hub. /// /// This is done so that an external component (say an integration test) can subscribe /// to events before the root component has started. pub debug: bool, /// If true, component_manager will serve an instance of fuchsia.process.Launcher and use this /// launcher for the built-in ELF component runner. The root component can additionally /// use and/or offer this service using '/builtin/fuchsia.process.Launcher' from realm. // This flag exists because the built-in process launcher *only* works when // component_manager runs under a job that has ZX_POL_NEW_PROCESS set to allow, like the root // job. Otherwise, the component_manager process cannot directly create process through // zx_process_create. When we run component_manager elsewhere, like in test environments, it // has to use the fuchsia.process.Launcher service provided through its namespace instead. pub use_builtin_process_launcher: bool, /// If true, component_manager will maintain a UTC kernel clock and vend write handles through /// an instance of `fuchsia.time.Maintenance`. This flag should only be used with the top-level /// component_manager. pub maintain_utc_clock: bool, // The number of threads to use for running component_manager's executor. // Value defaults to 1. pub num_threads: usize, /// The list of capabilities offered from component manager's namespace. pub namespace_capabilities: Vec<cm_rust::CapabilityDecl>, /// The list of capabilities offered from component manager as built-in capabilities. pub builtin_capabilities: Vec<cm_rust::CapabilityDecl>, /// Which builtin resolver to use for the fuchsia-pkg scheme. If not supplied this defaults to /// the NONE option. pub builtin_pkg_resolver: BuiltinPkgResolver, /// Determine what content to expose through the component manager's /// outgoing directory. pub out_dir_contents: OutDirContents, /// URL of the root component to launch. This field is used if no URL /// is passed to component manager. If value is passed in both places, then /// an error is raised. pub root_component_url: Option<Url>, /// Path to the component ID index, parsed from /// `fuchsia.component.internal.RuntimeConfig.component_id_index_path`. pub component_id_index_path: Option<String>, /// Where to log to. pub log_destination: LogDestination, /// If true, component manager will log all events dispatched in the topology. pub log_all_events: bool, /// Which builtin resolver to use for the fuchsia-boot scheme. If not supplied this defaults to /// the NONE option. pub builtin_boot_resolver: BuiltinBootResolver, /// If true, allow components to set the `OnTerminate=REBOOT` option. /// /// This lets a parent component designate that the system should reboot if a child terminates /// (except when it's shut down). pub reboot_on_terminate_enabled: bool, /// If and how the realm builder resolver and runner are enabled. pub realm_builder_resolver_and_runner: RealmBuilderResolverAndRunner, } /// A single security policy allowlist entry. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub enum AllowlistEntry { /// Allow the component with this exact AbsoluteMoniker. /// Example string form in config: "/foo/bar", "/foo/bar/baz" Exact(AbsoluteMoniker), /// Allow any components that are children of this AbsoluteMoniker. In other words, a /// prefix match against the target moniker. /// Example string form in config: "/foo/**", "/foo/bar/**" Realm(AbsoluteMoniker), /// Allow any components that are in AbsoluteMoniker's collection with the given name. /// Also a prefix match against the target moniker but additionally scoped to a specific /// collection. /// Example string form in config: "/foo/tests:**", "/bootstrap/drivers:**" Collection(AbsoluteMoniker, String), } /// Runtime security policy. #[derive(Debug, Default, PartialEq, Eq)] pub struct SecurityPolicy { /// Allowlists for Zircon job policy. pub job_policy: JobPolicyAllowlists, /// Capability routing policies. The key contains all the information required /// to uniquely identify any routable capability and the set of monikers /// define the set of component paths that are allowed to access this specific /// capability. pub capability_policy: HashMap<CapabilityAllowlistKey, HashSet<AllowlistEntry>>, /// Debug Capability routing policies. The key contains all the information required /// to uniquely identify any routable capability and the set of (monikers, environment_name) /// define the set of components which were allowed to register it as a debug capability in /// their environment `environment_name`. pub debug_capability_policy: HashMap<CapabilityAllowlistKey, HashSet<(AbsoluteMoniker, String)>>, /// Allowlists component child policy. These allowlists control what components are allowed /// to set privileged options on their children. pub child_policy: ChildPolicyAllowlists, } /// Allowlists for Zircon job policy. Part of runtime security policy. #[derive(Debug, Default, PartialEq, Eq)] pub struct JobPolicyAllowlists { /// Entries for components allowed to be given the ZX_POL_AMBIENT_MARK_VMO_EXEC job policy. /// /// Components must request this policy by including "job_policy_ambient_mark_vmo_exec: true" in /// their manifest's program object and must be using the ELF runner. /// This is equivalent to the v1 'deprecated-ambient-replace-as-executable' feature. pub ambient_mark_vmo_exec: Vec<AllowlistEntry>, /// Entries for components allowed to have their original process marked as critical to /// component_manager's job. /// /// Components must request this critical marking by including "main_process_critical: true" in /// their manifest's program object and must be using the ELF runner. pub main_process_critical: Vec<AllowlistEntry>, /// Entries for components allowed to call zx_process_create directly (e.g., do not have /// ZX_POL_NEW_PROCESS set to ZX_POL_ACTION_DENY). /// /// Components must request this policy by including "job_policy_create_raw_processes: true" in /// their manifest's program object and must be using the ELF runner. pub create_raw_processes: Vec<AllowlistEntry>, } /// Allowlists for child option policy. Part of runtime security policy. #[derive(Debug, Default, PartialEq, Eq, Clone)] pub struct ChildPolicyAllowlists { /// Absolute monikers of component instances allowed to have the /// `on_terminate=REBOOT` in their `children` declaration. pub reboot_on_terminate: Vec<AllowlistEntry>, } /// The available capability sources for capability allow lists. This is a strict /// subset of all possible Ref types, with equality support. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub enum CapabilityAllowlistSource { Self_, Framework, Capability, } /// Allowlist key for capability routing policy. Part of the runtime /// security policy. This defines all the required keying information to lookup /// whether a capability exists in the policy map or not. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct CapabilityAllowlistKey { pub source_moniker: ExtendedMoniker, pub source_name: CapabilityName, pub source: CapabilityAllowlistSource, pub capability: CapabilityTypeName, } impl Default for RuntimeConfig { fn default() -> Self { Self { list_children_batch_size: 1000, // security_policy must default to empty to ensure that it fails closed if no // configuration is present or it fails to load. security_policy: Default::default(), debug: false, use_builtin_process_launcher: false, maintain_utc_clock: false, num_threads: 1, namespace_capabilities: vec![], builtin_capabilities: vec![], builtin_pkg_resolver: BuiltinPkgResolver::None, out_dir_contents: OutDirContents::None, root_component_url: Default::default(), component_id_index_path: None, log_destination: LogDestination::Syslog, log_all_events: false, builtin_boot_resolver: BuiltinBootResolver::None, reboot_on_terminate_enabled: false, realm_builder_resolver_and_runner: RealmBuilderResolverAndRunner::None, } } } impl RuntimeConfig { /// Load RuntimeConfig from the '--config' command line arg. Path must /// point to binary encoded fuchsia.component.internal.Config file. /// Otherwise, an Error is returned. pub fn load_from_file<P: AsRef<Path>>(path: P) -> Result<Self, Error> { let raw_content = std::fs::read(path)?; Ok(Self::try_from(decode_persistent::<component_internal::Config>(&raw_content)?)?) } pub fn load_from_bytes(bytes: &Vec<u8>) -> Result<Self, Error> { Ok(Self::try_from(decode_persistent::<component_internal::Config>(&bytes)?)?) } fn translate_namespace_capabilities( capabilities: Option<Vec<fdecl::Capability>>, ) -> Result<Vec<cm_rust::CapabilityDecl>, Error> { let capabilities = capabilities.unwrap_or(vec![]); if let Some(c) = capabilities.iter().find(|c| { !matches!(c, fdecl::Capability::Protocol(_) | fdecl::Capability::Directory(_)) }) { return Err(format_err!("Type unsupported for namespace capability: {:?}", c)); } cm_fidl_validator::validate_capabilities(&capabilities, false)?; Ok(capabilities.into_iter().map(FidlIntoNative::fidl_into_native).collect()) } fn translate_builtin_capabilities( capabilities: Option<Vec<fdecl::Capability>>, ) -> Result<Vec<cm_rust::CapabilityDecl>, Error> { let capabilities = capabilities.unwrap_or(vec![]); cm_fidl_validator::validate_capabilities(&capabilities, true)?; Ok(capabilities.into_iter().map(FidlIntoNative::fidl_into_native).collect()) } } #[derive(Debug, Clone, Error, PartialEq, Eq)] pub enum AllowlistEntryError { #[error("Moniker parsing error in realm allowlist entry: {0:?}")] RealmEntryInvalidMoniker(String, #[source] MonikerError), #[error("Collection allowlist entry missing a realm: {0:?}")] CollectionEntryMissingRealm(String), #[error("Invalid collection name ({1:?}) in allowlist entry: {0:?}")] InvalidCollectionName(String, String), #[error("Moniker parsing error in collection allowlist entry: {0:?}")] CollectionEntryInvalidMoniker(String, #[source] MonikerError), #[error("Moniker parsing error in allowlist entry: {0:?}")] OtherInvalidMoniker(String, #[source] MonikerError), } fn parse_allowlist_entries(strs: &Option<Vec<String>>) -> Result<Vec<AllowlistEntry>, Error> { let strs = match strs { Some(strs) => strs, None => return Ok(Vec::new()), }; strs.iter() .map(|s| { if let Some(prefix) = s.strip_suffix("/**") { let realm = if prefix.is_empty() { AbsoluteMoniker::root() } else { AbsoluteMoniker::parse_str(prefix) .map_err(|e| AllowlistEntryError::RealmEntryInvalidMoniker(s.clone(), e))? }; Ok(AllowlistEntry::Realm(realm)) } else if let Some(prefix) = s.strip_suffix(":**") { let (realm, collection) = prefix .rsplit_once('/') .ok_or_else(|| AllowlistEntryError::CollectionEntryMissingRealm(s.clone()))?; Name::from_str(&collection).map_err(|_| { AllowlistEntryError::InvalidCollectionName(s.clone(), collection.into()) })?; let realm = if realm.is_empty() { AbsoluteMoniker::root() } else { AbsoluteMoniker::parse_str(realm).map_err(|e| { AllowlistEntryError::CollectionEntryInvalidMoniker(s.clone(), e) })? }; Ok(AllowlistEntry::Collection(realm, collection.to_string())) } else { let realm = AbsoluteMoniker::parse_str(s.as_str()) .map_err(|e| AllowlistEntryError::OtherInvalidMoniker(s.clone(), e))?; Ok(AllowlistEntry::Exact(realm)) } }) .collect() } fn as_usize_or_default(value: Option<u32>, default: usize) -> usize { match value { Some(value) => value as usize, None => default, } } #[derive(Debug, Clone, Error, PartialEq, Eq)] pub enum PolicyConfigError { #[error("Capability source name was empty in a capability policy entry.")] EmptyCapabilitySourceName, #[error("Capability type was empty in a capability policy entry.")] EmptyAllowlistedCapability, #[error("Debug registration type was empty in a debug policy entry.")] EmptyAllowlistedDebugRegistration, #[error("Environment name was empty in a debug policy entry.")] EmptyTargetMonikerDebugRegistration, #[error("Target moniker was empty in a debug policy entry.")] EmptyEnvironmentNameDebugRegistration, #[error("Capability from type was empty in a capability policy entry.")] EmptyFromType, #[error("Capability source_moniker was empty in a capability policy entry.")] EmptySourceMoniker, #[error("Invalid source capability.")] InvalidSourceCapability, #[error("Unsupported allowlist capability type")] UnsupportedAllowlistedCapability, } impl TryFrom<component_internal::Config> for RuntimeConfig { type Error = Error; fn try_from(config: component_internal::Config) -> Result<Self, Error> { let default = RuntimeConfig::default(); let list_children_batch_size = as_usize_or_default(config.list_children_batch_size, default.list_children_batch_size); let num_threads = as_usize_or_default(config.num_threads, default.num_threads); let root_component_url = match config.root_component_url { Some(url) => Some(Url::new(url)?), None => None, }; let security_policy = if let Some(security_policy) = config.security_policy { SecurityPolicy::try_from(security_policy).context("Unable to parse security policy")? } else { SecurityPolicy::default() }; let log_all_events = if let Some(log_all_events) = config.log_all_events { log_all_events } else { false }; Ok(RuntimeConfig { list_children_batch_size, security_policy, namespace_capabilities: Self::translate_namespace_capabilities( config.namespace_capabilities, )?, builtin_capabilities: Self::translate_builtin_capabilities( config.builtin_capabilities, )?, debug: config.debug.unwrap_or(default.debug), use_builtin_process_launcher: config .use_builtin_process_launcher .unwrap_or(default.use_builtin_process_launcher), maintain_utc_clock: config.maintain_utc_clock.unwrap_or(default.maintain_utc_clock), num_threads, builtin_pkg_resolver: config .builtin_pkg_resolver .unwrap_or(default.builtin_pkg_resolver), out_dir_contents: config.out_dir_contents.unwrap_or(default.out_dir_contents), root_component_url, component_id_index_path: config.component_id_index_path, log_destination: config.log_destination.unwrap_or(default.log_destination), log_all_events, builtin_boot_resolver: config .builtin_boot_resolver .unwrap_or(default.builtin_boot_resolver), reboot_on_terminate_enabled: config .reboot_on_terminate_enabled .unwrap_or(default.reboot_on_terminate_enabled), realm_builder_resolver_and_runner: config .realm_builder_resolver_and_runner .unwrap_or(default.realm_builder_resolver_and_runner), }) } } fn parse_capability_policy( capability_policy: Option<CapabilityPolicyAllowlists>, ) -> Result<HashMap<CapabilityAllowlistKey, HashSet<AllowlistEntry>>, Error> { let capability_policy = if let Some(capability_policy) = capability_policy { if let Some(allowlist) = capability_policy.allowlist { let mut policies = HashMap::new(); for e in allowlist.into_iter() { let source_moniker = ExtendedMoniker::parse_str( e.source_moniker .as_deref() .ok_or(Error::new(PolicyConfigError::EmptySourceMoniker))?, )?; let source_name = if let Some(source_name) = e.source_name { Ok(CapabilityName(source_name)) } else { Err(PolicyConfigError::EmptyCapabilitySourceName) }?; let source = match e.source { Some(fdecl::Ref::Self_(_)) => Ok(CapabilityAllowlistSource::Self_), Some(fdecl::Ref::Framework(_)) => Ok(CapabilityAllowlistSource::Framework), Some(fdecl::Ref::Capability(_)) => Ok(CapabilityAllowlistSource::Capability), _ => Err(Error::new(PolicyConfigError::InvalidSourceCapability)), }?; let capability = if let Some(capability) = e.capability.as_ref() { match &capability { component_internal::AllowlistedCapability::Directory(_) => { Ok(CapabilityTypeName::Directory) } component_internal::AllowlistedCapability::Event(_) => { Ok(CapabilityTypeName::Event) } component_internal::AllowlistedCapability::Protocol(_) => { Ok(CapabilityTypeName::Protocol) } component_internal::AllowlistedCapability::Service(_) => { Ok(CapabilityTypeName::Service) } component_internal::AllowlistedCapability::Storage(_) => { Ok(CapabilityTypeName::Storage) } component_internal::AllowlistedCapability::Runner(_) => { Ok(CapabilityTypeName::Runner) } component_internal::AllowlistedCapability::Resolver(_) => { Ok(CapabilityTypeName::Resolver) } _ => Err(Error::new(PolicyConfigError::EmptyAllowlistedCapability)), } } else { Err(Error::new(PolicyConfigError::EmptyAllowlistedCapability)) }?; let target_monikers = HashSet::from_iter(parse_allowlist_entries(&e.target_monikers)?); policies.insert( CapabilityAllowlistKey { source_moniker, source_name, source, capability }, target_monikers, ); } policies } else { HashMap::new() } } else { HashMap::new() }; Ok(capability_policy) } fn parse_debug_capability_policy( debug_registration_policy: Option<DebugRegistrationPolicyAllowlists>, ) -> Result<HashMap<CapabilityAllowlistKey, HashSet<(AbsoluteMoniker, String)>>, Error> { let debug_capability_policy = if let Some(debug_capability_policy) = debug_registration_policy { if let Some(allowlist) = debug_capability_policy.allowlist { let mut policies: HashMap<CapabilityAllowlistKey, HashSet<(AbsoluteMoniker, String)>> = HashMap::new(); for e in allowlist.into_iter() { let source_moniker = ExtendedMoniker::parse_str( e.source_moniker .as_deref() .ok_or(Error::new(PolicyConfigError::EmptySourceMoniker))?, )?; let source_name = if let Some(source_name) = e.source_name.as_ref() { Ok(CapabilityName(source_name.clone())) } else { Err(PolicyConfigError::EmptyCapabilitySourceName) }?; let capability = if let Some(capability) = e.debug.as_ref() { match &capability { component_internal::AllowlistedDebugRegistration::Protocol(_) => { Ok(CapabilityTypeName::Protocol) } _ => Err(Error::new(PolicyConfigError::EmptyAllowlistedDebugRegistration)), } } else { Err(Error::new(PolicyConfigError::EmptyAllowlistedDebugRegistration)) }?; let target_moniker = AbsoluteMoniker::parse_str( e.target_moniker .as_deref() .ok_or(PolicyConfigError::EmptyTargetMonikerDebugRegistration)?, )?; let environment_name = e .environment_name .ok_or(PolicyConfigError::EmptyEnvironmentNameDebugRegistration)?; let key = CapabilityAllowlistKey { source_moniker, source_name, source: CapabilityAllowlistSource::Self_, capability, }; let value = (target_moniker, environment_name); if let Some(h) = policies.get_mut(&key) { h.insert(value); } else { policies.insert(key, vec![value].into_iter().collect()); } } policies } else { HashMap::new() } } else { HashMap::new() }; Ok(debug_capability_policy) } impl TryFrom<component_internal::SecurityPolicy> for SecurityPolicy { type Error = Error; fn try_from(security_policy: component_internal::SecurityPolicy) -> Result<Self, Error> { let job_policy = if let Some(job_policy) = &security_policy.job_policy { let ambient_mark_vmo_exec = parse_allowlist_entries(&job_policy.ambient_mark_vmo_exec)?; let main_process_critical = parse_allowlist_entries(&job_policy.main_process_critical)?; let create_raw_processes = parse_allowlist_entries(&job_policy.create_raw_processes)?; JobPolicyAllowlists { ambient_mark_vmo_exec, main_process_critical, create_raw_processes, } } else { JobPolicyAllowlists::default() }; let capability_policy = parse_capability_policy(security_policy.capability_policy)?; let debug_capability_policy = parse_debug_capability_policy(security_policy.debug_registration_policy)?; let child_policy = if let Some(child_policy) = &security_policy.child_policy { let reboot_on_terminate = parse_allowlist_entries(&child_policy.reboot_on_terminate)?; ChildPolicyAllowlists { reboot_on_terminate } } else { ChildPolicyAllowlists::default() }; Ok(SecurityPolicy { job_policy, capability_policy, debug_capability_policy, child_policy }) } } #[cfg(test)] mod tests { use { super::*, assert_matches::assert_matches, cm_types::ParseError, fidl_fuchsia_io as fio, std::path::PathBuf, tempfile::TempDir, }; const FOO_PKG_URL: &str = "fuchsia-pkg://fuchsia.com/foo#meta/foo.cmx"; macro_rules! test_function_ok { ( $function:path, $($test_name:ident => ($input:expr, $expected:expr)),+ ) => { $( #[test] fn $test_name() { assert_matches!($function($input), Ok(v) if v == $expected); } )+ }; } macro_rules! test_function_err { ( $function:path, $($test_name:ident => ($input:expr, $type:ty, $expected:expr)),+ ) => { $( #[test] fn $test_name() { assert_eq!(*$function($input).unwrap_err().downcast_ref::<$type>().unwrap(), $expected); } )+ }; } macro_rules! test_config_ok { ( $($test_name:ident => ($input:expr, $expected:expr)),+ $(,)? ) => { test_function_ok! { RuntimeConfig::try_from, $($test_name => ($input, $expected)),+ } }; } macro_rules! test_config_err { ( $($test_name:ident => ($input:expr, $type:ty, $expected:expr)),+ $(,)? ) => { test_function_err! { RuntimeConfig::try_from, $($test_name => ($input, $type, $expected)),+ } }; } test_config_ok! { all_fields_none => (component_internal::Config { debug: None, list_children_batch_size: None, security_policy: None, maintain_utc_clock: None, use_builtin_process_launcher: None, num_threads: None, namespace_capabilities: None, builtin_capabilities: None, builtin_pkg_resolver: None, out_dir_contents: None, root_component_url: None, component_id_index_path: None, reboot_on_terminate_enabled: None, ..component_internal::Config::EMPTY }, RuntimeConfig::default()), all_leaf_nodes_none => (component_internal::Config { debug: Some(false), list_children_batch_size: Some(5), maintain_utc_clock: Some(false), builtin_pkg_resolver: None, use_builtin_process_launcher: Some(true), security_policy: Some(component_internal::SecurityPolicy { job_policy: Some(component_internal::JobPolicyAllowlists { main_process_critical: None, ambient_mark_vmo_exec: None, create_raw_processes: None, ..component_internal::JobPolicyAllowlists::EMPTY }), capability_policy: None, ..component_internal::SecurityPolicy::EMPTY }), num_threads: Some(10), namespace_capabilities: None, builtin_capabilities: None, out_dir_contents: None, root_component_url: None, component_id_index_path: None, log_destination: None, log_all_events: None, reboot_on_terminate_enabled: None, ..component_internal::Config::EMPTY }, RuntimeConfig { debug: false, list_children_batch_size: 5, maintain_utc_clock: false, use_builtin_process_launcher:true, num_threads: 10, builtin_pkg_resolver: BuiltinPkgResolver::None, ..Default::default() }), all_fields_some => ( component_internal::Config { debug: Some(true), list_children_batch_size: Some(42), maintain_utc_clock: Some(true), use_builtin_process_launcher: Some(false), builtin_pkg_resolver: Some(component_internal::BuiltinPkgResolver::None), security_policy: Some(component_internal::SecurityPolicy { job_policy: Some(component_internal::JobPolicyAllowlists { main_process_critical: Some(vec!["/something/important".to_string()]), ambient_mark_vmo_exec: Some(vec!["/".to_string(), "/foo/bar".to_string()]), create_raw_processes: Some(vec!["/another/thing".to_string()]), ..component_internal::JobPolicyAllowlists::EMPTY }), capability_policy: Some(component_internal::CapabilityPolicyAllowlists { allowlist: Some(vec![ component_internal::CapabilityAllowlistEntry { source_moniker: Some("<component_manager>".to_string()), source_name: Some("fuchsia.kernel.RootResource".to_string()), source: Some(fdecl::Ref::Self_(fdecl::SelfRef {})), capability: Some(component_internal::AllowlistedCapability::Protocol(component_internal::AllowlistedProtocol::EMPTY)), target_monikers: Some(vec![ "/bootstrap".to_string(), "/core/**".to_string(), "/core/test_manager/tests:**".to_string() ]), ..component_internal::CapabilityAllowlistEntry::EMPTY }, component_internal::CapabilityAllowlistEntry { source_moniker: Some("/foo/bar".to_string()), source_name: Some("running".to_string()), source: Some(fdecl::Ref::Framework(fdecl::FrameworkRef {})), capability: Some(component_internal::AllowlistedCapability::Event(component_internal::AllowlistedEvent::EMPTY)), target_monikers: Some(vec![ "/foo/bar".to_string(), "/foo/bar/**".to_string() ]), ..component_internal::CapabilityAllowlistEntry::EMPTY }, ]), ..component_internal::CapabilityPolicyAllowlists::EMPTY}), debug_registration_policy: Some(component_internal::DebugRegistrationPolicyAllowlists{ allowlist: Some(vec![ component_internal::DebugRegistrationAllowlistEntry { source_moniker: Some("/foo/bar/baz".to_string()), source_name: Some("fuchsia.foo.bar".to_string()), debug: Some(component_internal::AllowlistedDebugRegistration::Protocol(component_internal::AllowlistedProtocol::EMPTY)), target_moniker: Some("/foo/bar".to_string()), environment_name: Some("bar_env1".to_string()), ..component_internal::DebugRegistrationAllowlistEntry::EMPTY }, component_internal::DebugRegistrationAllowlistEntry { source_moniker: Some("/foo/bar/baz".to_string()), source_name: Some("fuchsia.foo.bar".to_string()), debug: Some(component_internal::AllowlistedDebugRegistration::Protocol(component_internal::AllowlistedProtocol::EMPTY)), target_moniker: Some("/foo".to_string()), environment_name: Some("foo_env1".to_string()), ..component_internal::DebugRegistrationAllowlistEntry::EMPTY }, component_internal::DebugRegistrationAllowlistEntry { source_moniker: Some("/foo/bar/baz".to_string()), source_name: Some("fuchsia.foo.bar".to_string()), debug: Some(component_internal::AllowlistedDebugRegistration::Protocol(component_internal::AllowlistedProtocol::EMPTY)), target_moniker: Some("/foo".to_string()), environment_name: Some("foo_env2".to_string()), ..component_internal::DebugRegistrationAllowlistEntry::EMPTY }, component_internal::DebugRegistrationAllowlistEntry { source_moniker: Some("/foo/bar".to_string()), source_name: Some("fuchsia.foo.baz".to_string()), debug: Some(component_internal::AllowlistedDebugRegistration::Protocol(component_internal::AllowlistedProtocol::EMPTY)), target_moniker: Some("/root".to_string()), environment_name: Some("root_env".to_string()), ..component_internal::DebugRegistrationAllowlistEntry::EMPTY }, ]), ..component_internal::DebugRegistrationPolicyAllowlists::EMPTY}), child_policy: Some(component_internal::ChildPolicyAllowlists { reboot_on_terminate: Some(vec!["/something/important".to_string()]), ..component_internal::ChildPolicyAllowlists::EMPTY }), ..component_internal::SecurityPolicy::EMPTY }), num_threads: Some(24), namespace_capabilities: Some(vec![ fdecl::Capability::Protocol(fdecl::Protocol { name: Some("foo_svc".into()), source_path: Some("/svc/foo".into()), ..fdecl::Protocol::EMPTY }), fdecl::Capability::Directory(fdecl::Directory { name: Some("bar_dir".into()), source_path: Some("/bar".into()), rights: Some(fio::Operations::CONNECT), ..fdecl::Directory::EMPTY }), ]), builtin_capabilities: Some(vec![ fdecl::Capability::Protocol(fdecl::Protocol { name: Some("foo_protocol".into()), source_path: None, ..fdecl::Protocol::EMPTY }), fdecl::Capability::Event(fdecl::Event { name: Some("bar_event".into()), ..fdecl::Event::EMPTY }), ]), out_dir_contents: Some(component_internal::OutDirContents::Svc), root_component_url: Some(FOO_PKG_URL.to_string()), component_id_index_path: Some("/boot/config/component_id_index".to_string()), log_destination: Some(component_internal::LogDestination::Klog), log_all_events: Some(true), builtin_boot_resolver: Some(component_internal::BuiltinBootResolver::None), reboot_on_terminate_enabled: Some(true), realm_builder_resolver_and_runner: Some(component_internal::RealmBuilderResolverAndRunner::None), ..component_internal::Config::EMPTY }, RuntimeConfig { debug: true, list_children_batch_size: 42, maintain_utc_clock: true, use_builtin_process_launcher: false, security_policy: SecurityPolicy { job_policy: JobPolicyAllowlists { ambient_mark_vmo_exec: vec![ AllowlistEntry::Exact(AbsoluteMoniker::root()), AllowlistEntry::Exact(AbsoluteMoniker::from(vec!["foo", "bar"])), ], main_process_critical: vec![ AllowlistEntry::Exact(AbsoluteMoniker::from(vec!["something", "important"])), ], create_raw_processes: vec![ AllowlistEntry::Exact(AbsoluteMoniker::from(vec!["another", "thing"])), ], }, capability_policy: HashMap::from_iter(vec![ (CapabilityAllowlistKey { source_moniker: ExtendedMoniker::ComponentManager, source_name: CapabilityName::from("fuchsia.kernel.RootResource"), source: CapabilityAllowlistSource::Self_, capability: CapabilityTypeName::Protocol, }, HashSet::from_iter(vec![ AllowlistEntry::Exact(AbsoluteMoniker::from(vec!["bootstrap"])), AllowlistEntry::Realm(AbsoluteMoniker::from(vec!["core"])), AllowlistEntry::Collection(AbsoluteMoniker::from(vec!["core", "test_manager"]), "tests".into()), ].iter().cloned()) ), (CapabilityAllowlistKey { source_moniker: ExtendedMoniker::ComponentInstance(AbsoluteMoniker::from(vec!["foo", "bar"])), source_name: CapabilityName::from("running"), source: CapabilityAllowlistSource::Framework, capability: CapabilityTypeName::Event, }, HashSet::from_iter(vec![ AllowlistEntry::Exact(AbsoluteMoniker::from(vec!["foo", "bar"])), AllowlistEntry::Realm(AbsoluteMoniker::from(vec!["foo", "bar"])), ].iter().cloned()) ), ].iter().cloned()), debug_capability_policy: HashMap::from_iter(vec![ (CapabilityAllowlistKey { source_moniker: ExtendedMoniker::ComponentInstance(AbsoluteMoniker::from(vec!["foo", "bar", "baz"])), source_name: CapabilityName::from("fuchsia.foo.bar"), source: CapabilityAllowlistSource::Self_, capability: CapabilityTypeName::Protocol, }, HashSet::from_iter(vec![ (AbsoluteMoniker::from(vec!["foo", "bar"]),"bar_env1".to_string()), (AbsoluteMoniker::from(vec!["foo"]),"foo_env1".to_string()), (AbsoluteMoniker::from(vec!["foo"]),"foo_env2".to_string()) ].iter().cloned()) ), (CapabilityAllowlistKey { source_moniker: ExtendedMoniker::ComponentInstance(AbsoluteMoniker::from(vec!["foo", "bar"])), source_name: CapabilityName::from("fuchsia.foo.baz"), source: CapabilityAllowlistSource::Self_, capability: CapabilityTypeName::Protocol, },
), ].iter().cloned()), child_policy: ChildPolicyAllowlists { reboot_on_terminate: vec![ AllowlistEntry::Exact(AbsoluteMoniker::from(vec!["something", "important"])), ], }, }, num_threads: 24, namespace_capabilities: vec![ cm_rust::CapabilityDecl::Protocol(cm_rust::ProtocolDecl { name: "foo_svc".into(), source_path: Some("/svc/foo".parse().unwrap()), }), cm_rust::CapabilityDecl::Directory(cm_rust::DirectoryDecl { name: "bar_dir".into(), source_path: Some("/bar".parse().unwrap()), rights: fio::Operations::CONNECT, }), ], builtin_capabilities: vec![ cm_rust::CapabilityDecl::Protocol(cm_rust::ProtocolDecl { name: "foo_protocol".into(), source_path: None, }), cm_rust::CapabilityDecl::Event(cm_rust::EventDecl { name: "bar_event".into(), }), ], builtin_pkg_resolver: BuiltinPkgResolver::None, out_dir_contents: OutDirContents::Svc, root_component_url: Some(Url::new(FOO_PKG_URL.to_string()).unwrap()), component_id_index_path: Some("/boot/config/component_id_index".to_string()), log_destination: LogDestination::Klog, log_all_events: true, builtin_boot_resolver: BuiltinBootResolver::None, reboot_on_terminate_enabled: true, realm_builder_resolver_and_runner: RealmBuilderResolverAndRunner::None, } ), } test_config_err! { invalid_job_policy => (component_internal::Config { debug: None, list_children_batch_size: None, maintain_utc_clock: None, use_builtin_process_launcher: None, builtin_pkg_resolver: None, security_policy: Some(component_internal::SecurityPolicy { job_policy: Some(component_internal::JobPolicyAllowlists { main_process_critical: None, ambient_mark_vmo_exec: Some(vec!["/".to_string(), "bad".to_string()]), create_raw_processes: None, ..component_internal::JobPolicyAllowlists::EMPTY }), capability_policy: None, ..component_internal::SecurityPolicy::EMPTY }), num_threads: None, namespace_capabilities: None, builtin_capabilities: None, out_dir_contents: None, root_component_url: None, component_id_index_path: None, reboot_on_terminate_enabled: None, ..component_internal::Config::EMPTY }, AllowlistEntryError, AllowlistEntryError::OtherInvalidMoniker( "bad".into(), MonikerError::InvalidMoniker { rep: "bad".into()})), invalid_capability_policy_empty_allowlist_cap => (component_internal::Config { debug: None, list_children_batch_size: None, maintain_utc_clock: None, use_builtin_process_launcher: None, builtin_pkg_resolver: None, security_policy: Some(component_internal::SecurityPolicy { job_policy: None, capability_policy: Some(component_internal::CapabilityPolicyAllowlists { allowlist: Some(vec![ component_internal::CapabilityAllowlistEntry { source_moniker: Some("<component_manager>".to_string()), source_name: Some("fuchsia.kernel.RootResource".to_string()), source: Some(fdecl::Ref::Self_(fdecl::SelfRef{})), capability: None, target_monikers: Some(vec!["/core".to_string()]), ..component_internal::CapabilityAllowlistEntry::EMPTY }]), ..component_internal::CapabilityPolicyAllowlists::EMPTY }), ..component_internal::SecurityPolicy::EMPTY }), num_threads: None, namespace_capabilities: None, builtin_capabilities: None, out_dir_contents: None, root_component_url: None, component_id_index_path: None, ..component_internal::Config::EMPTY }, PolicyConfigError, PolicyConfigError::EmptyAllowlistedCapability), invalid_capability_policy_empty_source_moniker => (component_internal::Config { debug: None, list_children_batch_size: None, maintain_utc_clock: None, use_builtin_process_launcher: None, builtin_pkg_resolver: None, security_policy: Some(component_internal::SecurityPolicy { job_policy: None, capability_policy: Some(component_internal::CapabilityPolicyAllowlists { allowlist: Some(vec![ component_internal::CapabilityAllowlistEntry { source_moniker: None, source_name: Some("fuchsia.kernel.RootResource".to_string()), capability: Some(component_internal::AllowlistedCapability::Protocol(component_internal::AllowlistedProtocol::EMPTY)), target_monikers: Some(vec!["/core".to_string()]), ..component_internal::CapabilityAllowlistEntry::EMPTY }]), ..component_internal::CapabilityPolicyAllowlists::EMPTY }), ..component_internal::SecurityPolicy::EMPTY }), num_threads: None, namespace_capabilities: None, builtin_capabilities: None, out_dir_contents: None, root_component_url: None, component_id_index_path: None, reboot_on_terminate_enabled: None, ..component_internal::Config::EMPTY }, PolicyConfigError, PolicyConfigError::EmptySourceMoniker), invalid_root_component_url => (component_internal::Config { debug: None, list_children_batch_size: None, maintain_utc_clock: None, use_builtin_process_launcher: None, builtin_pkg_resolver: None, security_policy: None, num_threads: None, namespace_capabilities: None, builtin_capabilities: None, out_dir_contents: None, root_component_url: Some("invalid url".to_string()), component_id_index_path: None, reboot_on_terminate_enabled: None, ..component_internal::Config::EMPTY }, ParseError, ParseError::InvalidValue), } fn write_config_to_file( tmp_dir: &TempDir, mut config: component_internal::Config, ) -> Result<PathBuf, Error> { let path = tmp_dir.path().join("test_config.fidl"); let content = fidl::encoding::encode_persistent_with_context( &fidl::encoding::Context { wire_format_version: fidl::encoding::WireFormatVersion::V2 }, &mut config, )?; std::fs::write(&path, &content)?; Ok(path) } #[test] fn config_from_file_no_arg() -> Result<(), Error> { assert_matches!(RuntimeConfig::load_from_file::<PathBuf>(Default::default()), Err(_)); Ok(()) } #[test] fn config_from_file_missing() -> Result<(), Error> { let path = PathBuf::from(&"/foo/bar".to_string()); assert_matches!(RuntimeConfig::load_from_file(&path), Err(_)); Ok(()) } #[test] fn config_from_file_valid() -> Result<(), Error> { let tempdir = TempDir::new().expect("failed to create temp directory"); let path = write_config_to_file( &tempdir, component_internal::Config { debug: None, list_children_batch_size: Some(42), security_policy: None, namespace_capabilities: None, builtin_capabilities: None, maintain_utc_clock: None, use_builtin_process_launcher: None, num_threads: None, builtin_pkg_resolver: None, out_dir_contents: None, root_component_url: Some(FOO_PKG_URL.to_string()), ..component_internal::Config::EMPTY }, )?; let expected = RuntimeConfig { list_children_batch_size: 42, root_component_url: Some(Url::new(FOO_PKG_URL.to_string())?), ..Default::default() }; assert_matches!( RuntimeConfig::load_from_file(&path) , Ok(v) if v == expected); Ok(()) } #[test] fn config_from_file_invalid() -> Result<(), Error> { let tempdir = TempDir::new().expect("failed to create temp directory"); let path = tempdir.path().join("test_config.fidl"); // Add config file containing garbage data. std::fs::write(&path, &vec![0xfa, 0xde])?; assert_matches!(RuntimeConfig::load_from_file(&path), Err(_)); Ok(()) } macro_rules! test_entries_ok { ( $($test_name:ident => ($input:expr, $expected:expr)),+ $(,)? ) => { test_function_ok! { parse_allowlist_entries, $($test_name => ($input, $expected)),+ } }; } macro_rules! test_entries_err { ( $($test_name:ident => ($input:expr, $type:ty, $expected:expr)),+ $(,)? ) => { test_function_err! { parse_allowlist_entries, $($test_name => ($input, $type, $expected)),+ } }; } test_entries_ok! { missing_entries => (&None, vec![]), empty_entries => (&Some(vec![]), vec![]), all_entry_types => (&Some(vec![ "/core".into(), "/**".into(), "/foo/**".into(), "/coll:**".into(), "/core/test_manager/tests:**".into(), ]), vec![ AllowlistEntry::Exact(AbsoluteMoniker::from(vec!["core"])), AllowlistEntry::Realm(AbsoluteMoniker::root()), AllowlistEntry::Realm(AbsoluteMoniker::from(vec!["foo"])), AllowlistEntry::Collection(AbsoluteMoniker::root(), "coll".into()), AllowlistEntry::Collection(AbsoluteMoniker::from(vec!["core", "test_manager"]), "tests".into()) ]) } test_entries_err! { invalid_realm_entry => ( &Some(vec!["/foo/**".into(), "bar/**".into()]), AllowlistEntryError, AllowlistEntryError::RealmEntryInvalidMoniker( "bar/**".into(), MonikerError::InvalidMoniker { rep: "bar".into() })), invalid_realm_in_collection_entry => ( &Some(vec!["/foo/coll:**".into(), "bar/coll:**".into()]), AllowlistEntryError, AllowlistEntryError::CollectionEntryInvalidMoniker( "bar/coll:**".into(), MonikerError::InvalidMoniker { rep: "bar".into() })), missing_realm_in_collection_entry => ( &Some(vec!["coll:**".into()]), AllowlistEntryError, AllowlistEntryError::CollectionEntryMissingRealm("coll:**".into())), missing_collection_name => ( &Some(vec!["/foo/coll:**".into(), "/:**".into()]), AllowlistEntryError, AllowlistEntryError::InvalidCollectionName( "/:**".into(), "".into(), )), invalid_collection_name => ( &Some(vec!["/foo/coll:**".into(), "/*:**".into()]), AllowlistEntryError, AllowlistEntryError::InvalidCollectionName( "/*:**".into(), "*".into(), )), invalid_exact_entry => ( &Some(vec!["/foo/bar*".into()]), AllowlistEntryError, AllowlistEntryError::OtherInvalidMoniker( "/foo/bar*".into(), MonikerError::InvalidMonikerPart("bar*".into()))), } }
HashSet::from_iter(vec![ (AbsoluteMoniker::from(vec!["root"]),"root_env".to_string()), ].iter().cloned())
user.service.ts
import { HttpClient } from '@angular/common/http'; import { Injectable } from '@angular/core'; import { Observable } from 'rxjs'; import { USER_API } from './api/user.api'; @Injectable({ providedIn: 'root' }) export class UserService { constructor(private _httpc: HttpClient) { }
saveNewConnection(payload: any): Observable<any> { return this._httpc.post(USER_API.addConnection(payload.userId), payload.user); } getAllConnectionsOfUser(userId: number): Observable<any> { return this._httpc.get(USER_API.getAllConnections(userId)); } getSearchResultForUserWithKeyword(userId: number, keyword: string): Observable<any> { return this._httpc.get(USER_API.searchURIForUserConSearch(userId, keyword)); } getUserProfileById(userId: number) { return this._httpc.get(USER_API.getProfileById(userId)); } updateUserInfo(userId: number, payload: any): Observable<any> { return this._httpc.post(USER_API.updateUserInfo(userId), payload); } updateUserProfileImage(userId: number, imgData: any) { return this._httpc.post(USER_API.uploadUserProfileImg(userId), imgData); } updateUserStatus(userId: number, status: string) { return this._httpc.put(USER_API.updateUserStatus(userId), status); } }
searchUserByUserName(username: string): Observable<any> { return this._httpc.get(USER_API.searchUser(username)) }
pprint.py
from __future__ import absolute_import, division, print_function def profile2d(p, vmin=None, vmax=None): from dials.array_family import flex import string if vmin is None: vmin = flex.min(p) if vmax is None: vmax = flex.max(p) assert vmax >= vmin dv = vmax - vmin if dv == 0: c = 0 m = 0 else: m = 35.0 / dv c = -m * vmin lookup = string.digits + string.ascii_uppercase ny, nx = p.all() text = "" for j in range(ny): for i in range(nx): v = int(m * p[j, i] + c) if v < 0: v = 0 elif v > 35: v = 35 t = lookup[v] text += t + " " text += "\n" return text def profile3d(p, vmin=None, vmax=None):
if __name__ == "__main__": from dials.array_family import flex a1 = flex.double( [ [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], ] ) a2 = flex.double( [ [0, 0, 0, 0, 0], [0, 0, 1, 0, 0], [0, 1, 2, 1, 0], [0, 0, 1, 0, 0], [0, 0, 0, 0, 0], ] ) a3 = flex.double( [ [0, 0, 1, 0, 0], [0, 1, 2, 1, 0], [1, 2, 5, 2, 1], [0, 1, 2, 1, 0], [0, 0, 1, 0, 0], ] ) a4 = flex.double( [ [0, 0, 0, 0, 0], [0, 0, 1, 0, 0], [0, 1, 2, 1, 0], [0, 0, 1, 0, 0], [0, 0, 0, 0, 0], ] ) a5 = flex.double( [ [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], ] ) a1.reshape(flex.grid(1, 5, 5)) a2.reshape(flex.grid(1, 5, 5)) a3.reshape(flex.grid(1, 5, 5)) a4.reshape(flex.grid(1, 5, 5)) a5.reshape(flex.grid(1, 5, 5)) a = flex.double(flex.grid(5, 5, 5)) a[0:1, :, :] = a1 a[1:2, :, :] = a2 a[2:3, :, :] = a3 a[3:4, :, :] = a4 a[4:5, :, :] = a5 a = a * 1000 print(profile3d(a))
""" Print a 3D profile. """ from dials.array_family import flex if vmin is None: vmin = flex.min(p) if vmax is None: vmax = flex.max(p) nz, ny, nx = p.all() text = [] for k in range(nz): p2 = p[k : k + 1, :, :] p2.reshape(flex.grid(ny, nx)) text.append(profile2d(p2, vmin=vmin, vmax=vmax)) return "\n".join(text)
index.js
"use strict" const punycodeRegex = require("punycode-regex")()
module.exports = (string) => punycodeRegex.test(string)
util-deps.js
/** * util-deps.js - The parser for dependencies * ref: tests/research/parse-dependencies/test.html */ var REQUIRE_RE = /"(?:\\"|[^"])*"|'(?:\\'|[^'])*'|\/\*[\S\s]*?\*\/|\/(?:\\\/|[^/\r\n])+\/(?=[^\/])|\/\/.*|\.\s*require|(?:^|[^$])\brequire\s*\(\s*(["'])(.+?)\1\s*\)/g, SLASH_RE = /\\\\/g, REQUIRE_NAME_RE = /^function[\s]*\([\s]*([^\s,\)]+)/;
var requireName = REQUIRE_NAME_RE.exec(code), RE = REQUIRE_RE; // no dependencies if( !requireName ){ return []; } if((requireName = requireName[1]) !== 'require'){ // reconstruct require regexp RE = RE .toString() // for compressed code // replace arg 'require' with actual name .replace(/require/g, requireName); // remove head & tail // '/xxxxx/g' -> 'xxxxx' RE = RE.slice(1, RE.length - 2); RE = new RegExp(RE, 'g'); } // grep deps by using regexp match var ret = []; code.replace(SLASH_RE, '') .replace(RE, function(m, m1, m2) { m2 && ret.push(m2); }); return ret; }
function parseDependencies(code) { // get require function name // in compress code, require function name is no longer 'require'
client.rs
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[derive(Debug)] pub(crate) struct Handle { pub(crate) client: aws_smithy_client::Client< aws_smithy_client::erase::DynConnector, aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>, >, pub(crate) conf: crate::Config, } /// Client for AWS Savings Plans /// /// Client for invoking operations on AWS Savings Plans. Each operation on AWS Savings Plans is a method on this /// this struct. `.send()` MUST be invoked on the generated operations to dispatch the request to the service. /// /// # Examples /// **Constructing a client and invoking an operation** /// ```rust,no_run /// # async fn docs() { /// // create a shared configuration. This can be used & shared between multiple service clients. /// let shared_config = aws_config::load_from_env().await; /// let client = aws_sdk_savingsplans::Client::new(&shared_config); /// // invoke an operation /// /* let rsp = client /// .<operation_name>(). /// .<param>("some value") /// .send().await; */ /// # } /// ``` /// **Constructing a client with custom configuration** /// ```rust,no_run /// use aws_config::RetryConfig; /// # async fn docs() { /// let shared_config = aws_config::load_from_env().await; /// let config = aws_sdk_savingsplans::config::Builder::from(&shared_config) /// .retry_config(RetryConfig::disabled()) /// .build(); /// let client = aws_sdk_savingsplans::Client::from_conf(config); /// # } #[derive(std::fmt::Debug)] pub struct Client { handle: std::sync::Arc<Handle>, } impl std::clone::Clone for Client { fn clone(&self) -> Self { Self { handle: self.handle.clone(), } } } #[doc(inline)] pub use aws_smithy_client::Builder; impl From< aws_smithy_client::Client< aws_smithy_client::erase::DynConnector, aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>, >, > for Client { fn from( client: aws_smithy_client::Client< aws_smithy_client::erase::DynConnector, aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>, >, ) -> Self { Self::with_config(client, crate::Config::builder().build()) } } impl Client { /// Creates a client with the given service configuration. pub fn with_config( client: aws_smithy_client::Client< aws_smithy_client::erase::DynConnector, aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>, >, conf: crate::Config, ) -> Self { Self { handle: std::sync::Arc::new(Handle { client, conf }), } } /// Returns the client's configuration. pub fn conf(&self) -> &crate::Config { &self.handle.conf } } impl Client { /// Constructs a fluent builder for the [`CreateSavingsPlan`](crate::client::fluent_builders::CreateSavingsPlan) operation. /// /// - The fluent builder is configurable: /// - [`savings_plan_offering_id(impl Into<String>)`](crate::client::fluent_builders::CreateSavingsPlan::savings_plan_offering_id) / [`set_savings_plan_offering_id(Option<String>)`](crate::client::fluent_builders::CreateSavingsPlan::set_savings_plan_offering_id): <p>The ID of the offering.</p> /// - [`commitment(impl Into<String>)`](crate::client::fluent_builders::CreateSavingsPlan::commitment) / [`set_commitment(Option<String>)`](crate::client::fluent_builders::CreateSavingsPlan::set_commitment): <p>The hourly commitment, in USD. This is a value between 0.001 and 1 million. You cannot specify more than five digits after the decimal point.</p> /// - [`upfront_payment_amount(impl Into<String>)`](crate::client::fluent_builders::CreateSavingsPlan::upfront_payment_amount) / [`set_upfront_payment_amount(Option<String>)`](crate::client::fluent_builders::CreateSavingsPlan::set_upfront_payment_amount): <p>The up-front payment amount. This is a whole number between 50 and 99 percent of the total value of the Savings Plan. This parameter is supported only if the payment option is <code>Partial Upfront</code>.</p> /// - [`purchase_time(DateTime)`](crate::client::fluent_builders::CreateSavingsPlan::purchase_time) / [`set_purchase_time(Option<DateTime>)`](crate::client::fluent_builders::CreateSavingsPlan::set_purchase_time): <p>The time at which to purchase the Savings Plan, in UTC format (YYYY-MM-DDTHH:MM:SSZ).</p> /// - [`client_token(impl Into<String>)`](crate::client::fluent_builders::CreateSavingsPlan::client_token) / [`set_client_token(Option<String>)`](crate::client::fluent_builders::CreateSavingsPlan::set_client_token): <p>Unique, case-sensitive identifier that you provide to ensure the idempotency of the request.</p> /// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreateSavingsPlan::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreateSavingsPlan::set_tags): <p>One or more tags.</p> /// - On success, responds with [`CreateSavingsPlanOutput`](crate::output::CreateSavingsPlanOutput) with field(s): /// - [`savings_plan_id(Option<String>)`](crate::output::CreateSavingsPlanOutput::savings_plan_id): <p>The ID of the Savings Plan.</p> /// - On failure, responds with [`SdkError<CreateSavingsPlanError>`](crate::error::CreateSavingsPlanError) pub fn create_savings_plan(&self) -> fluent_builders::CreateSavingsPlan { fluent_builders::CreateSavingsPlan::new(self.handle.clone()) } /// Constructs a fluent builder for the [`DeleteQueuedSavingsPlan`](crate::client::fluent_builders::DeleteQueuedSavingsPlan) operation. /// /// - The fluent builder is configurable: /// - [`savings_plan_id(impl Into<String>)`](crate::client::fluent_builders::DeleteQueuedSavingsPlan::savings_plan_id) / [`set_savings_plan_id(Option<String>)`](crate::client::fluent_builders::DeleteQueuedSavingsPlan::set_savings_plan_id): <p>The ID of the Savings Plan.</p> /// - On success, responds with [`DeleteQueuedSavingsPlanOutput`](crate::output::DeleteQueuedSavingsPlanOutput) /// - On failure, responds with [`SdkError<DeleteQueuedSavingsPlanError>`](crate::error::DeleteQueuedSavingsPlanError) pub fn delete_queued_savings_plan(&self) -> fluent_builders::DeleteQueuedSavingsPlan { fluent_builders::DeleteQueuedSavingsPlan::new(self.handle.clone()) } /// Constructs a fluent builder for the [`DescribeSavingsPlanRates`](crate::client::fluent_builders::DescribeSavingsPlanRates) operation. /// /// - The fluent builder is configurable: /// - [`savings_plan_id(impl Into<String>)`](crate::client::fluent_builders::DescribeSavingsPlanRates::savings_plan_id) / [`set_savings_plan_id(Option<String>)`](crate::client::fluent_builders::DescribeSavingsPlanRates::set_savings_plan_id): <p>The ID of the Savings Plan.</p> /// - [`filters(Vec<SavingsPlanRateFilter>)`](crate::client::fluent_builders::DescribeSavingsPlanRates::filters) / [`set_filters(Option<Vec<SavingsPlanRateFilter>>)`](crate::client::fluent_builders::DescribeSavingsPlanRates::set_filters): <p>The filters.</p> /// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::DescribeSavingsPlanRates::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::DescribeSavingsPlanRates::set_next_token): <p>The token for the next page of results.</p> /// - [`max_results(i32)`](crate::client::fluent_builders::DescribeSavingsPlanRates::max_results) / [`set_max_results(Option<i32>)`](crate::client::fluent_builders::DescribeSavingsPlanRates::set_max_results): <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> /// - On success, responds with [`DescribeSavingsPlanRatesOutput`](crate::output::DescribeSavingsPlanRatesOutput) with field(s): /// - [`savings_plan_id(Option<String>)`](crate::output::DescribeSavingsPlanRatesOutput::savings_plan_id): <p>The ID of the Savings Plan.</p> /// - [`search_results(Option<Vec<SavingsPlanRate>>)`](crate::output::DescribeSavingsPlanRatesOutput::search_results): <p>Information about the Savings Plans rates.</p> /// - [`next_token(Option<String>)`](crate::output::DescribeSavingsPlanRatesOutput::next_token): <p>The token to use to retrieve the next page of results. This value is null when there are no more results to return.</p> /// - On failure, responds with [`SdkError<DescribeSavingsPlanRatesError>`](crate::error::DescribeSavingsPlanRatesError) pub fn describe_savings_plan_rates(&self) -> fluent_builders::DescribeSavingsPlanRates { fluent_builders::DescribeSavingsPlanRates::new(self.handle.clone()) } /// Constructs a fluent builder for the [`DescribeSavingsPlans`](crate::client::fluent_builders::DescribeSavingsPlans) operation. /// /// - The fluent builder is configurable: /// - [`savings_plan_arns(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlans::savings_plan_arns) / [`set_savings_plan_arns(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlans::set_savings_plan_arns): <p>The Amazon Resource Names (ARN) of the Savings Plans.</p> /// - [`savings_plan_ids(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlans::savings_plan_ids) / [`set_savings_plan_ids(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlans::set_savings_plan_ids): <p>The IDs of the Savings Plans.</p> /// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::DescribeSavingsPlans::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::DescribeSavingsPlans::set_next_token): <p>The token for the next page of results.</p> /// - [`max_results(i32)`](crate::client::fluent_builders::DescribeSavingsPlans::max_results) / [`set_max_results(Option<i32>)`](crate::client::fluent_builders::DescribeSavingsPlans::set_max_results): <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> /// - [`states(Vec<SavingsPlanState>)`](crate::client::fluent_builders::DescribeSavingsPlans::states) / [`set_states(Option<Vec<SavingsPlanState>>)`](crate::client::fluent_builders::DescribeSavingsPlans::set_states): <p>The states.</p> /// - [`filters(Vec<SavingsPlanFilter>)`](crate::client::fluent_builders::DescribeSavingsPlans::filters) / [`set_filters(Option<Vec<SavingsPlanFilter>>)`](crate::client::fluent_builders::DescribeSavingsPlans::set_filters): <p>The filters.</p> /// - On success, responds with [`DescribeSavingsPlansOutput`](crate::output::DescribeSavingsPlansOutput) with field(s): /// - [`savings_plans(Option<Vec<SavingsPlan>>)`](crate::output::DescribeSavingsPlansOutput::savings_plans): <p>Information about the Savings Plans.</p> /// - [`next_token(Option<String>)`](crate::output::DescribeSavingsPlansOutput::next_token): <p>The token to use to retrieve the next page of results. This value is null when there are no more results to return.</p> /// - On failure, responds with [`SdkError<DescribeSavingsPlansError>`](crate::error::DescribeSavingsPlansError) pub fn describe_savings_plans(&self) -> fluent_builders::DescribeSavingsPlans { fluent_builders::DescribeSavingsPlans::new(self.handle.clone()) } /// Constructs a fluent builder for the [`DescribeSavingsPlansOfferingRates`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates) operation. /// /// - The fluent builder is configurable: /// - [`savings_plan_offering_ids(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::savings_plan_offering_ids) / [`set_savings_plan_offering_ids(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_savings_plan_offering_ids): <p>The IDs of the offerings.</p> /// - [`savings_plan_payment_options(Vec<SavingsPlanPaymentOption>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::savings_plan_payment_options) / [`set_savings_plan_payment_options(Option<Vec<SavingsPlanPaymentOption>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_savings_plan_payment_options): <p>The payment options.</p> /// - [`savings_plan_types(Vec<SavingsPlanType>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::savings_plan_types) / [`set_savings_plan_types(Option<Vec<SavingsPlanType>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_savings_plan_types): <p>The plan types.</p> /// - [`products(Vec<SavingsPlanProductType>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::products) / [`set_products(Option<Vec<SavingsPlanProductType>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_products): <p>The AWS products.</p> /// - [`service_codes(Vec<SavingsPlanRateServiceCode>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::service_codes) / [`set_service_codes(Option<Vec<SavingsPlanRateServiceCode>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_service_codes): <p>The services.</p> /// - [`usage_types(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::usage_types) / [`set_usage_types(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_usage_types): <p>The usage details of the line item in the billing report.</p> /// - [`operations(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::operations) / [`set_operations(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_operations): <p>The specific AWS operation for the line item in the billing report.</p> /// - [`filters(Vec<SavingsPlanOfferingRateFilterElement>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::filters) / [`set_filters(Option<Vec<SavingsPlanOfferingRateFilterElement>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_filters): <p>The filters.</p> /// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_next_token): <p>The token for the next page of results.</p> /// - [`max_results(i32)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::DescribeSavingsPlansOfferingRates::set_max_results): <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> /// - On success, responds with [`DescribeSavingsPlansOfferingRatesOutput`](crate::output::DescribeSavingsPlansOfferingRatesOutput) with field(s): /// - [`search_results(Option<Vec<SavingsPlanOfferingRate>>)`](crate::output::DescribeSavingsPlansOfferingRatesOutput::search_results): <p>Information about the Savings Plans offering rates.</p> /// - [`next_token(Option<String>)`](crate::output::DescribeSavingsPlansOfferingRatesOutput::next_token): <p>The token to use to retrieve the next page of results. This value is null when there are no more results to return.</p> /// - On failure, responds with [`SdkError<DescribeSavingsPlansOfferingRatesError>`](crate::error::DescribeSavingsPlansOfferingRatesError) pub fn describe_savings_plans_offering_rates( &self, ) -> fluent_builders::DescribeSavingsPlansOfferingRates { fluent_builders::DescribeSavingsPlansOfferingRates::new(self.handle.clone()) } /// Constructs a fluent builder for the [`DescribeSavingsPlansOfferings`](crate::client::fluent_builders::DescribeSavingsPlansOfferings) operation. /// /// - The fluent builder is configurable: /// - [`offering_ids(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::offering_ids) / [`set_offering_ids(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_offering_ids): <p>The IDs of the offerings.</p> /// - [`payment_options(Vec<SavingsPlanPaymentOption>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::payment_options) / [`set_payment_options(Option<Vec<SavingsPlanPaymentOption>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_payment_options): <p>The payment options.</p> /// - [`product_type(SavingsPlanProductType)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::product_type) / [`set_product_type(Option<SavingsPlanProductType>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_product_type): <p>The product type.</p> /// - [`plan_types(Vec<SavingsPlanType>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::plan_types) / [`set_plan_types(Option<Vec<SavingsPlanType>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_plan_types): <p>The plan type.</p> /// - [`durations(Vec<i64>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::durations) / [`set_durations(Option<Vec<i64>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_durations): <p>The durations, in seconds.</p> /// - [`currencies(Vec<CurrencyCode>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::currencies) / [`set_currencies(Option<Vec<CurrencyCode>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_currencies): <p>The currencies.</p> /// - [`descriptions(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::descriptions) / [`set_descriptions(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_descriptions): <p>The descriptions.</p> /// - [`service_codes(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::service_codes) / [`set_service_codes(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_service_codes): <p>The services.</p> /// - [`usage_types(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::usage_types) / [`set_usage_types(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_usage_types): <p>The usage details of the line item in the billing report.</p> /// - [`operations(Vec<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::operations) / [`set_operations(Option<Vec<String>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_operations): <p>The specific AWS operation for the line item in the billing report.</p> /// - [`filters(Vec<SavingsPlanOfferingFilterElement>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::filters) / [`set_filters(Option<Vec<SavingsPlanOfferingFilterElement>>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_filters): <p>The filters.</p> /// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_next_token): <p>The token for the next page of results.</p> /// - [`max_results(i32)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::DescribeSavingsPlansOfferings::set_max_results): <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> /// - On success, responds with [`DescribeSavingsPlansOfferingsOutput`](crate::output::DescribeSavingsPlansOfferingsOutput) with field(s): /// - [`search_results(Option<Vec<SavingsPlanOffering>>)`](crate::output::DescribeSavingsPlansOfferingsOutput::search_results): <p>Information about the Savings Plans offerings.</p> /// - [`next_token(Option<String>)`](crate::output::DescribeSavingsPlansOfferingsOutput::next_token): <p>The token to use to retrieve the next page of results. This value is null when there are no more results to return.</p> /// - On failure, responds with [`SdkError<DescribeSavingsPlansOfferingsError>`](crate::error::DescribeSavingsPlansOfferingsError) pub fn describe_savings_plans_offerings( &self, ) -> fluent_builders::DescribeSavingsPlansOfferings { fluent_builders::DescribeSavingsPlansOfferings::new(self.handle.clone()) } /// Constructs a fluent builder for the [`ListTagsForResource`](crate::client::fluent_builders::ListTagsForResource) operation. /// /// - The fluent builder is configurable: /// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::ListTagsForResource::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::ListTagsForResource::set_resource_arn): <p>The Amazon Resource Name (ARN) of the resource.</p> /// - On success, responds with [`ListTagsForResourceOutput`](crate::output::ListTagsForResourceOutput) with field(s): /// - [`tags(Option<HashMap<String, String>>)`](crate::output::ListTagsForResourceOutput::tags): <p>Information about the tags.</p> /// - On failure, responds with [`SdkError<ListTagsForResourceError>`](crate::error::ListTagsForResourceError) pub fn list_tags_for_resource(&self) -> fluent_builders::ListTagsForResource { fluent_builders::ListTagsForResource::new(self.handle.clone()) } /// Constructs a fluent builder for the [`TagResource`](crate::client::fluent_builders::TagResource) operation. /// /// - The fluent builder is configurable: /// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::TagResource::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::TagResource::set_resource_arn): <p>The Amazon Resource Name (ARN) of the resource.</p> /// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::TagResource::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::TagResource::set_tags): <p>One or more tags. For example, { "tags": {"key1":"value1", "key2":"value2"} }.</p> /// - On success, responds with [`TagResourceOutput`](crate::output::TagResourceOutput) /// - On failure, responds with [`SdkError<TagResourceError>`](crate::error::TagResourceError) pub fn tag_resource(&self) -> fluent_builders::TagResource { fluent_builders::TagResource::new(self.handle.clone()) } /// Constructs a fluent builder for the [`UntagResource`](crate::client::fluent_builders::UntagResource) operation. /// /// - The fluent builder is configurable: /// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::UntagResource::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::UntagResource::set_resource_arn): <p>The Amazon Resource Name (ARN) of the resource.</p> /// - [`tag_keys(Vec<String>)`](crate::client::fluent_builders::UntagResource::tag_keys) / [`set_tag_keys(Option<Vec<String>>)`](crate::client::fluent_builders::UntagResource::set_tag_keys): <p>The tag keys.</p> /// - On success, responds with [`UntagResourceOutput`](crate::output::UntagResourceOutput) /// - On failure, responds with [`SdkError<UntagResourceError>`](crate::error::UntagResourceError) pub fn untag_resource(&self) -> fluent_builders::UntagResource { fluent_builders::UntagResource::new(self.handle.clone()) } } pub mod fluent_builders { //! //! Utilities to ergonomically construct a request to the service. //! //! Fluent builders are created through the [`Client`](crate::client::Client) by calling //! one if its operation methods. After parameters are set using the builder methods, //! the `send` method can be called to initiate the request. //! /// Fluent builder constructing a request to `CreateSavingsPlan`. /// /// <p>Creates a Savings Plan.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct CreateSavingsPlan { handle: std::sync::Arc<super::Handle>, inner: crate::input::create_savings_plan_input::Builder, } impl CreateSavingsPlan { /// Creates a new `CreateSavingsPlan`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::CreateSavingsPlanOutput, aws_smithy_http::result::SdkError<crate::error::CreateSavingsPlanError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the offering.</p> pub fn savings_plan_offering_id(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.savings_plan_offering_id(input.into()); self } /// <p>The ID of the offering.</p> pub fn set_savings_plan_offering_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_savings_plan_offering_id(input); self } /// <p>The hourly commitment, in USD. This is a value between 0.001 and 1 million. You cannot specify more than five digits after the decimal point.</p> pub fn commitment(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.commitment(input.into()); self } /// <p>The hourly commitment, in USD. This is a value between 0.001 and 1 million. You cannot specify more than five digits after the decimal point.</p> pub fn set_commitment(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_commitment(input); self } /// <p>The up-front payment amount. This is a whole number between 50 and 99 percent of the total value of the Savings Plan. This parameter is supported only if the payment option is <code>Partial Upfront</code>.</p> pub fn upfront_payment_amount(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.upfront_payment_amount(input.into()); self } /// <p>The up-front payment amount. This is a whole number between 50 and 99 percent of the total value of the Savings Plan. This parameter is supported only if the payment option is <code>Partial Upfront</code>.</p> pub fn set_upfront_payment_amount( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_upfront_payment_amount(input); self } /// <p>The time at which to purchase the Savings Plan, in UTC format (YYYY-MM-DDTHH:MM:SSZ).</p> pub fn purchase_time(mut self, input: aws_smithy_types::DateTime) -> Self { self.inner = self.inner.purchase_time(input); self } /// <p>The time at which to purchase the Savings Plan, in UTC format (YYYY-MM-DDTHH:MM:SSZ).</p> pub fn set_purchase_time( mut self, input: std::option::Option<aws_smithy_types::DateTime>, ) -> Self { self.inner = self.inner.set_purchase_time(input); self } /// <p>Unique, case-sensitive identifier that you provide to ensure the idempotency of the request.</p> pub fn client_token(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.client_token(input.into()); self } /// <p>Unique, case-sensitive identifier that you provide to ensure the idempotency of the request.</p> pub fn set_client_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_client_token(input); self } /// Adds a key-value pair to `tags`. /// /// To override the contents of this collection use [`set_tags`](Self::set_tags). /// /// <p>One or more tags.</p> pub fn tags( mut self, k: impl Into<std::string::String>, v: impl Into<std::string::String>, ) -> Self { self.inner = self.inner.tags(k.into(), v.into()); self } /// <p>One or more tags.</p> pub fn set_tags( mut self, input: std::option::Option< std::collections::HashMap<std::string::String, std::string::String>, >, ) -> Self { self.inner = self.inner.set_tags(input); self } } /// Fluent builder constructing a request to `DeleteQueuedSavingsPlan`. /// /// <p>Deletes the queued purchase for the specified Savings Plan.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct DeleteQueuedSavingsPlan { handle: std::sync::Arc<super::Handle>, inner: crate::input::delete_queued_savings_plan_input::Builder, } impl DeleteQueuedSavingsPlan { /// Creates a new `DeleteQueuedSavingsPlan`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DeleteQueuedSavingsPlanOutput, aws_smithy_http::result::SdkError<crate::error::DeleteQueuedSavingsPlanError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the Savings Plan.</p> pub fn savings_plan_id(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.savings_plan_id(input.into()); self } /// <p>The ID of the Savings Plan.</p> pub fn set_savings_plan_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_savings_plan_id(input); self } } /// Fluent builder constructing a request to `DescribeSavingsPlanRates`. /// /// <p>Describes the specified Savings Plans rates.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct DescribeSavingsPlanRates { handle: std::sync::Arc<super::Handle>, inner: crate::input::describe_savings_plan_rates_input::Builder, } impl DescribeSavingsPlanRates { /// Creates a new `DescribeSavingsPlanRates`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeSavingsPlanRatesOutput, aws_smithy_http::result::SdkError<crate::error::DescribeSavingsPlanRatesError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the Savings Plan.</p> pub fn savings_plan_id(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.savings_plan_id(input.into()); self } /// <p>The ID of the Savings Plan.</p> pub fn set_savings_plan_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_savings_plan_id(input); self } /// Appends an item to `filters`. /// /// To override the contents of this collection use [`set_filters`](Self::set_filters). /// /// <p>The filters.</p> pub fn filters(mut self, input: crate::model::SavingsPlanRateFilter) -> Self { self.inner = self.inner.filters(input); self } /// <p>The filters.</p> pub fn set_filters( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanRateFilter>>, ) -> Self { self.inner = self.inner.set_filters(input); self } /// <p>The token for the next page of results.</p> pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(input.into()); self } /// <p>The token for the next page of results.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn max_results(mut self, input: i32) -> Self { self.inner = self.inner.max_results(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `DescribeSavingsPlans`. /// /// <p>Describes the specified Savings Plans.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct DescribeSavingsPlans { handle: std::sync::Arc<super::Handle>, inner: crate::input::describe_savings_plans_input::Builder, } impl DescribeSavingsPlans { /// Creates a new `DescribeSavingsPlans`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeSavingsPlansOutput, aws_smithy_http::result::SdkError<crate::error::DescribeSavingsPlansError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `savingsPlanArns`. /// /// To override the contents of this collection use [`set_savings_plan_arns`](Self::set_savings_plan_arns). /// /// <p>The Amazon Resource Names (ARN) of the Savings Plans.</p> pub fn savings_plan_arns(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.savings_plan_arns(input.into()); self } /// <p>The Amazon Resource Names (ARN) of the Savings Plans.</p> pub fn set_savings_plan_arns( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_savings_plan_arns(input); self } /// Appends an item to `savingsPlanIds`. /// /// To override the contents of this collection use [`set_savings_plan_ids`](Self::set_savings_plan_ids). /// /// <p>The IDs of the Savings Plans.</p> pub fn savings_plan_ids(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.savings_plan_ids(input.into()); self } /// <p>The IDs of the Savings Plans.</p> pub fn set_savings_plan_ids( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_savings_plan_ids(input); self } /// <p>The token for the next page of results.</p> pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(input.into()); self } /// <p>The token for the next page of results.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn max_results(mut self, input: i32) -> Self { self.inner = self.inner.max_results(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } /// Appends an item to `states`. /// /// To override the contents of this collection use [`set_states`](Self::set_states). /// /// <p>The states.</p> pub fn states(mut self, input: crate::model::SavingsPlanState) -> Self { self.inner = self.inner.states(input); self } /// <p>The states.</p> pub fn set_states( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanState>>, ) -> Self { self.inner = self.inner.set_states(input); self } /// Appends an item to `filters`. /// /// To override the contents of this collection use [`set_filters`](Self::set_filters). /// /// <p>The filters.</p> pub fn filters(mut self, input: crate::model::SavingsPlanFilter) -> Self { self.inner = self.inner.filters(input); self } /// <p>The filters.</p> pub fn set_filters( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanFilter>>, ) -> Self { self.inner = self.inner.set_filters(input); self } } /// Fluent builder constructing a request to `DescribeSavingsPlansOfferingRates`. /// /// <p>Describes the specified Savings Plans offering rates.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct DescribeSavingsPlansOfferingRates { handle: std::sync::Arc<super::Handle>, inner: crate::input::describe_savings_plans_offering_rates_input::Builder, } impl DescribeSavingsPlansOfferingRates { /// Creates a new `DescribeSavingsPlansOfferingRates`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeSavingsPlansOfferingRatesOutput, aws_smithy_http::result::SdkError<crate::error::DescribeSavingsPlansOfferingRatesError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `savingsPlanOfferingIds`. /// /// To override the contents of this collection use [`set_savings_plan_offering_ids`](Self::set_savings_plan_offering_ids). /// /// <p>The IDs of the offerings.</p> pub fn savings_plan_offering_ids(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.savings_plan_offering_ids(input.into()); self } /// <p>The IDs of the offerings.</p> pub fn set_savings_plan_offering_ids( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_savings_plan_offering_ids(input); self } /// Appends an item to `savingsPlanPaymentOptions`. /// /// To override the contents of this collection use [`set_savings_plan_payment_options`](Self::set_savings_plan_payment_options). /// /// <p>The payment options.</p> pub fn savings_plan_payment_options( mut self, input: crate::model::SavingsPlanPaymentOption, ) -> Self { self.inner = self.inner.savings_plan_payment_options(input); self } /// <p>The payment options.</p> pub fn set_savings_plan_payment_options( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanPaymentOption>>, ) -> Self { self.inner = self.inner.set_savings_plan_payment_options(input); self } /// Appends an item to `savingsPlanTypes`. /// /// To override the contents of this collection use [`set_savings_plan_types`](Self::set_savings_plan_types). /// /// <p>The plan types.</p> pub fn savings_plan_types(mut self, input: crate::model::SavingsPlanType) -> Self { self.inner = self.inner.savings_plan_types(input); self } /// <p>The plan types.</p> pub fn set_savings_plan_types( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanType>>, ) -> Self { self.inner = self.inner.set_savings_plan_types(input); self } /// Appends an item to `products`. /// /// To override the contents of this collection use [`set_products`](Self::set_products). /// /// <p>The AWS products.</p> pub fn products(mut self, input: crate::model::SavingsPlanProductType) -> Self { self.inner = self.inner.products(input); self } /// <p>The AWS products.</p> pub fn set_products( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanProductType>>, ) -> Self { self.inner = self.inner.set_products(input); self } /// Appends an item to `serviceCodes`. /// /// To override the contents of this collection use [`set_service_codes`](Self::set_service_codes). /// /// <p>The services.</p> pub fn service_codes(mut self, input: crate::model::SavingsPlanRateServiceCode) -> Self { self.inner = self.inner.service_codes(input); self } /// <p>The services.</p> pub fn set_service_codes( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanRateServiceCode>>, ) -> Self { self.inner = self.inner.set_service_codes(input); self } /// Appends an item to `usageTypes`. /// /// To override the contents of this collection use [`set_usage_types`](Self::set_usage_types). /// /// <p>The usage details of the line item in the billing report.</p> pub fn usage_types(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.usage_types(input.into()); self } /// <p>The usage details of the line item in the billing report.</p> pub fn set_usage_types( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_usage_types(input); self } /// Appends an item to `operations`. /// /// To override the contents of this collection use [`set_operations`](Self::set_operations). /// /// <p>The specific AWS operation for the line item in the billing report.</p> pub fn operations(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.operations(input.into()); self } /// <p>The specific AWS operation for the line item in the billing report.</p> pub fn set_operations( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_operations(input); self } /// Appends an item to `filters`. /// /// To override the contents of this collection use [`set_filters`](Self::set_filters). /// /// <p>The filters.</p> pub fn filters( mut self, input: crate::model::SavingsPlanOfferingRateFilterElement, ) -> Self { self.inner = self.inner.filters(input); self } /// <p>The filters.</p> pub fn set_filters( mut self, input: std::option::Option< std::vec::Vec<crate::model::SavingsPlanOfferingRateFilterElement>, >, ) -> Self { self.inner = self.inner.set_filters(input); self } /// <p>The token for the next page of results.</p> pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(input.into()); self } /// <p>The token for the next page of results.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn max_results(mut self, input: i32) -> Self { self.inner = self.inner.max_results(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `DescribeSavingsPlansOfferings`. /// /// <p>Describes the specified Savings Plans offerings.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct DescribeSavingsPlansOfferings { handle: std::sync::Arc<super::Handle>, inner: crate::input::describe_savings_plans_offerings_input::Builder, } impl DescribeSavingsPlansOfferings { /// Creates a new `DescribeSavingsPlansOfferings`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeSavingsPlansOfferingsOutput, aws_smithy_http::result::SdkError<crate::error::DescribeSavingsPlansOfferingsError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `offeringIds`. /// /// To override the contents of this collection use [`set_offering_ids`](Self::set_offering_ids). /// /// <p>The IDs of the offerings.</p> pub fn offering_ids(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.offering_ids(input.into()); self } /// <p>The IDs of the offerings.</p> pub fn set_offering_ids( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_offering_ids(input); self } /// Appends an item to `paymentOptions`. /// /// To override the contents of this collection use [`set_payment_options`](Self::set_payment_options). /// /// <p>The payment options.</p> pub fn payment_options(mut self, input: crate::model::SavingsPlanPaymentOption) -> Self { self.inner = self.inner.payment_options(input); self } /// <p>The payment options.</p> pub fn set_payment_options( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanPaymentOption>>, ) -> Self { self.inner = self.inner.set_payment_options(input); self } /// <p>The product type.</p> pub fn product_type(mut self, input: crate::model::SavingsPlanProductType) -> Self { self.inner = self.inner.product_type(input); self } /// <p>The product type.</p> pub fn set_product_type( mut self, input: std::option::Option<crate::model::SavingsPlanProductType>, ) -> Self { self.inner = self.inner.set_product_type(input); self } /// Appends an item to `planTypes`. /// /// To override the contents of this collection use [`set_plan_types`](Self::set_plan_types). /// /// <p>The plan type.</p> pub fn plan_types(mut self, input: crate::model::SavingsPlanType) -> Self { self.inner = self.inner.plan_types(input); self } /// <p>The plan type.</p> pub fn set_plan_types( mut self, input: std::option::Option<std::vec::Vec<crate::model::SavingsPlanType>>, ) -> Self { self.inner = self.inner.set_plan_types(input); self } /// Appends an item to `durations`. /// /// To override the contents of this collection use [`set_durations`](Self::set_durations). /// /// <p>The durations, in seconds.</p> pub fn durations(mut self, input: i64) -> Self { self.inner = self.inner.durations(input); self } /// <p>The durations, in seconds.</p> pub fn set_durations(mut self, input: std::option::Option<std::vec::Vec<i64>>) -> Self { self.inner = self.inner.set_durations(input); self } /// Appends an item to `currencies`. /// /// To override the contents of this collection use [`set_currencies`](Self::set_currencies). /// /// <p>The currencies.</p> pub fn currencies(mut self, input: crate::model::CurrencyCode) -> Self { self.inner = self.inner.currencies(input); self } /// <p>The currencies.</p> pub fn set_currencies( mut self, input: std::option::Option<std::vec::Vec<crate::model::CurrencyCode>>, ) -> Self { self.inner = self.inner.set_currencies(input); self } /// Appends an item to `descriptions`. /// /// To override the contents of this collection use [`set_descriptions`](Self::set_descriptions). /// /// <p>The descriptions.</p> pub fn descriptions(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.descriptions(input.into()); self } /// <p>The descriptions.</p> pub fn set_descriptions( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_descriptions(input); self } /// Appends an item to `serviceCodes`. /// /// To override the contents of this collection use [`set_service_codes`](Self::set_service_codes). /// /// <p>The services.</p> pub fn service_codes(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.service_codes(input.into()); self } /// <p>The services.</p> pub fn set_service_codes( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_service_codes(input); self } /// Appends an item to `usageTypes`. /// /// To override the contents of this collection use [`set_usage_types`](Self::set_usage_types). /// /// <p>The usage details of the line item in the billing report.</p> pub fn usage_types(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.usage_types(input.into()); self } /// <p>The usage details of the line item in the billing report.</p> pub fn set_usage_types( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_usage_types(input); self } /// Appends an item to `operations`. /// /// To override the contents of this collection use [`set_operations`](Self::set_operations). /// /// <p>The specific AWS operation for the line item in the billing report.</p> pub fn operations(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.operations(input.into()); self } /// <p>The specific AWS operation for the line item in the billing report.</p> pub fn set_operations( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_operations(input); self } /// Appends an item to `filters`. /// /// To override the contents of this collection use [`set_filters`](Self::set_filters). /// /// <p>The filters.</p> pub fn filters(mut self, input: crate::model::SavingsPlanOfferingFilterElement) -> Self { self.inner = self.inner.filters(input); self } /// <p>The filters.</p> pub fn set_filters( mut self, input: std::option::Option< std::vec::Vec<crate::model::SavingsPlanOfferingFilterElement>, >, ) -> Self { self.inner = self.inner.set_filters(input); self } /// <p>The token for the next page of results.</p> pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(input.into()); self } /// <p>The token for the next page of results.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn max_results(mut self, input: i32) -> Self { self.inner = self.inner.max_results(input); self } /// <p>The maximum number of results to return with a single call. To retrieve additional results, make another call with the returned token value.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `ListTagsForResource`. /// /// <p>Lists the tags for the specified resource.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct ListTagsForResource { handle: std::sync::Arc<super::Handle>, inner: crate::input::list_tags_for_resource_input::Builder, } impl ListTagsForResource { /// Creates a new `ListTagsForResource`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListTagsForResourceOutput, aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The Amazon Resource Name (ARN) of the resource.</p> pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.resource_arn(input.into()); self } /// <p>The Amazon Resource Name (ARN) of the resource.</p> pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_resource_arn(input); self } } /// Fluent builder constructing a request to `TagResource`. /// /// <p>Adds the specified tags to the specified resource.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct TagResource { handle: std::sync::Arc<super::Handle>, inner: crate::input::tag_resource_input::Builder, } impl TagResource { /// Creates a new `TagResource`. pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::TagResourceOutput, aws_smithy_http::result::SdkError<crate::error::TagResourceError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The Amazon Resource Name (ARN) of the resource.</p> pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.resource_arn(input.into()); self } /// <p>The Amazon Resource Name (ARN) of the resource.</p> pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_resource_arn(input); self } /// Adds a key-value pair to `tags`. /// /// To override the contents of this collection use [`set_tags`](Self::set_tags). /// /// <p>One or more tags. For example, { "tags": {"key1":"value1", "key2":"value2"} }.</p> pub fn tags( mut self, k: impl Into<std::string::String>, v: impl Into<std::string::String>, ) -> Self { self.inner = self.inner.tags(k.into(), v.into()); self } /// <p>One or more tags. For example, { "tags": {"key1":"value1", "key2":"value2"} }.</p> pub fn set_tags( mut self, input: std::option::Option< std::collections::HashMap<std::string::String, std::string::String>, >, ) -> Self { self.inner = self.inner.set_tags(input); self } } /// Fluent builder constructing a request to `UntagResource`. /// /// <p>Removes the specified tags from the specified resource.</p> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct UntagResource { handle: std::sync::Arc<super::Handle>, inner: crate::input::untag_resource_input::Builder, } impl UntagResource { /// Creates a new `UntagResource`. pub(crate) fn
(handle: std::sync::Arc<super::Handle>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::UntagResourceOutput, aws_smithy_http::result::SdkError<crate::error::UntagResourceError>, > { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The Amazon Resource Name (ARN) of the resource.</p> pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.resource_arn(input.into()); self } /// <p>The Amazon Resource Name (ARN) of the resource.</p> pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_resource_arn(input); self } /// Appends an item to `tagKeys`. /// /// To override the contents of this collection use [`set_tag_keys`](Self::set_tag_keys). /// /// <p>The tag keys.</p> pub fn tag_keys(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.tag_keys(input.into()); self } /// <p>The tag keys.</p> pub fn set_tag_keys( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_tag_keys(input); self } } } impl Client { /// Creates a client with the given service config and connector override. pub fn from_conf_conn<C, E>(conf: crate::Config, conn: C) -> Self where C: aws_smithy_client::bounds::SmithyConnector<Error = E> + Send + 'static, E: Into<aws_smithy_http::result::ConnectorError>, { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default(); let sleep_impl = conf.sleep_impl.clone(); let mut builder = aws_smithy_client::Builder::new() .connector(aws_smithy_client::erase::DynConnector::new(conn)) .middleware(aws_smithy_client::erase::DynMiddleware::new( crate::middleware::DefaultMiddleware::new(), )); builder.set_retry_config(retry_config.into()); builder.set_timeout_config(timeout_config); if let Some(sleep_impl) = sleep_impl { builder.set_sleep_impl(Some(sleep_impl)); } let client = builder.build(); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } /// Creates a new client from a shared config. #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn new(sdk_config: &aws_types::sdk_config::SdkConfig) -> Self { Self::from_conf(sdk_config.into()) } /// Creates a new client from the service [`Config`](crate::Config). #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn from_conf(conf: crate::Config) -> Self { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default(); let sleep_impl = conf.sleep_impl.clone(); let mut builder = aws_smithy_client::Builder::dyn_https().middleware( aws_smithy_client::erase::DynMiddleware::new( crate::middleware::DefaultMiddleware::new(), ), ); builder.set_retry_config(retry_config.into()); builder.set_timeout_config(timeout_config); // the builder maintains a try-state. To avoid suppressing the warning when sleep is unset, // only set it if we actually have a sleep impl. if let Some(sleep_impl) = sleep_impl { builder.set_sleep_impl(Some(sleep_impl)); } let client = builder.build(); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } }
new
lab2_1_1_7.py
print("Hello, Python!") print("Zoltan") #print(Zoltan) #print "Zoltan"
on the tree ''' )
print('Zoltan') print(''' Alma
polling.rs
use async_trait::async_trait; use bytes::{BufMut, Bytes, BytesMut}; use futures_util::{stream::once, FutureExt, Stream, StreamExt, TryFutureExt, TryStreamExt}; use http::HeaderMap; use native_tls::TlsConnector; use reqwest::{Client, ClientBuilder}; use std::{pin::Pin, sync::Arc}; use tokio::sync::RwLock; use url::Url; use crate::{asynchronous::transport::AsyncTransport, error::Result, Error}; /// An asynchronous polling type. Makes use of the nonblocking reqwest types and /// methods. #[derive(Clone, Debug)] pub struct PollingTransport { client: Client, base_url: Arc<RwLock<Url>>, } impl PollingTransport { pub fn new( base_url: Url, tls_config: Option<TlsConnector>, opening_headers: Option<HeaderMap>, ) -> Self { let client = match (tls_config, opening_headers) { (Some(config), Some(map)) => ClientBuilder::new() .use_preconfigured_tls(config) .default_headers(map) .build() .unwrap(), (Some(config), None) => ClientBuilder::new() .use_preconfigured_tls(config) .build() .unwrap(), (None, Some(map)) => ClientBuilder::new().default_headers(map).build().unwrap(), (None, None) => Client::new(), }; let mut url = base_url; url.query_pairs_mut().append_pair("transport", "polling"); PollingTransport { client, base_url: Arc::new(RwLock::new(url)), } } } #[async_trait] impl AsyncTransport for PollingTransport { async fn emit(&self, data: Bytes, is_binary_att: bool) -> Result<()> { let data_to_send = if is_binary_att { // the binary attachment gets `base64` encoded let mut packet_bytes = BytesMut::with_capacity(data.len() + 1); packet_bytes.put_u8(b'b'); let encoded_data = base64::encode(data); packet_bytes.put(encoded_data.as_bytes()); packet_bytes.freeze() } else { data }; let status = self .client .post(self.address().await?) .body(data_to_send) .send() .await? .status() .as_u16(); if status != 200 { let error = Error::IncompleteHttp(status); return Err(error); } Ok(()) } fn stream(&self) -> Result<Pin<Box<dyn Stream<Item = Result<Bytes>> + '_>>> { let stream = self .address() .into_stream() .map(|address| match address { Ok(addr) => self .client .get(addr) .send() .map_err(Error::IncompleteResponseFromReqwest) .left_future(), Err(err) => async { Err(err) }.right_future(), }) .then(|resp| async { match resp.await { Ok(val) => val .bytes_stream() .map_err(Error::IncompleteResponseFromReqwest) .left_stream(), Err(err) => once(async { Err(err) }).right_stream(), } }) .flatten(); Ok(Box::pin(stream)) } async fn base_url(&self) -> Result<Url> { Ok(self.base_url.read().await.clone()) } async fn set_base_url(&self, base_url: Url) -> Result<()> { let mut url = base_url; if !url .query_pairs() .any(|(k, v)| k == "transport" && v == "polling") { url.query_pairs_mut().append_pair("transport", "polling"); } *self.base_url.write().await = url; Ok(()) } } #[cfg(test)] mod test {
use crate::asynchronous::transport::AsyncTransport; use super::*; use std::str::FromStr; #[tokio::test] async fn polling_transport_base_url() -> Result<()> { let url = crate::test::engine_io_server()?.to_string(); let transport = PollingTransport::new(Url::from_str(&url[..]).unwrap(), None, None); assert_eq!( transport.base_url().await?.to_string(), url.clone() + "?transport=polling" ); transport .set_base_url(Url::parse("https://127.0.0.1")?) .await?; assert_eq!( transport.base_url().await?.to_string(), "https://127.0.0.1/?transport=polling" ); assert_ne!(transport.base_url().await?.to_string(), url); transport .set_base_url(Url::parse("http://127.0.0.1/?transport=polling")?) .await?; assert_eq!( transport.base_url().await?.to_string(), "http://127.0.0.1/?transport=polling" ); assert_ne!(transport.base_url().await?.to_string(), url); Ok(()) } }
cube-controller.ts
import { PositionedCube } from "./positioned-cube"; type Pos = { x: number; y: number; angle: number; }; const calculateDistance = (pos1: Pos, pos2: Pos) => { const diffX = pos1.x - pos2.x; const diffY = pos1.y - pos2.y; return Math.sqrt(diffX * diffX + diffY * diffY); }; const calculateSpeed = (pos1: Pos, pos2: Pos, isLargeTurn: boolean) => { const diffX = pos1.x - pos2.x; const diffY = pos1.y - pos2.y; const distance = Math.sqrt(diffX * diffX + diffY * diffY); if (distance < 50) { return { left: 0, right: 0 }; // stop } let relAngle = (Math.atan2(diffY, diffX) * 180) / Math.PI - pos2.angle; relAngle = relAngle % 360; if (relAngle < -180) { relAngle += 360; } else if (relAngle > 180) { relAngle -= 360; } if (isLargeTurn !== undefined && isLargeTurn) { relAngle /= 2; } let ratio = 1 - Math.abs(relAngle) / 90; let speed = 80; if (relAngle > 0) { return { left: speed, right: speed * ratio }; } else { return { left: speed * ratio, right: speed }; } }; export class
{ public cubes: { [key: string]: PositionedCube } = {}; public cubesOnMat: string[] = []; private timer?: NodeJS.Timeout; start = () => { this.timer = setInterval(this.loop, 50); }; stop = () => { this.timer && clearInterval(this.timer); }; setCubes = (cubes: PositionedCube[]) => { cubes.forEach((cube) => { if (this.cubes[cube.cube.id] === undefined) { this.cubes[cube.cube.id] = cube; cube.on("onMat", () => this.onMat(cube)); cube.on("offMat", () => this.offMat(cube)); } }); }; private loop = () => { if (this.cubesOnMat.length < 2) { return; } this.cubesOnMat.reduce((acc, cur) => { const accPos = this.cubes[acc].position; const curPos = this.cubes[cur].position; if (accPos === null || curPos === null) { return cur; } const speed = calculateSpeed(accPos, curPos, true); this.cubes[cur].cube.move({ ...speed, duration: 100 }); return cur; }, this.cubesOnMat[this.cubesOnMat.length - 1]); }; private onMat = (cube: PositionedCube) => { if (this.cubesOnMat.includes(cube.cube.id)) { return; } const nearest = this.cubesOnMat.reduce<{ index: number; distance: number; }>( (acc, cur, index) => { const curPos = this.cubes[cur].position; if (curPos === null || cube.position === null) { return acc; } const distance = calculateDistance(curPos, cube.position); return acc.distance > distance ? { index: index, distance: distance } : acc; }, { index: -1, distance: 5000 /* large enough */ } ); this.cubesOnMat.splice(nearest.index + 1, 0, cube.cube.id); }; private offMat = (cube: PositionedCube) => { this.cubesOnMat = this.cubesOnMat.filter((id) => { return id !== cube.cube.id; }); }; }
CubeController
test_open_application.py
from pytest_bdd import scenarios, when, then, parsers import ui_automation_tests.shared.tools.helpers as utils from ui_automation_tests.pages.generic_application.task_list import TaskListPage from ui_automation_tests.pages.open_application.country_contract_types import OpenApplicationCountryContractTypes from ui_automation_tests.pages.open_application.country_contract_types_summary import ( OpenApplicationCountryContractTypesSummaryPage, ) from ui_automation_tests.pages.exporter_hub_page import ExporterHubPage from ui_automation_tests.pages.generic_application.ultimate_end_users import GenericApplicationUltimateEndUsers from ui_automation_tests.shared import functions from ui_automation_tests.conftest import ( enter_type_of_application, enter_application_name, enter_permanent_or_temporary, choose_open_licence_category, answer_firearms_question, ) from ui_automation_tests.pages.apply_for_a_licence_page import ApplyForALicencePage from ui_automation_tests.pages.open_application.countries import OpenApplicationCountriesPage from ui_automation_tests.pages.open_application.goods_countries_page import GoodsCountriesPage from ui_automation_tests.pages.open_application.goods_types import OpenApplicationGoodsTypesPage from ui_automation_tests.pages.standard_application.goods import StandardApplicationGoodsPage scenarios( "../features/submit_open_application.feature", "../features/edit_open_application.feature", strict_gherkin=False ) @then(parsers.parse('I see my goods type added at position "{position}" with a description and a control code')) def i_see_the_goods_types_list(driver, position, context): goods_type_page = OpenApplicationGoodsTypesPage(driver) good_type = goods_type_page.get_text_of_goods_type_info(int(position)) assert context.good_description in good_type assert context.control_code in good_type @then(parsers.parse("I see a list of the preselected media products")) def i_see_the_goods_types_list_media_oiel(driver, context): goods_type_page = OpenApplicationGoodsTypesPage(driver) goods_types = goods_type_page.get_number_of_goods() assert len(goods_types) == 7 @then(parsers.parse("I see a list of the preselected cryptographic products")) def i_see_the_goods_types_list_cryptographic_oiel(driver, context): goods_type_page = OpenApplicationGoodsTypesPage(driver) goods_types = goods_type_page.get_number_of_goods() assert len(goods_types) == 4 @then("I should see a list of countries") def i_should_see_a_list_of_countries(driver): application_countries_list = OpenApplicationCountriesPage(driver) page_countries = application_countries_list.get_countries_names() assert len(page_countries) == 273 assert "United Kingdom" not in page_countries @then("I should see a list of all countries that have been preselected") def i_should_see_a_list_of_countries(driver): application_countries_list = OpenApplicationCountriesPage(driver) page_countries = application_countries_list.get_static_destinations_list() assert len(page_countries) == 273 assert "United Kingdom" not in page_countries @then("I should see a list of the countries permitted for a cryptographic OIEL") def i_should_see_a_list_of_countries_cryptographic_oiel(driver): application_countries_list = OpenApplicationCountriesPage(driver) page_countries = application_countries_list.get_static_destinations_list() assert len(page_countries) == 213 assert "United Kingdom" not in page_countries @then("I should see the UK Continental Shelf as the only permitted destination") def i_should_see_a_list_of_countries_uk_continental_shelf_oiel(driver): application_countries_list = OpenApplicationCountriesPage(driver) page_countries = application_countries_list.get_static_destinations_list() assert len(page_countries) == 1 assert page_countries[0] == "UK Continental Shelf" @when(parsers.parse('I select "{country}" from the country list')) def i_select_country_from_the_country_list(driver, country): application_countries_list = OpenApplicationCountriesPage(driver) application_countries_list.select_country(country) assert utils.find_element_by_href(driver, "#" + country).is_displayed() @when(parsers.parse('I search for country "{country}"')) def search_for_country(driver, country): OpenApplicationCountriesPage(driver).search_for_country(country) @then(parsers.parse('only "{country}" is displayed in country list')) def search_country_result(driver, country): assert ( country == OpenApplicationCountriesPage(driver).get_text_of_countries_list() ), "Country not searched correctly" @when("I click select all countries") def select_all_countries(driver): page = OpenApplicationCountriesPage(driver) page.click_select_all()
@then("all checkboxes are selected") def all_selected(driver): page = OpenApplicationCountriesPage(driver) assert page.get_number_of_checkboxes(checked=False) == page.get_number_of_checkboxes(checked=True) @when("I select that I want to add the same sectors and contract types to all countries") def select_yes_to_all_countries_with_the_same_contract_types(driver): OpenApplicationCountryContractTypes(driver).select_same_contract_types_for_all_countries_radio_button() @when("I select contract types for all countries") def select_contract_types_for_all_countries(driver, context): page = OpenApplicationCountryContractTypes(driver) context.contract_types = [ {"id": "Navy", "value": "Navy"}, { "id": "Aircraft-manufacturers,-maintainers-or-operators", "value": "Aircraft manufacturers, maintainers or operators", }, {"id": "Pharmaceutical-or-medical", "value": "Pharmaceutical or medical"}, ] page.select_contract_type(context.contract_types[0]["id"]) page.select_contract_type(context.contract_types[1]["id"]) page.select_contract_type(context.contract_types[2]["id"]) page.select_other_contract_type_and_fill_in_details() functions.click_submit(driver) @then("I should see all countries and the chosen contract types on the destination summary list") def i_should_see_destinations_summary_countries_contract_types(driver, context): page = OpenApplicationCountryContractTypesSummaryPage(driver) countries_and_contract_types = page.get_countries_with_respective_contract_types() assert len(countries_and_contract_types) == 273 assert "United Kingdom" not in countries_and_contract_types for country_with_contract_types in countries_and_contract_types: for contract_type in context.contract_types: assert contract_type["value"] in country_with_contract_types[1] @then( "I should see the UK Continental Shelf as the only destination and the chosen contract types on the destination summary list" ) def i_should_see_destinations_summary_uk_continental_shelf_contract_types(driver, context): page = OpenApplicationCountryContractTypesSummaryPage(driver) countries_and_contract_types = page.get_countries_with_respective_contract_types() assert len(countries_and_contract_types) == 1 assert countries_and_contract_types[0][0] == "UK Continental Shelf" for country_with_contract_types in countries_and_contract_types: for contract_type in context.contract_types: assert contract_type["value"] in country_with_contract_types[1] @when(parsers.parse('I "{assign_or_unassign}" all countries to all goods with link')) def assign_all_with_link(driver, assign_or_unassign): countries_page = GoodsCountriesPage(driver) if assign_or_unassign == "assign": countries_page.select_all_link() countries_page.click_save() else: countries_page.deselect_all_link() @when("I click Add goods type button") def click_goods_type_button(driver): OpenApplicationGoodsTypesPage(driver).click_add_good_button() @then(parsers.parse('I see all countries are "{assigned_or_unassigned}" to all goods')) def see_all_or_no_selected(driver, assigned_or_unassigned): countries_page = GoodsCountriesPage(driver) if assigned_or_unassigned == "assigned": assert countries_page.all_selected() else: assert countries_page.all_deselected() @when(parsers.parse('I create an open application of a "{export_type}" export type')) # noqa def create_open_app(driver, export_type, context): # noqa ExporterHubPage(driver).click_apply_for_a_licence() ApplyForALicencePage(driver).select_licence_type("export_licence") functions.click_submit(driver) enter_type_of_application(driver, "oiel", context) choose_open_licence_category(driver, "military", context) enter_permanent_or_temporary(driver, export_type, context) enter_application_name(driver, context) answer_firearms_question(driver) @when(parsers.parse('I create an open application for an export licence of the "{licence_type}" licence type')) # noqa def create_open_app_of_specific_type(driver, licence_type, context): # noqa ExporterHubPage(driver).click_apply_for_a_licence() ApplyForALicencePage(driver).select_licence_type("export_licence") functions.click_submit(driver) enter_type_of_application(driver, "oiel", context) choose_open_licence_category(driver, licence_type, context) if licence_type in ["military", "uk_continental_shelf"]: enter_permanent_or_temporary(driver, "permanent", context) enter_application_name(driver, context) if licence_type in ["military", "uk_continental_shelf"]: answer_firearms_question(driver) @when("I click on the add button") def i_click_on_the_add_button(driver): GenericApplicationUltimateEndUsers(driver).click_add_ultimate_recipient_button() @when("I remove a good type from the application") def i_remove_a_good_from_the_application(driver): remove_good_link = StandardApplicationGoodsPage(driver).find_remove_goods_type_link() driver.execute_script("arguments[0].click();", remove_good_link) @then("no goods types are left on the application") def no_goods_types_are_left_on_the_application(driver): assert (OpenApplicationGoodsTypesPage(driver).find_remove_goods_type_link(), None) @then(parsers.parse('I cannot see the sections "{sections}"')) # noqa def sections_did_not_appear_on_task_list(driver, sections): # noqa sections = sections.split(", ") for section in sections: assert TaskListPage(driver).get_section(section) is None
store.ts
import wallpapers from "./data/wallpapers" interface AppState { [key: string]: any } let appState: AppState = { darktheme: false, transparency: false, backgroundImage: wallpapers[15].path, volume: 100, brightness: 100, nightlight: false } try { const data = localStorage.getItem('preferences') if (data) appState = JSON.parse(data) } catch (e) { } export function getState(keys: string | Array<string>): AppState { if (typeof keys === 'string') { return { [keys]: appState[keys] } } if (keys instanceof Array) { const data: AppState = {} for (let index = 0; index < keys.length; index++) { const key = keys[index] data[key] = appState[key] }
return data } return appState } export function getStates() { return appState } export function setState(state: AppState): void { appState = { ...appState, ...state } localStorage.setItem('preferences', JSON.stringify(appState)) }
context.rs
use crate::ffi; use crate::device::Device; use crate::{Error, Result}; pub struct Context { raw_context: *mut ffi::nfc_context, } impl Context { pub fn
() -> Self { let mut new_context: *mut ffi::nfc_context = std::ptr::null_mut(); unsafe { ffi::nfc_init(&mut new_context); } if new_context.is_null() { // for context: the standard library assumes malloc // will never fail, and that's the only way context // can be null, so at that point either we'll crash // or something else will panic!("Context should never be null"); } Context { raw_context: new_context, } } pub fn open_device(&mut self, connstring: &str) -> Result<Device> { let mut connarr = crate::util::str_to_connarr(connstring); let device; unsafe { device = ffi::nfc_open(self.raw_context, connarr.as_mut_ptr()); } if device.is_null() { // for context, unfortunately we don't get any error info // from trying to open a device, we just get a perror, which // is pleasant. Err(Error::new( "Unable to open device, check STDERR for details!", )) } else { Ok(Device { raw_device: device, _phantom: std::marker::PhantomData, }) } } } impl Drop for Context { fn drop(&mut self) { if !self.raw_context.is_null() { unsafe { ffi::nfc_exit(self.raw_context); } } } }
new
Cert_5_5_02_LeaderReboot.py
#!/usr/bin/env python3
# Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import unittest import config import thread_cert from pktverify.consts import MLE_ADVERTISEMENT, MLE_PARENT_REQUEST, MLE_PARENT_RESPONSE, MLE_CHILD_UPDATE_RESPONSE, MLE_CHILD_ID_REQUEST, MLE_CHILD_ID_RESPONSE, MLE_LINK_REQUEST, MLE_LINK_ACCEPT_AND_REQUEST, ADDR_SOL_URI, SOURCE_ADDRESS_TLV, MODE_TLV, TIMEOUT_TLV, CHALLENGE_TLV, RESPONSE_TLV, LINK_LAYER_FRAME_COUNTER_TLV, MLE_FRAME_COUNTER_TLV, ROUTE64_TLV, ADDRESS16_TLV, LEADER_DATA_TLV, NETWORK_DATA_TLV, TLV_REQUEST_TLV, SCAN_MASK_TLV, CONNECTIVITY_TLV, LINK_MARGIN_TLV, VERSION_TLV, ADDRESS_REGISTRATION_TLV, ACTIVE_TIMESTAMP_TLV from pktverify.packet_verifier import PacketVerifier from pktverify.null_field import nullField LEADER = 1 ROUTER = 2 ED = 3 class Cert_5_5_2_LeaderReboot(thread_cert.TestCase): TOPOLOGY = { LEADER: { 'name': 'LEADER', 'mode': 'rsdn', 'panid': 0xface, 'router_selection_jitter': 1, 'whitelist': [ROUTER] }, ROUTER: { 'name': 'ROUTER', 'mode': 'rsdn', 'panid': 0xface, 'router_selection_jitter': 1, 'whitelist': [LEADER, ED] }, ED: { 'name': 'MED', 'is_mtd': True, 'mode': 'rsn', 'panid': 0xface, 'whitelist': [ROUTER] }, } def _setUpLeader(self): self.nodes[LEADER].add_whitelist(self.nodes[ROUTER].get_addr64()) self.nodes[LEADER].enable_whitelist() self.nodes[LEADER].set_router_selection_jitter(1) def test(self): self.nodes[LEADER].start() self.simulator.go(5) self.assertEqual(self.nodes[LEADER].get_state(), 'leader') self.nodes[ROUTER].start() self.simulator.go(5) self.assertEqual(self.nodes[ROUTER].get_state(), 'router') self.nodes[ED].start() self.simulator.go(5) self.assertEqual(self.nodes[ED].get_state(), 'child') self.nodes[LEADER].reset() self._setUpLeader() self.simulator.go(140) self.assertEqual(self.nodes[ROUTER].get_state(), 'leader') self.nodes[LEADER].start() self.simulator.go(5) self.assertEqual(self.nodes[LEADER].get_state(), 'router') addrs = self.nodes[ED].get_addrs() for addr in addrs: self.assertTrue(self.nodes[ROUTER].ping(addr)) def verify(self, pv): pkts = pv.pkts pv.summary.show() LEADER = pv.vars['LEADER'] ROUTER = pv.vars['ROUTER'] MED = pv.vars['MED'] leader_pkts = pkts.filter_wpan_src64(LEADER) _rpkts = pkts.filter_wpan_src64(ROUTER) # Step 2: The DUT MUST send properly formatted MLE Advertisements _rpkts.filter_mle_cmd(MLE_CHILD_ID_RESPONSE).must_next() _lpkts = leader_pkts.range(_rpkts.index) _lpkts.filter_mle_cmd(MLE_ADVERTISEMENT).must_next().must_verify( lambda p: {SOURCE_ADDRESS_TLV, LEADER_DATA_TLV, ROUTE64_TLV} == set(p.mle.tlv.type)) _rpkts.filter_mle_cmd(MLE_ADVERTISEMENT).must_next().must_verify( lambda p: {SOURCE_ADDRESS_TLV, LEADER_DATA_TLV, ROUTE64_TLV} == set(p.mle.tlv.type)) # Step 4: Router_1 MUST attempt to reattach to its original partition by # sending MLE Parent Requests to the All-Routers multicast # address (FFxx::xx) with a hop limit of 255. _rpkts.filter_mle_cmd(MLE_PARENT_REQUEST).must_next().must_verify( lambda p: {MODE_TLV, CHALLENGE_TLV, SCAN_MASK_TLV, VERSION_TLV} == set(p.mle.tlv.type)) lreset_start = _rpkts.index # Step 5: Leader MUST NOT respond to the MLE Parent Requests _lpkts.filter_mle_cmd(MLE_PARENT_RESPONSE).must_not_next() # Step 6:Router_1 MUST attempt to attach to any other Partition # within range by sending a MLE Parent Request. _rpkts.filter_mle_cmd(MLE_PARENT_REQUEST).must_next().must_verify( lambda p: {MODE_TLV, CHALLENGE_TLV, SCAN_MASK_TLV, VERSION_TLV} == set(p.mle.tlv.type)) lreset_stop = _rpkts.index # Step 3: The Leader MUST stop sending MLE advertisements. leader_pkts.range(lreset_start, lreset_stop).filter_mle_cmd(MLE_ADVERTISEMENT).must_not_next() # Step 7: Take over leader role of a new Partition and # begin transmitting MLE Advertisements with _rpkts.save_index(): _rpkts.filter_mle_cmd(MLE_ADVERTISEMENT).must_next().must_verify( lambda p: {SOURCE_ADDRESS_TLV, LEADER_DATA_TLV, ROUTE64_TLV} == set(p.mle.tlv.type)) # Step 8: Router_1 MUST respond with an MLE Child Update Response, # with the updated TLVs of the new partition _rpkts.filter_mle_cmd(MLE_CHILD_UPDATE_RESPONSE).must_next().must_verify( lambda p: {SOURCE_ADDRESS_TLV, MODE_TLV, LEADER_DATA_TLV, ADDRESS_REGISTRATION_TLV} < set(p.mle.tlv.type)) # Step 9: The Leader MUST send properly formatted MLE Parent # Requests to the All-Routers multicast address _lpkts.filter_mle_cmd(MLE_PARENT_REQUEST).must_next().must_verify( lambda p: {MODE_TLV, CHALLENGE_TLV, SCAN_MASK_TLV, VERSION_TLV} == set(p.mle.tlv.type)) # Step 10: Router_1 MUST send an MLE Parent Response _rpkts.filter_mle_cmd(MLE_PARENT_RESPONSE).must_next().must_verify( lambda p: { SOURCE_ADDRESS_TLV, LEADER_DATA_TLV, LINK_LAYER_FRAME_COUNTER_TLV, RESPONSE_TLV, CHALLENGE_TLV, LINK_MARGIN_TLV, CONNECTIVITY_TLV, VERSION_TLV } < set(p.mle.tlv.type)) # Step 11: Leader send MLE Child ID Request _lpkts.filter_mle_cmd(MLE_CHILD_ID_REQUEST).must_next().must_verify( lambda p: { RESPONSE_TLV, LINK_LAYER_FRAME_COUNTER_TLV, MODE_TLV, TIMEOUT_TLV, VERSION_TLV, TLV_REQUEST_TLV, ADDRESS16_TLV, NETWORK_DATA_TLV, ROUTE64_TLV, ACTIVE_TIMESTAMP_TLV } < set(p.mle.tlv.type)) #Step 12: Router_1 send MLE Child ID Response _rpkts.filter_mle_cmd(MLE_CHILD_ID_RESPONSE).must_next().must_verify( lambda p: {SOURCE_ADDRESS_TLV, LEADER_DATA_TLV, ADDRESS16_TLV, NETWORK_DATA_TLV, ROUTE64_TLV} < set( p.mle.tlv.type)) #Step 13: Leader send an Address Solicit Request _lpkts.filter_coap_request(ADDR_SOL_URI).must_next().must_verify( lambda p: p.coap.tlv.ext_mac_addr and p.coap.tlv.rloc16 is not nullField and p.coap.tlv.status != 0) #Step 14: Router_1 send an Address Solicit Response _rpkts.filter_coap_ack( ADDR_SOL_URI).must_next().must_verify(lambda p: p.coap.tlv.router_mask_assigned and p.coap.tlv.rloc16 is not nullField and p.coap.tlv.status == 0) #Step 15: Leader Send a Multicast Link Request _lpkts.filter_mle_cmd(MLE_LINK_REQUEST).must_next().must_verify( lambda p: {VERSION_TLV, TLV_REQUEST_TLV, SOURCE_ADDRESS_TLV, LEADER_DATA_TLV, CHALLENGE_TLV} < set( p.mle.tlv.type)) #Step 16: Router_1 send a Unicast Link Accept _rpkts.filter_mle_cmd(MLE_LINK_ACCEPT_AND_REQUEST).must_next().must_verify(lambda p: { VERSION_TLV, SOURCE_ADDRESS_TLV, RESPONSE_TLV, MLE_FRAME_COUNTER_TLV, LINK_MARGIN_TLV, LEADER_DATA_TLV } < set(p.mle.tlv.type)) #Step 17: Router_1 MUST respond with an ICMPv6 Echo Reply _rpkts.filter_ping_request().filter_wpan_dst64(MED).must_next() if __name__ == '__main__': unittest.main()
# # Copyright (c) 2016, The OpenThread Authors. # All rights reserved. #
agents.rs
//! Logic for Agents //! Agents are actors (such as users) that can edit content. //! https://docs.atomicdata.dev/commits/concepts.html use crate::{datetime_helpers, errors::AtomicResult, urls, Resource, Storelike}; #[derive(Clone, Debug)] pub struct Agent { /// Private key for signing commits pub private_key: Option<String>, /// Private key for signing commits pub public_key: String, /// URL of the Agent pub subject: String, pub created_at: i64, pub name: Option<String>, } impl Agent { /// Converts Agent to Resource. /// Does not include private key, only public. pub fn to_resource(&self, store: &impl Storelike) -> AtomicResult<Resource> { let mut agent = Resource::new_instance(urls::AGENT, store)?; agent.set_subject(self.subject.clone()); if let Some(name) = &self.name { agent.set_propval_string(crate::urls::NAME.into(), name, store)?; } agent.set_propval_string(crate::urls::PUBLIC_KEY.into(), &self.public_key, store)?; agent.set_propval_string( crate::urls::CREATED_AT.into(), &self.created_at.to_string(), store, )?; Ok(agent) } /// Creates a new Agent, generates a new Keypair. pub fn new(name: Option<&str>, store: &impl Storelike) -> AtomicResult<Agent> { let keypair = generate_keypair()?; Ok(Agent::new_from_private_key(name, store, &keypair.private)) } pub fn new_from_private_key( name: Option<&str>, store: &impl Storelike, private_key: &str, ) -> Agent { let keypair = generate_public_key(private_key); Agent { private_key: Some(keypair.private), public_key: keypair.public.clone(), subject: format!("{}/agents/{}", store.get_base_url(), keypair.public), name: name.map(|x| x.to_owned()), created_at: datetime_helpers::now(), } } pub fn new_from_public_key(store: &impl Storelike, public_key: &str) -> AtomicResult<Agent> { verify_public_key(public_key)?; Ok(Agent { private_key: None, public_key: public_key.into(), subject: format!("{}/agents/{}", store.get_base_url(), public_key), name: None, created_at: datetime_helpers::now(), }) } } /// keypair, serialized using base64 pub struct Pair { pub private: String, pub public: String, } /// Returns a new random keypair. fn generate_keypair() -> AtomicResult<Pair> { use ring::signature::KeyPair; let rng = ring::rand::SystemRandom::new(); const SEED_LEN: usize = 32; let seed: [u8; SEED_LEN] = ring::rand::generate(&rng) .map_err(|_| "Error generating random seed: {}")? .expose(); let key_pair = ring::signature::Ed25519KeyPair::from_seed_unchecked(&seed) .map_err(|e| format!("Error generating keypair {}", e)) .unwrap(); Ok(Pair { private: base64::encode(&seed), public: base64::encode(&key_pair.public_key()), }) } /// Returns a Key Pair (including public key) from a private key, base64 encoded. pub fn generate_public_key(private_key: &str) -> Pair { use ring::signature::KeyPair; let private_key_bytes = base64::decode(private_key).unwrap(); let key_pair = ring::signature::Ed25519KeyPair::from_seed_unchecked(private_key_bytes.as_ref()) .map_err(|_| "Error generating keypair") .unwrap(); Pair { private: base64::encode(private_key_bytes), public: base64::encode(key_pair.public_key().as_ref()), } } /// Checks if the public key is a valid ED25519 base64 key. /// Not perfect - only checks byte length and parses base64. pub fn verify_public_key(public_key: &str) -> AtomicResult<()> { let pubkey_bin = base64::decode(public_key) .map_err(|e| format!("Invalid public key. Not valid Base64. {}", e))?; println!("{}", public_key.len()); if pubkey_bin.len() != 32 { return Err(format!( "Invalid public key, should be 32 bytes long instead of {}. Key: {}", pubkey_bin.len(), public_key ) .into()); } Ok(()) } #[cfg(test)] mod test { #[cfg(test)] use super::*; #[test] fn keypair() { let pair = generate_keypair().unwrap(); let regenerated_pair = generate_public_key(&pair.private); assert_eq!(pair.public, regenerated_pair.public); } #[test] fn generate_from_private_key() { let private_key = "CapMWIhFUT+w7ANv9oCPqrHrwZpkP2JhzF9JnyT6WcI="; let public_key = "7LsjMW5gOfDdJzK/atgjQ1t20J/rw8MjVg6xwqm+h8U="; let regenerated_pair = generate_public_key(private_key); assert_eq!(public_key, regenerated_pair.public); } #[test] fn
() { let valid_public_key = "7LsjMW5gOfDdJzK/atgjQ1t20J/rw8MjVg6xwqm+h8U="; let invalid_length = "7LsjMW5gOfDdJzK/atgjQ1t20J/rw8MjVg6xwm+h8U"; let invalid_char = "7LsjMW5gOfDdJzK/atgjQ1t20^/rw8MjVg6xwqm+h8U="; verify_public_key(valid_public_key).unwrap(); verify_public_key(invalid_length).unwrap_err(); verify_public_key(invalid_char).unwrap_err(); } }
verifies_public_keys
sagas.ts
import { call, put, putResolve, delay, select, takeEvery, takeLatest } from 'redux-saga/effects' import { createAction, PayloadAction } from '@reduxjs/toolkit' import { actions, initialState } from './actions' import * as api from '../api' import { annotation_id_t } from '../annotations/model' import { selectors } from './' import { saga_actions as annotations_saga_actions } from '../annotations' import { actions as worldlines_actions } from '../worldlines' import { saga_actions as worldlines_saga_actions } from '../worldlines' import { worldline_id_t } from '../worldlines/model' import { selectors as annotations_selectors } from '../annotations' import { selectors as worldlines_selectors } from '../worldlines' import { get_annotations_state } from '../app/selectors' const prefix = "annotation_window/" const saga_action_types = { fetch_state: prefix + "fetch_state", set_selected_annotation: prefix + "set_selected_annotation", set_selected_worldline: prefix + "set_selected_worldline", center_on_current_annotation: prefix + "center_on_current_annotation", adjust_t: prefix + "adjust_t", click: prefix + "click", doubleclick: prefix + "doubleclick", save: prefix + "save", load: prefix + "load", rpc: prefix + "rpc", rpc_from_key: prefix + "rpc_from_key", } export type click_payload_t = { x?: number, y?: number, z?: number, } export const saga_actions = { fetch_state: createAction( saga_action_types.fetch_state), set_selected_annotation: createAction<annotation_id_t | null>( saga_action_types.set_selected_annotation), set_selected_worldline: createAction<worldline_id_t | null>( saga_action_types.set_selected_worldline), center_on_current_annotation: createAction( saga_action_types.center_on_current_annotation), adjust_t: createAction<number>( saga_action_types.adjust_t), click: createAction<click_payload_t>( saga_action_types.click), doubleclick: createAction<click_payload_t>( saga_action_types.doubleclick), save: createAction(saga_action_types.save), load: createAction(saga_action_types.load), rpc: createAction<[number, string]>(saga_action_types.rpc), rpc_from_key: createAction<string>(saga_action_types.rpc_from_key), } function* fetch_state() { const meta = yield call(api.fetch_metadata) const rpcs = yield call(api.fetch_rpcs) const new_state = { ...initialState, shape_x: meta.shape_x, shape_y: meta.shape_y, shape_z: meta.shape_z, shape_t: meta.shape_t, scale_x: 700 / meta.shape_x, scale_y: 700 / meta.shape_x, scale_z: 120 / meta.shape_z, rpc_list: rpcs.payload, } yield put(actions.set_state(new_state)) } function* set_selected_annotation( action: PayloadAction<annotation_id_t | null>) { if (!action.payload) { yield put(actions.set_selected_annotation_local(null)) yield put(actions.set_selected_worldline_local(null)) } else { const annotations = yield select(get_annotations_state) const a = annotations[action.payload] if (a === undefined) { console.error(`Annotation ${action.payload} not found.`) return } yield put(actions.set_selected_annotation_local(a.id)) if (a.worldline_id > 0) { yield put(actions.set_selected_worldline_local(a.worldline_id)) } yield put(saga_actions.center_on_current_annotation()) } } function* set_selected_worldline( action: PayloadAction<worldline_id_t | null>) { const new_id = action.payload if (new_id == null) { yield put(actions.set_selected_worldline_local(null)) return } const worldlines = yield select(worldlines_selectors.get_worldlines) if (!worldlines[new_id]) { window.alert(`Bad track: ${new_id}. Please select an existing one or ` + `create a new one.`) return } yield put(actions.set_selected_worldline_local(new_id)) yield put(actions.set_selected_annotation_local(null)) if (new_id !== null) yield put(worldlines_actions.update_worldline_local({ id: new_id, visible: true, })) yield put(saga_actions.center_on_current_annotation()) } function* center_on_current_annotation() { const a = yield select(annotations_selectors.get_selected_annotation) if (a !== null && a !== undefined) yield put(actions.center_on_annotation(a)) } function* adjust_t(action: PayloadAction<number>) { const t_idx = yield select(selectors.get_t_idx) yield put(actions.set_t_idx(t_idx + action.payload)) } function* click(action: PayloadAction<click_payload_t>) {
if (action.payload.z) yield put(actions.set_z(action.payload.z)) } function* doubleclick(action: PayloadAction<click_payload_t>) { yield put(saga_actions.click(action.payload)) yield put(annotations_saga_actions.insert_annotation_here()) } function* save() { yield call(api.save) } function* load() { yield call(api.load) yield put(annotations_saga_actions.get_annotations()) yield put(worldlines_saga_actions.get_worldlines()) } function* rpc(action: PayloadAction<[number, string]>) { const state = yield select(selectors.get_state) const rpc = state.rpc_list[action.payload[0]] const method = rpc.name const arg = action.payload[1] const result = yield call(api.rpc, method, arg, state) console.log(result) if (result.status === "ok") { for (let action of result.callbacks) { const try_int_payload = parseInt(action.payload) if (String(try_int_payload) === action.payload) action.payload = try_int_payload yield putResolve(action) yield delay(100) } } else if (result.status === "error") { window.alert(result.exception) console.error(result.traceback) } } function* rpc_from_key(action: PayloadAction<string>) { const state = yield select(selectors.get_state) const method_idx = state.rpc_keybindings[action.payload] const arg = state.rpc_args[action.payload] yield put(saga_actions.rpc([method_idx, arg])) } function* watch_fetch_state() { yield takeLatest(saga_action_types.fetch_state, fetch_state) } function* watch_set_selected_annotation() { yield takeLatest(saga_action_types.set_selected_annotation, set_selected_annotation) } function* watch_set_selected_worldline() { yield takeLatest(saga_action_types.set_selected_worldline, set_selected_worldline) } function* watch_center_on_current_annotation() { yield takeLatest(saga_action_types.center_on_current_annotation, center_on_current_annotation) } function* watch_adjust_t() { yield takeLatest(saga_action_types.adjust_t, adjust_t) } function* watch_click() { yield takeLatest(saga_action_types.click, click) } function* watch_doubleclick() { yield takeLatest(saga_action_types.doubleclick, doubleclick) } function* watch_save() { yield takeLatest(saga_action_types.save, save) } function* watch_load() { yield takeLatest(saga_action_types.load, load) } function* watch_rpc() { yield takeEvery(saga_action_types.rpc, rpc) } function* watch_rpc_from_key() { yield takeEvery(saga_action_types.rpc_from_key, rpc_from_key) } // These are the exported sagas. export const sagas = [ watch_fetch_state, watch_set_selected_annotation, watch_set_selected_worldline, watch_center_on_current_annotation, watch_adjust_t, watch_click, watch_doubleclick, watch_save, watch_load, watch_rpc, watch_rpc_from_key, ]
if (action.payload.x) yield put(actions.set_x(action.payload.x)) if (action.payload.y) yield put(actions.set_y(action.payload.y))
vertxEventBusService.spec.js
/* jshint camelcase: false, undef: true, unused: true, browser: true */ /* global module: false, describe: false, it: false, expect: false, beforeEach: false, inject: false, SockJS: false */ const SockJS = require('sockjs-client'); const enableUnhandledRejectionTracing = require('../util/unhandledRejectionTracing.js'); require('../../../src/module.js'); describe('integration of module::vertxEventBusService', function () { beforeEach(angular.mock.module('knalli.angular-vertxbus')); beforeEach(angular.mock.module('knalli.angular-vertxbus', function ($provide) { enableUnhandledRejectionTracing(angular, $provide); $provide.value('$log', { log: function () {}, debug: function () {}, info: function () {}, warn: function () {}, error: function () {} }); })); it('should have vertxEventBusService', angular.mock.inject(function (vertxEventBusService) { expect(vertxEventBusService).not.to.be(undefined); })); describe('vertxEventBusService', function () { var vertxEventBusService; beforeEach(angular.mock.inject(function (_vertxEventBusService_) { vertxEventBusService = _vertxEventBusService_; })); it('should be an object', function () { expect(typeof vertxEventBusService).to.be('object'); }); it('should have a method readyState()', function () { expect(vertxEventBusService.readyState).not.to.be(undefined); }); describe('readyState()', function () { it('should be a function', function () { expect(typeof vertxEventBusService.readyState).to.be('function'); }); }); describe('adding two handlers with the same address, different callbacks.', function () { it('both handlers should be called - with same address', function (done) { var abcCalled, abcCalled2; setTimeout(function () { var abcHandler = function (message) { abcCalled = message; }, abcHandler2 = function (message) { // use a copy of the data so that we don't change // the message sent to other callbacks. var copy = angular.copy(message); copy = copy + "-2"; abcCalled2 = copy; }; vertxEventBusService.addListener('abc', abcHandler); vertxEventBusService.addListener('abc', abcHandler2); // remove again! SockJS.currentMockInstance.onmessage({ data : JSON.stringify({ address : 'abc', body : '1x', replyAddress : undefined }) }); expect(abcCalled).to.be('1x'); expect(abcCalled2).to.be('1x-2'); // remove handlers vertxEventBusService.removeListener('abc', abcHandler); vertxEventBusService.removeListener('abc', abcHandler2); done(); }, 200); }); }); describe('adding two handlers with the same callback, different addresses.', function () { it('handler should be called twice - with two different values - two different addresses', function (done) { var singleCallbackValue; function FunctionHolder() { "use strict"; return { handler : function (message) { singleCallbackValue = message; } }; } setTimeout(function () { var funcOne = new FunctionHolder(); var funcTwo = new FunctionHolder(); vertxEventBusService.addListener('abc', funcOne.handler); vertxEventBusService.addListener('xyz', funcTwo.handler); SockJS.currentMockInstance.onmessage({ data : JSON.stringify({ address : 'abc', body : 'abc', replyAddress : undefined }) }); expect(singleCallbackValue).to.be('abc'); SockJS.currentMockInstance.onmessage({ data : JSON.stringify({ address : 'xyz', body : 'xyz', replyAddress : undefined }) }); expect(singleCallbackValue).to.be('xyz'); // remove handlers vertxEventBusService.removeListener('abc', funcOne.handler); vertxEventBusService.removeListener('xyz', funcTwo.handler); done(); }, 200); }); }); }); describe('vertxEventBusService', function () { describe('with disabled message queue (default)', function () { var vertxEventBus, vertxEventBusService, result; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(0); })); beforeEach(angular.mock.inject(function (_vertxEventBus_, _vertxEventBusService_) { vertxEventBus = _vertxEventBus_; vertxEventBusService = _vertxEventBusService_; // Mock bus is closed _vertxEventBus_.readyState = function () { return _vertxEventBus_.EventBus.CLOSED; }; var sendCalls = 0; _vertxEventBus_.send = function (address, message, headers, replyHandler) { ++sendCalls; result = { reply : message }; if (replyHandler) { replyHandler(result); } }; // extend object vertxEventBus.getSendCalls = function () { return sendCalls; }; })); describe('should not dispatch send', function () { it('when eventbus is closed', function (done) { setTimeout(function () { vertxEventBusService.send('xyz', {data : 1}); setTimeout(function () { expect(result).to.be(undefined); expect(vertxEventBusService.delegate.getMessageQueueLength()).to.be(0); expect(vertxEventBus.getSendCalls()).to.be(0); done(); }, 1000); }, 200); }); }); }); describe('with enabled message queue (size 3)', function () { var vertxEventBus, vertxEventBusService, result; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(3); })); beforeEach(angular.mock.inject(function (_vertxEventBus_, _vertxEventBusService_) { vertxEventBus = _vertxEventBus_; vertxEventBusService = _vertxEventBusService_; // Mock bus is closed vertxEventBus.readyState = function () { return vertxEventBus.EventBus.CLOSED; }; var sendCalls = 0; vertxEventBus.send = function (address, message, headers, replyHandler) { ++sendCalls; result = { reply : message }; if (replyHandler) { replyHandler(result); } }; // extend object vertxEventBus.getSendCalls = function () { return sendCalls; }; })); describe('when eventbus is closed', function () { it('should dispatch send as queued', function (done) { setTimeout(function () { vertxEventBusService.send('xyz', {data : 123}); setTimeout(function () { expect(result).to.be(undefined); expect(vertxEventBusService.delegate.getMessageQueueLength()).to.be(1); expect(vertxEventBus.getSendCalls()).to.be(0); done(); }, 1000); }, 200); }); it('should queue max 3 items', function (done) { setTimeout(function () { vertxEventBusService.send('xyz', {data : 1}); vertxEventBusService.send('xyz', {data : 2}); vertxEventBusService.send('xyz', {data : 3}); vertxEventBusService.send('xyz', {data : 4}); setTimeout(function () { expect(result).to.be(undefined); expect(vertxEventBusService.delegate.getMessageQueueLength()).to.be(3); expect(vertxEventBus.getSendCalls()).to.be(0); done(); }, 1000); }, 200); }); }); describe('should replay queued items', function () { it('when eventbus is reopened', function (done) { setTimeout(function () { vertxEventBusService.send('xyz', {data : 0}).then(null, angular.noop); vertxEventBusService.send('xyz', {data : 1}).then(null, angular.noop); vertxEventBusService.send('xyz', {data : 2}).then(null, angular.noop); vertxEventBusService.send('xyz', {data : 3}).then(null, angular.noop); // fake connect vertxEventBus.readyState = function () { return vertxEventBus.EventBus.OPEN; }; vertxEventBus.onopen(); setTimeout(function () { expect(result).to.eql({reply : {data : 3}}); expect(vertxEventBusService.delegate.getMessageQueueLength()).to.be(0); expect(vertxEventBus.getSendCalls()).to.be(3); done(); }, 1000); }, 200); }); }); }); describe('when the service is not connected correctly (stalled connection)', function () { var $rootScope, vertxEventBus, vertxEventBusService, $timeout; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(0).useDebug(true); })); beforeEach(angular.mock.inject(function (_$rootScope_, _vertxEventBus_, _vertxEventBusService_, _$timeout_) { $rootScope = _$rootScope_; $timeout = _$timeout_; vertxEventBus = _vertxEventBus_; vertxEventBusService = _vertxEventBusService_; // Mock bus is opened (said to be) _vertxEventBus_.readyState = function () { return _vertxEventBus_.EventBus.OPEN; }; _vertxEventBusService_.getConnectionState = function () { return true; }; var sendCalls = 0; _vertxEventBus_.send = function () { // do nothing, let it timeout }; // extend object vertxEventBus.getSendCalls = function () { return sendCalls; }; })); describe('send() should call the error callback', function () { var $interval; beforeEach(angular.mock.inject(function (_$interval_) { $interval = _$interval_; // angular.mock.$interval })); it('via promise.then()', function (done) { var successCalled, errorCalled; setTimeout(function () { // very short timeout: 10 vertxEventBusService.send('xyz', {data : 1}, {}, {timeout : 10}).then(function () { successCalled = true; }, function () { errorCalled = true; }); $rootScope.$apply(); setTimeout(function () { $interval.flush(20); // goto T+20 expect(successCalled).to.be(undefined); expect(errorCalled).to.be(true); done(); }, 300); }, 200); }); it('via promise.then() without expecting reply', function (done) { var successCalled, errorCalled; setTimeout(function () { // very short timeout: 10 vertxEventBusService.send('xyz', {data : 1}, {}, {timeout : 10, expectReply : false}).then(function () { successCalled = true; }, function () { errorCalled = true; }); $rootScope.$apply(); setTimeout(function () { $interval.flush(20); // goto T+20 expect(successCalled).to.be(true); expect(errorCalled).to.be(undefined); done(); }, 300); }, 200); }); it('via promise.catch()', function (done) { var successCalled, errorCalled; setTimeout(function () { // very short timeout: 10 vertxEventBusService.send('xyz', {data : 1}, {}, {timeout : 10}).then(function () { successCalled = true; })['catch'](function () { errorCalled = true; }); $rootScope.$apply(); setTimeout(function () { $interval.flush(20); // goto T+20 expect(successCalled).to.be(undefined); expect(errorCalled).to.be(true); done(); }, 300); }, 200); }); }); }); describe('when the service is not connected correctly (send throws exception because not open)', function () { var $rootScope, vertxEventBus, vertxEventBusService, $timeout; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(0).useDebug(true); })); beforeEach(angular.mock.inject(function (_$rootScope_, _vertxEventBus_, _vertxEventBusService_, _$timeout_) { $rootScope = _$rootScope_; $timeout = _$timeout_; vertxEventBus = _vertxEventBus_; vertxEventBusService = _vertxEventBusService_; // Mock bus is opened (said to be) _vertxEventBus_.readyState = function () { return _vertxEventBus_.EventBus.OPEN; }; _vertxEventBusService_.getConnectionState = function () { return true; }; var sendCalls = 0; _vertxEventBus_.send = function () { throw new Error('INVALID_STATE_ERR'); }; // extend object vertxEventBus.getSendCalls = function () { return sendCalls; }; })); describe('send() should call the error callback', function () { var $interval; beforeEach(angular.mock.inject(function (_$interval_) { $interval = _$interval_; // angular.mock.$interval })); it('via promise.then()', function (done) { var successCalled, errorCalled; setTimeout(function () { // very short timeout: 10 vertxEventBusService.send('xyz', {data : 1}, {}, {timeout : 10}).then(function () { successCalled = true; }, function () { errorCalled = true; }); $rootScope.$apply(); setTimeout(function () { $interval.flush(20); // goto T+20 expect(successCalled).to.be(undefined); expect(errorCalled).to.be(true); done(); }, 300); }, 200); }); it('via promise.then() without expecting reply', function (done) { var successCalled, errorCalled; setTimeout(function () { // very short timeout: 10 vertxEventBusService.send('xyz', {data : 1}, {}, {timeout : 10, expectReply : false}).then(function () { successCalled = true; }, function () { errorCalled = true; }); $rootScope.$apply(); setTimeout(function () { $interval.flush(20); // goto T+20 expect(successCalled).to.be(undefined); expect(errorCalled).to.be(true); done(); }, 300); }, 200); }); it('via promise.catch()', function (done) { var successCalled, errorCalled; setTimeout(function () { // very short timeout: 10 vertxEventBusService.send('xyz', {data : 1}, {}, {timeout : 10}).then(function () { successCalled = true; })['catch'](function () { errorCalled = true; }); $rootScope.$apply(); setTimeout(function () { $interval.flush(20); // goto T+20 expect(successCalled).to.be(undefined); expect(errorCalled).to.be(true); done(); }, 300); }, 200); }); }); }); describe('reconnect', function () { var $timeout, vertxEventBus, vertxEventBusService; beforeEach(angular.mock.inject(function (_vertxEventBus_, _vertxEventBusService_, _$timeout_) { $timeout = _$timeout_; vertxEventBus = _vertxEventBus_; vertxEventBusService = _vertxEventBusService_; // Mock bus is closed _vertxEventBus_.readyState = function () { return _vertxEventBus_.EventBus.OPEN; }; var sendCalls = 0; _vertxEventBus_.send = function () { // do nothing, let it timeout }; // extend object vertxEventBus.getSendCalls = function () { return sendCalls; }; })); it('should be a function', function () { expect(typeof vertxEventBus.reconnect).to.be('function'); }); // Reconnect should be switch the connectivity, onopen() and onclose() // must be delegated transparently it('should re-add handler after a reconnect', function (done) { this.timeout(20000); var okHandler = false; var myHandler = function () { //$log.debug('[TEST] onhandle() called'); okHandler = true; }; setTimeout(function () { vertxEventBusService.addListener('lalelu', myHandler); vertxEventBus.reconnect(); setTimeout(function () { setTimeout(function () { SockJS.currentMockInstance.onmessage({ data : JSON.stringify({ address : 'lalelu', body : { data : '1x' }, replyAddress : undefined }) }); expect(okHandler).to.be(true); done(); }, 2100); $timeout.flush(); }, 100); }, 100); }); }); describe('after adding and removing a handler via "registerHandler"', function () { var vertxEventBusService; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(0); })); beforeEach(angular.mock.inject(function (_vertxEventBusService_) { vertxEventBusService = _vertxEventBusService_; })); it('should not be called', function (done) { var abcCalled, xyzCalled;
var abcHandler = function (message) { abcCalled = message; }, xyzHandler = function (message) { xyzCalled = message; }; var abcFunct = vertxEventBusService.addListener('abc', abcHandler); var xyzFunct = vertxEventBusService.addListener('xyz', xyzHandler); // remove again! abcFunct(); xyzFunct(); SockJS.currentMockInstance.onmessage({ data : JSON.stringify({ address : 'xyz', body : { data : '1x' }, replyAddress : undefined }) }); expect(abcCalled).to.be(undefined); expect(xyzCalled).to.be(undefined); done(); }, 200); }); }); }); describe('after removing a registered handler via "unregisterHandler"', function () { var vertxEventBusService; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(0); })); beforeEach(angular.mock.inject(function (_vertxEventBusService_) { vertxEventBusService = _vertxEventBusService_; })); it('should not be called', function (done) { var abcCalled, xyzCalled; setTimeout(function () { var abcHandler = function (message) { abcCalled = message; }, xyzHandler = function (message) { xyzCalled = message; }; vertxEventBusService.addListener('abc', abcHandler); vertxEventBusService.addListener('xyz', xyzHandler); // remove again! vertxEventBusService.removeListener('abc', abcHandler); vertxEventBusService.removeListener('xyz', xyzHandler); SockJS.currentMockInstance.onmessage({ data : JSON.stringify({ address : 'xyz', message : { data : '1x' } }) }); expect(abcCalled).to.be(undefined); expect(xyzCalled).to.be(undefined); done(); }, 200); }); }); describe('vertxEventBusService (bus online) send()', function () { var vertxEventBusService, vertxEventBus, $timeout, $rootScope, $log; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(0).useDebug(true); })); beforeEach(angular.mock.inject(function (_vertxEventBus_, _vertxEventBusService_, _$timeout_, _$rootScope_, _$log_) { vertxEventBus = _vertxEventBus_; vertxEventBusService = _vertxEventBusService_; $timeout = _$timeout_; $rootScope = _$rootScope_; $log = _$log_; SockJS.currentMockInstance.$log = $log; })); it('should return a promise which will be resolved (success)', function (done) { setTimeout(function () { var results = { 'then' : 0, 'catch' : 0, 'finally' : 0 }; var promise = vertxEventBusService.send('xyz', {data : 123}); expect(promise).to.not.be(undefined); // looks like a promise? expect(typeof promise).to.be('object'); expect(typeof promise.then).to.be('function'); expect(typeof promise.catch).to.be('function'); expect(typeof promise.finally).to.be('function'); promise.then(function () { results.then++; }); promise.catch(function () { results.catch++; }); promise.finally(function () { results.finally++; }); $rootScope.$apply(); setTimeout(function () { expect(results.then).to.be(1); expect(results.catch).to.be(0); expect(results.finally).to.be(1); done(); }, 500); }, 200); }); it('should return a promise which will be rejected (failure in message)', function (done) { setTimeout(function () { var results = { 'then' : 0, 'catch' : 0, 'finally' : 0 }; var promise = vertxEventBusService.send('xyz', { data : 123, mockReply: { type: 'err', failureCode: 4711, failureType: 'whatever' } }); expect(promise).to.not.be(undefined); // looks like a promise? expect(typeof promise).to.be('object'); expect(typeof promise.then).to.be('function'); expect(typeof promise.catch).to.be('function'); expect(typeof promise.finally).to.be('function'); promise.then(function () { results.then++; }, angular.noop); // ignore error (because error is expected) promise.catch(function () { results.catch++; }); promise.finally(function () { results.finally++; }).then(null, angular.noop); // ignore error (because error is expected) $rootScope.$apply(); setTimeout(function () { expect(results.then).to.be(0); expect(results.catch).to.be(1); expect(results.finally).to.be(1); done(); }, 500); }, 200); }); }); describe('vertxEventBusService (bus offline) send()', function () { var vertxEventBusService, vertxEventBus, $timeout, $rootScope, $log; beforeEach(angular.mock.module('knalli.angular-vertxbus', function (vertxEventBusServiceProvider) { vertxEventBusServiceProvider.useMessageBuffer(0).useDebug(true); })); beforeEach(angular.mock.inject(function (_vertxEventBus_, _vertxEventBusService_, _$timeout_, _$rootScope_, _$log_) { vertxEventBus = _vertxEventBus_; vertxEventBusService = _vertxEventBusService_; $timeout = _$timeout_; $rootScope = _$rootScope_; $log = _$log_; SockJS.currentMockInstance.$log = $log; vertxEventBus.readyState = function () { return 3; }; })); it('should return a promise which will be rejected (fail)', function (done) { setTimeout(function () { var results = { 'then' : 0, 'catch' : 0, 'finally' : 0 }; var promise = vertxEventBusService.send('xyz', {data : 123}); expect(promise).to.not.be(undefined); // looks like a promise? expect(typeof promise).to.be('object'); expect(typeof promise.then).to.be('function'); expect(typeof promise.catch).to.be('function'); expect(typeof promise.finally).to.be('function'); promise.then(function () { results.then++; }, angular.noop); // ignore error (because error is expected) promise.catch(function () { results.catch++; }); promise.finally(function () { results.finally++; }).then(null, angular.noop); // ignore error (because error is expected) $rootScope.$apply(); setTimeout(function () { window.console.warn(results); expect(results.then).to.be(0); expect(results.catch).to.be(1); expect(results.finally).to.be(1); done(); }, 500); }, 200); }); }); });
setTimeout(function () {
viz_traj.py
#!/usr/bin/env python3 # -*-coding:utf-8 -*- # ============================================================================= """ @Author : Yujie He @File : viz_traj.py @Date created : 2022/02/25 @Maintainer : Yujie He @Email : [email protected] """ # ============================================================================= """ The module provides script to visualize the robot trajectory and the longest k (default:3) trajectories of the pedestrians around Qolo robot. Example: python qolo/viz_traj.py -f 0410_mds --all --overwrite """ # ============================================================================= """ TODO: 1. plot vx/vy or linear/angular velocity """ # ============================================================================= import os import sys import argparse import numpy as np import pandas as pd import matplotlib.pyplot as plt from qolo.core.crowdbot_data import CrowdBotDatabase from qolo.utils.geo_util import quat2yaw from qolo.utils.file_io_util import ( load_json2dict, load_pkl2dict, ) from qolo.utils.res_plot_util import ( viz_qolo_ped_traj_full, viz_qolo_ped_traj_frame, get_nlongest_peds, viz_ped_speed, viz_ped_speed_vw, ) color_list = ['navy', 'blue', 'slateblue', 'violet', 'skyblue'] def main(): parser = argparse.ArgumentParser( description="visualize trajectories of pedestrians around Qolo" ) parser.add_argument( "-f", "--folder", default="0410_mds", type=str, help="different subfolder in rosbag/ dir", ) parser.add_argument( "--seq", default="2021-04-10-11-28-10", # 2021-04-10-10-38-36 2021-04-10-10-41-17 type=str, help="specific sequence in the subfolder", ) parser.add_argument( "--all", dest="process_all", action="store_true", help="Process all sequences and disable single sequences", ) parser.set_defaults(process_all=False) parser.add_argument( "--overwrite", dest="overwrite", action="store_true", help="Whether to overwrite existing output (default: false)", ) parser.set_defaults(overwrite=True) args = parser.parse_args()
if args.seq is None or args.process_all: seqs = [cb_data.seqs[seq_idx] for seq_idx in range(cb_data.nr_seqs())] else: seqs = [args.seq] for seq_idx, seq in enumerate(seqs): sq_idx = cb_data.seqs.index(seq) seq_len = cb_data.nr_frames(sq_idx) print("({}/{}): {} with {} frames".format(seq_idx + 1, len(seqs), seq, seq_len)) # dest: path_img_path eval_res_dir = os.path.join(cb_data.metrics_dir) if not os.path.exists(eval_res_dir): print("Result images and npy will be saved in {}".format(eval_res_dir)) os.makedirs(eval_res_dir, exist_ok=True) path_img_path = os.path.join(eval_res_dir, seq, seq + "_traj.png") proc_path_img_path = os.path.join(eval_res_dir, seq, seq + "_traj_proc.png") # path_img_path = os.path.join(eval_res_dir, seq, seq + "_{}_traj.png".format(frame_id)) plot_exist = os.path.exists(path_img_path) if plot_exist and not args.overwrite: print("{} plots already generated!!!".format(seq)) print("Will not overwrite. If you want to overwrite, use flag --overwrite") continue # src 1: trajectory data traj_dir = os.path.join(cb_data.ped_data_dir, "traj") if not os.path.exists(traj_dir): sys.exit("Please use det2traj.py to extract pedestrian trajectories first!") traj_pkl_path = os.path.join(traj_dir, seq + '.pkl') # traj_json_path = os.path.join(traj_dir, seq + '.json') proc_traj_pkl_path = os.path.join(traj_dir, seq + '_proc.pkl') # src 2: qolo data tf_qolo_dir = os.path.join(cb_data.source_data_dir, "tf_qolo") pose_stamp_path = os.path.join(tf_qolo_dir, seq + "_tfqolo_sampled.npy") pose_stamped = np.load(pose_stamp_path, allow_pickle=True).item() # src 3: velocity path vel_dir = os.path.join(cb_data.ped_data_dir, "vel") vel_pkl_path = os.path.join(vel_dir, seq + '.pkl') trans_array = pose_stamped["position"] qolo_pose = { 'x': trans_array[:, 0], 'y': trans_array[:, 1], 'init_ori': pose_stamped["orientation"], } ped_traj_dict = load_pkl2dict(traj_pkl_path) # ped_traj_dict = load_json2dict(traj_json_path) ped_vel_dict = load_pkl2dict(vel_pkl_path) top_ids = get_nlongest_peds(ped_traj_dict, ped_num=5) viz_qolo_ped_traj_full( path_img_path, qolo_pose, ped_traj_dict, viz_ids=top_ids, color_list=color_list, ) # visualize processed trajectory proc_ped_traj_dict = load_pkl2dict(proc_traj_pkl_path) viz_qolo_ped_traj_full( proc_path_img_path, qolo_pose, proc_ped_traj_dict, viz_ids=top_ids, color_list=color_list, ) ped_vel_img_path1 = os.path.join(eval_res_dir, seq, seq + "_ped_vel.png") ped_vel_img_path2 = os.path.join(eval_res_dir, seq, seq + "_ped_vw.png") viz_ped_speed( ped_vel_img_path1, ped_vel_dict, viz_ids=top_ids, color_list=color_list, ) viz_ped_speed_vw( ped_vel_img_path2, ped_vel_dict, viz_ids=top_ids, color_list=color_list, ) if __name__ == "__main__": main()
cb_data = CrowdBotDatabase(args.folder)
json.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Rust JSON serialization library // Copyright (c) 2011 Google Inc. #![forbid(non_camel_case_types)] #![allow(missing_docs)] //! JSON parsing and serialization //! //! # What is JSON? //! //! JSON (JavaScript Object Notation) is a way to write data in Javascript. //! Like XML, it allows to encode structured data in a text format that can be easily read by humans //! Its simple syntax and native compatibility with JavaScript have made it a widely used format. //! //! Data types that can be encoded are JavaScript types (see the `Json` enum for more details): //! //! * `Boolean`: equivalent to rust's `bool` //! * `Number`: equivalent to rust's `f64` //! * `String`: equivalent to rust's `String` //! * `Array`: equivalent to rust's `Vec<T>`, but also allowing objects of different types in the //! same array //! * `Object`: equivalent to rust's `Treemap<String, json::Json>` //! * `Null` //! //! An object is a series of string keys mapping to values, in `"key": value` format. //! Arrays are enclosed in square brackets ([ ... ]) and objects in curly brackets ({ ... }). //! A simple JSON document encoding a person, his/her age, address and phone numbers could look like //! //! ```ignore //! { //! "FirstName": "John", //! "LastName": "Doe", //! "Age": 43, //! "Address": { //! "Street": "Downing Street 10", //! "City": "London", //! "Country": "Great Britain" //! }, //! "PhoneNumbers": [ //! "+44 1234567", //! "+44 2345678" //! ] //! } //! ``` //! //! # Rust Type-based Encoding and Decoding //! //! Rust provides a mechanism for low boilerplate encoding & decoding of values to and from JSON via //! the serialization API. //! To be able to encode a piece of data, it must implement the `serialize::Encodable` trait. //! To be able to decode a piece of data, it must implement the `serialize::Decodable` trait. //! The Rust compiler provides an annotation to automatically generate the code for these traits: //! `#[deriving(Decodable, Encodable)]` //! //! The JSON API provides an enum `json::Json` and a trait `ToJson` to encode objects. //! The `ToJson` trait provides a `to_json` method to convert an object into a `json::Json` value. //! A `json::Json` value can be encoded as a string or buffer using the functions described above. //! You can also use the `json::Encoder` object, which implements the `Encoder` trait. //! //! When using `ToJson` the `Encodable` trait implementation is not mandatory. //! //! # Examples of use //! //! ## Using Autoserialization //! //! Create a struct called `TestStruct` and serialize and deserialize it to and from JSON using the //! serialization API, using the derived serialization code. //! //! ```rust //! extern crate serialize; //! use serialize::json; //! //! // Automatically generate `Decodable` and `Encodable` trait implementations //! #[deriving(Decodable, Encodable)] //! pub struct TestStruct { //! data_int: u8, //! data_str: String, //! data_vector: Vec<u8>, //! } //! //! fn main() { //! let object = TestStruct { //! data_int: 1, //! data_str: "homura".to_string(), //! data_vector: vec![2,3,4,5], //! }; //! //! // Serialize using `json::encode` //! let encoded = json::encode(&object); //! //! // Deserialize using `json::decode` //! let decoded: TestStruct = json::decode(encoded.as_slice()).unwrap(); //! } //! ``` //! //! ## Using the `ToJson` trait //! //! The examples above use the `ToJson` trait to generate the JSON string, which is required //! for custom mappings. //! //! ### Simple example of `ToJson` usage //! //! ```rust //! extern crate serialize; //! use serialize::json::{mod, ToJson, Json}; //! //! // A custom data structure //! struct ComplexNum { //! a: f64, //! b: f64, //! } //! //! // JSON value representation //! impl ToJson for ComplexNum { //! fn to_json(&self) -> Json { //! Json::String(format!("{}+{}i", self.a, self.b)) //! } //! } //! //! // Only generate `Encodable` trait implementation //! #[deriving(Encodable)] //! pub struct ComplexNumRecord { //! uid: u8, //! dsc: String, //! val: Json, //! } //! //! fn main() { //! let num = ComplexNum { a: 0.0001, b: 12.539 }; //! let data: String = json::encode(&ComplexNumRecord{ //! uid: 1, //! dsc: "test".to_string(), //! val: num.to_json(), //! }); //! println!("data: {}", data); //! // data: {"uid":1,"dsc":"test","val":"0.0001+12.539j"}; //! } //! ``` //! //! ### Verbose example of `ToJson` usage //! //! ```rust //! extern crate serialize; //! use std::collections::TreeMap; //! use serialize::json::{mod, Json, ToJson}; //! //! // Only generate `Decodable` trait implementation //! #[deriving(Decodable)] //! pub struct TestStruct { //! data_int: u8, //! data_str: String, //! data_vector: Vec<u8>, //! } //! //! // Specify encoding method manually //! impl ToJson for TestStruct { //! fn to_json(&self) -> Json { //! let mut d = TreeMap::new(); //! // All standard types implement `to_json()`, so use it //! d.insert("data_int".to_string(), self.data_int.to_json()); //! d.insert("data_str".to_string(), self.data_str.to_json()); //! d.insert("data_vector".to_string(), self.data_vector.to_json()); //! Json::Object(d) //! } //! } //! //! fn main() { //! // Serialize using `ToJson` //! let input_data = TestStruct { //! data_int: 1, //! data_str: "madoka".to_string(), //! data_vector: vec![2,3,4,5], //! }; //! let json_obj: Json = input_data.to_json(); //! let json_str: String = json_obj.to_string(); //! //! // Deserialize like before //! let decoded: TestStruct = json::decode(json_str.as_slice()).unwrap(); //! } //! ``` use self::JsonEvent::*; use self::StackElement::*; use self::ErrorCode::*; use self::ParserError::*; use self::DecoderError::*; use self::ParserState::*; use self::InternalStackElement::*; use std; use std::collections::{HashMap, TreeMap}; use std::{char, f64, fmt, io, num, str}; use std::mem::{swap, transmute}; use std::num::{Float, FPNaN, FPInfinite, Int}; use std::str::{FromStr, ScalarValue}; use std::string; use std::vec::Vec; use std::ops; use Encodable; /// Represents a json value #[deriving(Clone, PartialEq, PartialOrd)] pub enum Json { I64(i64), U64(u64), F64(f64), String(string::String), Boolean(bool), Array(self::Array), Object(self::Object), Null, } pub type Array = Vec<Json>; pub type Object = TreeMap<string::String, Json>; /// The errors that can arise while parsing a JSON stream. #[deriving(Clone, PartialEq)] pub enum ErrorCode { InvalidSyntax, InvalidNumber, EOFWhileParsingObject, EOFWhileParsingArray, EOFWhileParsingValue, EOFWhileParsingString, KeyMustBeAString, ExpectedColon, TrailingCharacters, TrailingComma, InvalidEscape, InvalidUnicodeCodePoint, LoneLeadingSurrogateInHexEscape, UnexpectedEndOfHexEscape, UnrecognizedHex, NotFourDigit, NotUtf8, } impl Copy for ErrorCode {} #[deriving(Clone, PartialEq, Show)] pub enum ParserError { /// msg, line, col SyntaxError(ErrorCode, uint, uint), IoError(io::IoErrorKind, &'static str), } impl Copy for ParserError {} // Builder and Parser have the same errors. pub type BuilderError = ParserError; #[deriving(Clone, PartialEq, Show)] pub enum DecoderError { ParseError(ParserError), ExpectedError(string::String, string::String), MissingFieldError(string::String), UnknownVariantError(string::String), ApplicationError(string::String) } /// Returns a readable error string for a given error code. pub fn error_str(error: ErrorCode) -> &'static str { match error { InvalidSyntax => "invalid syntax", InvalidNumber => "invalid number", EOFWhileParsingObject => "EOF While parsing object", EOFWhileParsingArray => "EOF While parsing array", EOFWhileParsingValue => "EOF While parsing value", EOFWhileParsingString => "EOF While parsing string", KeyMustBeAString => "key must be a string", ExpectedColon => "expected `:`", TrailingCharacters => "trailing characters", TrailingComma => "trailing comma", InvalidEscape => "invalid escape", UnrecognizedHex => "invalid \\u{ esc}ape (unrecognized hex)", NotFourDigit => "invalid \\u{ esc}ape (not four digits)", NotUtf8 => "contents not utf-8", InvalidUnicodeCodePoint => "invalid Unicode code point", LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape", UnexpectedEndOfHexEscape => "unexpected end of hex escape", } } /// Shortcut function to decode a JSON `&str` into an object pub fn decode<T: ::Decodable<Decoder, DecoderError>>(s: &str) -> DecodeResult<T> { let json = match from_str(s) { Ok(x) => x, Err(e) => return Err(ParseError(e)) }; let mut decoder = Decoder::new(json); ::Decodable::decode(&mut decoder) } /// Shortcut function to encode a `T` into a JSON `String` pub fn encode<'a, T: Encodable<Encoder<'a>, io::IoError>>(object: &T) -> string::String { let buff = Encoder::buffer_encode(object); string::String::from_utf8(buff).unwrap() } impl fmt::Show for ErrorCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { error_str(*self).fmt(f) } } fn io_error_to_error(io: io::IoError) -> ParserError { IoError(io.kind, io.desc) } impl std::error::Error for DecoderError { fn description(&self) -> &str { "decoder error" } fn detail(&self) -> Option<std::string::String> { Some(self.to_string()) } } pub type EncodeResult = io::IoResult<()>; pub type DecodeResult<T> = Result<T, DecoderError>; pub fn escape_bytes(wr: &mut io::Writer, bytes: &[u8]) -> Result<(), io::IoError> { try!(wr.write_str("\"")); let mut start = 0; for (i, byte) in bytes.iter().enumerate() { let escaped = match *byte { b'"' => "\\\"", b'\\' => "\\\\", b'\x08' => "\\b", b'\x0c' => "\\f", b'\n' => "\\n", b'\r' => "\\r", b'\t' => "\\t", _ => { continue; } }; if start < i { try!(wr.write(bytes[start..i])); } try!(wr.write_str(escaped)); start = i + 1; } if start != bytes.len() { try!(wr.write(bytes[start..])); } wr.write_str("\"") } fn escape_str(writer: &mut io::Writer, v: &str) -> Result<(), io::IoError> { escape_bytes(writer, v.as_bytes()) } fn escape_char(writer: &mut io::Writer, v: char) -> Result<(), io::IoError> { let mut buf = [0, .. 4]; v.encode_utf8(&mut buf); escape_bytes(writer, &mut buf) } fn spaces(wr: &mut io::Writer, mut n: uint) -> Result<(), io::IoError> { const LEN: uint = 16; static BUF: [u8, ..LEN] = [b' ', ..LEN]; while n >= LEN { try!(wr.write(&BUF)); n -= LEN; } if n > 0 { wr.write(BUF[..n]) } else { Ok(()) } } fn fmt_number_or_null(v: f64) -> string::String { match v.classify() { FPNaN | FPInfinite => string::String::from_str("null"), _ if v.fract() != 0f64 => f64::to_str_digits(v, 6u), _ => f64::to_str_digits(v, 6u) + ".0", } } /// A structure for implementing serialization to JSON. pub struct Encoder<'a> { writer: &'a mut (io::Writer+'a), } impl<'a> Encoder<'a> { /// Creates a new JSON encoder whose output will be written to the writer /// specified. pub fn new(writer: &'a mut io::Writer) -> Encoder<'a> { Encoder { writer: writer } } /// Encode the specified struct into a json [u8] pub fn buffer_encode<T:Encodable<Encoder<'a>, io::IoError>>(object: &T) -> Vec<u8> { //Serialize the object in a string using a writer let mut m = Vec::new(); // FIXME(14302) remove the transmute and unsafe block. unsafe { let mut encoder = Encoder::new(&mut m as &mut io::Writer); // Vec<u8> never Errs let _ = object.encode(transmute(&mut encoder)); } m } } impl<'a> ::Encoder<io::IoError> for Encoder<'a> { fn emit_nil(&mut self) -> EncodeResult { write!(self.writer, "null") } fn emit_uint(&mut self, v: uint) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u64(&mut self, v: u64) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u32(&mut self, v: u32) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u16(&mut self, v: u16) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u8(&mut self, v: u8) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_int(&mut self, v: int) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i64(&mut self, v: i64) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i32(&mut self, v: i32) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i16(&mut self, v: i16) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i8(&mut self, v: i8) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_bool(&mut self, v: bool) -> EncodeResult { if v { write!(self.writer, "true") } else { write!(self.writer, "false") } } fn emit_f64(&mut self, v: f64) -> EncodeResult { write!(self.writer, "{}", fmt_number_or_null(v)) } fn emit_f32(&mut self, v: f32) -> EncodeResult { self.emit_f64(v as f64) } fn emit_char(&mut self, v: char) -> EncodeResult { escape_char(self.writer, v) } fn emit_str(&mut self, v: &str) -> EncodeResult { escape_str(self.writer, v) } fn emit_enum(&mut self, _name: &str, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { f(self) } fn emit_enum_variant(&mut self, name: &str, _id: uint, cnt: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { // enums are encoded as strings or objects // Bunny => "Bunny" // Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]} if cnt == 0 { escape_str(self.writer, name) } else { try!(write!(self.writer, "{{\"variant\":")); try!(escape_str(self.writer, name)); try!(write!(self.writer, ",\"fields\":[")); try!(f(self)); write!(self.writer, "]}}") } } fn emit_enum_variant_arg(&mut self, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { if idx != 0 { try!(write!(self.writer, ",")); } f(self) } fn emit_enum_struct_variant(&mut self, name: &str, id: uint, cnt: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_enum_variant(name, id, cnt, f) } fn emit_enum_struct_variant_field(&mut self, _: &str, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_enum_variant_arg(idx, f) } fn emit_struct(&mut self, _: &str, _: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { try!(write!(self.writer, "{{")); try!(f(self)); write!(self.writer, "}}") } fn emit_struct_field(&mut self, name: &str, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { if idx != 0 { try!(write!(self.writer, ",")); } try!(escape_str(self.writer, name)); try!(write!(self.writer, ":")); f(self) } fn emit_tuple(&mut self, len: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq(len, f) } fn emit_tuple_arg(&mut self, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq_elt(idx, f) } fn emit_tuple_struct(&mut self, _name: &str, len: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq(len, f) } fn emit_tuple_struct_arg(&mut self, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq_elt(idx, f) } fn emit_option(&mut self, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { f(self) } fn emit_option_none(&mut self) -> EncodeResult { self.emit_nil() } fn emit_option_some(&mut self, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { f(self) } fn emit_seq(&mut self, _len: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { try!(write!(self.writer, "[")); try!(f(self)); write!(self.writer, "]") } fn emit_seq_elt(&mut self, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { if idx != 0 { try!(write!(self.writer, ",")); } f(self) } fn emit_map(&mut self, _len: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { try!(write!(self.writer, "{{")); try!(f(self)); write!(self.writer, "}}") } fn emit_map_elt_key(&mut self, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { if idx != 0 { try!(write!(self.writer, ",")) } // ref #12967, make sure to wrap a key in double quotes, // in the event that its of a type that omits them (eg numbers) let mut buf = Vec::new(); // FIXME(14302) remove the transmute and unsafe block. unsafe { let mut check_encoder = Encoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } let out = str::from_utf8(buf[]).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); if needs_wrapping { try!(write!(self.writer, "\"")); } Ok(()) } fn emit_map_elt_val(&mut self, _idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult { try!(write!(self.writer, ":")); f(self) } } /// Another encoder for JSON, but prints out human-readable JSON instead of /// compact data pub struct PrettyEncoder<'a> { writer: &'a mut (io::Writer+'a), curr_indent: uint, indent: uint, } impl<'a> PrettyEncoder<'a> { /// Creates a new encoder whose output will be written to the specified writer pub fn new<'a>(writer: &'a mut io::Writer) -> PrettyEncoder<'a> { PrettyEncoder { writer: writer, curr_indent: 0, indent: 2, } } /// Set the number of spaces to indent for each level. /// This is safe to set during encoding. pub fn set_indent<'a>(&mut self, indent: uint) { // self.indent very well could be 0 so we need to use checked division. let level = self.curr_indent.checked_div(self.indent).unwrap_or(0); self.indent = indent; self.curr_indent = level * self.indent; } } impl<'a> ::Encoder<io::IoError> for PrettyEncoder<'a> { fn emit_nil(&mut self) -> EncodeResult { write!(self.writer, "null") } fn emit_uint(&mut self, v: uint) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u64(&mut self, v: u64) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u32(&mut self, v: u32) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u16(&mut self, v: u16) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_u8(&mut self, v: u8) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_int(&mut self, v: int) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i64(&mut self, v: i64) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i32(&mut self, v: i32) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i16(&mut self, v: i16) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_i8(&mut self, v: i8) -> EncodeResult { write!(self.writer, "{}", v) } fn emit_bool(&mut self, v: bool) -> EncodeResult { if v { write!(self.writer, "true") } else { write!(self.writer, "false") } } fn emit_f64(&mut self, v: f64) -> EncodeResult { write!(self.writer, "{}", fmt_number_or_null(v)) } fn emit_f32(&mut self, v: f32) -> EncodeResult { self.emit_f64(v as f64) } fn emit_char(&mut self, v: char) -> EncodeResult { escape_char(self.writer, v) } fn emit_str(&mut self, v: &str) -> EncodeResult { escape_str(self.writer, v) } fn emit_enum(&mut self, _name: &str, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { f(self) } fn emit_enum_variant(&mut self, name: &str, _id: uint, cnt: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if cnt == 0 { escape_str(self.writer, name) } else { try!(write!(self.writer, "{{\n")); self.curr_indent += self.indent; try!(spaces(self.writer, self.curr_indent)); try!(write!(self.writer, "\"variant\": ")); try!(escape_str(self.writer, name)); try!(write!(self.writer, ",\n")); try!(spaces(self.writer, self.curr_indent)); try!(write!(self.writer, "\"fields\": [\n")); self.curr_indent += self.indent; try!(f(self)); self.curr_indent -= self.indent; try!(write!(self.writer, "\n")); try!(spaces(self.writer, self.curr_indent)); self.curr_indent -= self.indent; try!(write!(self.writer, "]\n")); try!(spaces(self.writer, self.curr_indent)); write!(self.writer, "}}") } } fn emit_enum_variant_arg(&mut self, idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if idx != 0 { try!(write!(self.writer, ",\n")); } try!(spaces(self.writer, self.curr_indent)); f(self) } fn emit_enum_struct_variant(&mut self, name: &str, id: uint, cnt: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_enum_variant(name, id, cnt, f) } fn emit_enum_struct_variant_field(&mut self, _: &str, idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_enum_variant_arg(idx, f) } fn emit_struct(&mut self, _: &str, len: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if len == 0 { write!(self.writer, "{{}}") } else { try!(write!(self.writer, "{{")); self.curr_indent += self.indent; try!(f(self)); self.curr_indent -= self.indent; try!(write!(self.writer, "\n")); try!(spaces(self.writer, self.curr_indent)); write!(self.writer, "}}") } } fn emit_struct_field(&mut self, name: &str, idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if idx == 0 { try!(write!(self.writer, "\n")); } else { try!(write!(self.writer, ",\n")); } try!(spaces(self.writer, self.curr_indent)); try!(escape_str(self.writer, name)); try!(write!(self.writer, ": ")); f(self) } fn emit_tuple(&mut self, len: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq(len, f) } fn emit_tuple_arg(&mut self, idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq_elt(idx, f) } fn emit_tuple_struct(&mut self, _: &str, len: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq(len, f) } fn emit_tuple_struct_arg(&mut self, idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { self.emit_seq_elt(idx, f) } fn emit_option(&mut self, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { f(self) } fn emit_option_none(&mut self) -> EncodeResult { self.emit_nil() } fn emit_option_some(&mut self, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { f(self) } fn emit_seq(&mut self, len: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if len == 0 { write!(self.writer, "[]") } else { try!(write!(self.writer, "[")); self.curr_indent += self.indent; try!(f(self)); self.curr_indent -= self.indent; try!(write!(self.writer, "\n")); try!(spaces(self.writer, self.curr_indent)); write!(self.writer, "]") } } fn emit_seq_elt(&mut self, idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if idx == 0 { try!(write!(self.writer, "\n")); } else { try!(write!(self.writer, ",\n")); } try!(spaces(self.writer, self.curr_indent)); f(self) } fn emit_map(&mut self, len: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if len == 0 { write!(self.writer, "{{}}") } else { try!(write!(self.writer, "{{")); self.curr_indent += self.indent; try!(f(self)); self.curr_indent -= self.indent; try!(write!(self.writer, "\n")); try!(spaces(self.writer, self.curr_indent)); write!(self.writer, "}}") } } fn emit_map_elt_key(&mut self, idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { if idx == 0 { try!(write!(self.writer, "\n")); } else { try!(write!(self.writer, ",\n")); } try!(spaces(self.writer, self.curr_indent)); // ref #12967, make sure to wrap a key in double quotes, // in the event that its of a type that omits them (eg numbers) let mut buf = Vec::new(); // FIXME(14302) remove the transmute and unsafe block. unsafe { let mut check_encoder = PrettyEncoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } let out = str::from_utf8(buf[]).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); if needs_wrapping { try!(write!(self.writer, "\"")); } Ok(()) } fn emit_map_elt_val(&mut self, _idx: uint, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult { try!(write!(self.writer, ": ")); f(self) } } impl<E: ::Encoder<S>, S> Encodable<E, S> for Json { fn encode(&self, e: &mut E) -> Result<(), S> { match *self { Json::I64(v) => v.encode(e), Json::U64(v) => v.encode(e), Json::F64(v) => v.encode(e), Json::String(ref v) => v.encode(e), Json::Boolean(v) => v.encode(e), Json::Array(ref v) => v.encode(e), Json::Object(ref v) => v.encode(e), Json::Null => e.emit_nil(), } } } impl Json { /// Encodes a json value into an io::writer. Uses a single line. pub fn to_writer(&self, writer: &mut io::Writer) -> EncodeResult { let mut encoder = Encoder::new(writer); self.encode(&mut encoder) } /// Encodes a json value into an io::writer. /// Pretty-prints in a more readable format. pub fn to_pretty_writer(&self, writer: &mut io::Writer) -> EncodeResult { let mut encoder = PrettyEncoder::new(writer); self.encode(&mut encoder) } /// Encodes a json value into a string pub fn to_pretty_str(&self) -> string::String { let mut s = Vec::new(); self.to_pretty_writer(&mut s as &mut io::Writer).unwrap(); string::String::from_utf8(s).unwrap() } /// If the Json value is an Object, returns the value associated with the provided key. /// Otherwise, returns None. pub fn find<'a>(&'a self, key: &str) -> Option<&'a Json>{ match self { &Json::Object(ref map) => map.get(key), _ => None } } /// Attempts to get a nested Json Object for each key in `keys`. /// If any key is found not to exist, find_path will return None. /// Otherwise, it will return the Json value associated with the final key. pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{ let mut target = self; for key in keys.iter() { match target.find(*key) { Some(t) => { target = t; }, None => return None } } Some(target) } /// If the Json value is an Object, performs a depth-first search until /// a value associated with the provided key is found. If no value is found /// or the Json value is not an Object, returns None. pub fn search<'a>(&'a self, key: &str) -> Option<&'a Json> { match self { &Json::Object(ref map) => { match map.get(key) { Some(json_value) => Some(json_value), None => { for (_, v) in map.iter() { match v.search(key) { x if x.is_some() => return x, _ => () } } None } } }, _ => None } } /// Returns true if the Json value is an Object. Returns false otherwise. pub fn is_object<'a>(&'a self) -> bool { self.as_object().is_some() } /// If the Json value is an Object, returns the associated TreeMap. /// Returns None otherwise. pub fn as_object<'a>(&'a self) -> Option<&'a Object> { match self { &Json::Object(ref map) => Some(map), _ => None } } /// Returns true if the Json value is an Array. Returns false otherwise. pub fn is_array<'a>(&'a self) -> bool { self.as_array().is_some() } /// If the Json value is an Array, returns the associated vector. /// Returns None otherwise. pub fn as_array<'a>(&'a self) -> Option<&'a Array> { match self { &Json::Array(ref array) => Some(&*array), _ => None } } /// Returns true if the Json value is a String. Returns false otherwise. pub fn is_string<'a>(&'a self) -> bool { self.as_string().is_some() } /// If the Json value is a String, returns the associated str. /// Returns None otherwise. pub fn as_string<'a>(&'a self) -> Option<&'a str> { match *self { Json::String(ref s) => Some(s.as_slice()), _ => None } } /// Returns true if the Json value is a Number. Returns false otherwise. pub fn is_number(&self) -> bool { match *self { Json::I64(_) | Json::U64(_) | Json::F64(_) => true, _ => false, } } /// Returns true if the Json value is a i64. Returns false otherwise. pub fn is_i64(&self) -> bool { match *self { Json::I64(_) => true, _ => false, } } /// Returns true if the Json value is a u64. Returns false otherwise. pub fn is_u64(&self) -> bool { match *self { Json::U64(_) => true, _ => false, } } /// Returns true if the Json value is a f64. Returns false otherwise. pub fn is_f64(&self) -> bool { match *self { Json::F64(_) => true, _ => false, } } /// If the Json value is a number, return or cast it to a i64. /// Returns None otherwise. pub fn as_i64(&self) -> Option<i64> { match *self { Json::I64(n) => Some(n), Json::U64(n) => num::cast(n), _ => None } } /// If the Json value is a number, return or cast it to a u64. /// Returns None otherwise. pub fn as_u64(&self) -> Option<u64> { match *self { Json::I64(n) => num::cast(n), Json::U64(n) => Some(n), _ => None } } /// If the Json value is a number, return or cast it to a f64. /// Returns None otherwise. pub fn as_f64(&self) -> Option<f64> { match *self { Json::I64(n) => num::cast(n), Json::U64(n) => num::cast(n), Json::F64(n) => Some(n), _ => None } } /// Returns true if the Json value is a Boolean. Returns false otherwise. pub fn is_boolean(&self) -> bool { self.as_boolean().is_some() } /// If the Json value is a Boolean, returns the associated bool. /// Returns None otherwise. pub fn as_boolean(&self) -> Option<bool> { match self { &Json::Boolean(b) => Some(b), _ => None } } /// Returns true if the Json value is a Null. Returns false otherwise. pub fn is_null(&self) -> bool { self.as_null().is_some() } /// If the Json value is a Null, returns (). /// Returns None otherwise. pub fn as_null(&self) -> Option<()> { match self { &Json::Null => Some(()), _ => None } } } impl<'a> ops::Index<&'a str, Json> for Json { fn index<'a>(&'a self, idx: & &str) -> &'a Json { self.find(*idx).unwrap() } } impl ops::Index<uint, Json> for Json { fn index<'a>(&'a self, idx: &uint) -> &'a Json { match self { &Json::Array(ref v) => v.index(idx), _ => panic!("can only index Json with uint if it is an array") } } } /// The output of the streaming parser. #[deriving(PartialEq, Clone, Show)] pub enum JsonEvent { ObjectStart, ObjectEnd, ArrayStart, ArrayEnd, BooleanValue(bool), I64Value(i64), U64Value(u64), F64Value(f64), StringValue(string::String), NullValue, Error(ParserError), } #[deriving(PartialEq, Show)] enum ParserState { // Parse a value in an array, true means first element. ParseArray(bool), // Parse ',' or ']' after an element in an array. ParseArrayComma, // Parse a key:value in an object, true means first element. ParseObject(bool), // Parse ',' or ']' after an element in an object. ParseObjectComma, // Initial state. ParseStart, // Expecting the stream to end. ParseBeforeFinish, // Parsing can't continue. ParseFinished, } /// A Stack represents the current position of the parser in the logical /// structure of the JSON stream. /// For example foo.bar[3].x pub struct Stack { stack: Vec<InternalStackElement>, str_buffer: Vec<u8>, } /// StackElements compose a Stack. /// For example, Key("foo"), Key("bar"), Index(3) and Key("x") are the /// StackElements compositing the stack that represents foo.bar[3].x #[deriving(PartialEq, Clone, Show)] pub enum StackElement<'l> { Index(u32), Key(&'l str), } // Internally, Key elements are stored as indices in a buffer to avoid // allocating a string for every member of an object. #[deriving(PartialEq, Clone, Show)] enum InternalStackElement { InternalIndex(u32), InternalKey(u16, u16), // start, size } impl Stack { pub fn new() -> Stack { Stack { stack: Vec::new(), str_buffer: Vec::new() } } /// Returns The number of elements in the Stack. pub fn len(&self) -> uint { self.stack.len() } /// Returns true if the stack is empty. pub fn is_empty(&self) -> bool { self.stack.is_empty() } /// Provides access to the StackElement at a given index. /// lower indices are at the bottom of the stack while higher indices are /// at the top. pub fn get<'l>(&'l self, idx: uint) -> StackElement<'l> { match self.stack[idx] { InternalIndex(i) => Index(i), InternalKey(start, size) => { Key(str::from_utf8( self.str_buffer[start as uint .. start as uint + size as uint]).unwrap()) } } } /// Compares this stack with an array of StackElements. pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool { if self.stack.len() != rhs.len() { return false; } for i in range(0, rhs.len()) { if self.get(i) != rhs[i] { return false; } } return true; } /// Returns true if the bottom-most elements of this stack are the same as /// the ones passed as parameter. pub fn starts_with(&self, rhs: &[StackElement]) -> bool { if self.stack.len() < rhs.len() { return false; } for i in range(0, rhs.len()) { if self.get(i) != rhs[i] { return false; } } return true; } /// Returns true if the top-most elements of this stack are the same as /// the ones passed as parameter. pub fn ends_with(&self, rhs: &[StackElement]) -> bool { if self.stack.len() < rhs.len() { return false; } let offset = self.stack.len() - rhs.len(); for i in range(0, rhs.len()) { if self.get(i + offset) != rhs[i] { return false; } } return true; } /// Returns the top-most element (if any). pub fn top<'l>(&'l self) -> Option<StackElement<'l>> { return match self.stack.last() { None => None, Some(&InternalIndex(i)) => Some(Index(i)), Some(&InternalKey(start, size)) => { Some(Key(str::from_utf8( self.str_buffer[start as uint .. (start+size) as uint] ).unwrap())) } } } // Used by Parser to insert Key elements at the top of the stack. fn push_key(&mut self, key: string::String) { self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16)); for c in key.as_bytes().iter() { self.str_buffer.push(*c); } } // Used by Parser to insert Index elements at the top of the stack. fn push_index(&mut self, index: u32) { self.stack.push(InternalIndex(index)); } // Used by Parser to remove the top-most element of the stack. fn pop(&mut self) { assert!(!self.is_empty()); match *self.stack.last().unwrap() { InternalKey(_, sz) => { let new_size = self.str_buffer.len() - sz as uint; self.str_buffer.truncate(new_size); } InternalIndex(_) => {} } self.stack.pop(); } // Used by Parser to test whether the top-most element is an index. fn last_is_index(&self) -> bool { if self.is_empty() { return false; } return match *self.stack.last().unwrap() { InternalIndex(_) => true, _ => false, } } // Used by Parser to increment the index of the top-most element. fn bump_index(&mut self) { let len = self.stack.len(); let idx = match *self.stack.last().unwrap() { InternalIndex(i) => { i + 1 } _ => { panic!(); } }; self.stack[len - 1] = InternalIndex(idx); } } /// A streaming JSON parser implemented as an iterator of JsonEvent, consuming /// an iterator of char. pub struct Parser<T> { rdr: T, ch: Option<char>, line: uint, col: uint, // We maintain a stack representing where we are in the logical structure // of the JSON stream. stack: Stack, // A state machine is kept to make it possible to interrupt and resume parsing. state: ParserState, } impl<T: Iterator<char>> Iterator<JsonEvent> for Parser<T> { fn next(&mut self) -> Option<JsonEvent> { if self.state == ParseFinished { return None; } if self.state == ParseBeforeFinish { self.parse_whitespace(); // Make sure there is no trailing characters. if self.eof() { self.state = ParseFinished; return None; } else { return Some(self.error_event(TrailingCharacters)); } } return Some(self.parse()); } } impl<T: Iterator<char>> Parser<T> { /// Creates the JSON parser. pub fn new(rdr: T) -> Parser<T> { let mut p = Parser { rdr: rdr, ch: Some('\x00'), line: 1, col: 0, stack: Stack::new(), state: ParseStart, }; p.bump(); return p; } /// Provides access to the current position in the logical structure of the /// JSON stream. pub fn stack<'l>(&'l self) -> &'l Stack { return &self.stack; } fn eof(&self) -> bool { self.ch.is_none() } fn ch_or_null(&self) -> char { self.ch.unwrap_or('\x00') } fn bump(&mut self) { self.ch = self.rdr.next(); if self.ch_is('\n') { self.line += 1u; self.col = 1u; } else { self.col += 1u; } } fn next_char(&mut self) -> Option<char> { self.bump(); self.ch } fn ch_is(&self, c: char) -> bool { self.ch == Some(c) } fn error<T>(&self, reason: ErrorCode) -> Result<T, ParserError> { Err(SyntaxError(reason, self.line, self.col)) } fn parse_whitespace(&mut self) { while self.ch_is(' ') || self.ch_is('\n') || self.ch_is('\t') || self.ch_is('\r') { self.bump(); } } fn parse_number(&mut self) -> JsonEvent { let mut neg = false; if self.ch_is('-') { self.bump(); neg = true; } let res = match self.parse_u64() { Ok(res) => res, Err(e) => { return Error(e); } }; if self.ch_is('.') || self.ch_is('e') || self.ch_is('E') { let mut res = res as f64; if self.ch_is('.') { res = match self.parse_decimal(res) { Ok(res) => res, Err(e) => { return Error(e); } }; } if self.ch_is('e') || self.ch_is('E') { res = match self.parse_exponent(res) { Ok(res) => res, Err(e) => { return Error(e); } }; } if neg { res *= -1.0; } F64Value(res) } else { if neg { let res = -(res as i64); // Make sure we didn't underflow. if res > 0 { Error(SyntaxError(InvalidNumber, self.line, self.col)) } else { I64Value(res) } } else { U64Value(res) } } } fn parse_u64(&mut self) -> Result<u64, ParserError> { let mut accum = 0; let last_accum = 0; // necessary to detect overflow. match self.ch_or_null() { '0' => { self.bump(); // A leading '0' must be the only digit before the decimal point. match self.ch_or_null() { '0' ... '9' => return self.error(InvalidNumber), _ => () } }, '1' ... '9' => { while !self.eof() { match self.ch_or_null() { c @ '0' ... '9' => { accum *= 10; accum += (c as u64) - ('0' as u64); // Detect overflow by comparing to the last value. if accum <= last_accum { return self.error(InvalidNumber); } self.bump(); } _ => break, } } } _ => return self.error(InvalidNumber), } Ok(accum) } fn parse_decimal(&mut self, mut res: f64) -> Result<f64, ParserError> { self.bump(); // Make sure a digit follows the decimal place. match self.ch_or_null() { '0' ... '9' => (), _ => return self.error(InvalidNumber) } let mut dec = 1.0; while !self.eof() { match self.ch_or_null() { c @ '0' ... '9' => { dec /= 10.0; res += (((c as int) - ('0' as int)) as f64) * dec; self.bump(); } _ => break, } } Ok(res) } fn parse_exponent(&mut self, mut res: f64) -> Result<f64, ParserError> { self.bump(); let mut exp = 0u; let mut neg_exp = false; if self.ch_is('+') { self.bump(); } else if self.ch_is('-') { self.bump(); neg_exp = true; } // Make sure a digit follows the exponent place. match self.ch_or_null() { '0' ... '9' => (), _ => return self.error(InvalidNumber) } while !self.eof() { match self.ch_or_null() { c @ '0' ... '9' => { exp *= 10; exp += (c as uint) - ('0' as uint); self.bump(); } _ => break } } let exp = 10_f64.powi(exp as i32); if neg_exp { res /= exp; } else { res *= exp; } Ok(res) } fn decode_hex_escape(&mut self) -> Result<u16, ParserError> { let mut i = 0u; let mut n = 0u16; while i < 4 && !self.eof() { self.bump(); n = match self.ch_or_null() { c @ '0' ... '9' => n * 16 + ((c as u16) - ('0' as u16)), 'a' | 'A' => n * 16 + 10, 'b' | 'B' => n * 16 + 11, 'c' | 'C' => n * 16 + 12, 'd' | 'D' => n * 16 + 13, 'e' | 'E' => n * 16 + 14, 'f' | 'F' => n * 16 + 15, _ => return self.error(InvalidEscape) }; i += 1u; } // Error out if we didn't parse 4 digits. if i != 4 { return self.error(InvalidEscape); } Ok(n) } fn parse_str(&mut self) -> Result<string::String, ParserError> { let mut escape = false; let mut res = string::String::new(); loop { self.bump(); if self.eof() { return self.error(EOFWhileParsingString); } if escape { match self.ch_or_null() { '"' => res.push('"'), '\\' => res.push('\\'), '/' => res.push('/'), 'b' => res.push('\x08'), 'f' => res.push('\x0c'), 'n' => res.push('\n'), 'r' => res.push('\r'), 't' => res.push('\t'), 'u' => match try!(self.decode_hex_escape()) { 0xDC00 ... 0xDFFF => { return self.error(LoneLeadingSurrogateInHexEscape) } // Non-BMP characters are encoded as a sequence of // two hex escapes, representing UTF-16 surrogates. n1 @ 0xD800 ... 0xDBFF => { match (self.next_char(), self.next_char()) { (Some('\\'), Some('u')) => (), _ => return self.error(UnexpectedEndOfHexEscape), } let buf = [n1, try!(self.decode_hex_escape())]; match str::utf16_items(buf.as_slice()).next() { Some(ScalarValue(c)) => res.push(c), _ => return self.error(LoneLeadingSurrogateInHexEscape), } } n => match char::from_u32(n as u32) { Some(c) => res.push(c), None => return self.error(InvalidUnicodeCodePoint), }, }, _ => return self.error(InvalidEscape), } escape = false; } else if self.ch_is('\\') { escape = true; } else { match self.ch { Some('"') => { self.bump(); return Ok(res); }, Some(c) => res.push(c), None => unreachable!() } } } } // Invoked at each iteration, consumes the stream until it has enough // information to return a JsonEvent. // Manages an internal state so that parsing can be interrupted and resumed. // Also keeps track of the position in the logical structure of the json // stream int the form of a stack that can be queried by the user using the // stack() method. fn parse(&mut self) -> JsonEvent { loop { // The only paths where the loop can spin a new iteration // are in the cases ParseArrayComma and ParseObjectComma if ',' // is parsed. In these cases the state is set to (respectively) // ParseArray(false) and ParseObject(false), which always return, // so there is no risk of getting stuck in an infinite loop. // All other paths return before the end of the loop's iteration. self.parse_whitespace(); match self.state { ParseStart => { return self.parse_start(); } ParseArray(first) => { return self.parse_array(first); } ParseArrayComma => { match self.parse_array_comma_or_end() { Some(evt) => { return evt; } None => {} } } ParseObject(first) => { return self.parse_object(first); } ParseObjectComma => { self.stack.pop(); if self.ch_is(',') { self.state = ParseObject(false); self.bump(); } else { return self.parse_object_end(); } } _ => { return self.error_event(InvalidSyntax); } } } } fn parse_start(&mut self) -> JsonEvent { let val = self.parse_value(); self.state = match val { Error(_) => ParseFinished, ArrayStart => ParseArray(true), ObjectStart => ParseObject(true), _ => ParseBeforeFinish, }; return val; } fn parse_array(&mut self, first: bool) -> JsonEvent { if self.ch_is(']') { if !first { self.error_event(InvalidSyntax) } else { self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); ArrayEnd } } else { if first { self.stack.push_index(0); } let val = self.parse_value(); self.state = match val { Error(_) => ParseFinished, ArrayStart => ParseArray(true), ObjectStart => ParseObject(true), _ => ParseArrayComma, }; val } } fn parse_array_comma_or_end(&mut self) -> Option<JsonEvent> { if self.ch_is(',') { self.stack.bump_index(); self.state = ParseArray(false); self.bump(); None } else if self.ch_is(']') { self.stack.pop(); self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); Some(ArrayEnd) } else if self.eof() { Some(self.error_event(EOFWhileParsingArray)) } else { Some(self.error_event(InvalidSyntax)) } } fn parse_object(&mut self, first: bool) -> JsonEvent { if self.ch_is('}') { if !first { if self.stack.is_empty() { return self.error_event(TrailingComma); } else { self.stack.pop(); } } self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); return ObjectEnd; } if self.eof() { return self.error_event(EOFWhileParsingObject); } if !self.ch_is('"') { return self.error_event(KeyMustBeAString); } let s = match self.parse_str() { Ok(s) => s, Err(e) => { self.state = ParseFinished; return Error(e); } }; self.parse_whitespace(); if self.eof() { return self.error_event(EOFWhileParsingObject); } else if self.ch_or_null() != ':' { return self.error_event(ExpectedColon); } self.stack.push_key(s); self.bump(); self.parse_whitespace(); let val = self.parse_value(); self.state = match val { Error(_) => ParseFinished, ArrayStart => ParseArray(true), ObjectStart => ParseObject(true), _ => ParseObjectComma, }; return val; } fn parse_object_end(&mut self) -> JsonEvent { if self.ch_is('}') { self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); ObjectEnd } else if self.eof() { self.error_event(EOFWhileParsingObject) } else { self.error_event(InvalidSyntax) } } fn parse_value(&mut self) -> JsonEvent { if self.eof() { return self.error_event(EOFWhileParsingValue); } match self.ch_or_null() { 'n' => { self.parse_ident("ull", NullValue) } 't' => { self.parse_ident("rue", BooleanValue(true)) } 'f' => { self.parse_ident("alse", BooleanValue(false)) } '0' ... '9' | '-' => self.parse_number(), '"' => match self.parse_str() { Ok(s) => StringValue(s), Err(e) => Error(e), }, '[' => { self.bump(); ArrayStart } '{' => { self.bump(); ObjectStart } _ => { self.error_event(InvalidSyntax) } } } fn parse_ident(&mut self, ident: &str, value: JsonEvent) -> JsonEvent { if ident.chars().all(|c| Some(c) == self.next_char()) { self.bump(); value } else { Error(SyntaxError(InvalidSyntax, self.line, self.col)) } } fn error_event(&mut self, reason: ErrorCode) -> JsonEvent { self.state = ParseFinished; Error(SyntaxError(reason, self.line, self.col)) } } /// A Builder consumes a json::Parser to create a generic Json structure. pub struct Builder<T> { parser: Parser<T>, token: Option<JsonEvent>, } impl<T: Iterator<char>> Builder<T> { /// Create a JSON Builder. pub fn new(src: T) -> Builder<T> { Builder { parser: Parser::new(src), token: None, } } // Decode a Json value from a Parser. pub fn build(&mut self) -> Result<Json, BuilderError> { self.bump(); let result = self.build_value(); self.bump(); match self.token { None => {} Some(Error(e)) => { return Err(e); } ref tok => { panic!("unexpected token {}", tok.clone()); } } result } fn bump(&mut self) { self.token = self.parser.next(); } fn build_value(&mut self) -> Result<Json, BuilderError> { return match self.token { Some(NullValue) => Ok(Json::Null), Some(I64Value(n)) => Ok(Json::I64(n)), Some(U64Value(n)) => Ok(Json::U64(n)), Some(F64Value(n)) => Ok(Json::F64(n)), Some(BooleanValue(b)) => Ok(Json::Boolean(b)), Some(StringValue(ref mut s)) => { let mut temp = string::String::new(); swap(s, &mut temp); Ok(Json::String(temp)) } Some(Error(e)) => Err(e), Some(ArrayStart) => self.build_array(), Some(ObjectStart) => self.build_object(), Some(ObjectEnd) => self.parser.error(InvalidSyntax), Some(ArrayEnd) => self.parser.error(InvalidSyntax), None => self.parser.error(EOFWhileParsingValue), } } fn build_array(&mut self) -> Result<Json, BuilderError> { self.bump(); let mut values = Vec::new(); loop { if self.token == Some(ArrayEnd) { return Ok(Json::Array(values.into_iter().collect())); } match self.build_value() { Ok(v) => values.push(v), Err(e) => { return Err(e) } } self.bump(); } } fn build_object(&mut self) -> Result<Json, BuilderError> { self.bump(); let mut values = TreeMap::new(); loop { match self.token { Some(ObjectEnd) => { return Ok(Json::Object(values)); } Some(Error(e)) => { return Err(e); } None => { break; } _ => {} } let key = match self.parser.stack().top() { Some(Key(k)) => { k.to_string() } _ => { panic!("invalid state"); } }; match self.build_value() { Ok(value) => { values.insert(key, value); } Err(e) => { return Err(e); } } self.bump(); } return self.parser.error(EOFWhileParsingObject); } } /// Decodes a json value from an `&mut io::Reader` pub fn from_reader(rdr: &mut io::Reader) -> Result<Json, BuilderError> { let contents = match rdr.read_to_end() { Ok(c) => c, Err(e) => return Err(io_error_to_error(e)) }; let s = match str::from_utf8(contents.as_slice()) { Some(s) => s, _ => return Err(SyntaxError(NotUtf8, 0, 0)) }; let mut builder = Builder::new(s.chars()); builder.build() } /// Decodes a json value from a string pub fn from_str(s: &str) -> Result<Json, BuilderError> { let mut builder = Builder::new(s.chars()); builder.build() } /// A structure to decode JSON to values in rust. pub struct Decoder { stack: Vec<Json>, } impl Decoder { /// Creates a new decoder instance for decoding the specified JSON value. pub fn new(json: Json) -> Decoder { Decoder { stack: vec![json] } } } impl Decoder { fn pop(&mut self) -> Json { self.stack.pop().unwrap() } } macro_rules! expect( ($e:expr, Null) => ({ match $e { Json::Null => Ok(()), other => Err(ExpectedError("Null".into_string(), format!("{}", other))) } }); ($e:expr, $t:ident) => ({ match $e { Json::$t(v) => Ok(v), other => { Err(ExpectedError(stringify!($t).to_string(), format!("{}", other))) } } }) ) macro_rules! read_primitive { ($name:ident, $ty:ty) => { fn $name(&mut self) -> DecodeResult<$ty> { match self.pop() { Json::I64(f) => match num::cast(f) { Some(f) => Ok(f), None => Err(ExpectedError("Number".into_string(), format!("{}", f))), }, Json::U64(f) => match num::cast(f) { Some(f) => Ok(f), None => Err(ExpectedError("Number".into_string(), format!("{}", f))), }, Json::F64(f) => Err(ExpectedError("Integer".into_string(), format!("{}", f))), // re: #12967.. a type w/ numeric keys (ie HashMap<uint, V> etc) // is going to have a string here, as per JSON spec. Json::String(s) => match std::str::from_str(s.as_slice()) { Some(f) => Ok(f), None => Err(ExpectedError("Number".into_string(), s)), }, value => Err(ExpectedError("Number".into_string(), format!("{}", value))), } } } } impl ::Decoder<DecoderError> for Decoder { fn read_nil(&mut self) -> DecodeResult<()> { debug!("read_nil"); expect!(self.pop(), Null) } read_primitive!(read_uint, uint) read_primitive!(read_u8, u8) read_primitive!(read_u16, u16) read_primitive!(read_u32, u32) read_primitive!(read_u64, u64) read_primitive!(read_int, int) read_primitive!(read_i8, i8) read_primitive!(read_i16, i16) read_primitive!(read_i32, i32) read_primitive!(read_i64, i64) fn read_f32(&mut self) -> DecodeResult<f32> { self.read_f64().map(|x| x as f32) } fn read_f64(&mut self) -> DecodeResult<f64> { debug!("read_f64"); match self.pop() { Json::I64(f) => Ok(f as f64), Json::U64(f) => Ok(f as f64), Json::F64(f) => Ok(f), Json::String(s) => { // re: #12967.. a type w/ numeric keys (ie HashMap<uint, V> etc) // is going to have a string here, as per JSON spec. match std::str::from_str(s.as_slice()) { Some(f) => Ok(f), None => Err(ExpectedError("Number".into_string(), s)), } }, Json::Null => Ok(f64::NAN), value => Err(ExpectedError("Number".into_string(), format!("{}", value))) } } fn read_bool(&mut self) -> DecodeResult<bool> { debug!("read_bool"); expect!(self.pop(), Boolean) } fn read_char(&mut self) -> DecodeResult<char> { let s = try!(self.read_str()); { let mut it = s.chars(); match (it.next(), it.next()) { // exactly one character (Some(c), None) => return Ok(c), _ => () } } Err(ExpectedError("single character string".into_string(), format!("{}", s))) } fn read_str(&mut self) -> DecodeResult<string::String> { debug!("read_str"); expect!(self.pop(), String) } fn read_enum<T>(&mut self, name: &str, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_enum({})", name); f(self) } fn read_enum_variant<T>(&mut self, names: &[&str], f: |&mut Decoder, uint| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_enum_variant(names={})", names); let name = match self.pop() { Json::String(s) => s, Json::Object(mut o) => { let n = match o.remove(&"variant".into_string()) { Some(Json::String(s)) => s, Some(val) => { return Err(ExpectedError("String".into_string(), format!("{}", val))) } None => { return Err(MissingFieldError("variant".into_string())) } }; match o.remove(&"fields".into_string()) { Some(Json::Array(l)) => { for field in l.into_iter().rev() { self.stack.push(field); } }, Some(val) => { return Err(ExpectedError("Array".into_string(), format!("{}", val))) } None => { return Err(MissingFieldError("fields".into_string())) } } n } json => { return Err(ExpectedError("String or Object".into_string(), format!("{}", json))) } }; let idx = match names.iter() .position(|n| str::eq_slice(*n, name.as_slice())) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; f(self, idx) } fn read_enum_variant_arg<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_enum_variant_arg(idx={})", idx); f(self) } fn read_enum_struct_variant<T>(&mut self, names: &[&str], f: |&mut Decoder, uint| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_enum_struct_variant(names={})", names); self.read_enum_variant(names, f) } fn read_enum_struct_variant_field<T>(&mut self, name: &str, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_enum_struct_variant_field(name={}, idx={})", name, idx); self.read_enum_variant_arg(idx, f) } fn read_struct<T>(&mut self, name: &str, len: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_struct(name={}, len={})", name, len); let value = try!(f(self)); self.pop(); Ok(value) } fn read_struct_field<T>(&mut self, name: &str, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_struct_field(name={}, idx={})", name, idx); let mut obj = try!(expect!(self.pop(), Object)); let value = match obj.remove(&name.to_string()) { None => { // Add a Null and try to parse it as an Option<_> // to get None as a default value. self.stack.push(Json::Null); match f(self) { Ok(x) => x, Err(_) => return Err(MissingFieldError(name.to_string())), } }, Some(json) => { self.stack.push(json); try!(f(self)) } }; self.stack.push(Json::Object(obj)); Ok(value) } fn read_tuple<T>(&mut self, tuple_len: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_tuple()"); self.read_seq(|d, len| { if len == tuple_len { f(d) } else { Err(ExpectedError(format!("Tuple{}", tuple_len), format!("Tuple{}", len))) } }) } fn read_tuple_arg<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_tuple_arg(idx={})", idx); self.read_seq_elt(idx, f) } fn read_tuple_struct<T>(&mut self, name: &str, len: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_tuple_struct(name={})", name); self.read_tuple(len, f) } fn read_tuple_struct_arg<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_tuple_struct_arg(idx={})", idx); self.read_tuple_arg(idx, f) } fn read_option<T>(&mut self, f: |&mut Decoder, bool| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_option()"); match self.pop() { Json::Null => f(self, false), value => { self.stack.push(value); f(self, true) } } } fn read_seq<T>(&mut self, f: |&mut Decoder, uint| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_seq()"); let array = try!(expect!(self.pop(), Array)); let len = array.len(); for v in array.into_iter().rev() { self.stack.push(v); } f(self, len) } fn read_seq_elt<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_seq_elt(idx={})", idx); f(self) } fn read_map<T>(&mut self, f: |&mut Decoder, uint| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_map()"); let obj = try!(expect!(self.pop(), Object)); let len = obj.len(); for (key, value) in obj.into_iter() { self.stack.push(value); self.stack.push(Json::String(key)); } f(self, len) } fn read_map_elt_key<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_map_elt_key(idx={})", idx); f(self) } fn read_map_elt_val<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> { debug!("read_map_elt_val(idx={})", idx); f(self) } fn error(&mut self, err: &str) -> DecoderError { ApplicationError(err.to_string()) } } /// A trait for converting values to JSON pub trait ToJson for Sized? { /// Converts the value of `self` to an instance of JSON fn to_json(&self) -> Json; } macro_rules! to_json_impl_i64( ($($t:ty), +) => ( $(impl ToJson for $t { fn to_json(&self) -> Json { Json::I64(*self as i64) } })+ ) ) to_json_impl_i64!(int, i8, i16, i32, i64) macro_rules! to_json_impl_u64( ($($t:ty), +) => ( $(impl ToJson for $t { fn to_json(&self) -> Json { Json::U64(*self as u64) } })+ ) ) to_json_impl_u64!(uint, u8, u16, u32, u64) impl ToJson for Json { fn to_json(&self) -> Json { self.clone() } } impl ToJson for f32 { fn to_json(&self) -> Json { (*self as f64).to_json() } } impl ToJson for f64 { fn to_json(&self) -> Json { match self.classify() { FPNaN | FPInfinite => Json::Null, _ => Json::F64(*self) } } } impl ToJson for () { fn to_json(&self) -> Json { Json::Null } } impl ToJson for bool { fn to_json(&self) -> Json { Json::Boolean(*self) } } impl ToJson for str { fn to_json(&self) -> Json { Json::String(self.into_string()) } } impl ToJson for string::String { fn to_json(&self) -> Json { Json::String((*self).clone()) } } macro_rules! tuple_impl { // use variables to indicate the arity of the tuple ($($tyvar:ident),* ) => { // the trailing commas are for the 1 tuple impl< $( $tyvar : ToJson ),* > ToJson for ( $( $tyvar ),* , ) { #[inline] #[allow(non_snake_case)] fn to_json(&self) -> Json { match *self { ($(ref $tyvar),*,) => Json::Array(vec![$($tyvar.to_json()),*]) } } } } } tuple_impl!{A} tuple_impl!{A, B} tuple_impl!{A, B, C} tuple_impl!{A, B, C, D} tuple_impl!{A, B, C, D, E} tuple_impl!{A, B, C, D, E, F} tuple_impl!{A, B, C, D, E, F, G} tuple_impl!{A, B, C, D, E, F, G, H} tuple_impl!{A, B, C, D, E, F, G, H, I} tuple_impl!{A, B, C, D, E, F, G, H, I, J} tuple_impl!{A, B, C, D, E, F, G, H, I, J, K} tuple_impl!{A, B, C, D, E, F, G, H, I, J, K, L} impl<A: ToJson> ToJson for [A] { fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } } impl<A: ToJson> ToJson for Vec<A> { fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } } impl<A: ToJson> ToJson for TreeMap<string::String, A> { fn to_json(&self) -> Json { let mut d = TreeMap::new(); for (key, value) in self.iter() { d.insert((*key).clone(), value.to_json()); } Json::Object(d) } } impl<A: ToJson> ToJson for HashMap<string::String, A> { fn to_json(&self) -> Json { let mut d = TreeMap::new(); for (key, value) in self.iter() { d.insert((*key).clone(), value.to_json()); } Json::Object(d) } } impl<A:ToJson> ToJson for Option<A> { fn to_json(&self) -> Json { match *self { None => Json::Null, Some(ref value) => value.to_json() } } } impl fmt::Show for Json { /// Encodes a json value into a string fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.to_writer(f).map_err(|_| fmt::Error) } } impl FromStr for Json { fn from_str(s: &str) -> Option<Json> { from_str(s).ok() } } #[cfg(test)] mod tests { extern crate test; use self::Animal::*; use self::DecodeEnum::*; use self::test::Bencher; use {Encodable, Decodable}; use super::Json::*; use super::ErrorCode::*; use super::ParserError::*; use super::DecoderError::*; use super::JsonEvent::*; use super::ParserState::*; use super::StackElement::*; use super::InternalStackElement::*; use super::{PrettyEncoder, Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, StackElement, Stack, Encoder, Decoder}; use std::{i64, u64, f32, f64, io}; use std::collections::TreeMap; use std::num::Float; use std::string; #[deriving(Decodable, Eq, PartialEq, Show)] struct OptionData { opt: Option<uint>, } #[test] fn test_decode_option_none() { let s ="{}"; let obj: OptionData = super::decode(s).unwrap(); assert_eq!(obj, OptionData { opt: None }); } #[test] fn test_decode_option_some() { let s = "{ \"opt\": 10 }"; let obj: OptionData = super::decode(s).unwrap(); assert_eq!(obj, OptionData { opt: Some(10u) }); } #[test] fn test_decode_option_malformed() { check_err::<OptionData>("{ \"opt\": [] }", ExpectedError("Number".into_string(), "[]".into_string())); check_err::<OptionData>("{ \"opt\": false }", ExpectedError("Number".into_string(), "false".into_string())); } #[deriving(PartialEq, Encodable, Decodable, Show)] enum Animal { Dog, Frog(string::String, int) } #[deriving(PartialEq, Encodable, Decodable, Show)] struct Inner { a: (), b: uint, c: Vec<string::String>, } #[deriving(PartialEq, Encodable, Decodable, Show)] struct Outer { inner: Vec<Inner>, } fn mk_object(items: &[(string::String, Json)]) -> Json { let mut d = TreeMap::new(); for item in items.iter() { match *item { (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); }, } }; Object(d) } #[test] fn test_from_str_trait() { let s = "null"; assert!(::std::str::from_str::<Json>(s).unwrap() == from_str(s).unwrap()); } #[test] fn test_write_null() { assert_eq!(Null.to_string(), "null"); assert_eq!(Null.to_pretty_str(), "null"); } #[test] fn test_write_i64() { assert_eq!(U64(0).to_string(), "0"); assert_eq!(U64(0).to_pretty_str(), "0"); assert_eq!(U64(1234).to_string(), "1234"); assert_eq!(U64(1234).to_pretty_str(), "1234"); assert_eq!(I64(-5678).to_string(), "-5678"); assert_eq!(I64(-5678).to_pretty_str(), "-5678"); assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000"); assert_eq!(U64(7650007200025252000).to_pretty_str(), "7650007200025252000"); } #[test] fn test_write_f64() { assert_eq!(F64(3.0).to_string(), "3.0"); assert_eq!(F64(3.0).to_pretty_str(), "3.0"); assert_eq!(F64(3.1).to_string(), "3.1"); assert_eq!(F64(3.1).to_pretty_str(), "3.1"); assert_eq!(F64(-1.5).to_string(), "-1.5"); assert_eq!(F64(-1.5).to_pretty_str(), "-1.5"); assert_eq!(F64(0.5).to_string(), "0.5"); assert_eq!(F64(0.5).to_pretty_str(), "0.5"); assert_eq!(F64(f64::NAN).to_string(), "null"); assert_eq!(F64(f64::NAN).to_pretty_str(), "null"); assert_eq!(F64(f64::INFINITY).to_string(), "null"); assert_eq!(F64(f64::INFINITY).to_pretty_str(), "null"); assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null"); assert_eq!(F64(f64::NEG_INFINITY).to_pretty_str(), "null"); } #[test] fn test_write_str() { assert_eq!(String("".into_string()).to_string(), "\"\""); assert_eq!(String("".into_string()).to_pretty_str(), "\"\""); assert_eq!(String("homura".into_string()).to_string(), "\"homura\""); assert_eq!(String("madoka".into_string()).to_pretty_str(), "\"madoka\""); } #[test] fn test_write_bool() { assert_eq!(Boolean(true).to_string(), "true"); assert_eq!(Boolean(true).to_pretty_str(), "true"); assert_eq!(Boolean(false).to_string(), "false"); assert_eq!(Boolean(false).to_pretty_str(), "false"); } #[test] fn test_write_array() { assert_eq!(Array(vec![]).to_string(), "[]"); assert_eq!(Array(vec![]).to_pretty_str(), "[]"); assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]"); assert_eq!( Array(vec![Boolean(true)]).to_pretty_str(), "\ [\n \ true\n\ ]" ); let long_test_array = Array(vec![ Boolean(false), Null, Array(vec![String("foo\nbar".into_string()), F64(3.5)])]); assert_eq!(long_test_array.to_string(), "[false,null,[\"foo\\nbar\",3.5]]"); assert_eq!( long_test_array.to_pretty_str(), "\ [\n \ false,\n \ null,\n \ [\n \ \"foo\\nbar\",\n \ 3.5\n \ ]\n\ ]" ); } #[test] fn test_write_object() { assert_eq!(mk_object(&[]).to_string(), "{}"); assert_eq!(mk_object(&[]).to_pretty_str(), "{}"); assert_eq!( mk_object(&[ ("a".into_string(), Boolean(true)) ]).to_string(), "{\"a\":true}" ); assert_eq!( mk_object(&[("a".into_string(), Boolean(true))]).to_pretty_str(), "\ {\n \ \"a\": true\n\ }" ); let complex_obj = mk_object(&[ ("b".into_string(), Array(vec![ mk_object(&[("c".into_string(), String("\x0c\r".into_string()))]), mk_object(&[("d".into_string(), String("".into_string()))]) ])) ]); assert_eq!( complex_obj.to_string(), "{\ \"b\":[\ {\"c\":\"\\f\\r\"},\ {\"d\":\"\"}\ ]\ }" ); assert_eq!( complex_obj.to_pretty_str(), "\ {\n \ \"b\": [\n \ {\n \ \"c\": \"\\f\\r\"\n \ },\n \ {\n \ \"d\": \"\"\n \ }\n \ ]\n\ }" ); let a = mk_object(&[ ("a".into_string(), Boolean(true)), ("b".into_string(), Array(vec![ mk_object(&[("c".into_string(), String("\x0c\r".into_string()))]), mk_object(&[("d".into_string(), String("".into_string()))]) ])) ]); // We can't compare the strings directly because the object fields be // printed in a different order. assert_eq!(a.clone(), from_str(a.to_string().as_slice()).unwrap()); assert_eq!(a.clone(), from_str(a.to_pretty_str().as_slice()).unwrap()); } fn with_str_writer(f: |&mut io::Writer|) -> string::String { use std::str; let mut m = Vec::new(); f(&mut m as &mut io::Writer); string::String::from_utf8(m).unwrap() } #[test] fn test_write_enum() { let animal = Dog; assert_eq!( with_str_writer(|writer| { let mut encoder = Encoder::new(writer); animal.encode(&mut encoder).unwrap(); }), "\"Dog\"" ); assert_eq!( with_str_writer(|writer| { let mut encoder = PrettyEncoder::new(writer); animal.encode(&mut encoder).unwrap(); }), "\"Dog\"" ); let animal = Frog("Henry".into_string(), 349); assert_eq!( with_str_writer(|writer| { let mut encoder = Encoder::new(writer); animal.encode(&mut encoder).unwrap(); }), "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}" ); assert_eq!( with_str_writer(|writer| { let mut encoder = PrettyEncoder::new(writer); animal.encode(&mut encoder).unwrap(); }), "{\n \ \"variant\": \"Frog\",\n \ \"fields\": [\n \ \"Henry\",\n \ 349\n \ ]\n\ }" ); } #[test] fn test_write_some() { let value = Some("jodhpurs".into_string()); let s = with_str_writer(|writer| { let mut encoder = Encoder::new(writer); value.encode(&mut encoder).unwrap(); }); assert_eq!(s, "\"jodhpurs\""); let value = Some("jodhpurs".into_string()); let s = with_str_writer(|writer| { let mut encoder = PrettyEncoder::new(writer); value.encode(&mut encoder).unwrap(); }); assert_eq!(s, "\"jodhpurs\""); } #[test] fn test_write_none() { let value: Option<string::String> = None; let s = with_str_writer(|writer| { let mut encoder = Encoder::new(writer); value.encode(&mut encoder).unwrap(); }); assert_eq!(s, "null"); let s = with_str_writer(|writer| { let mut encoder = Encoder::new(writer); value.encode(&mut encoder).unwrap(); }); assert_eq!(s, "null"); } #[test] fn test_trailing_characters() { assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6))); assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2))); assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3))); assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3))); } #[test] fn test_read_identifiers() { assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3))); assert_eq!(from_str("null"), Ok(Null)); assert_eq!(from_str("true"), Ok(Boolean(true))); assert_eq!(from_str("false"), Ok(Boolean(false))); assert_eq!(from_str(" null "), Ok(Null)); assert_eq!(from_str(" true "), Ok(Boolean(true))); assert_eq!(from_str(" false "), Ok(Boolean(false))); } #[test] fn test_decode_identifiers() { let v: () = super::decode("null").unwrap(); assert_eq!(v, ()); let v: bool = super::decode("true").unwrap(); assert_eq!(v, true); let v: bool = super::decode("false").unwrap(); assert_eq!(v, false); } #[test] fn test_read_number() { assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1))); assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1))); assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1))); assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2))); assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2))); assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3))); assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3))); assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4))); assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20))); assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21))); assert_eq!(from_str("3"), Ok(U64(3))); assert_eq!(from_str("3.1"), Ok(F64(3.1))); assert_eq!(from_str("-1.2"), Ok(F64(-1.2))); assert_eq!(from_str("0.4"), Ok(F64(0.4))); assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5))); assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15))); assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01))); assert_eq!(from_str(" 3 "), Ok(U64(3))); assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN))); assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64))); assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX))); } #[test] fn test_decode_numbers() { let v: f64 = super::decode("3").unwrap(); assert_eq!(v, 3.0); let v: f64 = super::decode("3.1").unwrap(); assert_eq!(v, 3.1); let v: f64 = super::decode("-1.2").unwrap(); assert_eq!(v, -1.2); let v: f64 = super::decode("0.4").unwrap(); assert_eq!(v, 0.4); let v: f64 = super::decode("0.4e5").unwrap(); assert_eq!(v, 0.4e5); let v: f64 = super::decode("0.4e15").unwrap(); assert_eq!(v, 0.4e15); let v: f64 = super::decode("0.4e-01").unwrap(); assert_eq!(v, 0.4e-01); let v: u64 = super::decode("0").unwrap(); assert_eq!(v, 0); let v: u64 = super::decode("18446744073709551615").unwrap(); assert_eq!(v, u64::MAX); let v: i64 = super::decode("-9223372036854775808").unwrap(); assert_eq!(v, i64::MIN); let v: i64 = super::decode("9223372036854775807").unwrap(); assert_eq!(v, i64::MAX); let res: DecodeResult<i64> = super::decode("765.25252"); assert_eq!(res, Err(ExpectedError("Integer".into_string(), "765.25252".into_string()))); } #[test] fn test_read_str() { assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2))); assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5))); assert_eq!(from_str("\"\""), Ok(String("".into_string()))); assert_eq!(from_str("\"foo\""), Ok(String("foo".into_string()))); assert_eq!(from_str("\"\\\"\""), Ok(String("\"".into_string()))); assert_eq!(from_str("\"\\b\""), Ok(String("\x08".into_string()))); assert_eq!(from_str("\"\\n\""), Ok(String("\n".into_string()))); assert_eq!(from_str("\"\\r\""), Ok(String("\r".into_string()))); assert_eq!(from_str("\"\\t\""), Ok(String("\t".into_string()))); assert_eq!(from_str(" \"foo\" "), Ok(String("foo".into_string()))); assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".into_string()))); assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".into_string()))); } #[test] fn test_decode_str() { let s = [("\"\"", ""), ("\"foo\"", "foo"), ("\"\\\"\"", "\""), ("\"\\b\"", "\x08"), ("\"\\n\"", "\n"), ("\"\\r\"", "\r"), ("\"\\t\"", "\t"), ("\"\\u12ab\"", "\u{12ab}"), ("\"\\uAB12\"", "\u{AB12}")]; for &(i, o) in s.iter() { let v: string::String = super::decode(i).unwrap(); assert_eq!(v, o); } } #[test] fn test_read_array() { assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("[]"), Ok(Array(vec![]))); assert_eq!(from_str("[ ]"), Ok(Array(vec![]))); assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)]))); assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)]))); assert_eq!(from_str("[null]"), Ok(Array(vec![Null]))); assert_eq!(from_str("[3, 1]"), Ok(Array(vec![U64(3), U64(1)]))); assert_eq!(from_str("\n[3, 2]\n"), Ok(Array(vec![U64(3), U64(2)]))); assert_eq!(from_str("[2, [4, 1]]"), Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])]))); } #[test] fn test_decode_array() { let v: Vec<()> = super::decode("[]").unwrap(); assert_eq!(v, vec![]); let v: Vec<()> = super::decode("[null]").unwrap(); assert_eq!(v, vec![()]); let v: Vec<bool> = super::decode("[true]").unwrap(); assert_eq!(v, vec![true]); let v: Vec<int> = super::decode("[3, 1]").unwrap(); assert_eq!(v, vec![3, 1]); let v: Vec<Vec<uint>> = super::decode("[[3], [1, 2]]").unwrap(); assert_eq!(v, vec![vec![3], vec![1, 2]]); } #[test] fn test_decode_tuple() { let t: (uint, uint, uint) = super::decode("[1, 2, 3]").unwrap(); assert_eq!(t, (1u, 2, 3)) let t: (uint, string::String) = super::decode("[1, \"two\"]").unwrap(); assert_eq!(t, (1u, "two".into_string())); } #[test] fn test_decode_tuple_malformed_types() { assert!(super::decode::<(uint, string::String)>("[1, 2]").is_err()); } #[test] fn test_decode_tuple_malformed_length() { assert!(super::decode::<(uint, uint)>("[1, 2, 3]").is_err()); } #[test] fn test_read_object() { assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2))); assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3))); assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2))); assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5))); assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6))); assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6))); assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7))); assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8))); assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8))); assert_eq!(from_str("{}").unwrap(), mk_object(&[])); assert_eq!(from_str("{\"a\": 3}").unwrap(), mk_object(&[("a".into_string(), U64(3))])); assert_eq!(from_str( "{ \"a\": null, \"b\" : true }").unwrap(), mk_object(&[ ("a".into_string(), Null), ("b".into_string(), Boolean(true))])); assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(), mk_object(&[ ("a".into_string(), Null), ("b".into_string(), Boolean(true))])); assert_eq!(from_str( "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(), mk_object(&[ ("a".into_string(), F64(1.0)), ("b".into_string(), Array(vec![Boolean(true)])) ])); assert_eq!(from_str( "{\ \"a\": 1.0, \ \"b\": [\ true,\ \"foo\\nbar\", \ { \"c\": {\"d\": null} } \ ]\ }").unwrap(), mk_object(&[ ("a".into_string(), F64(1.0)), ("b".into_string(), Array(vec![ Boolean(true), String("foo\nbar".into_string()), mk_object(&[ ("c".into_string(), mk_object(&[("d".into_string(), Null)])) ]) ])) ])); } #[test] fn test_decode_struct() { let s = "{ \"inner\": [ { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] } ] }"; let v: Outer = super::decode(s).unwrap(); assert_eq!( v, Outer { inner: vec![ Inner { a: (), b: 2, c: vec!["abc".into_string(), "xyz".into_string()] } ] } ); } #[deriving(Decodable)] struct FloatStruct { f: f64, a: Vec<f64> } #[test] fn test_decode_struct_with_nan() { let s = "{\"f\":null,\"a\":[null,123]}"; let obj: FloatStruct = super::decode(s).unwrap(); assert!(obj.f.is_nan()); assert!(obj.a[0].is_nan()); assert_eq!(obj.a[1], 123f64); } #[test] fn test_decode_option() { let value: Option<string::String> = super::decode("null").unwrap(); assert_eq!(value, None); let value: Option<string::String> = super::decode("\"jodhpurs\"").unwrap(); assert_eq!(value, Some("jodhpurs".into_string())); } #[test] fn test_decode_enum() { let value: Animal = super::decode("\"Dog\"").unwrap(); assert_eq!(value, Dog); let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"; let value: Animal = super::decode(s).unwrap(); assert_eq!(value, Frog("Henry".into_string(), 349)); } #[test] fn test_decode_map() { let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\ \"fields\":[\"Henry\", 349]}}"; let mut map: TreeMap<string::String, Animal> = super::decode(s).unwrap(); assert_eq!(map.remove(&"a".into_string()), Some(Dog)); assert_eq!(map.remove(&"b".into_string()), Some(Frog("Henry".into_string(), 349))); } #[test] fn test_multiline_errors() { assert_eq!(from_str("{\n \"foo\":\n \"bar\""), Err(SyntaxError(EOFWhileParsingObject, 3u, 8u))); } #[deriving(Decodable)] #[allow(dead_code)] struct DecodeStruct { x: f64, y: bool, z: string::String, w: Vec<DecodeStruct> } #[deriving(Decodable)] enum DecodeEnum { A(f64), B(string::String) } fn check_err<T: Decodable<Decoder, DecoderError>>(to_parse: &'static str, expected: DecoderError) { let res: DecodeResult<T> = match from_str(to_parse) { Err(e) => Err(ParseError(e)), Ok(json) => Decodable::decode(&mut Decoder::new(json)) }; match res { Ok(_) => panic!("`{}` parsed & decoded ok, expecting error `{}`", to_parse, expected), Err(ParseError(e)) => panic!("`{}` is not valid json: {}", to_parse, e), Err(e) => { assert_eq!(e, expected); } } } #[test] fn test_decode_errors_struct() { check_err::<DecodeStruct>("[]", ExpectedError("Object".into_string(), "[]".into_string())); check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}", ExpectedError("Number".into_string(), "true".into_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}", ExpectedError("Boolean".into_string(), "[]".into_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}", ExpectedError("String".into_string(), "{}".into_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}", ExpectedError("Array".into_string(), "null".into_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}", MissingFieldError("w".into_string())); } #[test] fn test_decode_errors_enum() { check_err::<DecodeEnum>("{}", MissingFieldError("variant".into_string())); check_err::<DecodeEnum>("{\"variant\": 1}", ExpectedError("String".into_string(), "1".into_string())); check_err::<DecodeEnum>("{\"variant\": \"A\"}", MissingFieldError("fields".into_string())); check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}", ExpectedError("Array".into_string(), "null".into_string())); check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}", UnknownVariantError("C".into_string())); } #[test] fn test_find(){ let json_value = from_str("{\"dog\" : \"cat\"}").unwrap(); let found_str = json_value.find("dog"); assert!(found_str.unwrap().as_string().unwrap() == "cat"); } #[test] fn test_find_path(){ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); let found_str = json_value.find_path(&["dog", "cat", "mouse"]); assert!(found_str.unwrap().as_string().unwrap() == "cheese"); } #[test] fn test_search(){ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); let found_str = json_value.search("mouse").and_then(|j| j.as_string()); assert!(found_str.unwrap() == "cheese"); } #[test] fn test_index(){ let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap(); let ref array = json_value["animals"]; assert_eq!(array[0].as_string().unwrap(), "dog"); assert_eq!(array[1].as_string().unwrap(), "cat"); assert_eq!(array[2].as_string().unwrap(), "mouse"); } #[test] fn test_is_object(){ let json_value = from_str("{}").unwrap(); assert!(json_value.is_object()); } #[test] fn test_as_object(){ let json_value = from_str("{}").unwrap(); let json_object = json_value.as_object(); assert!(json_object.is_some()); } #[test] fn test_is_array(){ let json_value = from_str("[1, 2, 3]").unwrap(); assert!(json_value.is_array()); } #[test] fn test_as_array(){ let json_value = from_str("[1, 2, 3]").unwrap(); let json_array = json_value.as_array(); let expected_length = 3; assert!(json_array.is_some() && json_array.unwrap().len() == expected_length); } #[test] fn test_is_string(){ let json_value = from_str("\"dog\"").unwrap(); assert!(json_value.is_string()); } #[test] fn test_as_string(){ let json_value = from_str("\"dog\"").unwrap(); let json_str = json_value.as_string(); let expected_str = "dog"; assert_eq!(json_str, Some(expected_str)); } #[test] fn test_is_number(){ let json_value = from_str("12").unwrap(); assert!(json_value.is_number()); } #[test] fn test_is_i64(){ let json_value = from_str("-12").unwrap(); assert!(json_value.is_i64()); let json_value = from_str("12").unwrap(); assert!(!json_value.is_i64()); let json_value = from_str("12.0").unwrap(); assert!(!json_value.is_i64()); } #[test] fn test_is_u64(){ let json_value = from_str("12").unwrap(); assert!(json_value.is_u64()); let json_value = from_str("-12").unwrap(); assert!(!json_value.is_u64()); let json_value = from_str("12.0").unwrap(); assert!(!json_value.is_u64()); } #[test] fn
(){ let json_value = from_str("12").unwrap(); assert!(!json_value.is_f64()); let json_value = from_str("-12").unwrap(); assert!(!json_value.is_f64()); let json_value = from_str("12.0").unwrap(); assert!(json_value.is_f64()); let json_value = from_str("-12.0").unwrap(); assert!(json_value.is_f64()); } #[test] fn test_as_i64(){ let json_value = from_str("-12").unwrap(); let json_num = json_value.as_i64(); assert_eq!(json_num, Some(-12)); } #[test] fn test_as_u64(){ let json_value = from_str("12").unwrap(); let json_num = json_value.as_u64(); assert_eq!(json_num, Some(12)); } #[test] fn test_as_f64(){ let json_value = from_str("12.0").unwrap(); let json_num = json_value.as_f64(); assert_eq!(json_num, Some(12f64)); } #[test] fn test_is_boolean(){ let json_value = from_str("false").unwrap(); assert!(json_value.is_boolean()); } #[test] fn test_as_boolean(){ let json_value = from_str("false").unwrap(); let json_bool = json_value.as_boolean(); let expected_bool = false; assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool); } #[test] fn test_is_null(){ let json_value = from_str("null").unwrap(); assert!(json_value.is_null()); } #[test] fn test_as_null(){ let json_value = from_str("null").unwrap(); let json_null = json_value.as_null(); let expected_null = (); assert!(json_null.is_some() && json_null.unwrap() == expected_null); } #[test] fn test_encode_hashmap_with_numeric_key() { use std::str::from_utf8; use std::io::Writer; use std::collections::HashMap; let mut hm: HashMap<uint, bool> = HashMap::new(); hm.insert(1, true); let mut mem_buf = Vec::new(); { let mut encoder = Encoder::new(&mut mem_buf as &mut io::Writer); hm.encode(&mut encoder).unwrap(); } let json_str = from_utf8(mem_buf[]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), _ => {} // it parsed and we are good to go } } #[test] fn test_prettyencode_hashmap_with_numeric_key() { use std::str::from_utf8; use std::io::Writer; use std::collections::HashMap; let mut hm: HashMap<uint, bool> = HashMap::new(); hm.insert(1, true); let mut mem_buf = Vec::new(); { let mut encoder = PrettyEncoder::new(&mut mem_buf as &mut io::Writer); hm.encode(&mut encoder).unwrap() } let json_str = from_utf8(mem_buf[]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), _ => {} // it parsed and we are good to go } } #[test] fn test_prettyencoder_indent_level_param() { use std::str::from_utf8; use std::collections::TreeMap; let mut tree = TreeMap::new(); tree.insert("hello".into_string(), String("guten tag".into_string())); tree.insert("goodbye".into_string(), String("sayonara".into_string())); let json = Array( // The following layout below should look a lot like // the pretty-printed JSON (indent * x) vec! ( // 0x String("greetings".into_string()), // 1x Object(tree), // 1x + 2x + 2x + 1x ) // 0x // End JSON array (7 lines) ); // Helper function for counting indents fn indents(source: &str) -> uint { let trimmed = source.trim_left_chars(' '); source.len() - trimmed.len() } // Test up to 4 spaces of indents (more?) for i in range(0, 4u) { let mut writer = Vec::new(); { let ref mut encoder = PrettyEncoder::new(&mut writer); encoder.set_indent(i); json.encode(encoder).unwrap(); } let printed = from_utf8(writer[]).unwrap(); // Check for indents at each line let lines: Vec<&str> = printed.lines().collect(); assert_eq!(lines.len(), 7); // JSON should be 7 lines assert_eq!(indents(lines[0]), 0 * i); // [ assert_eq!(indents(lines[1]), 1 * i); // "greetings", assert_eq!(indents(lines[2]), 1 * i); // { assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag", assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara" assert_eq!(indents(lines[5]), 1 * i); // }, assert_eq!(indents(lines[6]), 0 * i); // ] // Finally, test that the pretty-printed JSON is valid from_str(printed).ok().expect("Pretty-printed JSON is invalid!"); } } #[test] fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() { use std::collections::HashMap; use Decodable; let json_str = "{\"1\":true}"; let json_obj = match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), Ok(o) => o }; let mut decoder = Decoder::new(json_obj); let _hm: HashMap<uint, bool> = Decodable::decode(&mut decoder).unwrap(); } #[test] fn test_hashmap_with_numeric_key_will_error_with_string_keys() { use std::collections::HashMap; use Decodable; let json_str = "{\"a\":true}"; let json_obj = match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), Ok(o) => o }; let mut decoder = Decoder::new(json_obj); let result: Result<HashMap<uint, bool>, DecoderError> = Decodable::decode(&mut decoder); assert_eq!(result, Err(ExpectedError("Number".into_string(), "a".into_string()))); } fn assert_stream_equal(src: &str, expected: Vec<(JsonEvent, Vec<StackElement>)>) { let mut parser = Parser::new(src.chars()); let mut i = 0; loop { let evt = match parser.next() { Some(e) => e, None => { break; } }; let (ref expected_evt, ref expected_stack) = expected[i]; if !parser.stack().is_equal_to(expected_stack.as_slice()) { panic!("Parser stack is not equal to {}", expected_stack); } assert_eq!(&evt, expected_evt); i+=1; } } #[test] #[cfg_attr(target_word_size = "32", ignore)] // FIXME(#14064) fn test_streaming_parser() { assert_stream_equal( r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#, vec![ (ObjectStart, vec![]), (StringValue("bar".into_string()), vec![Key("foo")]), (ArrayStart, vec![Key("array")]), (U64Value(0), vec![Key("array"), Index(0)]), (U64Value(1), vec![Key("array"), Index(1)]), (U64Value(2), vec![Key("array"), Index(2)]), (U64Value(3), vec![Key("array"), Index(3)]), (U64Value(4), vec![Key("array"), Index(4)]), (U64Value(5), vec![Key("array"), Index(5)]), (ArrayEnd, vec![Key("array")]), (ArrayStart, vec![Key("idents")]), (NullValue, vec![Key("idents"), Index(0)]), (BooleanValue(true), vec![Key("idents"), Index(1)]), (BooleanValue(false), vec![Key("idents"), Index(2)]), (ArrayEnd, vec![Key("idents")]), (ObjectEnd, vec![]), ] ); } fn last_event(src: &str) -> JsonEvent { let mut parser = Parser::new(src.chars()); let mut evt = NullValue; loop { evt = match parser.next() { Some(e) => e, None => return evt, } } } #[test] #[cfg_attr(target_word_size = "32", ignore)] // FIXME(#14064) fn test_read_object_streaming() { assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3))); assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2))); assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5))); assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6))); assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6))); assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7))); assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8))); assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8))); assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8))); assert_stream_equal( "{}", vec![(ObjectStart, vec![]), (ObjectEnd, vec![])] ); assert_stream_equal( "{\"a\": 3}", vec![ (ObjectStart, vec![]), (U64Value(3), vec![Key("a")]), (ObjectEnd, vec![]), ] ); assert_stream_equal( "{ \"a\": null, \"b\" : true }", vec![ (ObjectStart, vec![]), (NullValue, vec![Key("a")]), (BooleanValue(true), vec![Key("b")]), (ObjectEnd, vec![]), ] ); assert_stream_equal( "{\"a\" : 1.0 ,\"b\": [ true ]}", vec![ (ObjectStart, vec![]), (F64Value(1.0), vec![Key("a")]), (ArrayStart, vec![Key("b")]), (BooleanValue(true),vec![Key("b"), Index(0)]), (ArrayEnd, vec![Key("b")]), (ObjectEnd, vec![]), ] ); assert_stream_equal( r#"{ "a": 1.0, "b": [ true, "foo\nbar", { "c": {"d": null} } ] }"#, vec![ (ObjectStart, vec![]), (F64Value(1.0), vec![Key("a")]), (ArrayStart, vec![Key("b")]), (BooleanValue(true), vec![Key("b"), Index(0)]), (StringValue("foo\nbar".into_string()), vec![Key("b"), Index(1)]), (ObjectStart, vec![Key("b"), Index(2)]), (ObjectStart, vec![Key("b"), Index(2), Key("c")]), (NullValue, vec![Key("b"), Index(2), Key("c"), Key("d")]), (ObjectEnd, vec![Key("b"), Index(2), Key("c")]), (ObjectEnd, vec![Key("b"), Index(2)]), (ArrayEnd, vec![Key("b")]), (ObjectEnd, vec![]), ] ); } #[test] #[cfg_attr(target_word_size = "32", ignore)] // FIXME(#14064) fn test_read_array_streaming() { assert_stream_equal( "[]", vec![ (ArrayStart, vec![]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[ ]", vec![ (ArrayStart, vec![]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[true]", vec![ (ArrayStart, vec![]), (BooleanValue(true), vec![Index(0)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[ false ]", vec![ (ArrayStart, vec![]), (BooleanValue(false), vec![Index(0)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[null]", vec![ (ArrayStart, vec![]), (NullValue, vec![Index(0)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[3, 1]", vec![ (ArrayStart, vec![]), (U64Value(3), vec![Index(0)]), (U64Value(1), vec![Index(1)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "\n[3, 2]\n", vec![ (ArrayStart, vec![]), (U64Value(3), vec![Index(0)]), (U64Value(2), vec![Index(1)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[2, [4, 1]]", vec![ (ArrayStart, vec![]), (U64Value(2), vec![Index(0)]), (ArrayStart, vec![Index(1)]), (U64Value(4), vec![Index(1), Index(0)]), (U64Value(1), vec![Index(1), Index(1)]), (ArrayEnd, vec![Index(1)]), (ArrayEnd, vec![]), ] ); assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2))); assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); } #[test] fn test_trailing_characters_streaming() { assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6))); assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2))); assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3))); assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3))); } #[test] fn test_read_identifiers_streaming() { assert_eq!(Parser::new("null".chars()).next(), Some(NullValue)); assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true))); assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false))); assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3))); } #[test] fn test_stack() { let mut stack = Stack::new(); assert!(stack.is_empty()); assert!(stack.len() == 0); assert!(!stack.last_is_index()); stack.push_index(0); stack.bump_index(); assert!(stack.len() == 1); assert!(stack.is_equal_to(&[Index(1)])); assert!(stack.starts_with(&[Index(1)])); assert!(stack.ends_with(&[Index(1)])); assert!(stack.last_is_index()); assert!(stack.get(0) == Index(1)); stack.push_key("foo".into_string()); assert!(stack.len() == 2); assert!(stack.is_equal_to(&[Index(1), Key("foo")])); assert!(stack.starts_with(&[Index(1), Key("foo")])); assert!(stack.starts_with(&[Index(1)])); assert!(stack.ends_with(&[Index(1), Key("foo")])); assert!(stack.ends_with(&[Key("foo")])); assert!(!stack.last_is_index()); assert!(stack.get(0) == Index(1)); assert!(stack.get(1) == Key("foo")); stack.push_key("bar".into_string()); assert!(stack.len() == 3); assert!(stack.is_equal_to(&[Index(1), Key("foo"), Key("bar")])); assert!(stack.starts_with(&[Index(1)])); assert!(stack.starts_with(&[Index(1), Key("foo")])); assert!(stack.starts_with(&[Index(1), Key("foo"), Key("bar")])); assert!(stack.ends_with(&[Key("bar")])); assert!(stack.ends_with(&[Key("foo"), Key("bar")])); assert!(stack.ends_with(&[Index(1), Key("foo"), Key("bar")])); assert!(!stack.last_is_index()); assert!(stack.get(0) == Index(1)); assert!(stack.get(1) == Key("foo")); assert!(stack.get(2) == Key("bar")); stack.pop(); assert!(stack.len() == 2); assert!(stack.is_equal_to(&[Index(1), Key("foo")])); assert!(stack.starts_with(&[Index(1), Key("foo")])); assert!(stack.starts_with(&[Index(1)])); assert!(stack.ends_with(&[Index(1), Key("foo")])); assert!(stack.ends_with(&[Key("foo")])); assert!(!stack.last_is_index()); assert!(stack.get(0) == Index(1)); assert!(stack.get(1) == Key("foo")); } #[test] fn test_to_json() { use std::collections::{HashMap,TreeMap}; use super::ToJson; let array2 = Array(vec!(U64(1), U64(2))); let array3 = Array(vec!(U64(1), U64(2), U64(3))); let object = { let mut tree_map = TreeMap::new(); tree_map.insert("a".into_string(), U64(1)); tree_map.insert("b".into_string(), U64(2)); Object(tree_map) }; assert_eq!(array2.to_json(), array2); assert_eq!(object.to_json(), object); assert_eq!(3_i.to_json(), I64(3)); assert_eq!(4_i8.to_json(), I64(4)); assert_eq!(5_i16.to_json(), I64(5)); assert_eq!(6_i32.to_json(), I64(6)); assert_eq!(7_i64.to_json(), I64(7)); assert_eq!(8_u.to_json(), U64(8)); assert_eq!(9_u8.to_json(), U64(9)); assert_eq!(10_u16.to_json(), U64(10)); assert_eq!(11_u32.to_json(), U64(11)); assert_eq!(12_u64.to_json(), U64(12)); assert_eq!(13.0_f32.to_json(), F64(13.0_f64)); assert_eq!(14.0_f64.to_json(), F64(14.0_f64)); assert_eq!(().to_json(), Null); assert_eq!(f32::INFINITY.to_json(), Null); assert_eq!(f64::NAN.to_json(), Null); assert_eq!(true.to_json(), Boolean(true)); assert_eq!(false.to_json(), Boolean(false)); assert_eq!("abc".to_json(), String("abc".into_string())); assert_eq!("abc".into_string().to_json(), String("abc".into_string())); assert_eq!((1u, 2u).to_json(), array2); assert_eq!((1u, 2u, 3u).to_json(), array3); assert_eq!([1u, 2].to_json(), array2); assert_eq!((&[1u, 2, 3]).to_json(), array3); assert_eq!((vec![1u, 2]).to_json(), array2); assert_eq!(vec!(1u, 2, 3).to_json(), array3); let mut tree_map = TreeMap::new(); tree_map.insert("a".into_string(), 1u); tree_map.insert("b".into_string(), 2); assert_eq!(tree_map.to_json(), object); let mut hash_map = HashMap::new(); hash_map.insert("a".into_string(), 1u); hash_map.insert("b".into_string(), 2); assert_eq!(hash_map.to_json(), object); assert_eq!(Some(15i).to_json(), I64(15)); assert_eq!(Some(15u).to_json(), U64(15)); assert_eq!(None::<int>.to_json(), Null); } #[bench] fn bench_streaming_small(b: &mut Bencher) { b.iter( || { let mut parser = Parser::new( r#"{ "a": 1.0, "b": [ true, "foo\nbar", { "c": {"d": null} } ] }"#.chars() ); loop { match parser.next() { None => return, _ => {} } } }); } #[bench] fn bench_small(b: &mut Bencher) { b.iter( || { let _ = from_str(r#"{ "a": 1.0, "b": [ true, "foo\nbar", { "c": {"d": null} } ] }"#); }); } fn big_json() -> string::String { let mut src = "[\n".into_string(); for _ in range(0i, 500) { src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \ [1,2,3]},"#); } src.push_str("{}]"); return src; } #[bench] fn bench_streaming_large(b: &mut Bencher) { let src = big_json(); b.iter( || { let mut parser = Parser::new(src.chars()); loop { match parser.next() { None => return, _ => {} } } }); } #[bench] fn bench_large(b: &mut Bencher) { let src = big_json(); b.iter( || { let _ = from_str(src.as_slice()); }); } }
test_is_f64
EmotionTable.js
import React from 'react'; import './bootstrap.min.css'; class
extends React.Component { render() { //Returns the emotions as an HTML table return ( <div> <table className="table table-bordered"> <tbody> { /*Write code to use the .map method that you worked on in the Hands-on React lab to extract the emotions. If you are stuck, please click the instructions to see how to implement a map*/ Object.entries(this.props.emotions).map(function(mapentry) { return ( <tr> <td>{mapentry[0]}</td> <td>{mapentry[1]}</td> </tr> ) }) } </tbody> </table> </div> ); } } export default EmotionTable;
EmotionTable
index.rs
use std::ffi::{CStr, CString, OsString}; use std::marker; use std::ops::Range; use std::path::Path; use std::ptr; use std::slice; use libc::{c_char, c_int, c_uint, c_void, size_t}; use crate::util::{self, Binding}; use crate::IntoCString; use crate::{panic, raw, Error, IndexAddOption, IndexTime, Oid, Repository, Tree}; /// A structure to represent a git [index][1] /// /// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects pub struct Index { raw: *mut raw::git_index, } /// An iterator over the entries in an index pub struct IndexEntries<'index> { range: Range<usize>, index: &'index Index, } /// An iterator over the conflicting entries in an index pub struct IndexConflicts<'index> { conflict_iter: *mut raw::git_index_conflict_iterator, _marker: marker::PhantomData<&'index Index>, } /// A structure to represent the information returned when a conflict is detected in an index entry pub struct IndexConflict { /// The ancestor index entry of the two conflicting index entries pub ancestor: Option<IndexEntry>, /// The index entry originating from the user's copy of the repository. /// Its contents conflict with 'their' index entry pub our: Option<IndexEntry>, /// The index entry originating from the external repository. /// Its contents conflict with 'our' index entry pub their: Option<IndexEntry>, } /// A callback function to filter index matches. /// /// Used by `Index::{add_all,remove_all,update_all}`. The first argument is the /// path, and the second is the patchspec that matched it. Return 0 to confirm /// the operation on the item, > 0 to skip the item, and < 0 to abort the scan. pub type IndexMatchedPath<'a> = dyn FnMut(&Path, &[u8]) -> i32 + 'a; /// A structure to represent an entry or a file inside of an index. /// /// All fields of an entry are public for modification and inspection. This is /// also how a new index entry is created. #[allow(missing_docs)] pub struct IndexEntry { pub ctime: IndexTime, pub mtime: IndexTime, pub dev: u32, pub ino: u32, pub mode: u32, pub uid: u32, pub gid: u32, pub file_size: u32, pub id: Oid, pub flags: u16, pub flags_extended: u16, pub path: Vec<u8>, } impl Index { /// Creates a new in-memory index. /// /// This index object cannot be read/written to the filesystem, but may be /// used to perform in-memory index operations. pub fn new() -> Result<Index, Error> { crate::init(); let mut raw = ptr::null_mut(); unsafe { try_call!(raw::git_index_new(&mut raw)); Ok(Binding::from_raw(raw)) } } /// Create a new bare Git index object as a memory representation of the Git /// index file in 'index_path', without a repository to back it. /// /// Since there is no ODB or working directory behind this index, any Index /// methods which rely on these (e.g. add_path) will fail. /// /// If you need an index attached to a repository, use the `index()` method /// on `Repository`. pub fn open(index_path: &Path) -> Result<Index, Error> { crate::init(); let mut raw = ptr::null_mut(); let index_path = index_path.into_c_string()?; unsafe { try_call!(raw::git_index_open(&mut raw, index_path)); Ok(Binding::from_raw(raw)) } } /// Add or update an index entry from an in-memory struct /// /// If a previous index entry exists that has the same path and stage as the /// given 'source_entry', it will be replaced. Otherwise, the 'source_entry' /// will be added. pub fn add(&mut self, entry: &IndexEntry) -> Result<(), Error> { let path = CString::new(&entry.path[..])?; // libgit2 encodes the length of the path in the lower bits of the // `flags` entry, so mask those out and recalculate here to ensure we // don't corrupt anything. let mut flags = entry.flags & !raw::GIT_INDEX_ENTRY_NAMEMASK; if entry.path.len() < raw::GIT_INDEX_ENTRY_NAMEMASK as usize { flags |= entry.path.len() as u16; } else { flags |= raw::GIT_INDEX_ENTRY_NAMEMASK; } unsafe { let raw = raw::git_index_entry { dev: entry.dev, ino: entry.ino, mode: entry.mode, uid: entry.uid, gid: entry.gid, file_size: entry.file_size, id: *entry.id.raw(), flags: flags, flags_extended: entry.flags_extended, path: path.as_ptr(), mtime: raw::git_index_time { seconds: entry.mtime.seconds(), nanoseconds: entry.mtime.nanoseconds(), }, ctime: raw::git_index_time { seconds: entry.ctime.seconds(), nanoseconds: entry.ctime.nanoseconds(), }, }; try_call!(raw::git_index_add(self.raw, &raw)); Ok(()) } } /// Add or update an index entry from a buffer in memory /// /// This method will create a blob in the repository that owns the index and /// then add the index entry to the index. The path of the entry represents /// the position of the blob relative to the repository's root folder. /// /// If a previous index entry exists that has the same path as the given /// 'entry', it will be replaced. Otherwise, the 'entry' will be added. /// The id and the file_size of the 'entry' are updated with the real value /// of the blob. /// /// This forces the file to be added to the index, not looking at gitignore /// rules. /// /// If this file currently is the result of a merge conflict, this file will /// no longer be marked as conflicting. The data about the conflict will be /// moved to the "resolve undo" (REUC) section. pub fn add_frombuffer(&mut self, entry: &IndexEntry, data: &[u8]) -> Result<(), Error> { let path = CString::new(&entry.path[..])?; // libgit2 encodes the length of the path in the lower bits of the // `flags` entry, so mask those out and recalculate here to ensure we // don't corrupt anything. let mut flags = entry.flags & !raw::GIT_INDEX_ENTRY_NAMEMASK; if entry.path.len() < raw::GIT_INDEX_ENTRY_NAMEMASK as usize { flags |= entry.path.len() as u16; } else { flags |= raw::GIT_INDEX_ENTRY_NAMEMASK; } unsafe { let raw = raw::git_index_entry { dev: entry.dev, ino: entry.ino, mode: entry.mode, uid: entry.uid, gid: entry.gid, file_size: entry.file_size, id: *entry.id.raw(), flags: flags, flags_extended: entry.flags_extended, path: path.as_ptr(), mtime: raw::git_index_time { seconds: entry.mtime.seconds(), nanoseconds: entry.mtime.nanoseconds(), }, ctime: raw::git_index_time { seconds: entry.ctime.seconds(), nanoseconds: entry.ctime.nanoseconds(), }, }; let ptr = data.as_ptr() as *const c_void; let len = data.len() as size_t; try_call!(raw::git_index_add_frombuffer(self.raw, &raw, ptr, len)); Ok(()) } } /// Add or update an index entry from a file on disk /// /// The file path must be relative to the repository's working folder and /// must be readable. /// /// This method will fail in bare index instances. /// /// This forces the file to be added to the index, not looking at gitignore /// rules. /// /// If this file currently is the result of a merge conflict, this file will /// no longer be marked as conflicting. The data about the conflict will be /// moved to the "resolve undo" (REUC) section. pub fn add_path(&mut self, path: &Path) -> Result<(), Error> { // Git apparently expects '/' to be separators for paths let mut posix_path = OsString::new(); for (i, comp) in path.components().enumerate() { if i != 0 { posix_path.push("/"); } posix_path.push(comp.as_os_str()); } let posix_path = posix_path.into_c_string()?; unsafe { try_call!(raw::git_index_add_bypath(self.raw, posix_path)); Ok(()) } } /// Add or update index entries matching files in the working directory. /// /// This method will fail in bare index instances. /// /// The `pathspecs` are a list of file names or shell glob patterns that /// will matched against files in the repository's working directory. Each /// file that matches will be added to the index (either updating an /// existing entry or adding a new entry). You can disable glob expansion /// and force exact matching with the `AddDisablePathspecMatch` flag. /// /// Files that are ignored will be skipped (unlike `add_path`). If a file is /// already tracked in the index, then it will be updated even if it is /// ignored. Pass the `AddForce` flag to skip the checking of ignore rules. /// /// To emulate `git add -A` and generate an error if the pathspec contains /// the exact path of an ignored file (when not using `AddForce`), add the /// `AddCheckPathspec` flag. This checks that each entry in `pathspecs` /// that is an exact match to a filename on disk is either not ignored or /// already in the index. If this check fails, the function will return /// an error. /// /// To emulate `git add -A` with the "dry-run" option, just use a callback /// function that always returns a positive value. See below for details. /// /// If any files are currently the result of a merge conflict, those files /// will no longer be marked as conflicting. The data about the conflicts /// will be moved to the "resolve undo" (REUC) section. /// /// If you provide a callback function, it will be invoked on each matching /// item in the working directory immediately before it is added to / /// updated in the index. Returning zero will add the item to the index, /// greater than zero will skip the item, and less than zero will abort the /// scan an return an error to the caller. /// /// # Example /// /// Emulate `git add *`: /// /// ```no_run /// use git2::{Index, IndexAddOption, Repository}; /// /// let repo = Repository::open("/path/to/a/repo").expect("failed to open"); /// let mut index = repo.index().expect("cannot get the Index file"); /// index.add_all(["*"].iter(), IndexAddOption::DEFAULT, None); /// index.write(); /// ``` pub fn add_all<T, I>( &mut self, pathspecs: I, flag: IndexAddOption, mut cb: Option<&mut IndexMatchedPath<'_>>, ) -> Result<(), Error> where T: IntoCString, I: IntoIterator<Item = T>, { let (_a, _b, raw_strarray) = crate::util::iter2cstrs(pathspecs)?; let ptr = cb.as_mut(); let callback = ptr .as_ref() .map(|_| index_matched_path_cb as extern "C" fn(_, _, _) -> _); unsafe { try_call!(raw::git_index_add_all( self.raw, &raw_strarray, flag.bits() as c_uint, callback, ptr.map(|p| p as *mut _).unwrap_or(ptr::null_mut()) as *mut c_void )); } Ok(()) } /// Clear the contents (all the entries) of an index object. /// /// This clears the index object in memory; changes must be explicitly /// written to disk for them to take effect persistently via `write_*`. pub fn clear(&mut self) -> Result<(), Error> { unsafe { try_call!(raw::git_index_clear(self.raw)); } Ok(()) } /// Get the count of entries currently in the index pub fn len(&self) -> usize { unsafe { raw::git_index_entrycount(&*self.raw) as usize } } /// Return `true` is there is no entry in the index pub fn is_empty(&self) -> bool { self.len() == 0 } /// Get one of the entries in the index by its position. pub fn get(&self, n: usize) -> Option<IndexEntry> { unsafe { let ptr = raw::git_index_get_byindex(self.raw, n as size_t); if ptr.is_null() { None } else { Some(Binding::from_raw(*ptr)) } } } /// Get an iterator over the entries in this index. pub fn iter(&self) -> IndexEntries<'_> { IndexEntries { range: 0..self.len(), index: self, } } /// Get an iterator over the index entries that have conflicts pub fn conflicts(&self) -> Result<IndexConflicts<'_>, Error> { crate::init(); let mut conflict_iter = ptr::null_mut(); unsafe { try_call!(raw::git_index_conflict_iterator_new( &mut conflict_iter, self.raw )); Ok(Binding::from_raw(conflict_iter)) } } /// Get one of the entries in the index by its path. pub fn get_path(&self, path: &Path, stage: i32) -> Option<IndexEntry> { let path = path.into_c_string().unwrap(); unsafe { let ptr = call!(raw::git_index_get_bypath(self.raw, path, stage as c_int)); if ptr.is_null() { None } else { Some(Binding::from_raw(*ptr)) } } } /// Does this index have conflicts? /// /// Returns `true` if the index contains conflicts, `false` if it does not. pub fn has_conflicts(&self) -> bool { unsafe { raw::git_index_has_conflicts(self.raw) == 1 } } /// Get the full path to the index file on disk. /// /// Returns `None` if this is an in-memory index. pub fn path(&self) -> Option<&Path> { unsafe { crate::opt_bytes(self, raw::git_index_path(&*self.raw)).map(util::bytes2path) } } /// Update the contents of an existing index object in memory by reading /// from the hard disk. /// /// If force is true, this performs a "hard" read that discards in-memory /// changes and always reloads the on-disk index data. If there is no /// on-disk version, the index will be cleared. /// /// If force is false, this does a "soft" read that reloads the index data /// from disk only if it has changed since the last time it was loaded. /// Purely in-memory index data will be untouched. Be aware: if there are /// changes on disk, unwritten in-memory changes are discarded. pub fn read(&mut self, force: bool) -> Result<(), Error> { unsafe { try_call!(raw::git_index_read(self.raw, force)); } Ok(()) } /// Read a tree into the index file with stats /// /// The current index contents will be replaced by the specified tree. pub fn read_tree(&mut self, tree: &Tree<'_>) -> Result<(), Error> { unsafe { try_call!(raw::git_index_read_tree(self.raw, &*tree.raw())); } Ok(()) } /// Remove an entry from the index pub fn remove(&mut self, path: &Path, stage: i32) -> Result<(), Error> { let path = path.into_c_string()?; unsafe { try_call!(raw::git_index_remove(self.raw, path, stage as c_int)); } Ok(()) } /// Remove an index entry corresponding to a file on disk. /// /// The file path must be relative to the repository's working folder. It /// may exist. /// /// If this file currently is the result of a merge conflict, this file will /// no longer be marked as conflicting. The data about the conflict will be /// moved to the "resolve undo" (REUC) section. pub fn remove_path(&mut self, path: &Path) -> Result<(), Error> { let path = path.into_c_string()?; unsafe { try_call!(raw::git_index_remove_bypath(self.raw, path)); } Ok(()) } /// Remove all entries from the index under a given directory. pub fn remove_dir(&mut self, path: &Path, stage: i32) -> Result<(), Error> { let path = path.into_c_string()?; unsafe { try_call!(raw::git_index_remove_directory( self.raw, path, stage as c_int )); } Ok(()) } /// Remove all matching index entries. /// /// If you provide a callback function, it will be invoked on each matching /// item in the index immediately before it is removed. Return 0 to remove /// the item, > 0 to skip the item, and < 0 to abort the scan. pub fn remove_all<T, I>( &mut self, pathspecs: I, mut cb: Option<&mut IndexMatchedPath<'_>>, ) -> Result<(), Error> where T: IntoCString, I: IntoIterator<Item = T>, { let (_a, _b, raw_strarray) = crate::util::iter2cstrs(pathspecs)?; let ptr = cb.as_mut(); let callback = ptr .as_ref() .map(|_| index_matched_path_cb as extern "C" fn(_, _, _) -> _); unsafe { try_call!(raw::git_index_remove_all( self.raw, &raw_strarray, callback, ptr.map(|p| p as *mut _).unwrap_or(ptr::null_mut()) as *mut c_void )); } Ok(()) } /// Update all index entries to match the working directory /// /// This method will fail in bare index instances. /// /// This scans the existing index entries and synchronizes them with the /// working directory, deleting them if the corresponding working directory /// file no longer exists otherwise updating the information (including /// adding the latest version of file to the ODB if needed). /// /// If you provide a callback function, it will be invoked on each matching /// item in the index immediately before it is updated (either refreshed or /// removed depending on working directory state). Return 0 to proceed with /// updating the item, > 0 to skip the item, and < 0 to abort the scan. pub fn update_all<T, I>( &mut self, pathspecs: I, mut cb: Option<&mut IndexMatchedPath<'_>>, ) -> Result<(), Error> where T: IntoCString, I: IntoIterator<Item = T>, { let (_a, _b, raw_strarray) = crate::util::iter2cstrs(pathspecs)?; let ptr = cb.as_mut(); let callback = ptr .as_ref() .map(|_| index_matched_path_cb as extern "C" fn(_, _, _) -> _); unsafe { try_call!(raw::git_index_update_all( self.raw, &raw_strarray, callback, ptr.map(|p| p as *mut _).unwrap_or(ptr::null_mut()) as *mut c_void )); } Ok(()) } /// Write an existing index object from memory back to disk using an atomic /// file lock. pub fn write(&mut self) -> Result<(), Error> { unsafe { try_call!(raw::git_index_write(self.raw)); } Ok(()) } /// Write the index as a tree. /// /// This method will scan the index and write a representation of its /// current state back to disk; it recursively creates tree objects for each /// of the subtrees stored in the index, but only returns the OID of the /// root tree. This is the OID that can be used e.g. to create a commit. /// /// The index instance cannot be bare, and needs to be associated to an /// existing repository. /// /// The index must not contain any file in conflict. pub fn write_tree(&mut self) -> Result<Oid, Error> { let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ], }; unsafe { try_call!(raw::git_index_write_tree(&mut raw, self.raw)); Ok(Binding::from_raw(&raw as *const _)) } } /// Write the index as a tree to the given repository /// /// This is the same as `write_tree` except that the destination repository /// can be chosen. pub fn write_tree_to(&mut self, repo: &Repository) -> Result<Oid, Error> { let mut raw = raw::git_oid { id: [0; raw::GIT_OID_RAWSZ], }; unsafe { try_call!(raw::git_index_write_tree_to(&mut raw, self.raw, repo.raw())); Ok(Binding::from_raw(&raw as *const _)) } } } impl Binding for Index { type Raw = *mut raw::git_index; unsafe fn from_raw(raw: *mut raw::git_index) -> Index { Index { raw: raw } } fn raw(&self) -> *mut raw::git_index { self.raw } } impl<'index> Binding for IndexConflicts<'index> { type Raw = *mut raw::git_index_conflict_iterator; unsafe fn from_raw(raw: *mut raw::git_index_conflict_iterator) -> IndexConflicts<'index> { IndexConflicts { conflict_iter: raw, _marker: marker::PhantomData, } } fn raw(&self) -> *mut raw::git_index_conflict_iterator
} extern "C" fn index_matched_path_cb( path: *const c_char, matched_pathspec: *const c_char, payload: *mut c_void, ) -> c_int { unsafe { let path = CStr::from_ptr(path).to_bytes(); let matched_pathspec = CStr::from_ptr(matched_pathspec).to_bytes(); panic::wrap(|| { let payload = payload as *mut &mut IndexMatchedPath<'_>; (*payload)(util::bytes2path(path), matched_pathspec) as c_int }) .unwrap_or(-1) } } impl Drop for Index { fn drop(&mut self) { unsafe { raw::git_index_free(self.raw) } } } impl<'index> Drop for IndexConflicts<'index> { fn drop(&mut self) { unsafe { raw::git_index_conflict_iterator_free(self.conflict_iter) } } } impl<'index> Iterator for IndexEntries<'index> { type Item = IndexEntry; fn next(&mut self) -> Option<IndexEntry> { self.range.next().map(|i| self.index.get(i).unwrap()) } } impl<'index> Iterator for IndexConflicts<'index> { type Item = Result<IndexConflict, Error>; fn next(&mut self) -> Option<Result<IndexConflict, Error>> { let mut ancestor = ptr::null(); let mut our = ptr::null(); let mut their = ptr::null(); unsafe { try_call_iter!(raw::git_index_conflict_next( &mut ancestor, &mut our, &mut their, self.conflict_iter )); Some(Ok(IndexConflict { ancestor: match ancestor.is_null() { false => Some(IndexEntry::from_raw(*ancestor)), true => None, }, our: match our.is_null() { false => Some(IndexEntry::from_raw(*our)), true => None, }, their: match their.is_null() { false => Some(IndexEntry::from_raw(*their)), true => None, }, })) } } } impl Binding for IndexEntry { type Raw = raw::git_index_entry; unsafe fn from_raw(raw: raw::git_index_entry) -> IndexEntry { let raw::git_index_entry { ctime, mtime, dev, ino, mode, uid, gid, file_size, id, flags, flags_extended, path, } = raw; // libgit2 encodes the length of the path in the lower bits of `flags`, // but if the length exceeds the number of bits then the path is // nul-terminated. let mut pathlen = (flags & raw::GIT_INDEX_ENTRY_NAMEMASK) as usize; if pathlen == raw::GIT_INDEX_ENTRY_NAMEMASK as usize { pathlen = CStr::from_ptr(path).to_bytes().len(); } let path = slice::from_raw_parts(path as *const u8, pathlen); IndexEntry { dev: dev, ino: ino, mode: mode, uid: uid, gid: gid, file_size: file_size, id: Binding::from_raw(&id as *const _), flags: flags, flags_extended: flags_extended, path: path.to_vec(), mtime: Binding::from_raw(mtime), ctime: Binding::from_raw(ctime), } } fn raw(&self) -> raw::git_index_entry { // not implemented, may require a CString in storage panic!() } } #[cfg(test)] mod tests { use std::fs::{self, File}; use std::path::Path; use tempfile::TempDir; use crate::{Index, IndexEntry, IndexTime, Oid, Repository, ResetType}; #[test] fn smoke() { let mut index = Index::new().unwrap(); assert!(index.add_path(&Path::new(".")).is_err()); index.clear().unwrap(); assert_eq!(index.len(), 0); assert!(index.get(0).is_none()); assert!(index.path().is_none()); assert!(index.read(true).is_err()); } #[test] fn smoke_from_repo() { let (_td, repo) = crate::test::repo_init(); let mut index = repo.index().unwrap(); assert_eq!( index.path().map(|s| s.to_path_buf()), Some(repo.path().join("index")) ); Index::open(&repo.path().join("index")).unwrap(); index.clear().unwrap(); index.read(true).unwrap(); index.write().unwrap(); index.write_tree().unwrap(); index.write_tree_to(&repo).unwrap(); } #[test] fn add_all() { let (_td, repo) = crate::test::repo_init(); let mut index = repo.index().unwrap(); let root = repo.path().parent().unwrap(); fs::create_dir(&root.join("foo")).unwrap(); File::create(&root.join("foo/bar")).unwrap(); let mut called = false; index .add_all( ["foo"].iter(), crate::IndexAddOption::DEFAULT, Some(&mut |a: &Path, b: &[u8]| { assert!(!called); called = true; assert_eq!(b, b"foo"); assert_eq!(a, Path::new("foo/bar")); 0 }), ) .unwrap(); assert!(called); called = false; index .remove_all( ["."].iter(), Some(&mut |a: &Path, b: &[u8]| { assert!(!called); called = true; assert_eq!(b, b"."); assert_eq!(a, Path::new("foo/bar")); 0 }), ) .unwrap(); assert!(called); } #[test] fn smoke_add() { let (_td, repo) = crate::test::repo_init(); let mut index = repo.index().unwrap(); let root = repo.path().parent().unwrap(); fs::create_dir(&root.join("foo")).unwrap(); File::create(&root.join("foo/bar")).unwrap(); index.add_path(Path::new("foo/bar")).unwrap(); index.write().unwrap(); assert_eq!(index.iter().count(), 1); // Make sure we can use this repo somewhere else now. let id = index.write_tree().unwrap(); let tree = repo.find_tree(id).unwrap(); let sig = repo.signature().unwrap(); let id = repo.refname_to_id("HEAD").unwrap(); let parent = repo.find_commit(id).unwrap(); let commit = repo .commit(Some("HEAD"), &sig, &sig, "commit", &tree, &[&parent]) .unwrap(); let obj = repo.find_object(commit, None).unwrap(); repo.reset(&obj, ResetType::Hard, None).unwrap(); let td2 = TempDir::new().unwrap(); let url = crate::test::path2url(&root); let repo = Repository::clone(&url, td2.path()).unwrap(); let obj = repo.find_object(commit, None).unwrap(); repo.reset(&obj, ResetType::Hard, None).unwrap(); } #[test] fn add_then_read() { let mut index = Index::new().unwrap(); assert!(index.add(&entry()).is_err()); let mut index = Index::new().unwrap(); let mut e = entry(); e.path = b"foobar".to_vec(); index.add(&e).unwrap(); let e = index.get(0).unwrap(); assert_eq!(e.path.len(), 6); } #[test] fn add_frombuffer_then_read() { let (_td, repo) = crate::test::repo_init(); let mut index = repo.index().unwrap(); let mut e = entry(); e.path = b"foobar".to_vec(); let content = b"the contents"; index.add_frombuffer(&e, content).unwrap(); let e = index.get(0).unwrap(); assert_eq!(e.path.len(), 6); let b = repo.find_blob(e.id).unwrap(); assert_eq!(b.content(), content); } fn entry() -> IndexEntry { IndexEntry { ctime: IndexTime::new(0, 0), mtime: IndexTime::new(0, 0), dev: 0, ino: 0, mode: 0o100644, uid: 0, gid: 0, file_size: 0, id: Oid::from_bytes(&[0; 20]).unwrap(), flags: 0, flags_extended: 0, path: Vec::new(), } } }
{ self.conflict_iter }
swfobject.js
/** * SWFObject v1.5: Flash Player detection and embed - http://blog.deconcept.com/swfobject/ * * SWFObject is (c) 2007 Geoff Stearns and is released under the MIT License: * http://www.opensource.org/licenses/mit-license.php * */ if(typeof deconcept=="undefined"){ var deconcept=new Object(); } if(typeof deconcept.util=="undefined") { deconcept.util=new Object(); } if(typeof deconcept.SWFObjectUtil=="undefined") { deconcept.SWFObjectUtil=new Object(); } deconcept.SWFObject=function(_1,id,w,h,_5,c,_7,_8,_9,_a) { if(!document.getElementById){return; } this.DETECT_KEY=_a?_a:"detectflash";this.skipDetect=deconcept.util.getRequestParameter(this.DETECT_KEY);this.params=new Object();this.variables=new Object();this.attributes=new Array();if(_1){this.setAttribute("swf",_1); } if(id) { this.setAttribute("id",id); } if(w){this.setAttribute("width",w); } if(h){this.setAttribute("height",h); }if(_5) { this.setAttribute("version",new deconcept.PlayerVersion(_5.toString().split("."))); } this.installedVer=deconcept.SWFObjectUtil.getPlayerVersion(); if(!window.opera&&document.all&&this.installedVer.major>7){deconcept.SWFObject.doPrepUnload=true;
} var q=_7?_7:"high";this.addParam("quality",q);this.setAttribute("useExpressInstall",false);this.setAttribute("doExpressInstall",false);var _c=(_8)?_8:window.location;this.setAttribute("xiRedirectUrl",_c);this.setAttribute("redirectUrl","");if(_9) { this.setAttribute("redirectUrl",_9);}};deconcept.SWFObject.prototype={useExpressInstall:function(_d){this.xiSWFPath=!_d?"expressinstall.swf":_d;this.setAttribute("useExpressInstall",true);},setAttribute:function(_e,_f){this.attributes[_e]=_f;},getAttribute:function(_10) { return this.attributes[_10];},addParam:function(_11,_12){this.params[_11]=_12;},getParams:function(){return this.params;},addVariable:function(_13,_14){this.variables[_13]=_14;},getVariable:function(_15){return this.variables[_15];},getVariables:function(){return this.variables;},getVariablePairs:function(){var _16=new Array();var key;var _18=this.getVariables();for(key in _18){_16[_16.length]=key+"="+_18[key];}return _16;},getSWFHTML:function() { var _19="";if(navigator.plugins&&navigator.mimeTypes&&navigator.mimeTypes.length){if(this.getAttribute("doExpressInstall")){this.addVariable("MMplayerType","PlugIn");this.setAttribute("swf",this.xiSWFPath);}_19="<embed type=\"application/x-shockwave-flash\" src=\""+this.getAttribute("swf")+"\" width=\""+this.getAttribute("width")+"\" height=\""+this.getAttribute("height")+"\" style=\""+this.getAttribute("style")+"\"";_19+=" id=\""+this.getAttribute("id")+"\" name=\""+this.getAttribute("id")+"\" ";var _1a=this.getParams();for(var key in _1a){_19+=[key]+"=\""+_1a[key]+"\" "; } var _1c=this.getVariablePairs().join("&");if(_1c.length>0){_19+="flashvars=\""+_1c+"\"";}_19+="/>";}else{if(this.getAttribute("doExpressInstall")){this.addVariable("MMplayerType","ActiveX");this.setAttribute("swf",this.xiSWFPath);}_19="<object id=\""+this.getAttribute("id")+"\" classid=\"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000\" width=\""+this.getAttribute("width")+"\" height=\""+this.getAttribute("height")+"\" style=\""+this.getAttribute("style")+"\">";_19+="<param name=\"movie\" value=\""+this.getAttribute("swf")+"\" />"; var _1d=this.getParams();for(var key in _1d){_19+="<param name=\""+key+"\" value=\""+_1d[key]+"\" />";}var _1f=this.getVariablePairs().join("&");if(_1f.length>0){_19+="<param name=\"flashvars\" value=\""+_1f+"\" />";}_19+="</object>";}return _19;},write:function(_20) { if(this.getAttribute("useExpressInstall")) { var _21=new deconcept.PlayerVersion([6,0,65]);if(this.installedVer.versionIsValid(_21)&&!this.installedVer.versionIsValid(this.getAttribute("version"))){this.setAttribute("doExpressInstall",true);this.addVariable("MMredirectURL",escape(this.getAttribute("xiRedirectUrl"))); document.title=document.title.slice(0,47)+" - Flash Player Installation";this.addVariable("MMdoctitle",document.title);}}if(this.skipDetect||this.getAttribute("doExpressInstall")||this.installedVer.versionIsValid(this.getAttribute("version"))) { var n=(typeof _20=="string")?document.getElementById(_20):_20;n.innerHTML=this.getSWFHTML();return true;}else{if(this.getAttribute("redirectUrl")!=""){document.location.replace(this.getAttribute("redirectUrl"));}} return false;}};deconcept.SWFObjectUtil.getPlayerVersion=function() { var _23=new deconcept.PlayerVersion([0,0,0]); if(navigator.plugins&&navigator.mimeTypes.length) { var x=navigator.plugins["Shockwave Flash"]; if(x&&x.description){_23=new deconcept.PlayerVersion(x.description.replace(/([a-zA-Z]|\s)+/,"").replace(/(\s+r|\s+b[0-9]+)/,".").split("."));}} else { if(navigator.userAgent&&navigator.userAgent.indexOf("Windows CE")>=0){var axo=1;var _26=3;while(axo) {try {_26++;axo=new ActiveXObject("ShockwaveFlash.ShockwaveFlash."+_26);_23=new deconcept.PlayerVersion([_26,0,0]);}catch(e){axo=null;}}} else { try{var axo=new ActiveXObject("ShockwaveFlash.ShockwaveFlash.7"); }catch(e) { try{var axo=new ActiveXObject("ShockwaveFlash.ShockwaveFlash.6");_23=new deconcept.PlayerVersion([6,0,21]);axo.AllowScriptAccess="always";}catch(e){if(_23.major==6){return _23;}}try{axo=new ActiveXObject("ShockwaveFlash.ShockwaveFlash"); } catch(e){ } } if(axo!=null){_23=new deconcept.PlayerVersion(axo.GetVariable("$version").split(" ")[1].split(","));}}}return _23;};deconcept.PlayerVersion=function(_29){this.major=_29[0]!=null?parseInt(_29[0]):0;this.minor=_29[1]!=null?parseInt(_29[1]):0;this.rev=_29[2]!=null?parseInt(_29[2]):0;};deconcept.PlayerVersion.prototype.versionIsValid=function(fv){if(this.major<fv.major) { return false;}if(this.major>fv.major) { return true;}if(this.minor<fv.minor){return false;}if(this.minor>fv.minor){return true;}if(this.rev<fv.rev){return false;}return true; };deconcept.util={getRequestParameter:function(_2b) { var q=document.location.search||document.location.hash;if(_2b==null){return q;}if(q){var _2d=q.substring(1).split("&");for(var i=0;i<_2d.length;i++){if(_2d[i].substring(0,_2d[i].indexOf("="))==_2b){return _2d[i].substring((_2d[i].indexOf("=")+1));}}} return "";}};deconcept.SWFObjectUtil.cleanupSWFs=function() { var _2f=document.getElementsByTagName("OBJECT");for(var i=_2f.length-1;i>=0;i--){_2f[i].style.display="none";for(var x in _2f[i]){if(typeof _2f[i][x]=="function"){_2f[i][x]=function(){};}}} }; if(deconcept.SWFObject.doPrepUnload){if(!deconcept.unloadSet){deconcept.SWFObjectUtil.prepUnload=function(){__flash_unloadHandler=function(){};__flash_savedUnloadHandler=function(){};window.attachEvent("onunload",deconcept.SWFObjectUtil.cleanupSWFs);}; window.attachEvent("onbeforeunload",deconcept.SWFObjectUtil.prepUnload);deconcept.unloadSet=true;}} if(!document.getElementById&&document.all) { document.getElementById=function(id){return document.all[id];}; } var getQueryParamValue=deconcept.util.getRequestParameter; var FlashObject=deconcept.SWFObject; var SWFObject=deconcept.SWFObject;
} if(c) { this.addParam("bgcolor",c);
UserApi.ts
// TODO: better import syntax? import { BaseAPIRequestFactory, RequiredError } from './baseapi'; import {Configuration} from '../configuration'; import { RequestContext, HttpMethod, ResponseContext, HttpFile} from '../http/http'; import {ObjectSerializer} from '../models/ObjectSerializer'; import {ApiException} from './exception'; import {isCodeInRange} from '../util'; import { User } from '../models/User'; /** * no description */ export class UserApiRequestFactory extends BaseAPIRequestFactory { /** * This can only be done by the logged in user. * Create user * @param user Created user object */ public async createUser(user: User, options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // verify required parameter 'user' is not null or undefined if (user === null || user === undefined) { throw new RequiredError('Required parameter user was null or undefined when calling createUser.'); } // Path Params const localVarPath = '/user'; // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.POST); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params // Header Params // Form Params // Body Params const contentType = ObjectSerializer.getPreferredMediaType([ "application/json" ]); requestContext.setHeaderParam("Content-Type", contentType); const serializedBody = ObjectSerializer.stringify( ObjectSerializer.serialize(user, "User", ""), contentType ); requestContext.setBody(serializedBody); let authMethod = null; // Apply auth methods authMethod = config.authMethods["api_key"] if (authMethod) { await authMethod.applySecurityAuthentication(requestContext); } return requestContext; } /** * Creates list of users with given input array * @param user List of user object */ public async createUsersWithArrayInput(user: Array<User>, options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // verify required parameter 'user' is not null or undefined if (user === null || user === undefined) { throw new RequiredError('Required parameter user was null or undefined when calling createUsersWithArrayInput.'); } // Path Params const localVarPath = '/user/createWithArray'; // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.POST); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params
// Header Params // Form Params // Body Params const contentType = ObjectSerializer.getPreferredMediaType([ "application/json" ]); requestContext.setHeaderParam("Content-Type", contentType); const serializedBody = ObjectSerializer.stringify( ObjectSerializer.serialize(user, "Array<User>", ""), contentType ); requestContext.setBody(serializedBody); let authMethod = null; // Apply auth methods authMethod = config.authMethods["api_key"] if (authMethod) { await authMethod.applySecurityAuthentication(requestContext); } return requestContext; } /** * Creates list of users with given input array * @param user List of user object */ public async createUsersWithListInput(user: Array<User>, options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // verify required parameter 'user' is not null or undefined if (user === null || user === undefined) { throw new RequiredError('Required parameter user was null or undefined when calling createUsersWithListInput.'); } // Path Params const localVarPath = '/user/createWithList'; // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.POST); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params // Header Params // Form Params // Body Params const contentType = ObjectSerializer.getPreferredMediaType([ "application/json" ]); requestContext.setHeaderParam("Content-Type", contentType); const serializedBody = ObjectSerializer.stringify( ObjectSerializer.serialize(user, "Array<User>", ""), contentType ); requestContext.setBody(serializedBody); let authMethod = null; // Apply auth methods authMethod = config.authMethods["api_key"] if (authMethod) { await authMethod.applySecurityAuthentication(requestContext); } return requestContext; } /** * This can only be done by the logged in user. * Delete user * @param username The name that needs to be deleted */ public async deleteUser(username: string, options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // verify required parameter 'username' is not null or undefined if (username === null || username === undefined) { throw new RequiredError('Required parameter username was null or undefined when calling deleteUser.'); } // Path Params const localVarPath = '/user/{username}' .replace('{' + 'username' + '}', encodeURIComponent(String(username))); // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.DELETE); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params // Header Params // Form Params // Body Params let authMethod = null; // Apply auth methods authMethod = config.authMethods["api_key"] if (authMethod) { await authMethod.applySecurityAuthentication(requestContext); } return requestContext; } /** * Get user by user name * @param username The name that needs to be fetched. Use user1 for testing. */ public async getUserByName(username: string, options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // verify required parameter 'username' is not null or undefined if (username === null || username === undefined) { throw new RequiredError('Required parameter username was null or undefined when calling getUserByName.'); } // Path Params const localVarPath = '/user/{username}' .replace('{' + 'username' + '}', encodeURIComponent(String(username))); // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.GET); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params // Header Params // Form Params // Body Params // Apply auth methods return requestContext; } /** * Logs user into the system * @param username The user name for login * @param password The password for login in clear text */ public async loginUser(username: string, password: string, options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // verify required parameter 'username' is not null or undefined if (username === null || username === undefined) { throw new RequiredError('Required parameter username was null or undefined when calling loginUser.'); } // verify required parameter 'password' is not null or undefined if (password === null || password === undefined) { throw new RequiredError('Required parameter password was null or undefined when calling loginUser.'); } // Path Params const localVarPath = '/user/login'; // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.GET); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params if (username !== undefined) { requestContext.setQueryParam("username", ObjectSerializer.serialize(username, "string", "")); } if (password !== undefined) { requestContext.setQueryParam("password", ObjectSerializer.serialize(password, "string", "")); } // Header Params // Form Params // Body Params // Apply auth methods return requestContext; } /** * Logs out current logged in user session */ public async logoutUser(options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // Path Params const localVarPath = '/user/logout'; // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.GET); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params // Header Params // Form Params // Body Params let authMethod = null; // Apply auth methods authMethod = config.authMethods["api_key"] if (authMethod) { await authMethod.applySecurityAuthentication(requestContext); } return requestContext; } /** * This can only be done by the logged in user. * Updated user * @param username name that need to be deleted * @param user Updated user object */ public async updateUser(username: string, user: User, options?: Configuration): Promise<RequestContext> { let config = options || this.configuration; // verify required parameter 'username' is not null or undefined if (username === null || username === undefined) { throw new RequiredError('Required parameter username was null or undefined when calling updateUser.'); } // verify required parameter 'user' is not null or undefined if (user === null || user === undefined) { throw new RequiredError('Required parameter user was null or undefined when calling updateUser.'); } // Path Params const localVarPath = '/user/{username}' .replace('{' + 'username' + '}', encodeURIComponent(String(username))); // Make Request Context const requestContext = config.baseServer.makeRequestContext(localVarPath, HttpMethod.PUT); requestContext.setHeaderParam("Accept", "application/json, */*;q=0.8") // Query Params // Header Params // Form Params // Body Params const contentType = ObjectSerializer.getPreferredMediaType([ "application/json" ]); requestContext.setHeaderParam("Content-Type", contentType); const serializedBody = ObjectSerializer.stringify( ObjectSerializer.serialize(user, "User", ""), contentType ); requestContext.setBody(serializedBody); let authMethod = null; // Apply auth methods authMethod = config.authMethods["api_key"] if (authMethod) { await authMethod.applySecurityAuthentication(requestContext); } return requestContext; } } export class UserApiResponseProcessor { /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to createUser * @throws ApiException if the response code was not in [200, 299] */ public async createUser(response: ResponseContext): Promise< void> { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("0", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "successful operation"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { return; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to createUsersWithArrayInput * @throws ApiException if the response code was not in [200, 299] */ public async createUsersWithArrayInput(response: ResponseContext): Promise< void> { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("0", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "successful operation"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { return; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to createUsersWithListInput * @throws ApiException if the response code was not in [200, 299] */ public async createUsersWithListInput(response: ResponseContext): Promise< void> { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("0", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "successful operation"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { return; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to deleteUser * @throws ApiException if the response code was not in [200, 299] */ public async deleteUser(response: ResponseContext): Promise< void> { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("400", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "Invalid username supplied"); } if (isCodeInRange("404", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "User not found"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { return; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to getUserByName * @throws ApiException if the response code was not in [200, 299] */ public async getUserByName(response: ResponseContext): Promise<User > { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("200", response.httpStatusCode)) { const body: User = ObjectSerializer.deserialize( ObjectSerializer.parse(await response.body.text(), contentType), "User", "" ) as User; return body; } if (isCodeInRange("400", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "Invalid username supplied"); } if (isCodeInRange("404", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "User not found"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { const body: User = ObjectSerializer.deserialize( ObjectSerializer.parse(await response.body.text(), contentType), "User", "" ) as User; return body; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to loginUser * @throws ApiException if the response code was not in [200, 299] */ public async loginUser(response: ResponseContext): Promise<string > { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("200", response.httpStatusCode)) { const body: string = ObjectSerializer.deserialize( ObjectSerializer.parse(await response.body.text(), contentType), "string", "" ) as string; return body; } if (isCodeInRange("400", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "Invalid username/password supplied"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { const body: string = ObjectSerializer.deserialize( ObjectSerializer.parse(await response.body.text(), contentType), "string", "" ) as string; return body; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to logoutUser * @throws ApiException if the response code was not in [200, 299] */ public async logoutUser(response: ResponseContext): Promise< void> { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("0", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "successful operation"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { return; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } /** * Unwraps the actual response sent by the server from the response context and deserializes the response content * to the expected objects * * @params response Response returned by the server for a request to updateUser * @throws ApiException if the response code was not in [200, 299] */ public async updateUser(response: ResponseContext): Promise< void> { const contentType = ObjectSerializer.normalizeMediaType(response.headers["content-type"]); if (isCodeInRange("400", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "Invalid user supplied"); } if (isCodeInRange("404", response.httpStatusCode)) { throw new ApiException<string>(response.httpStatusCode, "User not found"); } // Work around for missing responses in specification, e.g. for petstore.yaml if (response.httpStatusCode >= 200 && response.httpStatusCode <= 299) { return; } let body = response.body || ""; throw new ApiException<string>(response.httpStatusCode, "Unknown API Status Code!\nBody: \"" + body + "\""); } }
jwt.js
const koajwt = require('koa-jwt2'); const jsonwebtoken = require('jsonwebtoken'); const utility = require('utility'); function __checkIfJWT(ctx) { return !ctx.ctxCaller && (!!ctx.get('authorization') || ctx.query.hasOwnProperty('eb-jwt')); } function __getToken(ctx) { // only valid for the top ctx if (ctx.ctxCaller) return null; // 1. check header let token; if (ctx.get('authorization')) { const parts = ctx.get('authorization').split(' '); if (parts.length === 2) { const scheme = parts[0]; const credentials = parts[1]; if (/^Bearer$/i.test(scheme)) { token = credentials; } } } if (token) return token; // 2. check query token = ctx.query['eb-jwt']; if (token) return token; // not found return null; } function __splitCookie(cookie) { const pos = cookie.indexOf('='); const name = cookie.substr(0, pos); const value = cookie.substr(pos + 1); return { name, value }; } function __parseCookiesRequest(str) { const cookies = {}; const cookiesArray = (str || '').split(';') .map(cookie => cookie.trim()) .filter(cookie => !!cookie); for (const cookie of cookiesArray) { const { name, value } = __splitCookie(cookie); cookies[name] = value; } return cookies; } function __parseCookiesResponse(cookiesArray) { const cookies = {}; for (const cookie of cookiesArray) { const { name, value } = __splitCookie(cookie.split(';')[0]); cookies[name] = value; } return cookies; } function __combineCookies(cookies) { const cookiesArray = []; for (const name in cookies) { cookiesArray.push(`${name}=${cookies[name]}`); } return cookiesArray.join('; '); } module.exports = (options, app) => { options.secret = options.secret || app.config.keys.split(',')[0]; options.getToken = __getToken; const _koajwt = koajwt(options); return async function jwt(ctx, next) { await _koajwt(ctx, async () => { // cookies let cookiesJwt; const useJwt = __checkIfJWT(ctx); // set cookie if (useJwt) { // clear cookie forcely ctx.request.headers.cookie = ''; if (ctx.state.jwt) { // check exp const isValid = !ctx.state.jwt.exp || ctx.state.jwt.exp > Date.now(); if (isValid) { // token const token = ctx.state.jwt.token; const res = ctx.cookies.keys.decrypt(utility.base64decode(token, true, 'buffer')); cookiesJwt = res ? res.value.toString() : undefined; if (cookiesJwt) { // set cookie ctx.request.headers.cookie = cookiesJwt; } } } } // next await next(); // check cookie if (useJwt && ctx.response.get('set-cookie') && ctx.response.type === 'application/json') { // parse const cookies = cookiesJwt ? __parseCookiesRequest(cookiesJwt) : {}; const cookiesNew = __parseCookiesResponse(ctx.response.get('set-cookie')); // assign Object.assign(cookies, cookiesNew); // combine const cookiesRes = __combineCookies(cookies); // jwt payload
const token = utility.base64encode(ctx.cookies.keys.encrypt(cookiesRes), true); const payload = { token }; // jwt const jwtEncode = jsonwebtoken.sign(payload, options.secret); if (!ctx.response.body) ctx.response.body = {}; ctx.response.body['eb-jwt'] = jwtEncode; // clear response header ctx.res.removeHeader('set-cookie'); } }); }; };
self_link_helpers.go
package google import ( "regexp" "strings" ) func GetResourceNameFromSelfLink(link string) string { parts := strings.Split(link, "/") return parts[len(parts)-1] } type LocationType int const ( Zonal LocationType = iota Regional Global ) // return the region a selfLink is referring to func GetRegionFromRegionSelfLink(selfLink string) string { re := regexp.MustCompile("/compute/[a-zA-Z0-9]*/projects/[a-zA-Z0-9-]*/regions/([a-zA-Z0-9-]*)") switch { case re.MatchString(selfLink): if res := re.FindStringSubmatch(selfLink); len(res) == 2 && res[1] != ""
} return selfLink }
{ return res[1] }
k8s_client.go
package repositories import ( "encoding/pem" "errors" "fmt" "strings" "code.cloudfoundry.org/cf-k8s-controllers/api/apierrors" "code.cloudfoundry.org/cf-k8s-controllers/api/authorization" "k8s.io/apimachinery/pkg/api/meta" "k8s.io/client-go/kubernetes/scheme" "k8s.io/client-go/rest" "sigs.k8s.io/controller-runtime/pkg/client" ) type UserK8sClientFactory interface { BuildClient(authorization.Info) (client.WithWatch, error) } type UnprivilegedClientFactory struct { config *rest.Config mapper meta.RESTMapper } func
(config *rest.Config, mapper meta.RESTMapper) UnprivilegedClientFactory { return UnprivilegedClientFactory{ config: rest.AnonymousClientConfig(rest.CopyConfig(config)), mapper: mapper, } } func (f UnprivilegedClientFactory) BuildClient(authInfo authorization.Info) (client.WithWatch, error) { config := rest.CopyConfig(f.config) switch strings.ToLower(authInfo.Scheme()) { case authorization.BearerScheme: config.BearerToken = authInfo.Token case authorization.CertScheme: certBlock, rst := pem.Decode(authInfo.CertData) if certBlock == nil { return nil, fmt.Errorf("failed to decode cert PEM") } keyBlock, _ := pem.Decode(rst) if keyBlock == nil { return nil, fmt.Errorf("failed to decode key PEM") } config.CertData = pem.EncodeToMemory(certBlock) config.KeyData = pem.EncodeToMemory(keyBlock) default: return nil, apierrors.NewNotAuthenticatedError(errors.New("unsupported Authorization header scheme")) } userClient, err := client.NewWithWatch(config, client.Options{ Scheme: scheme.Scheme, Mapper: f.mapper, }) if err != nil { return nil, apierrors.FromK8sError(err, "") } return userClient, nil } func NewPrivilegedClientFactory(config *rest.Config, mapper meta.RESTMapper) PrivilegedClientFactory { return PrivilegedClientFactory{ config: config, mapper: mapper, } } type PrivilegedClientFactory struct { config *rest.Config mapper meta.RESTMapper } func (f PrivilegedClientFactory) BuildClient(_ authorization.Info) (client.WithWatch, error) { return client.NewWithWatch(f.config, client.Options{ Scheme: scheme.Scheme, Mapper: f.mapper, }) }
NewUnprivilegedClientFactory
reconciler.go
/* Copyright 2020 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by injection-gen. DO NOT EDIT. package configuration import ( context "context" "encoding/json" "reflect" zap "go.uber.org/zap" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/equality" errors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" sets "k8s.io/apimachinery/pkg/util/sets" cache "k8s.io/client-go/tools/cache" record "k8s.io/client-go/tools/record" controller "knative.dev/pkg/controller" logging "knative.dev/pkg/logging" reconciler "knative.dev/pkg/reconciler" v1alpha1 "knative.dev/serving/pkg/apis/serving/v1alpha1" versioned "knative.dev/serving/pkg/client/clientset/versioned" servingv1alpha1 "knative.dev/serving/pkg/client/listers/serving/v1alpha1" ) // Interface defines the strongly typed interfaces to be implemented by a // controller reconciling v1alpha1.Configuration. type Interface interface { // ReconcileKind implements custom logic to reconcile v1alpha1.Configuration. Any changes // to the objects .Status or .Finalizers will be propagated to the stored // object. It is recommended that implementors do not call any update calls // for the Kind inside of ReconcileKind, it is the responsibility of the calling // controller to propagate those properties. The resource passed to ReconcileKind // will always have an empty deletion timestamp. ReconcileKind(ctx context.Context, o *v1alpha1.Configuration) reconciler.Event } // Finalizer defines the strongly typed interfaces to be implemented by a // controller finalizing v1alpha1.Configuration. type Finalizer interface { // FinalizeKind implements custom logic to finalize v1alpha1.Configuration. Any changes // to the objects .Status or .Finalizers will be ignored. Returning a nil or // Normal type reconciler.Event will allow the finalizer to be deleted on // the resource. The resource passed to FinalizeKind will always have a set // deletion timestamp. FinalizeKind(ctx context.Context, o *v1alpha1.Configuration) reconciler.Event } // reconcilerImpl implements controller.Reconciler for v1alpha1.Configuration resources. type reconcilerImpl struct { // Client is used to write back status updates. Client versioned.Interface // Listers index properties about resources Lister servingv1alpha1.ConfigurationLister // Recorder is an event recorder for recording Event resources to the // Kubernetes API. Recorder record.EventRecorder // configStore allows for decorating a context with config maps. // +optional configStore reconciler.ConfigStore // reconciler is the implementation of the business logic of the resource. reconciler Interface } // Check that our Reconciler implements controller.Reconciler var _ controller.Reconciler = (*reconcilerImpl)(nil) func NewReconciler(ctx context.Context, logger *zap.SugaredLogger, client versioned.Interface, lister servingv1alpha1.ConfigurationLister, recorder record.EventRecorder, r Interface, options ...controller.Options) controller.Reconciler
// Reconcile implements controller.Reconciler func (r *reconcilerImpl) Reconcile(ctx context.Context, key string) error { logger := logging.FromContext(ctx) // If configStore is set, attach the frozen configuration to the context. if r.configStore != nil { ctx = r.configStore.ToContext(ctx) } // Convert the namespace/name string into a distinct namespace and name namespace, name, err := cache.SplitMetaNamespaceKey(key) if err != nil { logger.Errorf("invalid resource key: %s", key) return nil } // Get the resource with this namespace/name. original, err := r.Lister.Configurations(namespace).Get(name) if errors.IsNotFound(err) { // The resource may no longer exist, in which case we stop processing. logger.Errorf("resource %q no longer exists", key) return nil } else if err != nil { return err } // Don't modify the informers copy. resource := original.DeepCopy() var reconcileEvent reconciler.Event if resource.GetDeletionTimestamp().IsZero() { // Append the target method to the logger. logger = logger.With(zap.String("targetMethod", "ReconcileKind")) // Set and update the finalizer on resource if r.reconciler // implements Finalizer. if resource, err = r.setFinalizerIfFinalizer(ctx, resource); err != nil { logger.Warnw("Failed to set finalizers", zap.Error(err)) } // Reconcile this copy of the resource and then write back any status // updates regardless of whether the reconciliation errored out. reconcileEvent = r.reconciler.ReconcileKind(ctx, resource) } else if fin, ok := r.reconciler.(Finalizer); ok { // Append the target method to the logger. logger = logger.With(zap.String("targetMethod", "FinalizeKind")) // For finalizing reconcilers, if this resource being marked for deletion // and reconciled cleanly (nil or normal event), remove the finalizer. reconcileEvent = fin.FinalizeKind(ctx, resource) if resource, err = r.clearFinalizer(ctx, resource, reconcileEvent); err != nil { logger.Warnw("Failed to clear finalizers", zap.Error(err)) } } // Synchronize the status. if equality.Semantic.DeepEqual(original.Status, resource.Status) { // If we didn't change anything then don't call updateStatus. // This is important because the copy we loaded from the injectionInformer's // cache may be stale and we don't want to overwrite a prior update // to status with this stale state. } else if err = r.updateStatus(original, resource); err != nil { logger.Warnw("Failed to update resource status", zap.Error(err)) r.Recorder.Eventf(resource, v1.EventTypeWarning, "UpdateFailed", "Failed to update status for %q: %v", resource.Name, err) return err } // Report the reconciler event, if any. if reconcileEvent != nil { var event *reconciler.ReconcilerEvent if reconciler.EventAs(reconcileEvent, &event) { logger.Infow("returned an event", zap.Any("event", reconcileEvent)) r.Recorder.Eventf(resource, event.EventType, event.Reason, event.Format, event.Args...) return nil } else { logger.Errorw("returned an error", zap.Error(reconcileEvent)) r.Recorder.Event(resource, v1.EventTypeWarning, "InternalError", reconcileEvent.Error()) return reconcileEvent } } return nil } func (r *reconcilerImpl) updateStatus(existing *v1alpha1.Configuration, desired *v1alpha1.Configuration) error { existing = existing.DeepCopy() return reconciler.RetryUpdateConflicts(func(attempts int) (err error) { // The first iteration tries to use the injectionInformer's state, subsequent attempts fetch the latest state via API. if attempts > 0 { existing, err = r.Client.ServingV1alpha1().Configurations(desired.Namespace).Get(desired.Name, metav1.GetOptions{}) if err != nil { return err } } // If there's nothing to update, just return. if reflect.DeepEqual(existing.Status, desired.Status) { return nil } existing.Status = desired.Status _, err = r.Client.ServingV1alpha1().Configurations(existing.Namespace).UpdateStatus(existing) return err }) } // updateFinalizersFiltered will update the Finalizers of the resource. // TODO: this method could be generic and sync all finalizers. For now it only // updates defaultFinalizerName. func (r *reconcilerImpl) updateFinalizersFiltered(ctx context.Context, resource *v1alpha1.Configuration) (*v1alpha1.Configuration, error) { finalizerName := defaultFinalizerName actual, err := r.Lister.Configurations(resource.Namespace).Get(resource.Name) if err != nil { return resource, err } // Don't modify the informers copy. existing := actual.DeepCopy() var finalizers []string // If there's nothing to update, just return. existingFinalizers := sets.NewString(existing.Finalizers...) desiredFinalizers := sets.NewString(resource.Finalizers...) if desiredFinalizers.Has(finalizerName) { if existingFinalizers.Has(finalizerName) { // Nothing to do. return resource, nil } // Add the finalizer. finalizers = append(existing.Finalizers, finalizerName) } else { if !existingFinalizers.Has(finalizerName) { // Nothing to do. return resource, nil } // Remove the finalizer. existingFinalizers.Delete(finalizerName) finalizers = existingFinalizers.List() } mergePatch := map[string]interface{}{ "metadata": map[string]interface{}{ "finalizers": finalizers, "resourceVersion": existing.ResourceVersion, }, } patch, err := json.Marshal(mergePatch) if err != nil { return resource, err } resource, err = r.Client.ServingV1alpha1().Configurations(resource.Namespace).Patch(resource.Name, types.MergePatchType, patch) if err != nil { r.Recorder.Eventf(resource, v1.EventTypeWarning, "FinalizerUpdateFailed", "Failed to update finalizers for %q: %v", resource.Name, err) } else { r.Recorder.Eventf(resource, v1.EventTypeNormal, "FinalizerUpdate", "Updated %q finalizers", resource.GetName()) } return resource, err } func (r *reconcilerImpl) setFinalizerIfFinalizer(ctx context.Context, resource *v1alpha1.Configuration) (*v1alpha1.Configuration, error) { if _, ok := r.reconciler.(Finalizer); !ok { return resource, nil } finalizers := sets.NewString(resource.Finalizers...) // If this resource is not being deleted, mark the finalizer. if resource.GetDeletionTimestamp().IsZero() { finalizers.Insert(defaultFinalizerName) } resource.Finalizers = finalizers.List() // Synchronize the finalizers filtered by defaultFinalizerName. return r.updateFinalizersFiltered(ctx, resource) } func (r *reconcilerImpl) clearFinalizer(ctx context.Context, resource *v1alpha1.Configuration, reconcileEvent reconciler.Event) (*v1alpha1.Configuration, error) { if _, ok := r.reconciler.(Finalizer); !ok { return resource, nil } if resource.GetDeletionTimestamp().IsZero() { return resource, nil } finalizers := sets.NewString(resource.Finalizers...) if reconcileEvent != nil { var event *reconciler.ReconcilerEvent if reconciler.EventAs(reconcileEvent, &event) { if event.EventType == v1.EventTypeNormal { finalizers.Delete(defaultFinalizerName) } } } else { finalizers.Delete(defaultFinalizerName) } resource.Finalizers = finalizers.List() // Synchronize the finalizers filtered by defaultFinalizerName. return r.updateFinalizersFiltered(ctx, resource) }
{ // Check the options function input. It should be 0 or 1. if len(options) > 1 { logger.Fatalf("up to one options struct is supported, found %d", len(options)) } rec := &reconcilerImpl{ Client: client, Lister: lister, Recorder: recorder, reconciler: r, } for _, opts := range options { if opts.ConfigStore != nil { rec.configStore = opts.ConfigStore } } return rec }
extern-types-pointer-cast.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed
// except according to those terms. // run-pass // Test that pointers to extern types can be cast from/to usize, // despite being !Sized. #![feature(extern_types)] extern { type A; } struct Foo { x: u8, tail: A, } struct Bar<T: ?Sized> { x: u8, tail: T, } #[cfg(target_pointer_width = "32")] const MAGIC: usize = 0xdeadbeef; #[cfg(target_pointer_width = "64")] const MAGIC: usize = 0x12345678deadbeef; fn main() { assert_eq!((MAGIC as *const A) as usize, MAGIC); assert_eq!((MAGIC as *const Foo) as usize, MAGIC); assert_eq!((MAGIC as *const Bar<A>) as usize, MAGIC); assert_eq!((MAGIC as *const Bar<Bar<A>>) as usize, MAGIC); }
margin.rs
use crate::client::*; use crate::errors::*; use crate::rest_model::*; use serde_json::from_str; static SAPI_V1_MARGIN_TRANSFER: &str = "/sapi/v1/margin/transfer"; static SAPI_V1_MARGIN_LOAN: &str = "/sapi/v1/margin/loan"; static SAPI_V1_MARGIN_REPAY: &str = "/sapi/v1/margin/repay"; static SAPI_V1_MARGIN_ORDER: &str = "/sapi/v1/margin/order"; static SAPI_V1_MARGIN_ACCOUNT: &str = "/sapi/v1/margin/account"; static SAPI_V1_MARGIN_PAIR: &str = "/sapi/v1/margin/pair"; static SAPI_V1_MARGIN_ASSET: &str = "/sapi/v1/margin/asset"; static SAPI_V1_MARGIN_ALL_ASSETS: &str = "/sapi/v1/margin/allAssets"; static SAPI_V1_MARGIN_ALL_PAIRS: &str = "/sapi/v1/margin/allPairs"; static SAPI_V1_MARGIN_PRICE_INDEX: &str = "/sapi/v1/margin/priceIndex"; static SAPI_V1_MARGIN_INTEREST_HISTORY: &str = "/sapi/v1/margin/interestHistory"; static SAPI_V1_MARGIN_FORCED_LIQUIDATION_RECORD: &str = "/sapi/v1/margin/forcedLiquidationRec"; static SAPI_V1_MARGIN_OPEN_ORDERS: &str = "/sapi/v1/margin/openOrders"; static SAPI_V1_MARGIN_ALL_ORDERS: &str = "/sapi/v1/margin/allOrders"; static SAPI_V1_MARGIN_MY_TRADES: &str = "/sapi/v1/margin/myTrades"; static SAPI_V1_MARGIN_MAX_BORROWABLE: &str = "/sapi/v1/margin/maxBorrowable"; static SAPI_V1_MARGIN_MAX_TRANSFERABLE: &str = "/sapi/v1/margin/maxTransferable"; static SAPI_USER_DATA_STREAM: &str = "/sapi/v1/userDataStream"; #[derive(Clone)] pub struct Margin { pub client: Client, pub recv_window: u64, } impl Margin { /// Execute transfer between spot account and margin account. /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let transaction_id = tokio_test::block_on(margin.transfer("BTCUSDT", 0.001, MarginTransferType::FromMainToMargin)); /// assert!(transaction_id.is_ok(), "{:?}", transaction_id); /// ``` pub async fn transfer<S, F>(&self, symbol: S, qty: F, transfer_type: MarginTransferType) -> Result<TransactionId> where S: Into<String>, F: Into<f64>, { let transfer: Transfer = Transfer { asset: symbol.into(), amount: qty.into(), transfer_type, }; self.client .post_signed_p(SAPI_V1_MARGIN_TRANSFER, transfer, self.recv_window) .await } /// Apply for a loan. /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let transaction_id = tokio_test::block_on(margin.loan("BTCUSDT", 0.001)); /// assert!(transaction_id.is_ok(), "{:?}", transaction_id); /// ``` pub async fn loan<S, F>(&self, symbol: S, qty: F) -> Result<TransactionId> where S: Into<String>, F: Into<f64>,
/// Repay loan for margin account. /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let transaction_id = tokio_test::block_on(margin.repay("BTCUSDT", 0.001)); /// assert!(transaction_id.is_ok(), "{:?}", transaction_id); /// ``` pub async fn repay<S, F>(&self, symbol: S, qty: F) -> Result<TransactionId> where S: Into<String>, F: Into<f64>, { let loan: Loan = Loan { asset: symbol.into(), amount: qty.into(), }; self.client .post_signed_p(SAPI_V1_MARGIN_REPAY, loan, self.recv_window) .await } /// Post a new order for margin account. /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let margin_order = MarginOrder { /// symbol: "BTCUSDT".to_string(), /// side: OrderSide::Sell, /// order_type: OrderType::Limit, /// quantity: 0.001, /// price: 10.0, /// stop_price: 10.0, /// new_client_order_id: "my_id".to_string(), /// iceberg_qty: 10.0, /// new_order_resp_type: OrderResponse::Ack, /// time_in_force: TimeInForce::FOK /// }; /// let transaction_id = tokio_test::block_on(margin.trade(margin_order)); /// assert!(transaction_id.is_ok(), "{:?}", transaction_id); /// ``` pub async fn trade(&self, margin_order: MarginOrder) -> Result<TransactionId> { self.client .post_signed_p(SAPI_V1_MARGIN_ORDER, margin_order, self.recv_window) .await } /// Cancel an existing order /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let transaction_id = tokio_test::block_on(margin.cancel_trade("BTCUSDT", 1_u64, "my_id".to_string(), "my_next_id".to_string())); /// assert!(transaction_id.is_ok(), "{:?}", transaction_id); /// ``` pub async fn cancel_trade<S, F>( &self, symbol: S, order_id: F, orig_client_order_id: String, new_client_order_id: String, ) -> Result<TransactionId> where S: Into<String>, F: Into<u64>, { let margin_order_cancellation: MarginOrderCancellation = MarginOrderCancellation { symbol: symbol.into(), order_id: order_id.into(), orig_client_order_id, new_client_order_id, }; self.client .delete_signed_p(SAPI_V1_MARGIN_REPAY, margin_order_cancellation, self.recv_window) .await } /// Get existing loan records /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let loan_query = RecordsQuery { /// asset: "BTC".to_string(), /// tx_id: None, /// start_time: None, /// end_time: None, /// current: None, /// size: None, /// transfer_type: Some(TransferType::RollIn) /// }; /// let records = tokio_test::block_on(margin.loans(loan_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn loans(&self, loan_query: RecordsQuery) -> Result<RecordsQueryResult<LoanState>> { self.client .post_signed_p(SAPI_V1_MARGIN_LOAN, loan_query, self.recv_window) .await } /// Get existing repay records history /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let records_query = RecordsQuery { /// asset: "BTC".to_string(), /// tx_id: None, /// start_time: None, /// end_time: None, /// current: None, /// size: None, /// transfer_type: Some(TransferType::RollIn) /// }; /// let records = tokio_test::block_on(margin.repays(records_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn repays(&self, repays_query: RecordsQuery) -> Result<RecordsQueryResult<RepayState>> { self.client .post_signed_p(SAPI_V1_MARGIN_REPAY, repays_query, self.recv_window) .await } /// Get margin account details /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let account_details = tokio_test::block_on(margin.details()); /// assert!(account_details.is_ok(), "{:?}", account_details); /// ``` pub async fn details(&self) -> Result<MarginAccountDetails> { let q: Option<PairQuery> = None; self.client.get_signed_p(SAPI_V1_MARGIN_ACCOUNT, q).await } /// Get asset details /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let asset_detail = tokio_test::block_on(margin.asset("BTC")); /// assert!(asset_detail.is_ok(), "{:?}", asset_detail); /// ``` pub async fn asset<S>(&self, asset: S) -> Result<AssetDetails> where S: Into<String>, { self.client .get_signed_p(SAPI_V1_MARGIN_ASSET, Some(AssetQuery { asset: asset.into() })) .await } /// Get margin pair market data /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let pair_details = tokio_test::block_on(margin.pair("BTCUSDT")); /// assert!(pair_details.is_ok(), "{:?}", pair_details); /// ``` pub async fn pair<S>(&self, symbol: S) -> Result<PairDetails> where S: Into<String>, { self.client .get_signed_p(SAPI_V1_MARGIN_PAIR, Some(PairQuery { symbol: symbol.into() })) .await } /// Get all assets details /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let all_assets = tokio_test::block_on(margin.all_assets()); /// assert!(all_assets.is_ok(), "{:?}", all_assets); /// ``` pub async fn all_assets(&self) -> Result<AllAssets> { let q: Option<PairQuery> = None; self.client.get_signed_p(SAPI_V1_MARGIN_ALL_ASSETS, q).await } /// Get all pair details /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let all_pairs = tokio_test::block_on(margin.all_pairs()); /// assert!(all_pairs.is_ok(), "{:?}", all_pairs); /// ``` pub async fn all_pairs(&self) -> Result<AllPairs> { let q: Option<PairQuery> = None; self.client.get_signed_p(SAPI_V1_MARGIN_ALL_PAIRS, q).await } /// Get price index /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let price_index = tokio_test::block_on(margin.price_index("BTCUSDT")); /// assert!(price_index.is_ok(), "{:?}", price_index); /// ``` pub async fn price_index<S>(&self, symbol: S) -> Result<PriceIndex> where S: Into<String>, { self.client .get_signed_p(SAPI_V1_MARGIN_PRICE_INDEX, Some(PairQuery { symbol: symbol.into() })) .await } /// Get transfer history /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let records_query = RecordsQuery { /// asset: "BTC".to_string(), /// tx_id: None, /// start_time: None, /// end_time: None, /// current: None, /// size: None, /// transfer_type: Some(TransferType::RollIn) /// }; /// let records = tokio_test::block_on(margin.transfers(records_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn transfers(&self, transfers_query: RecordsQuery) -> Result<RecordsQueryResult<OrderState>> { self.client .get_signed_p(SAPI_V1_MARGIN_TRANSFER, Some(transfers_query)) .await } /// Get interest history /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let records_query = RecordsQuery { /// asset: "BTC".to_string(), /// tx_id: None, /// start_time: None, /// end_time: None, /// current: None, /// size: None, /// transfer_type: Some(TransferType::RollIn) /// }; /// let records = tokio_test::block_on(margin.interests(records_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn interests(&self, interest_query: RecordsQuery) -> Result<RecordsQueryResult<InterestState>> { self.client .get_signed_p(SAPI_V1_MARGIN_INTEREST_HISTORY, Some(interest_query)) .await } /// Get forced liquidation history /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let records_query = RecordsQuery { /// asset: "BTC".to_string(), /// tx_id: None, /// start_time: None, /// end_time: None, /// current: None, /// size: None, /// transfer_type: Some(TransferType::RollIn) /// }; /// let records = tokio_test::block_on(margin.forced_liquidations(records_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn forced_liquidations( &self, forced_liquidations_query: RecordsQuery, ) -> Result<RecordsQueryResult<ForcedLiquidationState>> { self.client .get_signed_p( SAPI_V1_MARGIN_FORCED_LIQUIDATION_RECORD, Some(forced_liquidations_query), ) .await } /// Get an existing order state /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let records_query = MarginOrderQuery { /// symbol: "BTCUSDT".to_string(), /// order_id: "1".to_string(), /// orig_client_order_id: "my_id".to_string(), /// }; /// let records = tokio_test::block_on(margin.order(records_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn order(&self, margin_order: MarginOrderQuery) -> Result<MarginOrderState> { self.client.get_signed_p(SAPI_V1_MARGIN_ORDER, Some(margin_order)).await } /// Get open orders /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let order_state = tokio_test::block_on(margin.open_orders("BTCUSDT")); /// assert!(order_state.is_ok(), "{:?}", order_state); /// ``` pub async fn open_orders<S>(&self, symbol: S) -> Result<MarginOrderState> where S: Into<String>, { self.client .get_signed_p(SAPI_V1_MARGIN_OPEN_ORDERS, Some(PairQuery { symbol: symbol.into() })) .await } /// Get all orders /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let records_query = RecordsQuery { /// asset: "BTC".to_string(), /// tx_id: None, /// start_time: None, /// end_time: None, /// current: None, /// size: None, /// transfer_type: Some(TransferType::RollIn) /// }; /// let records = tokio_test::block_on(margin.orders(records_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn orders(&self, all_orders_query: RecordsQuery) -> Result<RecordsQueryResult<OrderSumaryState>> { self.client .get_signed_p(SAPI_V1_MARGIN_ALL_ORDERS, Some(all_orders_query)) .await } /// Get all trades /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let records_query = RecordsQuery { /// asset: "BTC".to_string(), /// tx_id: None, /// start_time: None, /// end_time: None, /// current: None, /// size: None, /// transfer_type: Some(TransferType::RollIn) /// }; /// let records = tokio_test::block_on(margin.trades(records_query)); /// assert!(records.is_ok(), "{:?}", records); /// ``` pub async fn trades(&self, all_orders_query: RecordsQuery) -> Result<RecordsQueryResult<OwnTradesState>> { self.client .get_signed_p(SAPI_V1_MARGIN_MY_TRADES, Some(all_orders_query)) .await } /// Get max borrowable /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let max = tokio_test::block_on(margin.max_borrowable("BTC")); /// assert!(max.is_ok(), "{:?}", max); /// ``` pub async fn max_borrowable<S>(&self, asset: S) -> Result<MaxAmount> where S: Into<String>, { self.client .get_signed_p(SAPI_V1_MARGIN_MAX_BORROWABLE, Some(AssetQuery { asset: asset.into() })) .await } /// Get max transferable /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*, rest_model::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let max = tokio_test::block_on(margin.max_transferable("BTC")); /// assert!(max.is_ok(), "{:?}", max); /// ``` pub async fn max_transferable<S>(&self, asset: S) -> Result<MaxAmount> where S: Into<String>, { self.client .get_signed_p( SAPI_V1_MARGIN_MAX_TRANSFERABLE, Some(AssetQuery { asset: asset.into() }), ) .await } /// Start user data stream /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let start = tokio_test::block_on(margin.start()); /// assert!(start.is_ok(), "{:?}", start); /// assert!(start.unwrap().listen_key.len() > 0) /// ``` pub async fn start(&self) -> Result<UserDataStream> { let data = self.client.post(SAPI_USER_DATA_STREAM).await?; let user_data_stream: UserDataStream = from_str(data.as_str())?; Ok(user_data_stream) } /// Current open orders on a symbol /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let start = tokio_test::block_on(margin.start()); /// assert!(start.is_ok(), "{:?}", start); /// let keep_alive = tokio_test::block_on(margin.keep_alive(&start.unwrap().listen_key)); /// assert!(keep_alive.is_ok()) /// ``` pub async fn keep_alive(&self, listen_key: &str) -> Result<Success> { let data = self.client.put(SAPI_USER_DATA_STREAM, listen_key).await?; let success: Success = from_str(data.as_str())?; Ok(success) } /// Close the user stream /// # Examples /// ```rust,no_run /// use binance::{api::*, margin::*, config::*}; /// let margin: Margin = Binance::new_with_env(&Config::testnet()); /// let start = tokio_test::block_on(margin.start()); /// assert!(start.is_ok(), "{:?}", start); /// let close = tokio_test::block_on(margin.close(&start.unwrap().listen_key)); /// assert!(close.is_ok()) /// ``` pub async fn close(&self, listen_key: &str) -> Result<Success> { let data = self.client.delete(SAPI_USER_DATA_STREAM, listen_key).await?; let success: Success = from_str(data.as_str())?; Ok(success) } }
{ let loan: Loan = Loan { asset: symbol.into(), amount: qty.into(), }; self.client .post_signed_p(SAPI_V1_MARGIN_LOAN, loan, self.recv_window) .await }
test_simple.py
from pythonish_validator.common import Validator DATA_SAMPLE = { "hero": { "name": "R2-D2", "friends": [ { "name": "Luke Skywalker", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Han Solo"}, {"name": "Leia Organa"}, {"name": "C-3PO"}, {"name": "R2-D2"} ] }, { "name": "Han Solo", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Luke Skywalker"}, {"name": "Leia Organa"}, {"name": "R2-D2"} ] }, { "name": "Leia Organa", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Luke Skywalker"}, {"name": "Han Solo"}, {"name": "C-3PO"}, {"name": "R2-D2"} ] } ] } } def test_valid():
def test_readme_example(): validator = Validator({ 'name': str, 'age': int, 'skills': [str] }) assert validator.is_valid({ 'name': 'Georgy', 'age': 29, 'skills': ['Python', 'Perl', 'C'] }) def test_non_iterable(): validator = Validator({ 'name': str, 'age': int, 'skills': [str] }) assert not validator.is_valid({ 'name': 'Georgy', 'age': 29, 'skills': None }) assert not validator.is_valid(None)
validator = Validator({ "hero": { "name": str, "friends": [ { "name": str, "appearsIn": [str], "friends": [ {"name": str} ] } ] } }) assert validator.is_valid(DATA_SAMPLE)
v1.d.ts
/** * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { AxiosPromise } from 'axios'; import { Compute, JWT, OAuth2Client, UserRefreshClient } from 'google-auth-library'; import { BodyResponseCallback, GlobalOptions, GoogleConfigurable, MethodOptions } from 'googleapis-common'; export declare namespace playcustomapp_v1 { interface Options extends GlobalOptions { version: 'v1'; } interface StandardParameters { /** * Data format for the response. */ alt?: string; /** * Selector specifying which fields to include in a partial response. */ fields?: string; /** * API key. Your API key identifies your project and provides you with API * access, quota, and reports. Required unless you provide an OAuth 2.0 * token. */ key?: string; /** * OAuth 2.0 token for the current user. */ oauth_token?: string; /** * Returns response with indentations and line breaks. */ prettyPrint?: boolean; /** * An opaque string that represents a user for quota purposes. Must not * exceed 40 characters. */ quotaUser?: string; /** * Deprecated. Please use quotaUser instead. */ userIp?: string; } /** * Google Play Custom App Publishing API * * An API to publish custom Android apps. * * @example * const {google} = require('googleapis'); * const playcustomapp = google.playcustomapp('v1'); * * @namespace playcustomapp * @type {Function} * @version v1 * @variation v1 * @param {object=} options Options for Playcustomapp */ class Playcustomapp { accounts: Resource$Accounts; constructor(options: GlobalOptions, google?: GoogleConfigurable); } /** * This resource represents a custom app. */ interface Schema$CustomApp { /** * Default listing language in BCP 47 format. */ languageCode?: string; /** * Title for the Android app. */ title?: string; } class Resource$Accounts { customApps: Resource$Accounts$Customapps; constructor(); } class Resource$Accounts$Customapps { constructor(); /** * playcustomapp.accounts.customApps.create * @desc Create and publish a new custom app. * @alias playcustomapp.accounts.customApps.create * @memberOf! () * * @param {object} params Parameters for request * @param {string} params.account Developer account ID. * @param {object} params.resource Media resource metadata * @param {object} params.media Media object * @param {string} params.media.mimeType Media mime-type * @param {string|object} params.media.body Media body contents * @param {object} [options] Optionally override request options, such as `url`, `method`, and `encoding`. * @param {callback} callback The callback that handles the response. * @return {object} Request object */ create(params?: Params$Resource$Accounts$Customapps$Create, options?: MethodOptions): AxiosPromise<Schema$CustomApp>; create(params: Params$Resource$Accounts$Customapps$Create, options: MethodOptions | BodyResponseCallback<Schema$CustomApp>, callback: BodyResponseCallback<Schema$CustomApp>): void; create(params: Params$Resource$Accounts$Customapps$Create, callback: BodyResponseCallback<Schema$CustomApp>): void; create(callback: BodyResponseCallback<Schema$CustomApp>): void;
/** * Auth client or API Key for the request */ auth?: string | OAuth2Client | JWT | Compute | UserRefreshClient; /** * Developer account ID. */ account?: string; /** * Request body metadata */ requestBody?: Schema$CustomApp; /** * Media metadata */ media?: { /** * Media mime-type */ mediaType?: string; /** * Media body contents */ body?: any; }; } }
} interface Params$Resource$Accounts$Customapps$Create extends StandardParameters {
config_stateless_agw.py
#!/usr/bin/env python3 """ Copyright 2020 The Magma Authors. This source code is licensed under the BSD-style license found in the LICENSE file in the root directory of this source tree. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Script to trigger pre and post start commands for the Sctpd systemd unit """ import argparse import os import subprocess import sys import shlex import time from enum import Enum from magma.configuration.service_configs import ( load_override_config, load_service_config, save_override_config, ) return_codes = Enum( "return_codes", "STATELESS STATEFUL CORRUPT INVALID", start=0 ) STATELESS_SERVICE_CONFIGS = [ ("mme", "use_stateless", True), ("mobilityd", "persist_to_redis", True), ("pipelined", "clean_restart", False), ("pipelined", "redis_enabled", True), ("sessiond", "support_stateless", True), ] def check_stateless_service_config(service, config_name, config_value): service_config = load_service_config(service) if service_config.get(config_name) == config_value: print("STATELESS\t%s -> %s" % (service, config_name)) return return_codes.STATELESS print("STATEFUL\t%s -> %s" % (service, config_name)) return return_codes.STATEFUL def check_stateless_services():
def check_stateless_agw(): sys.exit(check_stateless_services().value) def clear_redis_state(): if os.getuid() != 0: print("Need to run as root to clear Redis state.") sys.exit(return_codes.INVALID) # stop MME, which in turn stops mobilityd, pipelined and sessiond subprocess.call("service magma@mme stop".split()) # delete all keys from Redis which capture service state for key_regex in [ "*_state", "IMSI*", "mobilityd:assigned_ip_blocks", "mobilityd:ip_states:*", "NO_VLAN:mobilityd_gw_info", "QosManager", "s1ap_imsi_map", ]: redis_cmd = ( "redis-cli -p 6380 KEYS '" + key_regex + "' | xargs redis-cli -p 6380 DEL" ) subprocess.call( shlex.split(redis_cmd), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) def flushall_redis(): if os.getuid() != 0: print("Need to run as root to clear Redis state.") sys.exit(return_codes.INVALID) print("Flushing all content in Redis") subprocess.call("service magma@* stop".split()) subprocess.call("service magma@redis start".split()) subprocess.call("redis-cli -p 6380 flushall".split()) subprocess.call("service magma@redis stop".split()) def start_magmad(): if os.getuid() != 0: print("Need to run as root to start magmad.") sys.exit(return_codes.INVALID) subprocess.call("service magma@magmad start".split()) def restart_sctpd(): if os.getuid() != 0: print("Need to run as root to restart sctpd.") sys.exit(return_codes.INVALID) print("Restarting sctpd") subprocess.call("service sctpd restart".split()) # delay return after restarting so that Magma and OVS services come up time.sleep(30) def enable_stateless_agw(): if check_stateless_services() == return_codes.STATELESS: print("Nothing to enable, AGW is stateless") sys.exit(return_codes.STATELESS.value) for service, config, value in STATELESS_SERVICE_CONFIGS: cfg = load_override_config(service) or {} cfg[config] = value save_override_config(service, cfg) # restart Sctpd so that eNB connections are reset and local state cleared restart_sctpd() sys.exit(check_stateless_services().value) def disable_stateless_agw(): if check_stateless_services() == return_codes.STATEFUL: print("Nothing to disable, AGW is stateful") sys.exit(return_codes.STATEFUL.value) for service, config, value in STATELESS_SERVICE_CONFIGS: cfg = load_override_config(service) or {} # remove the stateless override cfg.pop(config, None) save_override_config(service, cfg) # restart Sctpd so that eNB connections are reset and local state cleared restart_sctpd() sys.exit(check_stateless_services().value) def sctpd_pre_start(): if check_stateless_services() == return_codes.STATEFUL: # switching from stateless to stateful print("AGW is stateful, nothing to be done") else: clear_redis_state() sys.exit(0) def sctpd_post_start(): subprocess.Popen("/bin/systemctl start magma@mme".split()) subprocess.Popen("/bin/systemctl start magma@pipelined".split()) subprocess.Popen("/bin/systemctl start magma@sessiond".split()) subprocess.Popen("/bin/systemctl start magma@mobilityd".split()) sys.exit(0) def clear_redis_and_restart(): clear_redis_state() sctpd_post_start() sys.exit(0) def flushall_redis_and_restart(): flushall_redis() start_magmad() restart_sctpd() sys.exit(0) STATELESS_FUNC_DICT = { "check": check_stateless_agw, "enable": enable_stateless_agw, "disable": disable_stateless_agw, "sctpd_pre": sctpd_pre_start, "sctpd_post": sctpd_post_start, "clear_redis": clear_redis_and_restart, "flushall_redis": flushall_redis_and_restart, } def main(): parser = argparse.ArgumentParser() parser.add_argument("command", choices=STATELESS_FUNC_DICT.keys()) args = parser.parse_args() func = STATELESS_FUNC_DICT[args.command] func() if __name__ == "__main__": main()
num_stateful = 0 for service, config, value in STATELESS_SERVICE_CONFIGS: if ( check_stateless_service_config(service, config, value) == return_codes.STATEFUL ): num_stateful += 1 if num_stateful == 0: res = return_codes.STATELESS elif num_stateful == len(STATELESS_SERVICE_CONFIGS): res = return_codes.STATEFUL else: res = return_codes.CORRUPT print("Check returning", res) return res
__init__.py
import logging import json import azure.functions as func import azure.durable_functions as df async def main(req: func.HttpRequest, starter: str) -> func.HttpResponse: client = df.DurableOrchestrationClient(starter) payload: str = json.loads(req.get_body().decode()) # Load JSON post request data instance_id = await client.start_new(req.route_params["functionName"], client_input=payload) logging.info(f"Started orchestration with ID = '{instance_id}'.")
return client.create_check_status_response(req, instance_id)
button.py
from kivy.uix.button import Button from kivy.properties import StringProperty, BooleanProperty, NumericProperty, ObjectProperty from kivy.graphics import Color, Rectangle, RoundedRectangle, Ellipse from kivy.lang import Builder Builder.load_string(''' <FlatButton>: background_normal: '' background_color: [0,0,0,0] text_size: self.size valign: 'middle' halign: 'center' markup: True ''') class RoundedButton(FlatButton): radius = NumericProperty(10) def update_back(self): with self.canvas.before: self.color = Color(rgba=self.background_color) self.rect = RoundedRectangle( pos=self.pos, size=self.size, radius=self.radius) def on_radius(self, _, value): """When the radius is set/changed, this function is called to update the radius of the button on the canvas Parameters ---------- _ : widget This is usually the instance calling the function, we dont care about this value : number The value of the radius property Returns ------- None """ self.rect.radius = value class FlatButton(Button): """A normal ::class `kivy.uix.button.Button` with all the visual representations removed, this button basically just looks like a label, but ofcourse, unlike a label, its clickable. Since this inherits from a normal Button, it supports all of its properties. Usage ---------
from ukivy.button import FlatButton ... btn = FlatButton(text='myButton') some_widget.add_widget(btn) ... """ pass
models.py
from django.db import models # Create your models here. """ Genre model This model is used to store information about the book category — for example whether it is fiction or non-fiction, romance or military history, etc. The model has a single CharField field (name), which is used to describe the genre (this is limited to 200 characters and has some help_text. At the end of the model we declare a __str__() method, which simply returns the name of the genre defined by a particular record. No verbose name has been defined, so the field will be called Name in forms. """ class Genre(models.Model): """Model representing a book genre (e.g. Science Fiction, Non Fiction).""" name = models.CharField(max_length=200, help_text="Enter a book genre (e.g. Science Fiction, French Poetry etc.)") class Meta: ordering = ['name'] permissions = ( ('can_create_genre', 'Create genre'), ('can_update_genre', 'Update genre'), ('can_delete_genre', 'Delete genre'), ) def get_absolute_url(self): """Returns the url to access a particular genre instance.""" return reverse('genre-detail', args=[str(self.id)]) def __str__(self): """String for representing the Model object (in Admin site etc.)""" return self.name """ Language model Imagine a local benefactor donates a number of new books written in another language (say, Farsi). The challenge is to work out how these would be best represented in our library website, and then to add them to the models. Some things to consider: 1. Should "language" be associated with a Book, BookInstance, or some other object? 2. Should the different languages be represented using model, a free text field, or a hard-coded selection list? """ class Language(models.Model): """Model representing a Language (e.g. English, French, Japanese, etc.)""" name = models.CharField(max_length=200, help_text="Enter the book's natural language (e.g. English, French, Japanese etc.)") class Meta: ordering = ['name'] permissions = ( ('can_create_language', 'Create language'), ('can_update_language', 'Update language'), ('can_delete_language', 'Delete language'), ) def get_absolute_url(self): """Returns the url to access a particular language instance.""" return reverse('language-detail', args=[str(self.id)]) def __str__(self): """String for representing the Model object (in Admin site etc.)""" return self.name """ Book model The book model represents all information about an available book in a general sense, but not a particular physical "instance" or "copy" available for loan. The model uses a CharField to represent the book's title and isbn (note how the isbn specifies its label as "ISBN" using the first unnamed parameter because the default label would otherwise be "Isbn"). The model uses TextField for the summary, because this text may need to be quite long. The genre is a ManyToManyField, so that a book can have multiple genres and a genre can have many books. The author is declared as ForeignKey, so each book will only have one author, but an author may have many books (in practice a book might have multiple authors, but not in this implementation!) In both field types the related model class is declared as the first unnamed parameter using either the model class or a string containing the name of the related model. You must use the name of the model as a string if the associated class has not yet been defined in this file before it is referenced! The other parameters of interest in the author field are null=True, which allows the database to store a Null value if no author is selected, and on_delete=models.SET_NULL, which will set the value of the author to Null if the associated author record is deleted. The model also defines __str__() , using the book's title field to represent a Book record. The final method, get_absolute_url() returns a URL that can be used to access a detail record for this model (for this to work we will have to define a URL mapping that has the name book-detail, and define an associated view and template). """ from django.urls import reverse # Used to generate URLs by reversing the URL patterns class Book(models.Model): """Model representing a book (but not a specific copy of a book).""" title = models.CharField(max_length=200) author = models.ForeignKey('Author', on_delete=models.SET_NULL, null=True) # Foreign Key used because book can only have one author, but authors can have multiple books # Author as a string rather than object because it hasn't been declared yet in the file. summary = models.TextField(max_length=3000, blank=True, help_text="Enter a brief description of the book") isbn = models.CharField('ISBN', max_length=13, blank=True, help_text='10 or 13 Characters <a href="https://www.isbn-international.org/content/what' '-isbn" target="_blank">ISBN number</a>') genre = models.ManyToManyField(Genre, help_text="Select a genre for this book") # ManyToManyField used because genre can contain many books. Books can cover many genres. # Genre class has already been defined so we can specify the object above. language = models.ForeignKey('Language', on_delete=models.SET_NULL, null=True) class Meta: ordering = ['title', 'author'] permissions = ( ("can_create_book", "Create book"), ("can_update_book", "Update book"), ("can_delete_book", "Delete book"), ) def display_genre(self): """Creates a string for the Genre. This is required to display genre in Admin.""" return ', '.join([genre.name for genre in self.genre.all()[:3]]) display_genre.short_description = 'Genre' def get_absolute_url(self): ""
def __str__(self): """String for representing the Model object.""" return self.title """ BookInstance model The BookInstance represents a specific copy of a book that someone might borrow, and includes information about whether the copy is available or on what date it is expected back, "imprint" or version details, and a unique id for the book in the library. The model uses 1. ForeignKey to identify the associated Book (each book can have many copies, but a copy can only have one Book). 2. CharField to represent the imprint (specific release) of the book. We additionally declare a few new types of field: 1. UUIDField is used for the id field to set it as the primary_key for this model. This type of field allocates a globally unique value for each instance (one for every book you can find in the library). 2. DateField is used for the due_back date (at which the book is expected to come available after being borrowed or in maintenance). This value can be blank or null (needed for when the book is available). The model metadata (Class Meta) uses this field to order records when they are returned in a query. 3. status is a CharField that defines a choice/selection list. As you can see, we define a tuple containing tuples of key-value pairs and pass it to the choices argument. The value in a key/value pair is a display value that a user can select, while the keys are the values that are actually saved if the option is selected. We've also set a default value of 'm' (maintenance) as books will initially be created unavailable before they are stocked on the shelves. The model __str__() represents the BookInstance object using a combination of its unique id and the associated Book's title. """ import uuid # Required for unique book instances from datetime import date from django.contrib.auth.models import User # Required to assign User as a borrower from django.db.models import F # Required to use query expressions class BookInstance(models.Model): """Model representing a specific copy of a book (i.e. that can be borrowed from the library).""" id = models.UUIDField(primary_key=True, default=uuid.uuid4, help_text="Unique ID for this particular book across whole library") book = models.ForeignKey('Book', on_delete=models.SET_NULL, null=True) imprint = models.CharField(max_length=200) due_back = models.DateField(null=True, blank=True, help_text='Enter the date in the form of yyyy-mm-dd') borrower = models.ForeignKey(User, on_delete=models.SET_NULL, null=True, blank=True) @property def is_overdue(self): if self.due_back and date.today() > self.due_back: return True return False LOAN_STATUS = ( ('m', 'Maintenance'), ('o', 'On loan'), ('a', 'Available'), ('r', 'Reserved'), ) status = models.CharField(max_length=1, choices=LOAN_STATUS, blank=True, default='m', help_text='Book availability') class Meta: ordering = [F('due_back').asc(nulls_last=True)] permissions = ( ("can_mark_returned", "Set book as returned"), ("can_create_bookinstance", "Create bookinstance"), ("can_update_bookinstance", "Update bookinstance"), ("can_delete_bookinstance", "Delete bookinstance"), ) def __str__(self): """String for representing the Model object""" return '{0} ({1})'.format(self.id, self.book.title) """ Author model The model defines an author as having a first name, last name, date of birth, and (optional) date of death. It specifies that by default the __str__() returns the name in last name, firstname order. The get_absolute_url() method reverses the author-detail URL mapping to get the URL for displaying an individual author. """ class Author(models.Model): """Model representing an author.""" first_name = models.CharField(max_length=100) # first name = given name = 名 last_name = models.CharField(max_length=100) # last name = family name = surname = 姓 date_of_birth = models.DateField(null=True, blank=True, help_text='Enter the date in the form of yyyy-mm-dd') date_of_death = models.DateField('Died', null=True, blank=True, help_text='Enter the date in the form of yyyy-mm-dd') class Meta: ordering = ['last_name', 'first_name'] permissions = ( ("can_create_author", "Create author"), ("can_update_author", "Update author"), ("can_delete_author", "Delete author"), ) def get_absolute_url(self): """Returns the url to access a particular author instance.""" return reverse('author-detail', args=[str(self.id)]) def __str__(self): """String for representing the Model object.""" return '{0} {1}'.format(self.first_name, self.last_name) """ Caution! You should re-run the database migrations everytime you make changes in this file. """
"Returns the url to access a particular book instance.""" return reverse('book-detail', args=[str(self.id)])
node.rs
use crate::config::Config; use chrono::prelude::*; use k8s_openapi::api::coordination::v1::Lease; use k8s_openapi::api::core::v1::Node; use k8s_openapi::apimachinery::pkg::apis::meta::v1::Time; use kube::{ api::{Api, PatchParams, PostParams}, client::APIClient, }; use log::{debug, error, info}; /// Create a node /// /// This creates a Kubernetes Node that describes our Kubelet, failing with a log message /// if one already exists. If one does exist, we simply re-use it. You may call that /// hacky, but I call it... hacky. /// /// A node comes with a lease, and we maintain the lease to tell Kubernetes that the /// node remains alive and functional. Note that this will not work in /// versions of Kubernetes prior to 1.14. pub async fn create_node(client: &APIClient, config: Config, arch: &str) { let node_client: Api<Node> = Api::all(client.clone()); let node_name = config.node_name.clone(); let node = node_definition(config, arch); match node_client .create( &PostParams::default(), &serde_json::from_value(node) .expect("failed to deserialize node from node definition JSON"), ) .await { Ok(node) =>
Err(e) => { error!("Error creating node: {}", e); info!("Looking up node to see if it exists already"); match node_client.get(&node_name).await { Ok(node) => { let node_uid = node.metadata.unwrap_or_default().uid.unwrap_or_default(); create_lease(&node_uid, &node_name, &client).await } Err(e) => error!("Error fetching node after failed create: {}", e), } } }; } /// Update the timestamps on the Node object. /// /// This is how we report liveness to the upstream. /// /// We trap errors because... well... quite frankly there is nothing useful /// to do if the Kubernetes API is unavailable, and we can merrily continue /// doing our processing of the pod queue. pub async fn update_node(client: &APIClient, node_name: &str) { let node_client: Api<Node> = Api::all(client.clone()); // Get me a node let node_res = node_client.get(node_name).await; match node_res { Err(e) => { error!("Failed to get node: {:?}", e); } Ok(node) => { debug!("node update complete, beginning lease update"); let uid = node.metadata.unwrap_or_default().uid.unwrap_or_default(); update_lease(&uid, node_name, client).await; } } } /// Create a node lease /// /// These creates a new node lease and claims the node for a set /// period of time. Leases work by creating a new Lease object /// and then using an ownerReference to tie it to a particular node. /// /// As far as I can tell, leases ALWAYS go in the 'kube-node-lease' /// namespace, no exceptions. async fn create_lease(node_uid: &str, node_name: &str, client: &APIClient) { let leases: Api<Lease> = Api::namespaced(client.clone(), "kube-node-lease"); let lease = lease_definition(node_uid, node_name); let lease = serde_json::from_value(lease) .expect("failed to deserialize lease from lease definition JSON"); let resp = leases.create(&PostParams::default(), &lease).await; match resp { Ok(_) => debug!("Created lease"), Err(e) => error!("Failed to create lease: {}", e), } } /// Update the Kubernetes node lease, essentially requesting that we keep /// the lease for another period. /// /// TODO: Our patch is overzealous right now. We just need to update the /// timestamp. async fn update_lease(node_uid: &str, node_name: &str, client: &APIClient) { let leases: Api<Lease> = Api::namespaced(client.clone(), "kube-node-lease"); let lease = lease_definition(node_uid, node_name); let lease_data = serde_json::to_vec(&lease).expect("Lease should always be serializable to JSON"); let resp = leases .patch(node_name, &PatchParams::default(), lease_data) .await; match resp { Ok(_) => info!("Created lease"), Err(e) => error!("Failed to create lease: {}", e), } } /// Define a new node that will handle WASM load. /// /// The most important part of this spec is the set of labels, which control /// how pods are scheduled on this node. It claims the wasm-wasi architecture, /// though perhaps this should be wasm32-wasi. I am not clear what to do with /// the OS field. I have seen 'emscripten' used for this field, but in our case /// the runtime is not emscripten, and besides... specifying which runtime we /// use seems like a misstep. Ideally, we'll be able to support multiple runtimes. fn node_definition(config: Config, arch: &str) -> serde_json::Value { let ts = Time(Utc::now()); serde_json::json!({ "apiVersion": "v1", "kind": "Node", "metadata": { "name": config.node_name, "labels": { "beta.kubernetes.io/arch": arch, "beta.kubernetes.io/os": "linux", "kubernetes.io/arch": arch, "kubernetes.io/os": "linux", "kubernetes.io/hostname": config.hostname, "kubernetes.io/role": "agent", "type": "krustlet" }, "annotations": { "node.alpha.kubernetes.io/ttl": "0", "volumes.kubernetes.io/controller-managed-attach-detach": "true" } }, "spec": { "podCIDR": "10.244.0.0/24" }, "status": { "nodeInfo": { "architecture": "wasm-wasi", "bootID": "", "containerRuntimeVersion": "mvp", "kernelVersion": "", "kubeProxyVersion": "v1.17.0", "kubeletVersion": "v1.17.0", "machineID": "", "operatingSystem": "linux", "osImage": "", "systemUUID": "" }, "capacity": { "cpu": "4", "ephemeral-storage": "61255492Ki", "hugepages-1Gi": "0", "hugepages-2Mi": "0", "memory": "4032800Ki", "pods": "30" }, "alocatable": { "cpu": "4", "ephemeral-storage": "61255492Ki", "hugepages-1Gi": "0", "hugepages-2Mi": "0", "memory": "4032800Ki", "pods": "30" }, "conditions": [ { "type": "Ready", "status": "True", "lastHeartbeatTime": ts, "lastTransitionTime": ts, "reason": "KubeletReady", "message": "kubelet is ready", }, { "type": "OutOfDisk", "status": "False", "lastHeartbeatTime": ts, "lastTransitionTime": ts, "reason": "KubeletHasSufficientDisk", "message": "kubelet has sufficient disk space available", }, ], "addresses": [ { "type": "InternalIP", "address": config.node_ip }, { "type": "Hostname", "address": config.hostname } ], "daemonEndpoints": { "kubeletEndpoint": { "Port": config.server_config.port } } } }) } /// Define a new coordination.Lease object for Kubernetes /// /// The lease tells Kubernetes that we want to claim the node for a while /// longer. And then tells Kubernetes how long it should wait before /// expecting a new lease. fn lease_definition(node_uid: &str, node_name: &str) -> serde_json::Value { serde_json::json!( { "apiVersion": "coordination.k8s.io/v1", "kind": "Lease", "metadata": { "name": node_name, "ownerReferences": [ { "apiVersion": "v1", "kind": "Node", "name": node_name, "uid": node_uid } ] }, "spec": lease_spec_definition(node_name) } ) } /// Defines a new coordiation lease for Kubernetes /// /// We set the lease times, the lease duration, and the node name. fn lease_spec_definition(node_name: &str) -> serde_json::Value { // Workaround for https://github.com/deislabs/krustlet/issues/5 // In the future, use LeaseSpec rather than a JSON value let now = Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Micros, true); serde_json::json!( { "holderIdentity": node_name, "acquireTime": now, "renewTime": now, "leaseDurationSeconds": 300 } ) }
{ info!("created node just fine"); let node_uid = node.metadata.unwrap_or_default().uid.unwrap_or_default(); create_lease(&node_uid, &node_name, &client).await }
arrayElementsProduct.py
#Question: https://python.web.id/blog/given-an-array-of-integers-cf/ def arrayElementsProduct(inputArray):
''' >>> inputArray = [1, 3, 2, 10] >>> arrayElementsProduct(inputArray) 60 >>> >>> inputArray = [2, 4, 10, 1] >>> arrayElementsProduct(inputArray) 80 >>> inputArray = [1, 1] >>> arrayElementsProduct(inputArray) 1 >>> '''
product = 1 for numb in inputArray: product *= numb return product
footprint.py
# -*- coding: utf-8 -*- """ @author: Chris Lucas """ import math import numpy as np from shapely.geometry import ( Polygon, MultiPolygon, LineString, MultiLineString, LinearRing ) from shapely import wkt from building_boundary import utils def line_orientations(lines): """ Computes the orientations of the lines. Parameters ---------- lines : list of (2x2) array The lines defined by the coordinates two points. Returns ------- orientations : list of float The orientations of the lines in radians from 0 to pi (east to west counterclockwise) 0 to -pi (east to west clockwise) """ orientations = [] for l in lines: dx, dy = l[0] - l[1] orientation = math.atan2(dy, dx) if not any([np.isclose(orientation, o) for o in orientations]): orientations.append(orientation) return orientations def geometry_orientations(geom): """ Computes the orientations of the lines of a geometry (Polygon, MultiPolygon, LineString, MultiLineString, or LinearRing). Parameters ---------- geom : Polygon, MultiPolygon, LineString, MultiLineString, or LinearRing The geometry Returns ------- orientations : list of float The orientations of the lines of the geometry in radians from 0 to pi (east to west counterclockwise) 0 to -pi (east to west clockwise) """ orientations = [] if type(geom) == Polygon: lines = utils.create_pairs(geom.exterior.coords[:-1]) orientations = line_orientations(lines) elif type(geom) == MultiPolygon: for p in geom: lines = utils.create_pairs(p.exterior.coords[:-1]) orientations.extend(line_orientations(lines)) elif type(geom) == LineString: if geom.coords[0] == geom.coords[-1]: lines = utils.create_pairs(geom.coords[:-1]) else: lines = list(utils.create_pairs(geom.coords))[:-1] orientations = line_orientations(lines) elif type(geom) == MultiLineString: for l in geom: if l.coords[0] == l.coords[-1]: lines = utils.create_pairs(l.coords[:-1]) else: lines = list(utils.create_pairs(l.coords))[:-1] orientations.extend(line_orientations(lines)) elif type(geom) == LinearRing: lines = utils.create_pairs(geom.coords[:-1]) orientations = line_orientations(lines) else: raise TypeError('Invalid geometry type. Expects Polygon, ' 'MultiPolygon, LineString, MultiLineString, ' 'or LinearRing.') return orientations def compute_orientations(footprint_wkt):
""" Computes the orientations of the footprint. Parameters ---------- footprint_wkt : string The footprint geometry defined by a WKT string. Returns ------- orientations : list of float The orientations of the lines of the geometry in radians from 0 to pi (east to west counterclockwise) 0 to -pi (east to west clockwise) """ footprint_geom = wkt.loads(footprint_wkt) orientations = geometry_orientations(footprint_geom) return orientations
rlog.go
// Copyright © 2020 Uzhinskiy Boris <[email protected]> // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package rlog import ( "fmt" ) type Config struct { fname string maxsize int32 }
}
func init() { fmt.Println("RLOG - rotate log")
CodeEditor.spec.tsx
import { CodeEditor, CodeEditorProps } from './CodeEditor'; import TestUtil from '../../Util/TestUtil'; import SldStyleParser from 'geostyler-sld-parser'; import { StyleParser } from 'geostyler-style'; describe('CodeEditor', () => { let wrapper: any; let dummyStyle = TestUtil.getMarkStyle(); let onStyleChangeDummy: jest.Mock; let sldParser = new SldStyleParser(); const delay = 1337; beforeEach(() => { onStyleChangeDummy = jest.fn(); const props: CodeEditorProps = { style: dummyStyle, onStyleChange: onStyleChangeDummy, parsers: [ sldParser ], delay }; wrapper = TestUtil.shallowRenderComponent(CodeEditor, props); }); it('is defined', () => { expect(CodeEditor).toBeDefined(); }); it('renders correctly', () => { expect(wrapper).not.toBeUndefined(); }); describe('defaultParser', () => { it('sets the defaultParser if passed as prop', () => { const defaultParserProps: CodeEditorProps = { style: dummyStyle, onStyleChange: onStyleChangeDummy, parsers: [ sldParser ], defaultParser: sldParser, delay }; const defaultValueWrapper = TestUtil.shallowRenderComponent(CodeEditor, defaultParserProps); const activeParser: StyleParser = defaultValueWrapper.state('activeParser'); expect(activeParser.title).toEqual(SldStyleParser.title); }); }); describe('updateValueFromStyle', () => { it('sets the value to the geostyler-style if no activeparser is set', () => { const updateValueFromStyle = wrapper.instance().updateValueFromStyle; updateValueFromStyle(dummyStyle); const value = wrapper.state().value; expect(value).toEqual(JSON.stringify(dummyStyle, null, 2)); }); // TODO // it('sets the value as sld if active parsers is SLD Parser', async () => { // await new Promise((resolve) => { // const sldParser = new SldStyleParser(); // sldParser.writeStyle(dummyStyle) // .then(async (sld: string) => { // wrapper.setState({ // activeParser: SldStyleParser // }); // const updateValueFromStyle = wrapper.instance().updateValueFromStyle; // await updateValueFromStyle(dummyStyle); // const value = wrapper.state().value; // expect(value).toEqual(sld); // resolve(); // }); // }); // }); }); describe('valueFromStyleInput', () => { it('returns geostyler-style if no activeparser is set', async () => { const valueFromStyleInput = wrapper.instance().valueFromStyleInput; const value = await valueFromStyleInput(dummyStyle); expect(value).toEqual(JSON.stringify(dummyStyle, null, 2)); }); it('returns the value as sld if active parsers is SLD Parser', async () => { wrapper.setState({ activeParser: sldParser }); await new Promise((resolve) => { sldParser.writeStyle(dummyStyle) .then(async (sld: string) => { const valueFromStyleInput = wrapper.instance().valueFromStyleInput; const value = await valueFromStyleInput(dummyStyle); expect(value).toEqual(sld); resolve(); }); }); });
describe('getModeByParser', () => { it('returns "application/json" if activeParser is NOT "SLD Style Parser"', () => { const getModeByParser = wrapper.instance().getModeByParser; const value = getModeByParser(); expect(value).toEqual('application/json'); }); it('returns "application/xml" if activeParser is "SLD Style Parser"', () => { wrapper.setState({ activeParser: sldParser }); const getModeByParser = wrapper.instance().getModeByParser; const value = getModeByParser(); expect(value).toEqual('application/xml'); }); }); describe('styleFromValue', () => { it('returns geostyler-style if no activeparser is set', async () => { const styleFromValue = wrapper.instance().styleFromValue; const value = await styleFromValue(JSON.stringify(dummyStyle)); expect(value).toEqual(dummyStyle); }); it('returns geostyler-style if active parsers is SLD Parser and value is SLD string', async () => { wrapper.setState({ activeParser: sldParser }); await new Promise((resolve) => { sldParser.writeStyle(dummyStyle) .then(async (sld: string) => { const styleFromValue = wrapper.instance().styleFromValue; const value = await styleFromValue(sld); expect(value).toEqual(dummyStyle); resolve(); }); }); }); }); describe('onChange', () => { it('sets the passed value in the state', () => { const onChange = wrapper.instance().onChange; const value = JSON.stringify(dummyStyle); onChange(null, null, value); expect(wrapper.state().value).toBe(value); }); it('tries to parse the passed value', () => { const onChange = wrapper.instance().onChange; const value = JSON.stringify(dummyStyle); const styleFromValueDummy = wrapper.instance().styleFromValue = jest.fn(); onChange(null, null, value); expect(styleFromValueDummy).toBeCalledWith(value); }); // TODO // it('calls a passed onStyleChange method with the parsed style', async () => { // const onChange = wrapper.instance().onChange; // wrapper.setState({ // activeParser: SldStyleParser // }); // await new Promise((resolve) => { // const sldParser = new SldStyleParser(); // sldParser.writeStyle(dummyStyle) // .then(async (sld: string) => { // await onChange(null, null, sld); // expect(onStyleChangeDummy).toBeCalledWith(dummyStyle); // resolve(); // }); // }); // }); }); describe('onSelect', () => { it('sets select parser as active parser', () => { const onSelect = wrapper.instance().onSelect; onSelect(sldParser.title); expect(wrapper.state().activeParser).toEqual(sldParser); }); it('calls "updateValueFromStyle"', () => { const updateValueFromStyleDummy = wrapper.instance().updateValueFromStyle = jest.fn(); const onSelect = wrapper.instance().onSelect; onSelect(sldParser.title); expect(updateValueFromStyleDummy).toBeCalledWith(dummyStyle); }); }); describe('handleOnChange', () => { it('calls "onChange" after [props.delay] milliseconds', () => { jest.useFakeTimers(); const handleOnChange = wrapper.instance().handleOnChange; const onChangeDummy = wrapper.instance().onChange = jest.fn(); handleOnChange(null, null, dummyStyle); expect(setTimeout).toHaveBeenCalledTimes(1); expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), delay); jest.runOnlyPendingTimers(); expect(onChangeDummy).toBeCalledWith(null, null, dummyStyle); jest.clearAllTimers(); }); }); describe('getParserOptions', () => { it('returns a Select.Option for every passed parser', () => { const getParserOptions = wrapper.instance().getParserOptions; const gots = getParserOptions(); gots.forEach((got: any, index: number) => { expect(got.type.name).toBe('Option'); }); }); }); // TODO // describe('onDownloadButtonClick', () => { // it('calls saveAs', () => { // const spy = jest.spyOn(fileSaver, 'saveAs'); // const onDownloadButtonClick = wrapper.instance().onDownloadButtonClick; // onDownloadButtonClick(); // expect(spy).toBeCalled(); // }); // }); });
});
param_server.py
import time, os, json, time import numpy as np import torch from torch._C import device import torch.distributed as dist from torch.autograd import Variable def test_model(model, test_data, dev): correct, total = 0, 0 model.eval() with torch.no_grad(): for data, target in test_data: data, target = Variable(data).cuda(dev), Variable(target).cuda(dev) output = model(data) # get the index of the max log-probability _, predictions = output.max(1) total += predictions.size(0) correct += torch.sum(predictions == target.data).float() acc = correct / total return acc.item() def update_model(model, global_mu, size, cpu, gpu, args): # all_param = model.state_dict() # receive the parameter variance from workers for param in model.parameters(): tensor = torch.zeros_like(param.data, device=cpu) gather_list = [torch.zeros_like(param.data, device=cpu) for _ in range(size)] dist.gather(tensor=tensor, gather_list=gather_list, dst=0) param.data = torch.zeros_like(param.data, device=gpu) for w in range(size): # Suppose the model received from clients are well processed param.data = param.data + gather_list[w].clone().detach().to(gpu) # receive averaged K from workers avg_k_list = [torch.tensor(0.0) for _ in range(size)] dist.gather(tensor=torch.tensor(0.0), gather_list=avg_k_list, dst=0) avg_k = sum(avg_k_list) print('Averaged K:', avg_k) # send averaged K to workers avg_k_list = [avg_k if args.avg_k==-1 else torch.tensor(float(args.avg_k)) for _ in range(size)] dist.scatter(tensor=avg_k, scatter_list=avg_k_list) # receive the mu from clients for idx, param in enumerate(global_mu): tensor = torch.zeros_like(param.data, device=cpu) gather_list = [torch.zeros_like(param.data, device=cpu) for _ in range(size)] dist.gather(tensor=tensor, gather_list=gather_list, dst=0) global_mu[idx] = torch.zeros_like(param.data, device=gpu) for w in range(size): # Suppose the model received from clients are well processed global_mu[idx] = global_mu[idx] + gather_list[w].clone().detach().to(gpu) # send the parameters to workers for param in model.parameters(): tmp_p = param.clone().detach().to(cpu) scatter_p_list = [tmp_p for _ in range(size)] dist.scatter(tensor=tmp_p, scatter_list=scatter_p_list) if torch.sum(torch.isnan(tmp_p)) > 0: print("NaN occurs. Terminate. ") exit(-1) # send global_mu to workers for param in global_mu: tmp_p = param.clone().detach().to(cpu) scatter_p_list = [tmp_p for _ in range(size)] dist.scatter(tensor=tmp_p, scatter_list=scatter_p_list) # model.load_state_dict(all_param) def run(size, model, args, test_data, f_result, cpu, gpu): # Receive the weights from all clients temp_w = torch.tensor([0.0 for _ in range(args.num_workers+1)]) weights = [torch.tensor([0.0 for _ in range(args.num_workers+1)]) for _ in range(size)] dist.gather(tensor=temp_w, gather_list=weights, dst=0) weights = sum(weights) weights = weights / torch.sum(weights) print('weights:', weights) # send weights to clients weights_list = [weights.clone().detach().to(cpu) for _ in range(size)] dist.scatter(tensor=temp_w, scatter_list=weights_list) start = time.time() model = model.cuda(gpu) for p in model.parameters(): tmp_p = p.clone().detach().to(cpu) scatter_p_list = [tmp_p for _ in range(size)] # dist.scatter(tensor=tmp_p, scatter_list=scatter_p_list, group=group) dist.scatter(tensor=tmp_p, scatter_list=scatter_p_list) global_mu = [torch.zeros_like(param.data, device=gpu) for param in model.parameters()] print('Model has sent to all nodes! ') print('Begin!') np.random.seed(42) for t in range(args.T): model.train() # send participants to all clients participants = np.random.choice(np.arange(len(weights)), size=args.num_part, replace=True, p=weights.numpy()) if args.partial else np.arange(len(weights)) print('Participants list:', list(participants)) participants = torch.tensor(participants).to(cpu) part_list = [participants for _ in range(size)] dist.scatter(tensor=participants, scatter_list=part_list) # receive the list of train loss from workers info_list = [torch.tensor(0.0) for _ in range(size)] # dist.gather(tensor=torch.tensor([0.0]), gather_list=info_list, group=group) dist.gather(tensor=torch.tensor(0.0), gather_list=info_list, dst=0) # info_list = np.concatenate([list(a) for a in info_list]) # train_loss = sum(info_list).item() / args.num_part if args.partial else sum(info_list * weights).item() train_loss = sum(info_list).item() # if args.partial: # update_model_partial(model, size, cpu, gpu, args.num_part) # else: # update_model_full(model, size, cpu, gpu, weights) update_model(model, global_mu, size, cpu, gpu, args) timestamp = time.time() - start test_acc = test_model(model, test_data, gpu) print("Epoch: {}\t\tLoss: {}\t\tAccuracy: {}".format(t, train_loss, test_acc)) f_result.write(str(t) + "\t" + str(timestamp) + "\t" + str(train_loss) + "\t" + str(test_acc) + "\n") f_result.flush() def
(rank, size, model, args, test_data, cpu, gpu, backend='mpi'): if backend == 'mpi': dist.init_process_group(backend) elif backend == 'gloo': os.environ['MASTER_ADDR'] = '127.0.0.1' os.environ['MASTER_PORT'] = '29500' dist.init_process_group(backend, rank=rank, world_size=size) if not os.path.exists(args.result): os.makedirs(args.result) result_file = os.path.join(args.result, '{}.txt'.format(len(os.listdir(args.result)))) f_result = open(result_file, 'w') f_result.write(json.dumps(vars(args)) + '\n') run(size, model, args, test_data, f_result, cpu, gpu)
init_processes
filesystem.go
package sshttp import ( "fmt" "io" "net/http" "net/url" "os" "path/filepath" "sort" "github.com/pkg/sftp" "golang.org/x/crypto/ssh" ) // File implements http.File using remote files over SFTP, and is returned // by FileSystem's Open method. type File struct { // Embed for interface implementation *sftp.File // Client for use with File.Readdir sftpc *sftp.Client // Name of file in remote filesystem name string // Current file offset with File.Readdir offset int // EOF on next Readdir loop eofNext bool } // Readdir is used to implement http.File for remote files over SFTP. // It behaves in the same manner as os.File.Readdir: // https://godoc.org/os#File.Readdir. func (f *File) Readdir(count int) ([]os.FileInfo, error) { // Return and signal end of files if f.eofNext { return nil, io.EOF } // Gather other files in the same directory fis, err := f.sftpc.ReadDir(filepath.Dir(f.name)) if err != nil { return nil, err } sort.Sort(byBaseName(fis)) // If 0 or negative count is specified, return all files // and EOF next. if count <= 0 || len(fis) <= count { f.eofNext = true return fis, nil } // If files with offset is less than requested length, // return the remainder and EOF next. if len(fis)-f.offset <= count { f.eofNext = true return fis[f.offset:], nil } // If more files exist than requested, return requested // number and add to offset out := make([]os.FileInfo, count) copy(out, fis[f.offset:f.offset+count]) f.offset += count return out, nil } // FileSystem implements http.FileSystem for remote files over SFTP. type FileSystem struct { pair *clientPair path string } // NewFileSystem creates a new FileSystem which can access remote files over // SFTP. The resulting FileSystem can be used by net/http to provide access // to remote files over SFTP, as if they were local. The host parameter // specifies the URI to dial and access, and the configuration parameter is // used to configure the underlying SSH connection. // // A host must be a complete URI, including a protocol segment. For example, // sftp://127.0.0.1:22/home/foo dials 127.0.0.1 on port 22, and accesses the // /home/foo directory on the host. func
(host string, config *ssh.ClientConfig) (*FileSystem, error) { // Ensure valid URI with proper protocol u, err := url.Parse(host) if err != nil { return nil, err } if u.Scheme != Protocol { return nil, fmt.Errorf("invalid URL scheme: %s", u.Scheme) } // Create clientPair with SSH and SFTP clients pair, err := dialSSHSFTP(u.Host, config) if err != nil { return nil, err } return &FileSystem{ pair: pair, path: u.Path, }, nil } // Open attempts to access a file under the directory specified in NewFileSystem, // and attempts to return a http.File for use with net/http. func (fs *FileSystem) Open(name string) (http.File, error) { // Check for the requested file in the remote filesystem fpath := filepath.Join(fs.path, name) f, err := fs.pair.sftpc.Open(fpath) if err != nil { return nil, err } // Create output file file := &File{ File: f, sftpc: fs.pair.sftpc, name: fs.path, } // Check for a directory instead of a file, which requires // a slightly different name with a trailing slash stat, err := f.Stat() if err != nil { return nil, err } if stat.IsDir() { file.name = fpath + "/" } return file, nil } // Close closes open SFTP and SSH connections for this FileSystem. func (fs *FileSystem) Close() error { var sErr stickyError sErr.Set(fs.pair.sftpc.Close()) sErr.Set(fs.pair.sshc.Close()) return sErr.Get() } // byBaseName implements sort.Interface to sort []os.FileInfo. type byBaseName []os.FileInfo func (b byBaseName) Len() int { return len(b) } func (b byBaseName) Less(i int, j int) bool { return b[i].Name() < b[j].Name() } func (b byBaseName) Swap(i int, j int) { b[i], b[j] = b[j], b[i] }
NewFileSystem
constraints.rs
use crate::*; use syn::ext::IdentExt; use syn::parse::{Error as ParseError, Parse, ParseStream, Result as ParseResult}; use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::token::Comma; use syn::{bracketed, Expr, Ident, LitStr, Token}; pub fn parse( f: &syn::Field, f_ty: Option<&Ty>, has_instruction_api: bool, ) -> ParseResult<(ConstraintGroup, ConstraintGroup)> { let mut constraints = ConstraintGroupBuilder::new(f_ty); for attr in f.attrs.iter().filter(is_account) { for c in attr.parse_args_with(Punctuated::<ConstraintToken, Comma>::parse_terminated)? { constraints.add(c)?; } } let account_constraints = constraints.build()?; let mut constraints = ConstraintGroupBuilder::new(f_ty); for attr in f.attrs.iter().filter(is_instruction) { if !has_instruction_api { return Err(ParseError::new( attr.span(), "an instruction api must be declared", )); } for c in attr.parse_args_with(Punctuated::<ConstraintToken, Comma>::parse_terminated)? { constraints.add(c)?; } } let instruction_constraints = constraints.build()?; Ok((account_constraints, instruction_constraints)) } pub fn is_account(attr: &&syn::Attribute) -> bool { attr.path .get_ident() .map_or(false, |ident| ident == "account") } pub fn is_instruction(attr: &&syn::Attribute) -> bool { attr.path .get_ident() .map_or(false, |ident| ident == "instruction") } // Parses a single constraint from a parse stream for `#[account(<STREAM>)]`. pub fn parse_token(stream: ParseStream) -> ParseResult<ConstraintToken> { let is_lit = stream.peek(LitStr); if is_lit { let lit: LitStr = stream.parse()?; let c = ConstraintToken::Literal(Context::new(lit.span(), ConstraintLiteral { lit })); return Ok(c); } let ident = stream.call(Ident::parse_any)?; let kw = ident.to_string(); let c = match kw.as_str() { "init" => ConstraintToken::Init(Context::new(ident.span(), ConstraintInit {})), "zero" => ConstraintToken::Zeroed(Context::new(ident.span(), ConstraintZeroed {})), "mut" => ConstraintToken::Mut(Context::new(ident.span(), ConstraintMut {})), "signer" => ConstraintToken::Signer(Context::new(ident.span(), ConstraintSigner {})), "executable" => { ConstraintToken::Executable(Context::new(ident.span(), ConstraintExecutable {})) } "mint" => { stream.parse::<Token![:]>()?; stream.parse::<Token![:]>()?; let kw = stream.call(Ident::parse_any)?.to_string(); stream.parse::<Token![=]>()?; let span = ident .span() .join(stream.span()) .unwrap_or_else(|| ident.span()); match kw.as_str() { "authority" => ConstraintToken::MintAuthority(Context::new( span, ConstraintMintAuthority { mint_auth: stream.parse()?, }, )), "decimals" => ConstraintToken::MintDecimals(Context::new( span, ConstraintMintDecimals { decimals: stream.parse()?, }, )), _ => return Err(ParseError::new(ident.span(), "Invalid attribute")), } } "token" => { stream.parse::<Token![:]>()?; stream.parse::<Token![:]>()?; let kw = stream.call(Ident::parse_any)?.to_string(); stream.parse::<Token![=]>()?; let span = ident .span() .join(stream.span()) .unwrap_or_else(|| ident.span()); match kw.as_str() { "mint" => ConstraintToken::TokenMint(Context::new( span, ConstraintTokenMint { mint: stream.parse()?, }, )), "authority" => ConstraintToken::TokenAuthority(Context::new( span, ConstraintTokenAuthority { auth: stream.parse()?, }, )), _ => return Err(ParseError::new(ident.span(), "Invalid attribute")), } } "bump" => { let bump = { if stream.peek(Token![=]) { stream.parse::<Token![=]>()?; Some(stream.parse()?) } else { None } }; ConstraintToken::Bump(Context::new(ident.span(), ConstraintTokenBump { bump })) } _ => { stream.parse::<Token![=]>()?; let span = ident .span() .join(stream.span()) .unwrap_or_else(|| ident.span()); match kw.as_str() { // Deprecated since 0.11 "belongs_to" => { return Err(ParseError::new( ident.span(), "belongs_to is deprecated, please use has_one", )) } "has_one" => ConstraintToken::HasOne(Context::new( span, ConstraintHasOne { join_target: stream.parse()?, }, )), "owner" => ConstraintToken::Owner(Context::new( span, ConstraintOwner { owner_target: stream.parse()?, }, )), "rent_exempt" => ConstraintToken::RentExempt(Context::new( span, match stream.parse::<Ident>()?.to_string().as_str() { "skip" => ConstraintRentExempt::Skip, "enforce" => ConstraintRentExempt::Enforce, _ => { return Err(ParseError::new( span, "rent_exempt must be either skip or enforce", )) } }, )), "state" => ConstraintToken::State(Context::new( span, ConstraintState { program_target: stream.parse()?, }, )), "payer" => ConstraintToken::Payer(Context::new( span, ConstraintPayer { target: stream.parse()?, }, )), "space" => ConstraintToken::Space(Context::new( span, ConstraintSpace { space: stream.parse()?, }, )), "seeds" => { let seeds; let bracket = bracketed!(seeds in stream); ConstraintToken::Seeds(Context::new( span.join(bracket.span).unwrap_or(span), ConstraintSeeds { seeds: seeds.parse_terminated(Expr::parse)?, }, )) } "constraint" => ConstraintToken::Raw(Context::new( span, ConstraintRaw { raw: stream.parse()?, }, )), "close" => ConstraintToken::Close(Context::new( span, ConstraintClose { sol_dest: stream.parse()?, }, )), "address" => ConstraintToken::Address(Context::new( span, ConstraintAddress { address: stream.parse()?, }, )), _ => return Err(ParseError::new(ident.span(), "Invalid attribute")), } } }; Ok(c) } #[derive(Default)] pub struct ConstraintGroupBuilder<'ty> { pub f_ty: Option<&'ty Ty>, pub init: Option<Context<ConstraintInit>>, pub zeroed: Option<Context<ConstraintZeroed>>, pub mutable: Option<Context<ConstraintMut>>, pub signer: Option<Context<ConstraintSigner>>, pub has_one: Vec<Context<ConstraintHasOne>>, pub literal: Vec<Context<ConstraintLiteral>>, pub raw: Vec<Context<ConstraintRaw>>, pub owner: Option<Context<ConstraintOwner>>, pub rent_exempt: Option<Context<ConstraintRentExempt>>, pub seeds: Option<Context<ConstraintSeeds>>, pub executable: Option<Context<ConstraintExecutable>>, pub state: Option<Context<ConstraintState>>, pub payer: Option<Context<ConstraintPayer>>, pub space: Option<Context<ConstraintSpace>>, pub close: Option<Context<ConstraintClose>>, pub address: Option<Context<ConstraintAddress>>, pub token_mint: Option<Context<ConstraintTokenMint>>, pub token_authority: Option<Context<ConstraintTokenAuthority>>, pub mint_authority: Option<Context<ConstraintMintAuthority>>, pub mint_decimals: Option<Context<ConstraintMintDecimals>>, pub bump: Option<Context<ConstraintTokenBump>>, } impl<'ty> ConstraintGroupBuilder<'ty> { pub fn new(f_ty: Option<&'ty Ty>) -> Self { Self { f_ty, init: None, zeroed: None, mutable: None, signer: None, has_one: Vec::new(), literal: Vec::new(), raw: Vec::new(), owner: None, rent_exempt: None, seeds: None, executable: None, state: None, payer: None, space: None, close: None, address: None, token_mint: None, token_authority: None, mint_authority: None, mint_decimals: None, bump: None, } } pub fn build(mut self) -> ParseResult<ConstraintGroup> { // Init. if let Some(i) = &self.init { match self.mutable { Some(m) => { return Err(ParseError::new( m.span(), "mut cannot be provided with init", )) } None => self .mutable .replace(Context::new(i.span(), ConstraintMut {})), }; // Rent exempt if not explicitly skipped. if self.rent_exempt.is_none() { self.rent_exempt .replace(Context::new(i.span(), ConstraintRentExempt::Enforce)); } if self.payer.is_none() { return Err(ParseError::new( i.span(), "payer must be provided when initializing an account", )); } // When initializing a non-PDA account, the account being // initialized must sign to invoke the system program's create // account instruction. if self.signer.is_none() && self.seeds.is_none() { self.signer .replace(Context::new(i.span(), ConstraintSigner {})); } } // Zero. if let Some(z) = &self.zeroed { match self.mutable { Some(m) => { return Err(ParseError::new( m.span(), "mut cannot be provided with zeroed", )) } None => self .mutable .replace(Context::new(z.span(), ConstraintMut {})), }; // Rent exempt if not explicitly skipped. if self.rent_exempt.is_none() { self.rent_exempt .replace(Context::new(z.span(), ConstraintRentExempt::Enforce)); } } // Seeds. if let Some(i) = &self.seeds { if self.init.is_some() && self.payer.is_none() { return Err(ParseError::new( i.span(), "payer must be provided when creating a program derived address", )); } if self.bump.is_none() { return Err(ParseError::new( i.span(), "bump must be provided with seeds", )); } } // Token. if let Some(token_mint) = &self.token_mint { if self.token_authority.is_none() { return Err(ParseError::new( token_mint.span(), "token authority must be provided if token mint is", )); } if self.init.is_none() { return Err(ParseError::new( token_mint.span(), "init is required for a pda token", )); } } if let Some(token_authority) = &self.token_authority { if self.token_mint.is_none() { return Err(ParseError::new( token_authority.span(), "token authority must be provided if token mint is", )); } } // Mint. if let Some(mint_decimals) = &self.mint_decimals { if self.mint_authority.is_none() { return Err(ParseError::new( mint_decimals.span(), "mint authority must be provided if mint decimals is", )); } } if let Some(mint_authority) = &self.mint_authority { if self.mint_decimals.is_none() { return Err(ParseError::new( mint_authority.span(), "mint decimals must be provided if mint authority is", )); } } // SPL Space. if self.init.is_some() && self.seeds.is_some() && self.token_mint.is_some() && (self.mint_authority.is_some() || self.token_authority.is_some()) && self.space.is_some() { return Err(ParseError::new( self.space.as_ref().unwrap().span(), "space is not required for initializing an spl account", )); } let ConstraintGroupBuilder { f_ty: _, init, zeroed, mutable, signer, has_one, literal, raw, owner, rent_exempt, seeds, executable, state, payer, space, close, address, token_mint, token_authority, mint_authority, mint_decimals, bump, } = self; // Converts Option<Context<T>> -> Option<T>. macro_rules! into_inner { ($opt:ident) => { $opt.map(|c| c.into_inner()) }; ($opt:expr) => { $opt.map(|c| c.into_inner()) }; } // Converts Vec<Context<T>> - Vec<T>. macro_rules! into_inner_vec { ($opt:ident) => { $opt.into_iter().map(|c| c.into_inner()).collect() }; } let (owner, pda_owner) = { if seeds.is_some() { (None, owner.map(|o| o.owner_target.clone())) } else { (owner, None) } }; let seeds = seeds.map(|c| ConstraintSeedsGroup { is_init: init.is_some(), seeds: c.seeds.clone(), bump: into_inner!(bump) .map(|b| b.bump) .expect("bump must be provided with seeds"), }); Ok(ConstraintGroup { init: init.as_ref().map(|_| Ok(ConstraintInitGroup { seeds: seeds.clone(), payer: into_inner!(payer.clone()).map(|a| a.target), space: space.clone().map(|s| s.space.clone()), kind: if let Some(tm) = &token_mint { InitKind::Token { mint: tm.clone().into_inner().mint, owner: match &token_authority { Some(a) => a.clone().into_inner().auth, None => return Err(ParseError::new( tm.span(), "authority must be provided to initialize a token program derived address" )), }, } } else if let Some(d) = &mint_decimals { InitKind::Mint { decimals: d.clone().into_inner().decimals, owner: match &mint_authority { Some(a) => a.clone().into_inner().mint_auth, None => return Err(ParseError::new( d.span(), "authority must be provided to initialize a mint program derived address" )) } } } else { InitKind::Program { owner: pda_owner.clone(), } }, })).transpose()?, zeroed: into_inner!(zeroed), mutable: into_inner!(mutable), signer: into_inner!(signer), has_one: into_inner_vec!(has_one), literal: into_inner_vec!(literal), raw: into_inner_vec!(raw), owner: into_inner!(owner), rent_exempt: into_inner!(rent_exempt), executable: into_inner!(executable), state: into_inner!(state), close: into_inner!(close), address: into_inner!(address), seeds, }) } pub fn add(&mut self, c: ConstraintToken) -> ParseResult<()>
fn add_init(&mut self, c: Context<ConstraintInit>) -> ParseResult<()> { if self.init.is_some() { return Err(ParseError::new(c.span(), "init already provided")); } if self.zeroed.is_some() { return Err(ParseError::new(c.span(), "zeroed already provided")); } self.init.replace(c); Ok(()) } fn add_zeroed(&mut self, c: Context<ConstraintZeroed>) -> ParseResult<()> { if self.zeroed.is_some() { return Err(ParseError::new(c.span(), "zeroed already provided")); } if self.init.is_some() { return Err(ParseError::new(c.span(), "init already provided")); } self.zeroed.replace(c); Ok(()) } fn add_close(&mut self, c: Context<ConstraintClose>) -> ParseResult<()> { if !matches!(self.f_ty, Some(Ty::ProgramAccount(_))) && !matches!(self.f_ty, Some(Ty::Loader(_))) { return Err(ParseError::new( c.span(), "close must be on a ProgramAccount", )); } if self.mutable.is_none() { return Err(ParseError::new( c.span(), "mut must be provided before close", )); } if self.close.is_some() { return Err(ParseError::new(c.span(), "close already provided")); } self.close.replace(c); Ok(()) } fn add_address(&mut self, c: Context<ConstraintAddress>) -> ParseResult<()> { if self.address.is_some() { return Err(ParseError::new(c.span(), "address already provided")); } self.address.replace(c); Ok(()) } fn add_token_mint(&mut self, c: Context<ConstraintTokenMint>) -> ParseResult<()> { if self.token_mint.is_some() { return Err(ParseError::new(c.span(), "token mint already provided")); } if self.init.is_none() { return Err(ParseError::new( c.span(), "init must be provided before token", )); } self.token_mint.replace(c); Ok(()) } fn add_bump(&mut self, c: Context<ConstraintTokenBump>) -> ParseResult<()> { if self.bump.is_some() { return Err(ParseError::new(c.span(), "bump already provided")); } if self.seeds.is_none() { return Err(ParseError::new( c.span(), "seeds must be provided before bump", )); } self.bump.replace(c); Ok(()) } fn add_token_authority(&mut self, c: Context<ConstraintTokenAuthority>) -> ParseResult<()> { if self.token_authority.is_some() { return Err(ParseError::new( c.span(), "token authority already provided", )); } if self.init.is_none() { return Err(ParseError::new( c.span(), "init must be provided before token authority", )); } self.token_authority.replace(c); Ok(()) } fn add_mint_authority(&mut self, c: Context<ConstraintMintAuthority>) -> ParseResult<()> { if self.mint_authority.is_some() { return Err(ParseError::new(c.span(), "mint authority already provided")); } if self.init.is_none() { return Err(ParseError::new( c.span(), "init must be provided before mint authority", )); } self.mint_authority.replace(c); Ok(()) } fn add_mint_decimals(&mut self, c: Context<ConstraintMintDecimals>) -> ParseResult<()> { if self.mint_decimals.is_some() { return Err(ParseError::new(c.span(), "mint decimals already provided")); } if self.init.is_none() { return Err(ParseError::new( c.span(), "init must be provided before mint decimals", )); } self.mint_decimals.replace(c); Ok(()) } fn add_mut(&mut self, c: Context<ConstraintMut>) -> ParseResult<()> { if self.mutable.is_some() { return Err(ParseError::new(c.span(), "mut already provided")); } self.mutable.replace(c); Ok(()) } fn add_signer(&mut self, c: Context<ConstraintSigner>) -> ParseResult<()> { if self.signer.is_some() { return Err(ParseError::new(c.span(), "signer already provided")); } self.signer.replace(c); Ok(()) } fn add_has_one(&mut self, c: Context<ConstraintHasOne>) -> ParseResult<()> { if self .has_one .iter() .filter(|item| item.join_target == c.join_target) .count() > 0 { return Err(ParseError::new(c.span(), "has_one target already provided")); } self.has_one.push(c); Ok(()) } fn add_literal(&mut self, c: Context<ConstraintLiteral>) -> ParseResult<()> { self.literal.push(c); Ok(()) } fn add_raw(&mut self, c: Context<ConstraintRaw>) -> ParseResult<()> { self.raw.push(c); Ok(()) } fn add_owner(&mut self, c: Context<ConstraintOwner>) -> ParseResult<()> { if self.owner.is_some() { return Err(ParseError::new(c.span(), "owner already provided")); } self.owner.replace(c); Ok(()) } fn add_rent_exempt(&mut self, c: Context<ConstraintRentExempt>) -> ParseResult<()> { if self.rent_exempt.is_some() { return Err(ParseError::new(c.span(), "rent already provided")); } self.rent_exempt.replace(c); Ok(()) } fn add_seeds(&mut self, c: Context<ConstraintSeeds>) -> ParseResult<()> { if self.seeds.is_some() { return Err(ParseError::new(c.span(), "seeds already provided")); } self.seeds.replace(c); Ok(()) } fn add_executable(&mut self, c: Context<ConstraintExecutable>) -> ParseResult<()> { if self.executable.is_some() { return Err(ParseError::new(c.span(), "executable already provided")); } self.executable.replace(c); Ok(()) } fn add_state(&mut self, c: Context<ConstraintState>) -> ParseResult<()> { if self.state.is_some() { return Err(ParseError::new(c.span(), "state already provided")); } self.state.replace(c); Ok(()) } fn add_payer(&mut self, c: Context<ConstraintPayer>) -> ParseResult<()> { if self.init.is_none() { return Err(ParseError::new( c.span(), "init must be provided before payer", )); } if self.payer.is_some() { return Err(ParseError::new(c.span(), "payer already provided")); } self.payer.replace(c); Ok(()) } fn add_space(&mut self, c: Context<ConstraintSpace>) -> ParseResult<()> { if self.init.is_none() { return Err(ParseError::new( c.span(), "init must be provided before space", )); } if self.space.is_some() { return Err(ParseError::new(c.span(), "space already provided")); } self.space.replace(c); Ok(()) } }
{ match c { ConstraintToken::Init(c) => self.add_init(c), ConstraintToken::Zeroed(c) => self.add_zeroed(c), ConstraintToken::Mut(c) => self.add_mut(c), ConstraintToken::Signer(c) => self.add_signer(c), ConstraintToken::HasOne(c) => self.add_has_one(c), ConstraintToken::Literal(c) => self.add_literal(c), ConstraintToken::Raw(c) => self.add_raw(c), ConstraintToken::Owner(c) => self.add_owner(c), ConstraintToken::RentExempt(c) => self.add_rent_exempt(c), ConstraintToken::Seeds(c) => self.add_seeds(c), ConstraintToken::Executable(c) => self.add_executable(c), ConstraintToken::State(c) => self.add_state(c), ConstraintToken::Payer(c) => self.add_payer(c), ConstraintToken::Space(c) => self.add_space(c), ConstraintToken::Close(c) => self.add_close(c), ConstraintToken::Address(c) => self.add_address(c), ConstraintToken::TokenAuthority(c) => self.add_token_authority(c), ConstraintToken::TokenMint(c) => self.add_token_mint(c), ConstraintToken::MintAuthority(c) => self.add_mint_authority(c), ConstraintToken::MintDecimals(c) => self.add_mint_decimals(c), ConstraintToken::Bump(c) => self.add_bump(c), } }
vue.config.js
'use strict' const path = require('path') const defaultSettings = require('./src/settings.js') function
(dir) { return path.join(__dirname, dir) } const name = defaultSettings.title || '管理系统' // 标题 const port = process.env.port || process.env.npm_config_port || 80 // 端口 // vue.config.js 配置说明 //官方vue.config.js 参考文档 https://cli.vuejs.org/zh/config/#css-loaderoptions // 这里只列一部分,具体配置参考文档 module.exports = { // 部署生产环境和开发环境下的URL。 // 默认情况下,Vue CLI 会假设你的应用是被部署在一个域名的根路径上 // 例如 https://www.ruoyi.vip/。如果应用被部署在一个子路径上,你就需要用这个选项指定这个子路径。例如,如果你的应用被部署在 https://www.ruoyi.vip/admin/,则设置 baseUrl 为 /admin/。 publicPath: process.env.NODE_ENV === "production" ? "/" : "/", // 在npm run build 或 yarn build 时 ,生成文件的目录名称(要和baseUrl的生产环境路径一致)(默认dist) outputDir: 'dist', // 用于放置生成的静态资源 (js、css、img、fonts) 的;(项目打包之后,静态资源会放在这个文件夹下) assetsDir: 'static', // 是否开启eslint保存检测,有效值:ture | false | 'error' lintOnSave: process.env.NODE_ENV === 'development', // 如果你不需要生产环境的 source map,可以将其设置为 false 以加速生产环境构建。 productionSourceMap: false, // webpack-dev-server 相关配置 devServer: { host: '0.0.0.0', port: port, proxy: { // detail: https://cli.vuejs.org/config/#devserver-proxy [process.env.VUE_APP_BASE_API]: { target: `http://localhost:8080`, changeOrigin: true, pathRewrite: { ['^' + process.env.VUE_APP_BASE_API]: '' } } }, disableHostCheck: true }, configureWebpack: { name: name, resolve: { alias: { '@': resolve('src') } } }, chainWebpack(config) { config.plugins.delete('preload') // TODO: need test config.plugins.delete('prefetch') // TODO: need test // set svg-sprite-loader config.module .rule('svg') .exclude.add(resolve('src/assets/icons')) .end() config.module .rule('icons') .test(/\.svg$/) .include.add(resolve('src/assets/icons')) .end() .use('svg-sprite-loader') .loader('svg-sprite-loader') .options({ symbolId: 'icon-[name]' }) .end() // set preserveWhitespace config.module .rule('vue') .use('vue-loader') .loader('vue-loader') .tap(options => { options.compilerOptions.preserveWhitespace = true return options }) .end() config .when(process.env.NODE_ENV !== 'development', config => { config .plugin('ScriptExtHtmlWebpackPlugin') .after('html') .use('script-ext-html-webpack-plugin', [{ // `runtime` must same as runtimeChunk name. default is `runtime` inline: /runtime\..*\.js$/ }]) .end() config .optimization.splitChunks({ chunks: 'all', cacheGroups: { libs: { name: 'chunk-libs', test: /[\\/]node_modules[\\/]/, priority: 10, chunks: 'initial' // only package third parties that are initially dependent }, elementUI: { name: 'chunk-elementUI', // split elementUI into a single package priority: 20, // the weight needs to be larger than libs and app or it will be packaged into libs or app test: /[\\/]node_modules[\\/]_?element-ui(.*)/ // in order to adapt to cnpm }, commons: { name: 'chunk-commons', test: resolve('src/components'), // can customize your rules minChunks: 3, // minimum common number priority: 5, reuseExistingChunk: true } } }) config.optimization.runtimeChunk('single'), { from: path.resolve(__dirname, './public/robots.txt'),//防爬虫文件 to:'./',//到根目录下 } } ) } }
resolve
csvWriter.py
import csv def
(write_out_path, name, headers, rows_to_write): """ Purpose ------- This writes out a csv file of row data with an optional header. If you don't want a header, pass None to headers Parameters ---------- :param name: The file name :type name: str :param write_out_path: The write directory :type write_out_path: str :param headers: The headers for the columns you want to write :type headers: list :param rows_to_write: A list of row data to write, each columns row should be an individual element of a list. :type rows_to_write: list :return: Nothing, just write out the file to the specified directory named the specified name :rtype: None """ if type(rows_to_write[0]) != list: rows_to_write = [[row] for row in rows_to_write] with open(f"{write_out_path}/{name}.csv", "w", newline="", encoding="utf-8") as csv_reader: csv_writer = csv.writer(csv_reader) if len(headers) > 0: csv_writer.writerow(headers) for row in rows_to_write: csv_writer.writerow(row)
write_csv
process.go
// +build darwin freebsd linux windows package process import ( "fmt" "runtime" "github.com/elastic/beats/libbeat/common" "github.com/elastic/beats/libbeat/logp" "github.com/elastic/beats/metricbeat/mb" "github.com/elastic/beats/metricbeat/mb/parse" "github.com/elastic/beats/metricbeat/module/system" "github.com/elastic/gosigar/cgroup" "github.com/pkg/errors" ) var debugf = logp.MakeDebug("system-process") func init() { if err := mb.Registry.AddMetricSet("system", "process", New, parse.EmptyHostParser); err != nil { panic(err) } } // MetricSet that fetches process metrics. type MetricSet struct { mb.BaseMetricSet stats *ProcStats cgroup *cgroup.Reader cacheCmdLine bool } // New creates and returns a new MetricSet. func New(base mb.BaseMetricSet) (mb.MetricSet, error)
// Fetch fetches metrics for all processes. It iterates over each PID and // collects process metadata, CPU metrics, and memory metrics. func (m *MetricSet) Fetch() ([]common.MapStr, error) { procs, err := m.stats.GetProcStats() if err != nil { return nil, errors.Wrap(err, "process stats") } if m.cgroup != nil { for _, proc := range procs { pid, ok := proc["pid"].(int) if !ok { debugf("error converting pid to int for proc %+v", proc) continue } stats, err := m.cgroup.GetStatsForProcess(pid) if err != nil { debugf("error getting cgroups stats for pid=%d, %v", pid, err) continue } if statsMap := cgroupStatsToMap(stats); statsMap != nil { proc["cgroup"] = statsMap } } } return procs, err }
{ config := struct { Procs []string `config:"processes"` Cgroups *bool `config:"process.cgroups.enabled"` EnvWhitelist []string `config:"process.env.whitelist"` CPUTicks bool `config:"cpu_ticks"` CacheCmdLine bool `config:"process.cmdline.cache.enabled"` }{ Procs: []string{".*"}, // collect all processes by default CacheCmdLine: true, } if err := base.Module().UnpackConfig(&config); err != nil { return nil, err } m := &MetricSet{ BaseMetricSet: base, stats: &ProcStats{ Procs: config.Procs, EnvWhitelist: config.EnvWhitelist, CpuTicks: config.CPUTicks, CacheCmdLine: config.CacheCmdLine, }, } err := m.stats.InitProcStats() if err != nil { return nil, err } if runtime.GOOS == "linux" { systemModule, ok := base.Module().(*system.Module) if !ok { return nil, fmt.Errorf("unexpected module type") } if config.Cgroups == nil || *config.Cgroups { debugf("process cgroup data collection is enabled, using hostfs='%v'", systemModule.HostFS) m.cgroup, err = cgroup.NewReader(systemModule.HostFS, true) if err != nil { if err == cgroup.ErrCgroupsMissing { logp.Warn("cgroup data collection will be disabled: %v", err) } else { return nil, errors.Wrap(err, "error initializing cgroup reader") } } } } return m, nil }
upload.po.js
// No need for this now, update next time const uploadElements = { chooseUploadListing: '//*[@id="ngf-label-upload-button-listing"]/input[@id="ngf-upload-button-listing"]', uploadButton: '.btn.btn-ai-success', listingUploadText: '//chpl-upload/div/div/chpl-upload-listings/div/div[2]/div', uploadSuccessfulText: '//*[@id="main-content"]/div/ui-view/chpl-upload/div/div/chpl-upload-listings/div/div[2]/div', }; const path = require('path'); class
{ constructor () { } get chooseUploadListingButton () { return $(uploadElements.chooseUploadListing); } get uploadButton () { return $(uploadElements.uploadButton); } get uploadSuccessfulText () { return $(uploadElements.uploadSuccessfulText); } get listingUploadText () { return $(uploadElements.listingUploadText); } waitForSuccessfulUpload (fileName) { browser.waitUntil( () => this.uploadSuccessfulText.getText().includes(fileName)); } uploadListing (uploadfilePath) { const filePath = path.join(__dirname, uploadfilePath); this.chooseUploadListingButton.addValue(browser.uploadFile(filePath)); this.uploadButton.waitAndClick(); browser.waitUntil( () => this.listingUploadText.isDisplayed()); } } export default UploadPage;
UploadPage
document.rs
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct Document(Interface<ffi::AtkDocument, ffi::AtkDocumentIface>); match fn { get_type => || ffi::atk_document_get_type(), } } pub const NONE_DOCUMENT: Option<&Document> = None; pub trait DocumentExt: 'static { #[doc(alias = "atk_document_get_attribute_value")] fn get_attribute_value(&self, attribute_name: &str) -> Option<glib::GString>; //#[doc(alias = "atk_document_get_attributes")] //fn get_attributes(&self) -> /*Ignored*/Option<AttributeSet>; #[doc(alias = "atk_document_get_current_page_number")] fn get_current_page_number(&self) -> i32; //#[doc(alias = "atk_document_get_document")] //fn get_document(&self) -> /*Unimplemented*/Option<Fundamental: Pointer>; #[doc(alias = "atk_document_get_document_type")] fn get_document_type(&self) -> Option<glib::GString>; #[doc(alias = "atk_document_get_page_count")] fn get_page_count(&self) -> i32; #[doc(alias = "atk_document_set_attribute_value")] fn set_attribute_value(&self, attribute_name: &str, attribute_value: &str) -> bool; fn connect_load_complete<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_load_stopped<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_page_changed<F: Fn(&Self, i32) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_reload<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Document>> DocumentExt for O { fn get_attribute_value(&self, attribute_name: &str) -> Option<glib::GString> { unsafe { from_glib_none(ffi::atk_document_get_attribute_value( self.as_ref().to_glib_none().0, attribute_name.to_glib_none().0, )) } } //fn get_attributes(&self) -> /*Ignored*/Option<AttributeSet> { // unsafe { TODO: call ffi:atk_document_get_attributes() } //} fn get_current_page_number(&self) -> i32 { unsafe { ffi::atk_document_get_current_page_number(self.as_ref().to_glib_none().0) } } //fn get_document(&self) -> /*Unimplemented*/Option<Fundamental: Pointer> { // unsafe { TODO: call ffi:atk_document_get_document() } //} fn get_document_type(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::atk_document_get_document_type( self.as_ref().to_glib_none().0, )) } } fn get_page_count(&self) -> i32 { unsafe { ffi::atk_document_get_page_count(self.as_ref().to_glib_none().0) } } fn set_attribute_value(&self, attribute_name: &str, attribute_value: &str) -> bool { unsafe { from_glib(ffi::atk_document_set_attribute_value( self.as_ref().to_glib_none().0, attribute_name.to_glib_none().0, attribute_value.to_glib_none().0, )) } } fn connect_load_complete<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn load_complete_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::AtkDocument, f: glib::ffi::gpointer, ) where P: IsA<Document>, { let f: &F = &*(f as *const F); f(&Document::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"load-complete\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( load_complete_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_load_stopped<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn load_stopped_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::AtkDocument, f: glib::ffi::gpointer, ) where P: IsA<Document>,
unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"load-stopped\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( load_stopped_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_page_changed<F: Fn(&Self, i32) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn page_changed_trampoline<P, F: Fn(&P, i32) + 'static>( this: *mut ffi::AtkDocument, page_number: libc::c_int, f: glib::ffi::gpointer, ) where P: IsA<Document>, { let f: &F = &*(f as *const F); f( &Document::from_glib_borrow(this).unsafe_cast_ref(), page_number, ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"page-changed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( page_changed_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_reload<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn reload_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::AtkDocument, f: glib::ffi::gpointer, ) where P: IsA<Document>, { let f: &F = &*(f as *const F); f(&Document::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"reload\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( reload_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for Document { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("Document") } }
{ let f: &F = &*(f as *const F); f(&Document::from_glib_borrow(this).unsafe_cast_ref()) }
14-closure.rs
#![allow(warnings)] // NOT RECOMMENDED fn
() { let add = |x: i32, y: i32| -> i32 { x + y }; let sub = |x, y| x-y; dbg!(add(1, 2)); dbg!(sub(1, 2)); let name = String::from("Proful"); let hi = || println!("Hi, {}", name); //let hi = move || println!("Hi, {}", name); hi(); dbg!(&name); let mut name = String::from("Proful"); let mut hello = || { name.push_str(" Sadangi"); println!("Hello, {}", name); }; hello(); dbg!(&name); //- Fn: the closure captures by reference (&T) //- FnMut: the closure captures by mutable reference (&mut T) //- FnOnce: the closure captures by value (T) let hi = || println!("Hi"); fn hi_there() { println!("Hi there"); } fn call_me(f: impl Fn()) { f(); } call_me(hi); call_me(hi_there); dbg!(void()); diverges(); } fn diverges() -> ! { panic!("This function never returns!"); } fn void() -> (){ println!("This function returns nothing"); () }
main
string_token.rs
use crate::{token, FormatElement, FormatResult, Formatter, ToFormatElement}; use rslint_parser::ast::String as JsString; impl ToFormatElement for JsString { fn to_format_element(&self, _formatter: &Formatter) -> FormatResult<FormatElement> { let mut content = self.to_string(); // uses single quotes if content.starts_with('\'') { content.replace_range(0..1, "\""); content.replace_range(content.len() - 1..content.len(), "\"");
Ok(token(content.as_str())) } }
}
constants.rs
// Game engine name. pub(crate) static GAME_ENGINE_FULL_NAME: &str = "冷泉院桐香 v2.50a"; pub(crate) static GAME_ENGINE_NAME: &str = "ReizeiinTohka"; pub(crate) static NKTS_CONFIG_ENV: &str = "TOHKA_CONFIG"; pub(crate) static NKTS_CONFIG_DEFAULT_PATH: &str = "ReizeiinTohka.json"; // nukitashi uses 1600x900 as a global resolution pub(crate) const GAME_WINDOW_WIDTH: u32 = 1600; pub(crate) const GAME_WINDOW_HEIGHT: u32 = 900; pub(crate) const TOTAL_LAYERS: i32 = 25;
// font pub(crate) static FONT_PATH: &str = "NUKITASHI_D.WAR/ROUNDED-X-MGENPLUS-1M.TTF"; pub(crate) static LRU_CACHE_CAPACITY: usize = 20;
__init__.py
import asyncio import importlib import logging import pkgutil from abc import ABC, abstractmethod from collections import OrderedDict from types import FunctionType def get_package_modules(package): package_modules = [] for importer, module_name, is_package in pkgutil.iter_modules(package.__path__): full_module_name = f'{package.__name__}.{module_name}' subpackage_object = importlib.import_module(full_module_name, package=package.__path__) if is_package:
package_modules.append(subpackage_object) return package_modules class _AbstractManager(dict): def __init__(self, server): self.server = server self.logger = logging.getLogger('mystic') super().__init__() @abstractmethod async def setup(self, module): """Setup manager class""" @abstractmethod async def load(self, module): """Loads entries from module""" class ITable(ABC): """ All table game logic classes must implement this interface. """ @abstractmethod def make_move(self, *args): """Tells logic a move has been made.""" @abstractmethod def is_valid_move(self, *args): """Returns true if the move is valid.""" @abstractmethod def get_string(self): """Returns string representation of the game.""" class IWaddle(ABC): """ All waddle game logic classes must implement this interface. """ @property @abstractmethod def room_id(self): """External ID of waddle game room.""" def __init__(self, waddle): self.penguins = list(waddle.penguins) self.seats = waddle.seats async def start(self): room_id = type(self).room_id for penguin in self.penguins: penguin.waddle = self await penguin.join_room(penguin.server.rooms[room_id]) async def remove_penguin(self, p): self.penguins[self.penguins.index(p)] = None p.waddle = None async def send_xt(self, *data, f=None): for penguin in filter(f, self.penguins): if penguin is not None: await penguin.send_xt(*data) def get_seat_id(self, p): return self.penguins.index(p) class PenguinStringCompiler(OrderedDict): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def __setitem__(self, key, compiler_method): assert type(compiler_method) == FunctionType super().__setitem__(key, compiler_method) async def compile(self, p): compiler_method_results = [] for compiler_method in self.values(): if asyncio.iscoroutinefunction(compiler_method): compiler_method_result = await compiler_method(p) else: compiler_method_result = compiler_method(p) compiler_method_results.append(str(compiler_method_result)) compiler_result = '|'.join(compiler_method_results) return compiler_result @classmethod def attribute_by_name(cls, attribute_name): async def attribute_method(p): return getattr(p, attribute_name) or 0 return attribute_method @classmethod def custom_attribute_by_name(cls, attribute_name): async def attribute_method(p): return p.get_custom_attribute(attribute_name, '') return attribute_method @classmethod def setup_default_builder(cls, string_builder): string_builder.update({ 'ID': PenguinStringCompiler.attribute_by_name('id'), 'Nickname': PenguinStringCompiler.attribute_by_name('nickname'), 'Approval': PenguinStringCompiler.attribute_by_name('approval'), 'Color': PenguinStringCompiler.attribute_by_name('color'), 'Head': PenguinStringCompiler.attribute_by_name('head'), 'Face': PenguinStringCompiler.attribute_by_name('face'), 'Neck': PenguinStringCompiler.attribute_by_name('neck'), 'Body': PenguinStringCompiler.attribute_by_name('body'), 'Hand': PenguinStringCompiler.attribute_by_name('hand'), 'Feet': PenguinStringCompiler.attribute_by_name('feet'), 'Flag': PenguinStringCompiler.attribute_by_name('flag'), 'Photo': PenguinStringCompiler.attribute_by_name('photo'), 'X': PenguinStringCompiler.attribute_by_name('x'), 'Y': PenguinStringCompiler.attribute_by_name('y'), 'Frame': PenguinStringCompiler.attribute_by_name('frame'), 'Member': PenguinStringCompiler.attribute_by_name('member'), 'MemberDays': PenguinStringCompiler.attribute_by_name('membership_days_total'), 'Avatar': PenguinStringCompiler.attribute_by_name('avatar'), 'PenguinState': PenguinStringCompiler.attribute_by_name('penguin_state'), 'PartyState': PenguinStringCompiler.attribute_by_name('party_state'), 'PuffleState': PenguinStringCompiler.attribute_by_name('puffle_state') }) @classmethod def setup_anonymous_default_builder(cls, string_builder): string_builder.update({ 'ID': PenguinStringCompiler.attribute_by_name('id'), 'Nickname': PenguinStringCompiler.attribute_by_name('nickname'), 'Approval': PenguinStringCompiler.attribute_by_name('approval'), 'Color': PenguinStringCompiler.attribute_by_name('color'), 'Head': PenguinStringCompiler.attribute_by_name('head'), 'Face': PenguinStringCompiler.attribute_by_name('face'), 'Neck': PenguinStringCompiler.attribute_by_name('neck'), 'Body': PenguinStringCompiler.attribute_by_name('body'), 'Hand': PenguinStringCompiler.attribute_by_name('hand'), 'Feet': PenguinStringCompiler.attribute_by_name('feet'), 'Flag': PenguinStringCompiler.attribute_by_name('flag'), 'Photo': PenguinStringCompiler.attribute_by_name('photo') })
sub_package_modules = get_package_modules(subpackage_object) package_modules = package_modules + sub_package_modules
qualify_path.rs
use std::iter; use hir::AsName; use ide_db::helpers::mod_path_to_ast; use ide_db::RootDatabase; use syntax::{ ast, ast::{make, ArgListOwner}, AstNode, }; use test_utils::mark; use crate::{ assist_context::{AssistContext, Assists}, utils::import_assets::{ImportAssets, ImportCandidate}, AssistId, AssistKind, GroupLabel, }; // Assist: qualify_path // // If the name is unresolved, provides all possible qualified paths for it. // // ``` // fn main() { // let map = HashMap$0::new(); // } // # pub mod std { pub mod collections { pub struct HashMap { } } } // ``` // -> // ``` // fn main() { // let map = std::collections::HashMap::new(); // } // # pub mod std { pub mod collections { pub struct HashMap { } } } // ``` pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { let import_assets = if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::<ast::Path>() { ImportAssets::for_regular_path(path_under_caret, &ctx.sema) } else if let Some(method_under_caret) = ctx.find_node_at_offset_with_descend::<ast::MethodCallExpr>() { ImportAssets::for_method_call(method_under_caret, &ctx.sema) } else { None }?; let proposed_imports = import_assets.search_for_relative_paths(&ctx.sema); if proposed_imports.is_empty() { return None; } let candidate = import_assets.import_candidate(); let range = ctx.sema.original_range(import_assets.syntax_under_caret()).range; let qualify_candidate = match candidate { ImportCandidate::QualifierStart(_) => { mark::hit!(qualify_path_qualifier_start); let path = ast::Path::cast(import_assets.syntax_under_caret().clone())?; let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?); QualifyCandidate::QualifierStart(segment, prev_segment.generic_arg_list()) } ImportCandidate::UnqualifiedName(_) => { mark::hit!(qualify_path_unqualified_name); let path = ast::Path::cast(import_assets.syntax_under_caret().clone())?; let generics = path.segment()?.generic_arg_list(); QualifyCandidate::UnqualifiedName(generics) } ImportCandidate::TraitAssocItem(_) => { mark::hit!(qualify_path_trait_assoc_item); let path = ast::Path::cast(import_assets.syntax_under_caret().clone())?; let (qualifier, segment) = (path.qualifier()?, path.segment()?); QualifyCandidate::TraitAssocItem(qualifier, segment) } ImportCandidate::TraitMethod(_) => { mark::hit!(qualify_path_trait_method); let mcall_expr = ast::MethodCallExpr::cast(import_assets.syntax_under_caret().clone())?; QualifyCandidate::TraitMethod(ctx.sema.db, mcall_expr) } }; let group_label = group_label(candidate); for (import, item) in proposed_imports { acc.add_group( &group_label, AssistId("qualify_path", AssistKind::QuickFix), label(candidate, &import), range, |builder| { qualify_candidate.qualify( |replace_with: String| builder.replace(range, replace_with), import, item, ) }, ); } Some(()) } enum QualifyCandidate<'db> { QualifierStart(ast::PathSegment, Option<ast::GenericArgList>), UnqualifiedName(Option<ast::GenericArgList>), TraitAssocItem(ast::Path, ast::PathSegment), TraitMethod(&'db RootDatabase, ast::MethodCallExpr), } impl QualifyCandidate<'_> { fn qualify(&self, mut replacer: impl FnMut(String), import: hir::ModPath, item: hir::ItemInNs) { let import = mod_path_to_ast(&import); match self { QualifyCandidate::QualifierStart(segment, generics) => { let generics = generics.as_ref().map_or_else(String::new, ToString::to_string); replacer(format!("{}{}::{}", import, generics, segment)); } QualifyCandidate::UnqualifiedName(generics) => { let generics = generics.as_ref().map_or_else(String::new, ToString::to_string); replacer(format!("{}{}", import.to_string(), generics)); } QualifyCandidate::TraitAssocItem(qualifier, segment) => { replacer(format!("<{} as {}>::{}", qualifier, import, segment)); } &QualifyCandidate::TraitMethod(db, ref mcall_expr) => { Self::qualify_trait_method(db, mcall_expr, replacer, import, item); } } } fn qualify_trait_method( db: &RootDatabase, mcall_expr: &ast::MethodCallExpr, mut replacer: impl FnMut(String), import: ast::Path, item: hir::ItemInNs, ) -> Option<()> { let receiver = mcall_expr.receiver()?; let trait_method_name = mcall_expr.name_ref()?; let generics = mcall_expr.generic_arg_list().as_ref().map_or_else(String::new, ToString::to_string); let arg_list = mcall_expr.arg_list().map(|arg_list| arg_list.args()); let trait_ = item_as_trait(item)?; let method = find_trait_method(db, trait_, &trait_method_name)?; if let Some(self_access) = method.self_param(db).map(|sp| sp.access(db)) { let receiver = match self_access { hir::Access::Shared => make::expr_ref(receiver, false), hir::Access::Exclusive => make::expr_ref(receiver, true), hir::Access::Owned => receiver, }; replacer(format!( "{}::{}{}{}", import, trait_method_name, generics, match arg_list { Some(args) => make::arg_list(iter::once(receiver).chain(args)), None => make::arg_list(iter::once(receiver)), } )); } Some(()) } } fn find_trait_method( db: &RootDatabase, trait_: hir::Trait, trait_method_name: &ast::NameRef, ) -> Option<hir::Function> { if let Some(hir::AssocItem::Function(method)) = trait_.items(db).into_iter().find(|item: &hir::AssocItem| { item.name(db).map(|name| name == trait_method_name.as_name()).unwrap_or(false) }) { Some(method) } else { None } } fn item_as_trait(item: hir::ItemInNs) -> Option<hir::Trait> { if let hir::ModuleDef::Trait(trait_) = hir::ModuleDef::from(item.as_module_def_id()?) { Some(trait_) } else { None } } fn group_label(candidate: &ImportCandidate) -> GroupLabel { let name = match candidate { ImportCandidate::UnqualifiedName(it) | ImportCandidate::QualifierStart(it) => &it.name, ImportCandidate::TraitAssocItem(it) | ImportCandidate::TraitMethod(it) => &it.name, }; GroupLabel(format!("Qualify {}", name)) } fn label(candidate: &ImportCandidate, import: &hir::ModPath) -> String { match candidate { ImportCandidate::UnqualifiedName(_) => format!("Qualify as `{}`", &import), ImportCandidate::QualifierStart(_) => format!("Qualify with `{}`", &import), ImportCandidate::TraitAssocItem(_) => format!("Qualify `{}`", &import), ImportCandidate::TraitMethod(_) => format!("Qualify with cast as `{}`", &import), } } #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; use super::*; #[test] fn applicable_when_found_an_import_partial() { mark::check!(qualify_path_unqualified_name); check_assist( qualify_path, r" mod std { pub mod fmt { pub struct Formatter; } } use std::fmt; $0Formatter ", r" mod std { pub mod fmt { pub struct Formatter; } } use std::fmt; fmt::Formatter ", ); } #[test] fn applicable_when_found_an_import() { check_assist( qualify_path, r" $0PubStruct pub mod PubMod { pub struct PubStruct; } ", r" PubMod::PubStruct pub mod PubMod { pub struct PubStruct; } ", ); } #[test] fn applicable_in_macros() { check_assist( qualify_path, r" macro_rules! foo { ($i:ident) => { fn foo(a: $i) {} } } foo!(Pub$0Struct); pub mod PubMod { pub struct PubStruct; } ", r" macro_rules! foo { ($i:ident) => { fn foo(a: $i) {} } } foo!(PubMod::PubStruct); pub mod PubMod { pub struct PubStruct; } ", ); } #[test] fn applicable_when_found_multiple_imports() { check_assist( qualify_path, r" PubSt$0ruct pub mod PubMod1 { pub struct PubStruct; } pub mod PubMod2 { pub struct PubStruct; } pub mod PubMod3 { pub struct PubStruct; } ", r" PubMod3::PubStruct pub mod PubMod1 { pub struct PubStruct; } pub mod PubMod2 { pub struct PubStruct; } pub mod PubMod3 { pub struct PubStruct; } ", ); } #[test] fn not_applicable_for_already_imported_types() { check_assist_not_applicable( qualify_path, r" use PubMod::PubStruct; PubStruct$0 pub mod PubMod { pub struct PubStruct; } ", ); } #[test] fn not_applicable_for_types_with_private_paths() { check_assist_not_applicable( qualify_path, r" PrivateStruct$0 pub mod PubMod { struct PrivateStruct; } ", ); } #[test] fn not_applicable_when_no_imports_found() { check_assist_not_applicable( qualify_path, " PubStruct$0", ); } #[test] fn not_applicable_in_import_statements() { check_assist_not_applicable( qualify_path, r" use PubStruct$0; pub mod PubMod { pub struct PubStruct; }", ); } #[test] fn qualify_function() { check_assist( qualify_path, r" test_function$0 pub mod PubMod { pub fn test_function() {}; } ", r" PubMod::test_function pub mod PubMod { pub fn test_function() {}; } ", ); } #[test] fn qualify_macro() { check_assist( qualify_path, r" //- /lib.rs crate:crate_with_macro #[macro_export] macro_rules! foo { () => () } //- /main.rs crate:main deps:crate_with_macro fn main() { foo$0 } ", r" fn main() { crate_with_macro::foo } ", ); } #[test] fn qualify_path_target() { check_assist_target( qualify_path, r" struct AssistInfo { group_label: Option<$0GroupLabel>, } mod m { pub struct GroupLabel; } ", "GroupLabel", ) } #[test] fn not_applicable_when_path_start_is_imported() { check_assist_not_applicable( qualify_path, r" pub mod mod1 { pub mod mod2 { pub mod mod3 { pub struct TestStruct; } } } use mod1::mod2; fn main() { mod2::mod3::TestStruct$0 } ", ); } #[test] fn not_applicable_for_imported_function() { check_assist_not_applicable( qualify_path, r" pub mod test_mod { pub fn test_function() {} } use test_mod::test_function; fn main() { test_function$0 } ", ); } #[test] fn associated_struct_function() { check_assist( qualify_path, r" mod test_mod { pub struct TestStruct {} impl TestStruct { pub fn test_function() {}
} fn main() { TestStruct::test_function$0 } ", r" mod test_mod { pub struct TestStruct {} impl TestStruct { pub fn test_function() {} } } fn main() { test_mod::TestStruct::test_function } ", ); } #[test] fn associated_struct_const() { mark::check!(qualify_path_qualifier_start); check_assist( qualify_path, r" mod test_mod { pub struct TestStruct {} impl TestStruct { const TEST_CONST: u8 = 42; } } fn main() { TestStruct::TEST_CONST$0 } ", r" mod test_mod { pub struct TestStruct {} impl TestStruct { const TEST_CONST: u8 = 42; } } fn main() { test_mod::TestStruct::TEST_CONST } ", ); } #[test] fn associated_trait_function() { check_assist( qualify_path, r" mod test_mod { pub trait TestTrait { fn test_function(); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_function() {} } } fn main() { test_mod::TestStruct::test_function$0 } ", r" mod test_mod { pub trait TestTrait { fn test_function(); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_function() {} } } fn main() { <test_mod::TestStruct as test_mod::TestTrait>::test_function } ", ); } #[test] fn not_applicable_for_imported_trait_for_function() { check_assist_not_applicable( qualify_path, r" mod test_mod { pub trait TestTrait { fn test_function(); } pub trait TestTrait2 { fn test_function(); } pub enum TestEnum { One, Two, } impl TestTrait2 for TestEnum { fn test_function() {} } impl TestTrait for TestEnum { fn test_function() {} } } use test_mod::TestTrait2; fn main() { test_mod::TestEnum::test_function$0; } ", ) } #[test] fn associated_trait_const() { mark::check!(qualify_path_trait_assoc_item); check_assist( qualify_path, r" mod test_mod { pub trait TestTrait { const TEST_CONST: u8; } pub struct TestStruct {} impl TestTrait for TestStruct { const TEST_CONST: u8 = 42; } } fn main() { test_mod::TestStruct::TEST_CONST$0 } ", r" mod test_mod { pub trait TestTrait { const TEST_CONST: u8; } pub struct TestStruct {} impl TestTrait for TestStruct { const TEST_CONST: u8 = 42; } } fn main() { <test_mod::TestStruct as test_mod::TestTrait>::TEST_CONST } ", ); } #[test] fn not_applicable_for_imported_trait_for_const() { check_assist_not_applicable( qualify_path, r" mod test_mod { pub trait TestTrait { const TEST_CONST: u8; } pub trait TestTrait2 { const TEST_CONST: f64; } pub enum TestEnum { One, Two, } impl TestTrait2 for TestEnum { const TEST_CONST: f64 = 42.0; } impl TestTrait for TestEnum { const TEST_CONST: u8 = 42; } } use test_mod::TestTrait2; fn main() { test_mod::TestEnum::TEST_CONST$0; } ", ) } #[test] fn trait_method() { mark::check!(qualify_path_trait_method); check_assist( qualify_path, r" mod test_mod { pub trait TestTrait { fn test_method(&self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(&self) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_struct.test_meth$0od() } ", r" mod test_mod { pub trait TestTrait { fn test_method(&self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(&self) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_mod::TestTrait::test_method(&test_struct) } ", ); } #[test] fn trait_method_multi_params() { check_assist( qualify_path, r" mod test_mod { pub trait TestTrait { fn test_method(&self, test: i32); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(&self, test: i32) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_struct.test_meth$0od(42) } ", r" mod test_mod { pub trait TestTrait { fn test_method(&self, test: i32); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(&self, test: i32) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_mod::TestTrait::test_method(&test_struct, 42) } ", ); } #[test] fn trait_method_consume() { check_assist( qualify_path, r" mod test_mod { pub trait TestTrait { fn test_method(self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(self) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_struct.test_meth$0od() } ", r" mod test_mod { pub trait TestTrait { fn test_method(self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(self) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_mod::TestTrait::test_method(test_struct) } ", ); } #[test] fn trait_method_cross_crate() { check_assist( qualify_path, r" //- /main.rs crate:main deps:dep fn main() { let test_struct = dep::test_mod::TestStruct {}; test_struct.test_meth$0od() } //- /dep.rs crate:dep pub mod test_mod { pub trait TestTrait { fn test_method(&self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(&self) {} } } ", r" fn main() { let test_struct = dep::test_mod::TestStruct {}; dep::test_mod::TestTrait::test_method(&test_struct) } ", ); } #[test] fn assoc_fn_cross_crate() { check_assist( qualify_path, r" //- /main.rs crate:main deps:dep fn main() { dep::test_mod::TestStruct::test_func$0tion } //- /dep.rs crate:dep pub mod test_mod { pub trait TestTrait { fn test_function(); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_function() {} } } ", r" fn main() { <dep::test_mod::TestStruct as dep::test_mod::TestTrait>::test_function } ", ); } #[test] fn assoc_const_cross_crate() { check_assist( qualify_path, r" //- /main.rs crate:main deps:dep fn main() { dep::test_mod::TestStruct::CONST$0 } //- /dep.rs crate:dep pub mod test_mod { pub trait TestTrait { const CONST: bool; } pub struct TestStruct {} impl TestTrait for TestStruct { const CONST: bool = true; } } ", r" fn main() { <dep::test_mod::TestStruct as dep::test_mod::TestTrait>::CONST } ", ); } #[test] fn assoc_fn_as_method_cross_crate() { check_assist_not_applicable( qualify_path, r" //- /main.rs crate:main deps:dep fn main() { let test_struct = dep::test_mod::TestStruct {}; test_struct.test_func$0tion() } //- /dep.rs crate:dep pub mod test_mod { pub trait TestTrait { fn test_function(); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_function() {} } } ", ); } #[test] fn private_trait_cross_crate() { check_assist_not_applicable( qualify_path, r" //- /main.rs crate:main deps:dep fn main() { let test_struct = dep::test_mod::TestStruct {}; test_struct.test_meth$0od() } //- /dep.rs crate:dep pub mod test_mod { trait TestTrait { fn test_method(&self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method(&self) {} } } ", ); } #[test] fn not_applicable_for_imported_trait_for_method() { check_assist_not_applicable( qualify_path, r" mod test_mod { pub trait TestTrait { fn test_method(&self); } pub trait TestTrait2 { fn test_method(&self); } pub enum TestEnum { One, Two, } impl TestTrait2 for TestEnum { fn test_method(&self) {} } impl TestTrait for TestEnum { fn test_method(&self) {} } } use test_mod::TestTrait2; fn main() { let one = test_mod::TestEnum::One; one.test$0_method(); } ", ) } #[test] fn dep_import() { check_assist( qualify_path, r" //- /lib.rs crate:dep pub struct Struct; //- /main.rs crate:main deps:dep fn main() { Struct$0 } ", r" fn main() { dep::Struct } ", ); } #[test] fn whole_segment() { // Tests that only imports whose last segment matches the identifier get suggested. check_assist( qualify_path, r" //- /lib.rs crate:dep pub mod fmt { pub trait Display {} } pub fn panic_fmt() {} //- /main.rs crate:main deps:dep struct S; impl f$0mt::Display for S {} ", r" struct S; impl dep::fmt::Display for S {} ", ); } #[test] fn macro_generated() { // Tests that macro-generated items are suggested from external crates. check_assist( qualify_path, r" //- /lib.rs crate:dep macro_rules! mac { () => { pub struct Cheese; }; } mac!(); //- /main.rs crate:main deps:dep fn main() { Cheese$0; } ", r" fn main() { dep::Cheese; } ", ); } #[test] fn casing() { // Tests that differently cased names don't interfere and we only suggest the matching one. check_assist( qualify_path, r" //- /lib.rs crate:dep pub struct FMT; pub struct fmt; //- /main.rs crate:main deps:dep fn main() { FMT$0; } ", r" fn main() { dep::FMT; } ", ); } #[test] fn keep_generic_annotations() { check_assist( qualify_path, r" //- /lib.rs crate:dep pub mod generic { pub struct Thing<'a, T>(&'a T); } //- /main.rs crate:main deps:dep fn foo() -> Thin$0g<'static, ()> {} fn main() {} ", r" fn foo() -> dep::generic::Thing<'static, ()> {} fn main() {} ", ); } #[test] fn keep_generic_annotations_leading_colon() { check_assist( qualify_path, r" //- /lib.rs crate:dep pub mod generic { pub struct Thing<'a, T>(&'a T); } //- /main.rs crate:main deps:dep fn foo() -> Thin$0g::<'static, ()> {} fn main() {} ", r" fn foo() -> dep::generic::Thing::<'static, ()> {} fn main() {} ", ); } #[test] fn associated_struct_const_generic() { check_assist( qualify_path, r" mod test_mod { pub struct TestStruct<T> {} impl<T> TestStruct<T> { const TEST_CONST: u8 = 42; } } fn main() { TestStruct::<()>::TEST_CONST$0 } ", r" mod test_mod { pub struct TestStruct<T> {} impl<T> TestStruct<T> { const TEST_CONST: u8 = 42; } } fn main() { test_mod::TestStruct::<()>::TEST_CONST } ", ); } #[test] fn associated_trait_const_generic() { check_assist( qualify_path, r" mod test_mod { pub trait TestTrait { const TEST_CONST: u8; } pub struct TestStruct<T> {} impl<T> TestTrait for TestStruct<T> { const TEST_CONST: u8 = 42; } } fn main() { test_mod::TestStruct::<()>::TEST_CONST$0 } ", r" mod test_mod { pub trait TestTrait { const TEST_CONST: u8; } pub struct TestStruct<T> {} impl<T> TestTrait for TestStruct<T> { const TEST_CONST: u8 = 42; } } fn main() { <test_mod::TestStruct::<()> as test_mod::TestTrait>::TEST_CONST } ", ); } #[test] fn trait_method_generic() { check_assist( qualify_path, r" mod test_mod { pub trait TestTrait { fn test_method<T>(&self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method<T>(&self) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_struct.test_meth$0od::<()>() } ", r" mod test_mod { pub trait TestTrait { fn test_method<T>(&self); } pub struct TestStruct {} impl TestTrait for TestStruct { fn test_method<T>(&self) {} } } fn main() { let test_struct = test_mod::TestStruct {}; test_mod::TestTrait::test_method::<()>(&test_struct) } ", ); } }
}
thread.rs
use crate::cmp; use crate::ffi::CStr; use crate::io; use crate::mem; use crate::ptr; use crate::sys::os; use crate::time::Duration; use crate::sys_common::thread::*; #[cfg(not(target_os = "l4re"))] pub const DEFAULT_MIN_STACK_SIZE: usize = 2 * 1024 * 1024; #[cfg(target_os = "l4re")] pub const DEFAULT_MIN_STACK_SIZE: usize = 1024 * 1024; pub struct Thread { id: libc::pthread_t, } // Some platforms may have pthread_t as a pointer in which case we still want // a thread to be Send/Sync unsafe impl Send for Thread {} unsafe impl Sync for Thread {} // The pthread_attr_setstacksize symbol doesn't exist in the emscripten libc, // so we have to not link to it to satisfy emcc's ERROR_ON_UNDEFINED_SYMBOLS. #[cfg(not(target_os = "emscripten"))] unsafe fn pthread_attr_setstacksize(attr: *mut libc::pthread_attr_t, stack_size: libc::size_t) -> libc::c_int { libc::pthread_attr_setstacksize(attr, stack_size) } #[cfg(target_os = "emscripten")] unsafe fn pthread_attr_setstacksize(_attr: *mut libc::pthread_attr_t, _stack_size: libc::size_t) -> libc::c_int { panic!() } impl Thread { // unsafe: see thread::Builder::spawn_unchecked for safety requirements pub unsafe fn new(stack: usize, p: Box<dyn FnOnce()>) -> io::Result<Thread> { let p = box p; let mut native: libc::pthread_t = mem::zeroed(); let mut attr: libc::pthread_attr_t = mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); let stack_size = cmp::max(stack, min_stack_size(&attr)); match pthread_attr_setstacksize(&mut attr, stack_size) { 0 => {} n => { assert_eq!(n, libc::EINVAL); // EINVAL means |stack_size| is either too small or not a // multiple of the system page size. Because it's definitely // >= PTHREAD_STACK_MIN, it must be an alignment issue. // Round up to the nearest page and try again. let page_size = os::page_size(); let stack_size = (stack_size + page_size - 1) & (-(page_size as isize - 1) as usize - 1); assert_eq!(libc::pthread_attr_setstacksize(&mut attr, stack_size), 0); } }; let ret = libc::pthread_create(&mut native, &attr, thread_start, &*p as *const _ as *mut _); assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); return if ret != 0 { Err(io::Error::from_raw_os_error(ret)) } else { mem::forget(p); // ownership passed to pthread_create Ok(Thread { id: native }) }; extern fn thread_start(main: *mut libc::c_void) -> *mut libc::c_void { unsafe { start_thread(main as *mut u8); } ptr::null_mut() } } pub fn yield_now() { let ret = unsafe { libc::sched_yield() }; debug_assert_eq!(ret, 0); } #[cfg(any(target_os = "linux", target_os = "android"))] pub fn set_name(name: &CStr) { const PR_SET_NAME: libc::c_int = 15; // pthread wrapper only appeared in glibc 2.12, so we use syscall // directly. unsafe { libc::prctl(PR_SET_NAME, name.as_ptr() as libc::c_ulong, 0, 0, 0); } } #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "bitrig", target_os = "openbsd"))] pub fn set_name(name: &CStr) { unsafe { libc::pthread_set_name_np(libc::pthread_self(), name.as_ptr()); } } #[cfg(any(target_os = "macos", target_os = "ios"))] pub fn set_name(name: &CStr) { unsafe { libc::pthread_setname_np(name.as_ptr()); } } #[cfg(target_os = "netbsd")] pub fn set_name(name: &CStr) { use crate::ffi::CString; let cname = CString::new(&b"%s"[..]).unwrap(); unsafe { libc::pthread_setname_np(libc::pthread_self(), cname.as_ptr(), name.as_ptr() as *mut libc::c_void); } } #[cfg(any(target_env = "newlib", target_os = "solaris", target_os = "haiku", target_os = "l4re", target_os = "emscripten", target_os = "hermit"))] pub fn set_name(_name: &CStr) { // Newlib, Illumos, Haiku, and Emscripten have no way to set a thread name. } #[cfg(target_os = "fuchsia")] pub fn set_name(_name: &CStr) { // FIXME: determine whether Fuchsia has a way to set a thread name. } pub fn sleep(dur: Duration) { let mut secs = dur.as_secs(); let mut nsecs = dur.subsec_nanos() as _; // If we're awoken with a signal then the return value will be -1 and // nanosleep will fill in `ts` with the remaining time. unsafe { while secs > 0 || nsecs > 0 { let mut ts = libc::timespec { tv_sec: cmp::min(libc::time_t::max_value() as u64, secs) as libc::time_t, tv_nsec: nsecs, }; secs -= ts.tv_sec as u64; if libc::nanosleep(&ts, &mut ts) == -1 { assert_eq!(os::errno(), libc::EINTR); secs += ts.tv_sec as u64; nsecs = ts.tv_nsec; } else { nsecs = 0; } } } } pub fn join(self) { unsafe { let ret = libc::pthread_join(self.id, ptr::null_mut()); mem::forget(self); assert!(ret == 0, "failed to join thread: {}", io::Error::from_raw_os_error(ret)); } } pub fn id(&self) -> libc::pthread_t
pub fn into_id(self) -> libc::pthread_t { let id = self.id; mem::forget(self); id } } impl Drop for Thread { fn drop(&mut self) { let ret = unsafe { libc::pthread_detach(self.id) }; debug_assert_eq!(ret, 0); } } #[cfg(all(not(all(target_os = "linux", not(target_env = "musl"))), not(target_os = "freebsd"), not(target_os = "macos"), not(target_os = "bitrig"), not(all(target_os = "netbsd", not(target_vendor = "rumprun"))), not(target_os = "openbsd"), not(target_os = "solaris")))] #[cfg_attr(test, allow(dead_code))] pub mod guard { use crate::ops::Range; pub type Guard = Range<usize>; pub unsafe fn current() -> Option<Guard> { None } pub unsafe fn init() -> Option<Guard> { None } } #[cfg(any(all(target_os = "linux", not(target_env = "musl")), target_os = "freebsd", target_os = "macos", target_os = "bitrig", all(target_os = "netbsd", not(target_vendor = "rumprun")), target_os = "openbsd", target_os = "solaris"))] #[cfg_attr(test, allow(dead_code))] pub mod guard { use libc::{mmap, mprotect}; use libc::{PROT_NONE, PROT_READ, PROT_WRITE, MAP_PRIVATE, MAP_ANON, MAP_FAILED, MAP_FIXED}; use crate::ops::Range; use crate::sys::os; // This is initialized in init() and only read from after static mut PAGE_SIZE: usize = 0; pub type Guard = Range<usize>; #[cfg(target_os = "solaris")] unsafe fn get_stack_start() -> Option<*mut libc::c_void> { let mut current_stack: libc::stack_t = crate::mem::zeroed(); assert_eq!(libc::stack_getbounds(&mut current_stack), 0); Some(current_stack.ss_sp) } #[cfg(target_os = "macos")] unsafe fn get_stack_start() -> Option<*mut libc::c_void> { let stackaddr = libc::pthread_get_stackaddr_np(libc::pthread_self()) as usize - libc::pthread_get_stacksize_np(libc::pthread_self()); Some(stackaddr as *mut libc::c_void) } #[cfg(any(target_os = "openbsd", target_os = "bitrig"))] unsafe fn get_stack_start() -> Option<*mut libc::c_void> { let mut current_stack: libc::stack_t = crate::mem::zeroed(); assert_eq!(libc::pthread_stackseg_np(libc::pthread_self(), &mut current_stack), 0); let extra = if cfg!(target_os = "bitrig") {3} else {1} * PAGE_SIZE; let stackaddr = if libc::pthread_main_np() == 1 { // main thread current_stack.ss_sp as usize - current_stack.ss_size + extra } else { // new thread current_stack.ss_sp as usize - current_stack.ss_size }; Some(stackaddr as *mut libc::c_void) } #[cfg(any(target_os = "android", target_os = "freebsd", target_os = "linux", target_os = "netbsd", target_os = "l4re"))] unsafe fn get_stack_start() -> Option<*mut libc::c_void> { let mut ret = None; let mut attr: libc::pthread_attr_t = crate::mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); #[cfg(target_os = "freebsd")] let e = libc::pthread_attr_get_np(libc::pthread_self(), &mut attr); #[cfg(not(target_os = "freebsd"))] let e = libc::pthread_getattr_np(libc::pthread_self(), &mut attr); if e == 0 { let mut stackaddr = crate::ptr::null_mut(); let mut stacksize = 0; assert_eq!(libc::pthread_attr_getstack(&attr, &mut stackaddr, &mut stacksize), 0); ret = Some(stackaddr); } assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); ret } // Precondition: PAGE_SIZE is initialized. unsafe fn get_stack_start_aligned() -> Option<*mut libc::c_void> { assert!(PAGE_SIZE != 0); let stackaddr = get_stack_start()?; // Ensure stackaddr is page aligned! A parent process might // have reset RLIMIT_STACK to be non-page aligned. The // pthread_attr_getstack() reports the usable stack area // stackaddr < stackaddr + stacksize, so if stackaddr is not // page-aligned, calculate the fix such that stackaddr < // new_page_aligned_stackaddr < stackaddr + stacksize let remainder = (stackaddr as usize) % PAGE_SIZE; Some(if remainder == 0 { stackaddr } else { ((stackaddr as usize) + PAGE_SIZE - remainder) as *mut libc::c_void }) } pub unsafe fn init() -> Option<Guard> { PAGE_SIZE = os::page_size(); let stackaddr = get_stack_start_aligned()?; if cfg!(target_os = "linux") { // Linux doesn't allocate the whole stack right away, and // the kernel has its own stack-guard mechanism to fault // when growing too close to an existing mapping. If we map // our own guard, then the kernel starts enforcing a rather // large gap above that, rendering much of the possible // stack space useless. See #43052. // // Instead, we'll just note where we expect rlimit to start // faulting, so our handler can report "stack overflow", and // trust that the kernel's own stack guard will work. let stackaddr = stackaddr as usize; Some(stackaddr - PAGE_SIZE..stackaddr) } else { // Reallocate the last page of the stack. // This ensures SIGBUS will be raised on // stack overflow. // Systems which enforce strict PAX MPROTECT do not allow // to mprotect() a mapping with less restrictive permissions // than the initial mmap() used, so we mmap() here with // read/write permissions and only then mprotect() it to // no permissions at all. See issue #50313. let result = mmap(stackaddr, PAGE_SIZE, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANON | MAP_FIXED, -1, 0); if result != stackaddr || result == MAP_FAILED { panic!("failed to allocate a guard page"); } let result = mprotect(stackaddr, PAGE_SIZE, PROT_NONE); if result != 0 { panic!("failed to protect the guard page"); } let guardaddr = stackaddr as usize; let offset = if cfg!(target_os = "freebsd") { 2 } else { 1 }; Some(guardaddr..guardaddr + offset * PAGE_SIZE) } } #[cfg(any(target_os = "macos", target_os = "bitrig", target_os = "openbsd", target_os = "solaris"))] pub unsafe fn current() -> Option<Guard> { let stackaddr = get_stack_start()? as usize; Some(stackaddr - PAGE_SIZE..stackaddr) } #[cfg(any(target_os = "android", target_os = "freebsd", target_os = "linux", target_os = "netbsd", target_os = "l4re"))] pub unsafe fn current() -> Option<Guard> { let mut ret = None; let mut attr: libc::pthread_attr_t = crate::mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); #[cfg(target_os = "freebsd")] let e = libc::pthread_attr_get_np(libc::pthread_self(), &mut attr); #[cfg(not(target_os = "freebsd"))] let e = libc::pthread_getattr_np(libc::pthread_self(), &mut attr); if e == 0 { let mut guardsize = 0; assert_eq!(libc::pthread_attr_getguardsize(&attr, &mut guardsize), 0); if guardsize == 0 { panic!("there is no guard page"); } let mut stackaddr = crate::ptr::null_mut(); let mut size = 0; assert_eq!(libc::pthread_attr_getstack(&attr, &mut stackaddr, &mut size), 0); let stackaddr = stackaddr as usize; ret = if cfg!(target_os = "freebsd") { // FIXME does freebsd really fault *below* the guard addr? let guardaddr = stackaddr - guardsize; Some(guardaddr - PAGE_SIZE..guardaddr) } else if cfg!(target_os = "netbsd") { Some(stackaddr - guardsize..stackaddr) } else if cfg!(all(target_os = "linux", target_env = "gnu")) { // glibc used to include the guard area within the stack, as noted in the BUGS // section of `man pthread_attr_getguardsize`. This has been corrected starting // with glibc 2.27, and in some distro backports, so the guard is now placed at the // end (below) the stack. There's no easy way for us to know which we have at // runtime, so we'll just match any fault in the range right above or below the // stack base to call that fault a stack overflow. Some(stackaddr - guardsize..stackaddr + guardsize) } else { Some(stackaddr..stackaddr + guardsize) }; } assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); ret } } // glibc >= 2.15 has a __pthread_get_minstack() function that returns // PTHREAD_STACK_MIN plus however many bytes are needed for thread-local // storage. We need that information to avoid blowing up when a small stack // is created in an application with big thread-local storage requirements. // See #6233 for rationale and details. #[cfg(target_os = "linux")] #[allow(deprecated)] fn min_stack_size(attr: *const libc::pthread_attr_t) -> usize { weak!(fn __pthread_get_minstack(*const libc::pthread_attr_t) -> libc::size_t); match __pthread_get_minstack.get() { None => libc::PTHREAD_STACK_MIN, Some(f) => unsafe { f(attr) }, } } // No point in looking up __pthread_get_minstack() on non-glibc // platforms. #[cfg(all(not(target_os = "linux"), not(target_os = "netbsd")))] fn min_stack_size(_: *const libc::pthread_attr_t) -> usize { libc::PTHREAD_STACK_MIN } #[cfg(target_os = "netbsd")] fn min_stack_size(_: *const libc::pthread_attr_t) -> usize { 2048 // just a guess }
{ self.id }
module_inclusion.rs
mod utils; mod included_not { use crate::utils; use log::log_enabled; use stderrlog::StdErrLog; #[test] fn including_module_with_substring_name()
} mod included { mod b { use crate::utils; use log::log_enabled; use stderrlog::StdErrLog; #[test] fn super_and_submodule_included() { utils::init(); let mut logger = StdErrLog::new(); logger.module("module_inclusion::included"); logger.module("module_inclusion::included::a"); logger.verbosity(10); utils::set_logger(logger); assert!(log_enabled!(log::Level::Error)); } #[test] fn sub_and_supermodule_included() { utils::init(); let mut logger = StdErrLog::new(); logger.module("module_inclusion::included::a"); logger.module("module_inclusion::included"); logger.verbosity(10); utils::set_logger(logger); assert!(log_enabled!(log::Level::Error)); } } }
{ utils::init(); let mut logger = StdErrLog::new(); logger.module("module_inclusion::included"); logger.verbosity(10); utils::set_logger(logger); assert!(!log_enabled!(log::Level::Error)); }
app.rs
use std::{ fmt, io::Read, sync::{Arc, Mutex, PoisonError}, }; use glium::{ SwapBuffersError, glutin::{ Event, dpi::{LogicalPosition, LogicalSize} }, }; use webrender::{PipelineInfo, api::{HitTestFlags, DevicePixel}}; use euclid::TypedSize2D; #[cfg(feature = "image_loading")] use image::ImageError; #[cfg(feature = "logging")] use log::LevelFilter; #[cfg(feature = "image_loading")] use images::ImageType; use { error::{FontError, ClipboardError}, window::{Window, WindowId}, css_parser::{FontId, PixelValue, LetterSpacing}, text_cache::TextId, dom::UpdateScreen, window::FakeWindow, css::{FakeCss, ParsedCss}, app_resources::AppResources, app_state::AppState, traits::Layout, ui_state::UiState, ui_description::UiDescription, daemon::Daemon, }; /// Graphical application that maintains some kind of application state pub struct App<T: Layout> { /// The graphical windows, indexed by ID windows: Vec<Window<T>>, /// The global application state pub app_state: AppState<T>, } /// Error returned by the `.run()` function /// /// If the `.run()` function would panic, that would need `T` to /// implement `Debug`, which is not necessary if we just return an error. pub enum RuntimeError<T: Layout> { // Could not swap the display (drawing error) GlSwapError(SwapBuffersError), ArcUnlockError, MutexPoisonError(PoisonError<T>), } impl<T: Layout> From<PoisonError<T>> for RuntimeError<T> { fn from(e: PoisonError<T>) -> Self { RuntimeError::MutexPoisonError(e) } } impl<T: Layout> From<SwapBuffersError> for RuntimeError<T> { fn from(e: SwapBuffersError) -> Self { RuntimeError::GlSwapError(e) } } impl<T: Layout> fmt::Debug for RuntimeError<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self) } } pub(crate) struct FrameEventInfo {
pub(crate) should_redraw_window: bool, pub(crate) should_swap_window: bool, pub(crate) should_hittest: bool, pub(crate) cur_cursor_pos: LogicalPosition, pub(crate) new_window_size: Option<LogicalSize>, pub(crate) new_dpi_factor: Option<f64>, pub(crate) is_resize_event: bool, } impl Default for FrameEventInfo { fn default() -> Self { Self { should_redraw_window: false, should_swap_window: false, should_hittest: false, cur_cursor_pos: LogicalPosition::new(0.0, 0.0), new_window_size: None, new_dpi_factor: None, is_resize_event: false, } } } /// Configuration for optional features, such as whether to enable logging or panic hooks #[derive(Debug, Clone)] #[cfg_attr(not(feature = "logging"), derive(Copy))] pub struct AppConfig { /// If enabled, logs error and info messages. /// /// Default is `Some(LevelFilter::Error)` to log all errors by default #[cfg(feature = "logging")] pub enable_logging: Option<LevelFilter>, /// Path to the output log if the logger is enabled #[cfg(feature = "logging")] pub log_file_path: Option<String>, /// If the app crashes / panics, a window with a message box pops up /// Additionally, the error + backtrace gets logged to the output /// file (if logging is enabled). #[cfg(feature = "logging")] pub enable_visual_panic_hook: bool, } impl Default for AppConfig { fn default() -> Self { Self { #[cfg(feature = "logging")] enable_logging: Some(LevelFilter::Error), #[cfg(feature = "logging")] log_file_path: None, #[cfg(feature = "logging")] enable_visual_panic_hook: true, } } } impl<T: Layout> App<T> { #[allow(unused_variables)] /// Create a new, empty application. This does not open any windows. pub fn new(initial_data: T, config: AppConfig) -> Self { #[cfg(feature = "logging")] { if let Some(log_level) = config.enable_logging { ::logging::set_up_logging(config.log_file_path, log_level); } if config.enable_visual_panic_hook { ::logging::set_up_panic_hooks(); } } Self { windows: Vec::new(), app_state: AppState::new(initial_data), } } /// Spawn a new window on the screen. Note that this should only be used to /// create extra windows, the default window will be the window submitted to /// the `.run` method. pub fn push_window(&mut self, window: Window<T>) { use default_callbacks::DefaultCallbackSystem; // TODO: push_window doesn't work dynamically! self.app_state.windows.push(FakeWindow { state: window.state.clone(), css: FakeCss::default(), default_callbacks: DefaultCallbackSystem::new(), read_only_window: window.display.clone(), }); self.windows.push(window); } /// Start the rendering loop for the currently open windows /// This is the "main app loop", "main game loop" or whatever you want to call it. /// Usually this is the last function you call in your `main()` function, since exiting /// it means that the user has closed all windows and wants to close the app. /// /// When all windows are closed, this function returns the internal data again. /// This is useful for ex. CLI application that run procedurally, but then want to /// open a window temporarily, to ask for user input in a "nicer" way than a pure /// CLI-way. /// /// This way you can do this: /// /// ```no_run,ignore /// let app = App::new(MyData { username: None, password: None }); /// app.create_window(WindowCreateOptions::default(), Css::native()); /// /// // pop open a window that asks the user for his username and password... /// let MyData { username, password } = app.run(); /// /// // continue the rest of the program here... /// println!("username: {:?}, password: {:?}", username, password); /// ``` pub fn run(mut self, window: Window<T>) -> Result<T, RuntimeError<T>> { self.push_window(window); self.run_inner()?; let unique_arc = Arc::try_unwrap(self.app_state.data).map_err(|_| RuntimeError::ArcUnlockError)?; unique_arc.into_inner().map_err(|e| e.into()) } fn run_inner(&mut self) -> Result<(), RuntimeError<T>> { use std::{thread, time::{Duration, Instant}}; let mut ui_state_cache = Self::initialize_ui_state(&self.windows, &mut self.app_state); let mut ui_description_cache = vec![UiDescription::default(); self.windows.len()]; let mut force_redraw_cache = vec![1_usize; self.windows.len()]; let mut parsed_css_cache: Vec<Option<ParsedCss>> = vec![None; self.windows.len()]; let mut awakened_task = vec![false; self.windows.len()]; #[cfg(debug_assertions)] let mut last_css_reload = Instant::now(); while !self.windows.is_empty() { let time_start = Instant::now(); let mut closed_windows = Vec::<usize>::new(); 'window_loop: for (idx, ref mut window) in self.windows.iter_mut().enumerate() { let window_id = WindowId { id: idx }; let mut frame_event_info = FrameEventInfo::default(); let mut events = Vec::new(); window.events_loop.poll_events(|e| events.push(e)); for event in &events { if preprocess_event(event, &mut frame_event_info, awakened_task[idx]) == WindowCloseEvent::AboutToClose { closed_windows.push(idx); continue 'window_loop; } window.state.update_mouse_cursor_position(event); window.state.update_keyboard_modifiers(event); window.state.update_keyboard_pressed_chars(event); } if frame_event_info.should_hittest { for event in &events { do_hit_test_and_call_callbacks( event, window, window_id, &mut frame_event_info, &ui_state_cache, &mut self.app_state); } } if frame_event_info.should_swap_window || frame_event_info.is_resize_event { window.display.swap_buffers()?; if let Some(i) = force_redraw_cache.get_mut(idx) { if *i > 0 { *i -= 1 }; if *i == 0 { clean_up_unused_opengl_textures(window.renderer.as_mut().unwrap().flush_pipeline_info()); } } } if frame_event_info.is_resize_event || frame_event_info.should_redraw_window { // This is a hack because during a resize event, winit eats the "awakened" // event. So what we do is that we call the layout-and-render again, to // trigger a second "awakened" event. So when the window is resized, the // layout function is called twice (the first event will be eaten by winit) // // This is a reported bug and should be fixed somewhere in July force_redraw_cache[idx] = 2; } // TODO: use this! let should_redraw_animations = window.run_all_animations(); // Update the window state that we got from the frame event (updates window dimensions and DPI) window.update_from_external_window_state(&mut frame_event_info); // Update the window state every frame that was set by the user window.update_from_user_window_state(self.app_state.windows[idx].state.clone()); // Reset the scroll amount to 0 (for the next frame) window.clear_scroll_state(); if frame_event_info.should_redraw_window || force_redraw_cache[idx] > 0 { #[cfg(not(debug_assertions))] { if parsed_css_cache[idx].is_none() { parsed_css_cache[idx] = Some(ParsedCss::from_css(&window.css)); } } #[cfg(debug_assertions)] { if window.css.hot_reload_path.is_some() { parsed_css_cache[idx] = Some(ParsedCss::from_css(&window.css)); } } let parsed_css = parsed_css_cache[idx].as_ref().unwrap(); // Call the Layout::layout() fn, get the DOM let window_id = WindowId { id: idx }; ui_state_cache[idx] = UiState::from_app_state(&mut self.app_state, window_id); // Style the DOM ui_description_cache[idx] = UiDescription::from_dom( &ui_state_cache[idx], &parsed_css, &window.css.dynamic_css_overrides ); // Send webrender the size and buffer of the display Self::update_display(&window); // render the window (webrender will send an Awakened event when the frame is done) let arc_mutex_t_clone = self.app_state.data.clone(); render( arc_mutex_t_clone, true, /* has_window_size_changed */ &ui_description_cache[idx], &ui_state_cache[idx], &parsed_css, &mut *window, &mut self.app_state.windows[idx], &mut self.app_state.resources); awakened_task[idx] = false; } } #[cfg(debug_assertions)] { for (window_idx, window) in self.windows.iter_mut().enumerate() { // Hot-reload CSS if necessary if window.css.hot_reload_path.is_some() && Instant::now() - last_css_reload > Duration::from_millis(500) { window.css.reload_css(); window.css.needs_relayout = true; last_css_reload = Instant::now(); window.events_loop.create_proxy().wakeup().unwrap_or(()); awakened_task[window_idx] = true; } } } // Close windows if necessary closed_windows.into_iter().for_each(|closed_window_id| { ui_state_cache.remove(closed_window_id); ui_description_cache.remove(closed_window_id); force_redraw_cache.remove(closed_window_id); parsed_css_cache.remove(closed_window_id); self.windows.remove(closed_window_id); }); let should_redraw_daemons = self.app_state.run_all_daemons(); let should_redraw_tasks = self.app_state.clean_up_finished_tasks(); if [should_redraw_daemons, should_redraw_tasks].into_iter().any(|e| *e == UpdateScreen::Redraw) { self.windows.iter().for_each(|w| w.events_loop.create_proxy().wakeup().unwrap_or(())); awakened_task = vec![true; self.windows.len()]; } else { // Wait until 16ms have passed let diff = time_start.elapsed(); const FRAME_TIME: Duration = Duration::from_millis(16); if diff < FRAME_TIME { thread::sleep(FRAME_TIME - diff); } } } Ok(()) } fn update_display(window: &Window<T>) { use webrender::api::{Transaction, DeviceUintRect, DeviceUintPoint}; use euclid::TypedSize2D; let mut txn = Transaction::new(); let physical_fb_dimensions = window.state.size.dimensions.to_physical(window.state.size.hidpi_factor); let framebuffer_size = TypedSize2D::new(physical_fb_dimensions.width as u32, physical_fb_dimensions.height as u32); let bounds = DeviceUintRect::new(DeviceUintPoint::new(0, 0), framebuffer_size); txn.set_window_parameters(framebuffer_size, bounds, window.state.size.hidpi_factor as f32); window.internal.api.send_transaction(window.internal.document_id, txn); } fn initialize_ui_state(windows: &[Window<T>], app_state: &mut AppState<T>) -> Vec<UiState<T>> { windows.iter().enumerate().map(|(idx, _window)| { let window_id = WindowId { id: idx }; UiState::from_app_state(app_state, window_id) }).collect() } /// Add an image to the internal resources. Only available with /// `--feature="image_loading"` (on by default) /// /// ## Returns /// /// - `Ok(Some(()))` if an image with the same ID already exists. /// - `Ok(None)` if the image was added, but didn't exist previously. /// - `Err(e)` if the image couldn't be decoded #[cfg(feature = "image_loading")] pub fn add_image<S: Into<String>, R: Read>(&mut self, id: S, data: &mut R, image_type: ImageType) -> Result<Option<()>, ImageError> { self.app_state.add_image(id, data, image_type) } /// Removes an image from the internal app resources. /// Returns `Some` if the image existed and was removed. /// If the given ID doesn't exist, this function does nothing and returns `None`. pub fn delete_image<S: AsRef<str>>(&mut self, id: S) -> Option<()> { self.app_state.delete_image(id) } /// Checks if an image is currently registered and ready-to-use pub fn has_image<S: AsRef<str>>(&mut self, id: S) -> bool { self.app_state.has_image(id) } /// Add a font (TTF or OTF) as a resource, identified by ID /// /// ## Returns /// /// - `Ok(Some(()))` if an font with the same ID already exists. /// - `Ok(None)` if the font was added, but didn't exist previously. /// - `Err(e)` if the font couldn't be decoded pub fn add_font<R: Read>(&mut self, id: FontId, data: &mut R) -> Result<Option<()>, FontError> { self.app_state.add_font(id, data) } /// Checks if a font is currently registered and ready-to-use pub fn has_font(&mut self, id: &FontId) -> bool { self.app_state.has_font(id) } /// Deletes a font from the internal app resources. /// /// ## Arguments /// /// - `id`: The stringified ID of the font to remove, e.g. `"Helvetica-Bold"`. /// /// ## Returns /// /// - `Some(())` if if the image existed and was successfully removed /// - `None` if the given ID doesn't exist. In that case, the function does /// nothing. /// /// Wrapper function for [`AppState::delete_font`]. After this function has been /// called, you can be sure that the renderer doesn't know about your font anymore. /// This also means that the font needs to be re-parsed if you want to add it again. /// Use with care. /// /// ## Example /// #[cfg_attr(feature = "no-opengl-tests", doc = " ```no_run")] #[cfg_attr(not(feature = "no-opengl-tests"), doc = " ```")] /// # use azul::prelude::*; /// # const TEST_FONT: &[u8] = include_bytes!("../assets/fonts/weblysleekuil.ttf"); /// # /// # struct MyAppData { } /// # /// # impl Layout for MyAppData { /// # fn layout(&self, _window_id: WindowInfo<MyAppData>) -> Dom<MyAppData> { /// # Dom::new(NodeType::Div) /// # } /// # } /// # /// # fn main() { /// let mut app = App::new(MyAppData { }, AppConfig::default()); /// app.add_font(FontId::ExternalFont("Webly Sleeky UI".into()), &mut TEST_FONT).unwrap(); /// app.delete_font(&FontId::ExternalFont("Webly Sleeky UI".into())); /// // NOTE: The font isn't immediately removed, only in the next draw call /// app.mock_render_frame(); /// assert!(!app.has_font(&FontId::ExternalFont("Webly Sleeky UI".into()))); /// # } /// ``` /// /// [`AppState::delete_font`]: ../app_state/struct.AppState.html#method.delete_font pub fn delete_font(&mut self, id: &FontId) -> Option<()> { self.app_state.delete_font(id) } /// Create a daemon. Does nothing if a daemon with the function pointer location already exists. /// /// If the daemon was inserted, returns true, otherwise false pub fn add_daemon(&mut self, daemon: Daemon<T>) -> bool { self.app_state.add_daemon(daemon) } pub fn add_text_uncached<S: Into<String>>(&mut self, text: S) -> TextId { self.app_state.add_text_uncached(text) } pub fn add_text_cached<S: Into<String>>(&mut self, text: S, font_id: &FontId, font_size: PixelValue, letter_spacing: Option<LetterSpacing>) -> TextId { self.app_state.add_text_cached(text, font_id, font_size, letter_spacing) } pub fn delete_text(&mut self, id: TextId) { self.app_state.delete_text(id); } pub fn clear_all_texts(&mut self) { self.app_state.clear_all_texts(); } /// Get the contents of the system clipboard as a string pub fn get_clipboard_string(&mut self) -> Result<String, ClipboardError> { self.app_state.get_clipboard_string() } /// Set the contents of the system clipboard as a string pub fn set_clipboard_string(&mut self, contents: String) -> Result<(), ClipboardError> { self.app_state.set_clipboard_string(contents) } /// Mock rendering function, for creating a hidden window and rendering one frame /// Used in unit tests. You **have** to enable software rendering, otherwise, /// this function won't work in a headless environment. /// /// **NOTE**: In a headless environment, such as Travis, you have to use XVFB to /// create a fake X11 server. XVFB also has a bug where it loads with the default of /// 8-bit greyscale color (see [here]). In order to fix that, you have to run: /// /// `xvfb-run --server-args "-screen 0 1920x1080x24" cargo test --features "doc-test"` /// /// [here]: https://unix.stackexchange.com/questions/104914/ /// #[cfg(any(feature = "doc-test"))] pub fn mock_render_frame(&mut self) { use prelude::*; let hidden_create_options = WindowCreateOptions { state: WindowState { is_visible: false, .. Default::default() }, /// force sofware renderer (OSMesa) renderer_type: RendererType::Software, .. Default::default() }; self.push_window(Window::new(hidden_create_options, Css::native()).unwrap()); // TODO: do_first_redraw shouldn't exist, need to find a better way to update the resources // This will make App::delete_font doc-test fail if run without `no-opengl-tests`. // // let ui_state_cache = Self::initialize_ui_state(&self.windows, &self.app_state); // Self::do_first_redraw(&mut self.windows, &mut self.app_state, &ui_state_cache); } /// See `AppState::add_custom_task`. pub fn add_custom_task<U: Send + 'static>( &mut self, data: &Arc<Mutex<U>>, callback: fn(Arc<Mutex<U>>, Arc<()>), after_completion_deamons: &[Daemon<T>]) { self.app_state.add_custom_task(data, callback, after_completion_deamons); } } impl<T: Layout + Send + 'static> App<T> { /// See `AppState::add_ask`. pub fn add_task( &mut self, callback: fn(Arc<Mutex<T>>, Arc<()>), after_completion_callbacks: &[Daemon<T>]) { self.app_state.add_task(callback, after_completion_callbacks); } } #[derive(Debug, Copy, Clone, PartialEq)] enum WindowCloseEvent { AboutToClose, NoCloseEvent, } /// Pre-filters any events that are not handled by the framework yet, since it would be wasteful /// to process them. Modifies the `frame_event_info` /// /// `awakened_task` is a special field that should be set to true if the `Task` /// system fired a `WindowEvent::Awakened`. fn preprocess_event(event: &Event, frame_event_info: &mut FrameEventInfo, awakened_task: bool) -> WindowCloseEvent { use glium::glutin::WindowEvent; match event { Event::WindowEvent { event, .. } => { match event { WindowEvent::MouseInput { .. } => { frame_event_info.should_hittest = true; }, WindowEvent::CursorMoved { position, .. } => { frame_event_info.should_hittest = true; frame_event_info.cur_cursor_pos = *position; }, WindowEvent::Resized(wh) => { frame_event_info.new_window_size = Some(*wh); frame_event_info.is_resize_event = true; frame_event_info.should_redraw_window = true; }, WindowEvent::Refresh => { frame_event_info.should_redraw_window = true; }, WindowEvent::HiDpiFactorChanged(dpi) => { frame_event_info.new_dpi_factor = Some(*dpi); frame_event_info.should_redraw_window = true; }, WindowEvent::MouseWheel { .. } => { frame_event_info.should_hittest = true; }, WindowEvent::CloseRequested => { return WindowCloseEvent::AboutToClose; }, WindowEvent::KeyboardInput { .. } => { frame_event_info.should_hittest = true; } _ => { }, } }, Event::Awakened => { frame_event_info.should_swap_window = true; if awakened_task { frame_event_info.should_redraw_window = true; } }, _ => { }, } WindowCloseEvent::NoCloseEvent } fn do_hit_test_and_call_callbacks<T: Layout>( event: &Event, window: &mut Window<T>, window_id: WindowId, info: &mut FrameEventInfo, ui_state_cache: &[UiState<T>], app_state: &mut AppState<T>) { use dom::UpdateScreen; use webrender::api::WorldPoint; use window::WindowEvent; use dom::Callback; use window_state::{KeyboardState, MouseState}; let cursor_location = match window.state.mouse_state.cursor_pos { Some(pos) => WorldPoint::new(pos.x as f32, pos.y as f32), None => return, }; let hit_test_results = window.internal.api.hit_test( window.internal.document_id, Some(window.internal.pipeline_id), cursor_location, HitTestFlags::FIND_ALL); let mut should_update_screen = UpdateScreen::DontRedraw; let callbacks_filter_list = window.state.determine_callbacks(event); // TODO: this should be refactored - currently very stateful and error-prone! app_state.windows[window_id.id].set_keyboard_state(&window.state.keyboard_state); app_state.windows[window_id.id].set_mouse_state(&window.state.mouse_state); // Run all default callbacks - **before** the user-defined callbacks are run! // TODO: duplicated code! { use app_state::AppStateNoData; let mut lock = app_state.data.lock().unwrap(); for (item, callback_id_list) in hit_test_results.items.iter().filter_map(|item| ui_state_cache[window_id.id].tag_ids_to_default_callbacks // <- NOTE: tag_ids_to_default_callbacks .get(&item.tag.0) .and_then(|callback_id_list| Some((item, callback_id_list))) ) { use dom::On; let window_event = WindowEvent { window: window_id.id, hit_dom_node: ui_state_cache[window_id.id].tag_ids_to_node_ids[&item.tag.0], cursor_relative_to_item: (item.point_in_viewport.x, item.point_in_viewport.y), cursor_in_viewport: (item.point_in_viewport.x, item.point_in_viewport.y), }; // Invoke On::MouseOver callback - TODO: duplicated code (due to borrowing issues)! if let Some(callback_id) = callback_id_list.get(&On::MouseOver) { let app_state_no_data = AppStateNoData { windows: &app_state.windows, resources: &mut app_state.resources, }; // safe unwrap, we have added the callback previously if app_state.windows[window_id.id].default_callbacks.run_callback( &mut *lock, callback_id, app_state_no_data, window_event ) == UpdateScreen::Redraw { should_update_screen = UpdateScreen::Redraw; } } for callback_id in callbacks_filter_list.iter().filter_map(|on| callback_id_list.get(on)) { let app_state_no_data = AppStateNoData { windows: &app_state.windows, resources: &mut app_state.resources, }; // safe unwrap, we have added the callback previously if app_state.windows[window_id.id].default_callbacks.run_callback( &mut *lock, callback_id, app_state_no_data, window_event ) == UpdateScreen::Redraw { should_update_screen = UpdateScreen::Redraw; } } } } // unlock AppState mutex // For all hit items, lookup the callback and call it for (item, callback_list) in hit_test_results.items.iter().filter_map(|item| ui_state_cache[window_id.id].tag_ids_to_callbacks .get(&item.tag.0) .and_then(|callback_list| Some((item, callback_list))) ) { use dom::On; let window_event = WindowEvent { window: window_id.id, hit_dom_node: ui_state_cache[window_id.id].tag_ids_to_node_ids[&item.tag.0], cursor_relative_to_item: (item.point_in_viewport.x, item.point_in_viewport.y), cursor_in_viewport: (item.point_in_viewport.x, item.point_in_viewport.y), }; let mut invoke_callback = |&Callback(callback_func)| { if (callback_func)(app_state, window_event) == UpdateScreen::Redraw { should_update_screen = UpdateScreen::Redraw; } }; // Invoke On::MouseOver callback if let Some(callback_id) = callback_list.get(&On::MouseOver) { invoke_callback(callback_id); } // Invoke user-defined callback if necessary for callback_id in callbacks_filter_list.iter().filter_map(|on| callback_list.get(on)) { invoke_callback(callback_id); } } app_state.windows[window_id.id].set_keyboard_state(&KeyboardState::default()); app_state.windows[window_id.id].set_mouse_state(&MouseState::default()); if should_update_screen == UpdateScreen::Redraw { info.should_redraw_window = true; // TODO: THIS IS PROBABLY THE WRONG PLACE TO DO THIS!!! // Copy the current fake CSS changes to the real CSS, then clear the fake CSS again // TODO: .clone() and .clear() can be one operation window.css.dynamic_css_overrides = app_state.windows[window_id.id].css.dynamic_css_overrides.clone(); // clear the dynamic CSS overrides app_state.windows[window_id.id].css.clear(); app_state.windows[window_id.id].default_callbacks.clear(); } } fn render<T: Layout>( app_data: Arc<Mutex<T>>, has_window_size_changed: bool, ui_description: &UiDescription<T>, ui_state: &UiState<T>, parsed_css: &ParsedCss, window: &mut Window<T>, fake_window: &mut FakeWindow<T>, app_resources: &mut AppResources) { use webrender::api::*; use display_list::DisplayList; use euclid::TypedSize2D; use std::u32; let display_list = DisplayList::new_from_ui_description(ui_description, ui_state); let builder = display_list.into_display_list_builder( app_data, window.internal.pipeline_id, window.internal.epoch, has_window_size_changed, &window.internal.api, &parsed_css, &window.state.size, &mut *fake_window, &mut window.ui_solver, &mut *app_resources); // NOTE: Display list has to be rebuilt every frame, otherwise, the epochs get out of sync window.internal.last_display_list_builder = builder.finalize().2; let mut txn = Transaction::new(); let LogicalSize { width, height } = window.state.size.dimensions; let layout_size = TypedSize2D::new(width as f32, height as f32); let framebuffer_size_physical = window.state.size.dimensions.to_physical(window.state.size.hidpi_factor); let framebuffer_size = TypedSize2D::new(framebuffer_size_physical.width as u32, framebuffer_size_physical.height as u32); txn.set_display_list( window.internal.epoch, None, layout_size, (window.internal.pipeline_id, layout_size, window.internal.last_display_list_builder.clone()), true, ); // We don't want the epoch to increase to u32::MAX, since u32::MAX represents // an invalid epoch, which could confuse webrender window.internal.epoch = Epoch(if window.internal.epoch.0 == (u32::MAX - 1) { 0 } else { window.internal.epoch.0 + 1 }); txn.set_root_pipeline(window.internal.pipeline_id); txn.generate_frame(); window.internal.api.send_transaction(window.internal.document_id, txn); window.renderer.as_mut().unwrap().update(); render_inner(window, framebuffer_size); } fn clean_up_unused_opengl_textures(pipeline_info: PipelineInfo) { use compositor::ACTIVE_GL_TEXTURES; // TODO: currently active epochs can be empty, why? // // I mean, while the renderer is rendering, there can never be "no epochs" active, // at least one epoch must always be active. if pipeline_info.epochs.is_empty() { return; } // TODO: pipeline_info.epochs does not contain all active epochs, // at best it contains the lowest in-use epoch. I.e. if `Epoch(43)` // is listed, you can remove all textures from Epochs **lower than 43** // BUT NOT EPOCHS HIGHER THAN 43. // // This means that "all active epochs" (in the documentation) is misleading // since it doesn't actually list all active epochs, otherwise it'd list Epoch(43), // Epoch(44), Epoch(45), which are currently active. let oldest_to_remove_epoch = pipeline_info.epochs.values().min().unwrap(); let mut active_textures_lock = ACTIVE_GL_TEXTURES.lock().unwrap(); // Retain all OpenGL textures from epochs higher than the lowest epoch // // TODO: Handle overflow of Epochs correctly (low priority) active_textures_lock.retain(|key, _| key > oldest_to_remove_epoch); } // See: https://github.com/servo/webrender/pull/2880 // webrender doesn't reset the active shader back to what it was, but rather sets it // to zero, which glium doesn't know about, so on the next frame it tries to draw with shader 0 fn render_inner<T: Layout>(window: &mut Window<T>, framebuffer_size: TypedSize2D<u32, DevicePixel>) { use gleam::gl; use window::get_gl_context; // use glium::glutin::GlContext; // unsafe { window.display.gl_window().make_current().unwrap(); } let mut current_program = [0_i32]; unsafe { get_gl_context(&window.display).unwrap().get_integer_v(gl::CURRENT_PROGRAM, &mut current_program) }; window.renderer.as_mut().unwrap().render(framebuffer_size).unwrap(); get_gl_context(&window.display).unwrap().use_program(current_program[0] as u32); }
upload.go
package services import ( "net/http" "os" "path/filepath" "regexp" "sort" "strconv" "strings" rthttpclient "github.com/cobalt77/jfrog-client-go/artifactory/httpclient" "github.com/cobalt77/jfrog-client-go/artifactory/services/fspatterns" "github.com/cobalt77/jfrog-client-go/artifactory/services/utils" "github.com/cobalt77/jfrog-client-go/auth" clientutils "github.com/cobalt77/jfrog-client-go/utils" "github.com/cobalt77/jfrog-client-go/utils/errorutils" ioutils "github.com/cobalt77/jfrog-client-go/utils/io" "github.com/cobalt77/jfrog-client-go/utils/io/fileutils" "github.com/cobalt77/jfrog-client-go/utils/io/fileutils/checksum" "github.com/cobalt77/jfrog-client-go/utils/io/httputils" "github.com/cobalt77/jfrog-client-go/utils/log" "github.com/jfrog/gofrog/parallel" ) type UploadService struct { client *rthttpclient.ArtifactoryHttpClient Progress ioutils.Progress ArtDetails auth.ServiceDetails DryRun bool Threads int } func NewUploadService(client *rthttpclient.ArtifactoryHttpClient) *UploadService { return &UploadService{client: client} } func (us *UploadService) SetThreads(threads int) { us.Threads = threads } func (us *UploadService) GetJfrogHttpClient() *rthttpclient.ArtifactoryHttpClient { return us.client } func (us *UploadService) SetServiceDetails(artDetails auth.ServiceDetails) { us.ArtDetails = artDetails } func (us *UploadService) SetDryRun(isDryRun bool) { us.DryRun = isDryRun } func (us *UploadService) UploadFiles(uploadParams ...UploadParams) (artifactsFileInfo []utils.FileInfo, totalUploaded, totalFailed int, err error) { // Uploading threads are using this struct to report upload results. uploadSummary := *utils.NewUploadResult(us.Threads) producerConsumer := parallel.NewRunner(us.Threads, 100, false) errorsQueue := clientutils.NewErrorsQueue(1) us.prepareUploadTasks(producerConsumer, errorsQueue, uploadSummary, uploadParams...) return us.performUploadTasks(producerConsumer, &uploadSummary, errorsQueue) } func (us *UploadService) prepareUploadTasks(producer parallel.Runner, errorsQueue *clientutils.ErrorsQueue, uploadSummary utils.UploadResult, uploadParamsSlice ...UploadParams) { go func() { defer producer.Done() // Iterate over file-spec groups and produce upload tasks. // When encountering an error, log and move to next group. vcsCache := clientutils.NewVcsDetals() for _, uploadParams := range uploadParamsSlice { artifactHandlerFunc := us.createArtifactHandlerFunc(&uploadSummary, uploadParams) err := collectFilesForUpload(uploadParams, producer, artifactHandlerFunc, errorsQueue, vcsCache) if err != nil { log.Error(err) errorsQueue.AddError(err) } } }() } func (us *UploadService) performUploadTasks(consumer parallel.Runner, uploadSummary *utils.UploadResult, errorsQueue *clientutils.ErrorsQueue) (artifactsFileInfo []utils.FileInfo, totalUploaded, totalFailed int, err error) { // Blocking until consuming is finished. consumer.Run() err = errorsQueue.GetError() totalUploaded = utils.SumIntArray(uploadSummary.SuccessCount) totalUploadAttempted := utils.SumIntArray(uploadSummary.TotalCount) log.Debug("Uploaded", strconv.Itoa(totalUploaded), "artifacts.") totalFailed = totalUploadAttempted - totalUploaded if totalFailed > 0 { log.Error("Failed uploading", strconv.Itoa(totalFailed), "artifacts.") } artifactsFileInfo = utils.FlattenFileInfoArray(uploadSummary.FileInfo) return } func addProps(oldProps, additionalProps string) string { if len(oldProps) > 0 && !strings.HasSuffix(oldProps, ";") && len(additionalProps) > 0 { oldProps += ";" } return oldProps + additionalProps } func addSymlinkProps(artifact clientutils.Artifact, uploadParams UploadParams) (string, error)
func collectFilesForUpload(uploadParams UploadParams, producer parallel.Runner, artifactHandlerFunc artifactContext, errorsQueue *clientutils.ErrorsQueue, vcsCache *clientutils.VcsCache) error { if strings.Index(uploadParams.GetTarget(), "/") < 0 { uploadParams.SetTarget(uploadParams.GetTarget() + "/") } uploadParams.SetPattern(clientutils.ReplaceTildeWithUserHome(uploadParams.GetPattern())) // Save parentheses index in pattern, witch have corresponding placeholder. rootPath, err := fspatterns.GetRootPath(uploadParams.GetPattern(), uploadParams.GetTarget(), uploadParams.IsRegexp(), uploadParams.IsSymlink()) if err != nil { return err } isDir, err := fileutils.IsDirExists(rootPath, uploadParams.IsSymlink()) if err != nil { return err } // If the path is a single file (or a symlink while preserving symlinks) upload it and return if !isDir || (fileutils.IsPathSymlink(rootPath) && uploadParams.IsSymlink()) { artifact, err := fspatterns.GetSingleFileToUpload(rootPath, uploadParams.GetTarget(), uploadParams.IsFlat(), uploadParams.IsSymlink()) if err != nil { return err } props, err := addSymlinkProps(artifact, uploadParams) if err != nil { return err } if uploadParams.IsAddVcsProps() { vcsProps, err := getVcsProps(artifact.LocalPath, vcsCache) if err != nil { return err } uploadParams.BuildProps += vcsProps } uploadData := UploadData{Artifact: artifact, Props: props, BuildProps: uploadParams.BuildProps} task := artifactHandlerFunc(uploadData) producer.AddTaskWithError(task, errorsQueue.AddError) return err } uploadParams.SetPattern(clientutils.PrepareLocalPathForUpload(uploadParams.GetPattern(), uploadParams.IsRegexp())) err = collectPatternMatchingFiles(uploadParams, rootPath, producer, artifactHandlerFunc, errorsQueue, vcsCache) return err } func collectPatternMatchingFiles(uploadParams UploadParams, rootPath string, producer parallel.Runner, artifactHandlerFunc artifactContext, errorsQueue *clientutils.ErrorsQueue, vcsCache *clientutils.VcsCache) error { excludePathPattern := fspatterns.PrepareExcludePathPattern(uploadParams) patternRegex, err := regexp.Compile(uploadParams.GetPattern()) if errorutils.CheckError(err) != nil { return err } paths, err := fspatterns.GetPaths(rootPath, uploadParams.IsRecursive(), uploadParams.IsIncludeDirs(), uploadParams.IsSymlink()) if err != nil { return err } // Longest paths first sort.Sort(sort.Reverse(sort.StringSlice(paths))) // 'foldersPaths' is a subset of the 'paths' array. foldersPaths is in use only when we need to upload folders with flat=true. // 'foldersPaths' will contain only the directories paths which are in the 'paths' array. var foldersPaths []string for index, path := range paths { matches, isDir, isSymlinkFlow, err := fspatterns.PrepareAndFilterPaths(path, excludePathPattern, uploadParams.IsSymlink(), uploadParams.IsIncludeDirs(), patternRegex) if err != nil { return err } if matches != nil && len(matches) > 0 { target := uploadParams.GetTarget() tempPaths := paths tempIndex := index // In case we need to upload directories with flat=true, we want to avoid the creation of unnecessary paths in Artifactory. // To achieve this, we need to take into consideration the directories which had already been uploaded, ignoring all files paths. // When flat=false we take into consideration folder paths which were created implicitly by file upload if uploadParams.IsFlat() && uploadParams.IsIncludeDirs() && isDir { foldersPaths = append(foldersPaths, path) tempPaths = foldersPaths tempIndex = len(foldersPaths) - 1 } taskData := &uploadTaskData{target: target, path: path, isDir: isDir, isSymlinkFlow: isSymlinkFlow, paths: tempPaths, groups: matches, index: tempIndex, size: len(matches), uploadParams: uploadParams, producer: producer, artifactHandlerFunc: artifactHandlerFunc, errorsQueue: errorsQueue, } createUploadTask(taskData, vcsCache) } } return nil } type uploadTaskData struct { target string path string isDir bool isSymlinkFlow bool paths []string groups []string index int size int uploadParams UploadParams producer parallel.Runner artifactHandlerFunc artifactContext errorsQueue *clientutils.ErrorsQueue } func createUploadTask(taskData *uploadTaskData, vcsCache *clientutils.VcsCache) error { for i := 1; i < taskData.size; i++ { group := strings.Replace(taskData.groups[i], "\\", "/", -1) taskData.target = strings.Replace(taskData.target, "{"+strconv.Itoa(i)+"}", group, -1) } var task parallel.TaskFunc // Get symlink target (returns empty string if regular file) - Used in upload name / symlinks properties symlinkPath, err := fspatterns.GetFileSymlinkPath(taskData.path) if err != nil { return err } // If preserving symlinks or symlink target is empty, use root path name for upload (symlink itself / regular file) if taskData.uploadParams.IsSymlink() || symlinkPath == "" { taskData.target = getUploadTarget(taskData.path, taskData.target, taskData.uploadParams.IsFlat()) } else { taskData.target = getUploadTarget(symlinkPath, taskData.target, taskData.uploadParams.IsFlat()) } artifact := clientutils.Artifact{LocalPath: taskData.path, TargetPath: taskData.target, Symlink: symlinkPath} props, e := addSymlinkProps(artifact, taskData.uploadParams) if e != nil { return e } if taskData.uploadParams.IsAddVcsProps() { vcsProps, err := getVcsProps(taskData.path, vcsCache) if err != nil { return err } taskData.uploadParams.BuildProps += vcsProps } uploadData := UploadData{Artifact: artifact, Props: props, BuildProps: taskData.uploadParams.BuildProps} if taskData.isDir && taskData.uploadParams.IsIncludeDirs() && !taskData.isSymlinkFlow { if taskData.path != "." && (taskData.index == 0 || !utils.IsSubPath(taskData.paths, taskData.index, fileutils.GetFileSeparator())) { uploadData.IsDir = true } else { return nil } } task = taskData.artifactHandlerFunc(uploadData) taskData.producer.AddTaskWithError(task, taskData.errorsQueue.AddError) return nil } // Construct the target path while taking `flat` flag into account. func getUploadTarget(rootPath, target string, isFlat bool) string { if strings.HasSuffix(target, "/") { if isFlat { fileName, _ := fileutils.GetFileAndDirFromPath(rootPath) target += fileName } else { target += clientutils.TrimPath(rootPath) } } return target } func addPropsToTargetPath(targetPath, props, buildProps, debConfig string) (string, error) { propsStr := strings.Join([]string{props, getDebianProps(debConfig)}, ";") properties, err := utils.ParseProperties(propsStr, utils.SplitCommas) if err != nil { return "", err } buildProperties, err := utils.ParseProperties(buildProps, utils.JoinCommas) if err != nil { return "", err } return strings.Join([]string{targetPath, properties.ToEncodedString(), buildProperties.ToEncodedString()}, ";"), nil } func prepareUploadData(localPath, baseTargetPath, props, buildProps string, uploadParams UploadParams, logMsgPrefix string) (fileInfo os.FileInfo, targetPath string, err error) { targetPath, err = addPropsToTargetPath(baseTargetPath, props, buildProps, uploadParams.GetDebian()) if errorutils.CheckError(err) != nil { return } log.Info(logMsgPrefix+"Uploading artifact:", localPath) fileInfo, err = os.Lstat(localPath) errorutils.CheckError(err) return } // Uploads the file in the specified local path to the specified target path. // Returns true if the file was successfully uploaded. func (us *UploadService) uploadFile(localPath, targetPath, pathInArtifactory, props, buildProps string, uploadParams UploadParams, logMsgPrefix string) (utils.FileInfo, bool, error) { fileInfo, targetPathWithProps, err := prepareUploadData(localPath, targetPath, props, buildProps, uploadParams, logMsgPrefix) if err != nil { return utils.FileInfo{}, false, err } var checksumDeployed = false var resp *http.Response var details *fileutils.FileDetails var body []byte httpClientsDetails := us.ArtDetails.CreateHttpClientDetails() if errorutils.CheckError(err) != nil { return utils.FileInfo{}, false, err } if uploadParams.IsSymlink() && fileutils.IsFileSymlink(fileInfo) { resp, details, body, err = us.uploadSymlink(targetPathWithProps, logMsgPrefix, httpClientsDetails, uploadParams) } else { resp, details, body, checksumDeployed, err = us.doUpload(localPath, targetPathWithProps, logMsgPrefix, httpClientsDetails, fileInfo, uploadParams) } if err != nil { return utils.FileInfo{}, false, err } logUploadResponse(logMsgPrefix, resp, body, checksumDeployed, us.DryRun) artifact := createBuildArtifactItem(details, localPath, targetPath, pathInArtifactory) return artifact, us.DryRun || checksumDeployed || resp.StatusCode == http.StatusCreated || resp.StatusCode == http.StatusOK, nil } func (us *UploadService) uploadSymlink(targetPath, logMsgPrefix string, httpClientsDetails httputils.HttpClientDetails, uploadParams UploadParams) (resp *http.Response, details *fileutils.FileDetails, body []byte, err error) { details, err = fspatterns.CreateSymlinkFileDetails() if err != nil { return } resp, body, err = utils.UploadFile("", targetPath, logMsgPrefix, &us.ArtDetails, details, httpClientsDetails, us.client, uploadParams.GetRetries(), nil) return } func (us *UploadService) doUpload(localPath, targetPath, logMsgPrefix string, httpClientsDetails httputils.HttpClientDetails, fileInfo os.FileInfo, uploadParams UploadParams) (*http.Response, *fileutils.FileDetails, []byte, bool, error) { var details *fileutils.FileDetails var checksumDeployed bool var resp *http.Response var body []byte var err error addExplodeHeader(&httpClientsDetails, uploadParams.IsExplodeArchive()) if fileInfo.Size() >= uploadParams.MinChecksumDeploy && !uploadParams.IsExplodeArchive() { resp, details, body, err = us.tryChecksumDeploy(localPath, targetPath, httpClientsDetails, us.client) if err != nil { return resp, details, body, checksumDeployed, err } checksumDeployed = !us.DryRun && (resp.StatusCode == http.StatusCreated || resp.StatusCode == http.StatusOK) } if !us.DryRun && !checksumDeployed { var body []byte resp, body, err = utils.UploadFile(localPath, targetPath, logMsgPrefix, &us.ArtDetails, details, httpClientsDetails, us.client, uploadParams.Retries, us.Progress) if err != nil { return resp, details, body, checksumDeployed, err } } if details == nil { details, err = fileutils.GetFileDetails(localPath) } return resp, details, body, checksumDeployed, err } func logUploadResponse(logMsgPrefix string, resp *http.Response, body []byte, checksumDeployed, isDryRun bool) { if resp != nil && resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusOK { log.Error(logMsgPrefix + "Artifactory response: " + resp.Status + "\n" + clientutils.IndentJson(body)) return } if !isDryRun { var strChecksumDeployed string if checksumDeployed { strChecksumDeployed = " (Checksum deploy)" } else { strChecksumDeployed = "" } log.Debug(logMsgPrefix, "Artifactory response:", resp.Status, strChecksumDeployed) } } func createBuildArtifactItem(details *fileutils.FileDetails, localPath, targetPath, pathInArtifactory string) utils.FileInfo { return utils.FileInfo{ LocalPath: localPath, ArtifactoryPath: targetPath, InternalArtifactoryPath: pathInArtifactory, FileHashes: &utils.FileHashes{ Sha256: details.Checksum.Sha256, Sha1: details.Checksum.Sha1, Md5: details.Checksum.Md5, }, } } func addExplodeHeader(httpClientsDetails *httputils.HttpClientDetails, isExplode bool) { if isExplode { utils.AddHeader("X-Explode-Archive", "true", &httpClientsDetails.Headers) } } func (us *UploadService) tryChecksumDeploy(filePath, targetPath string, httpClientsDetails httputils.HttpClientDetails, client *rthttpclient.ArtifactoryHttpClient) (resp *http.Response, details *fileutils.FileDetails, body []byte, err error) { if us.DryRun { return } details, err = fileutils.GetFileDetails(filePath) if err != nil { return } requestClientDetails := httpClientsDetails.Clone() utils.AddHeader("X-Checksum-Deploy", "true", &requestClientDetails.Headers) utils.AddChecksumHeaders(requestClientDetails.Headers, details) utils.AddAuthHeaders(requestClientDetails.Headers, us.ArtDetails) resp, body, err = client.SendPut(targetPath, nil, requestClientDetails) return } func getDebianProps(debianPropsStr string) string { if debianPropsStr == "" { return "" } result := "" debProps := clientutils.SplitWithEscape(debianPropsStr, '/') for k, v := range []string{"deb.distribution", "deb.component", "deb.architecture"} { debProp := strings.Join([]string{v, debProps[k]}, "=") result = strings.Join([]string{result, debProp}, ";") } return result } type UploadParams struct { *utils.ArtifactoryCommonParams Deb string BuildProps string Symlink bool ExplodeArchive bool Flat bool AddVcsProps bool Retries int MinChecksumDeploy int64 } func (up *UploadParams) IsFlat() bool { return up.Flat } func (up *UploadParams) IsSymlink() bool { return up.Symlink } func (up *UploadParams) IsAddVcsProps() bool { return up.AddVcsProps } func (up *UploadParams) IsExplodeArchive() bool { return up.ExplodeArchive } func (up *UploadParams) GetDebian() string { return up.Deb } func (up *UploadParams) GetRetries() int { return up.Retries } type UploadData struct { Artifact clientutils.Artifact Props string BuildProps string IsDir bool } type artifactContext func(UploadData) parallel.TaskFunc func (us *UploadService) createArtifactHandlerFunc(uploadResult *utils.UploadResult, uploadParams UploadParams) artifactContext { return func(artifact UploadData) parallel.TaskFunc { return func(threadId int) (e error) { if artifact.IsDir { us.createFolderInArtifactory(artifact) return } var uploaded bool var target string var artifactFileInfo utils.FileInfo uploadResult.TotalCount[threadId]++ logMsgPrefix := clientutils.GetLogMsgPrefix(threadId, us.DryRun) target, e = utils.BuildArtifactoryUrl(us.ArtDetails.GetUrl(), artifact.Artifact.TargetPath, make(map[string]string)) if e != nil { return } artifactFileInfo, uploaded, e = us.uploadFile(artifact.Artifact.LocalPath, target, artifact.Artifact.TargetPath, artifact.Props, artifact.BuildProps, uploadParams, logMsgPrefix) if e != nil { return } if uploaded { uploadResult.SuccessCount[threadId]++ uploadResult.FileInfo[threadId] = append(uploadResult.FileInfo[threadId], artifactFileInfo) } return } } } func (us *UploadService) createFolderInArtifactory(artifact UploadData) error { url, err := utils.BuildArtifactoryUrl(us.ArtDetails.GetUrl(), artifact.Artifact.TargetPath, make(map[string]string)) url = clientutils.AddTrailingSlashIfNeeded(url) if err != nil { return err } content := make([]byte, 0) httpClientsDetails := us.ArtDetails.CreateHttpClientDetails() resp, body, err := us.client.SendPut(url, content, &httpClientsDetails) if err != nil { log.Debug(resp) return err } logUploadResponse("Uploaded directory:", resp, body, false, us.DryRun) return err } func NewUploadParams() UploadParams { return UploadParams{ArtifactoryCommonParams: &utils.ArtifactoryCommonParams{}, MinChecksumDeploy: 10240} } func getVcsProps(path string, vcsCache *clientutils.VcsCache) (string, error) { path, err := filepath.Abs(path) if err != nil { return "", errorutils.CheckError(err) } props := "" revision, url, err := vcsCache.GetVcsDetails(filepath.Dir(path)) if err != nil { return "", errorutils.CheckError(err) } if revision != "" { props += ";vcs.revision=" + revision } if url != "" { props += ";vcs.url=" + url } return props, nil }
{ artifactProps := "" artifactSymlink := artifact.Symlink if uploadParams.IsSymlink() && len(artifactSymlink) > 0 { sha1Property := "" fileInfo, err := os.Stat(artifact.LocalPath) if err != nil { // If error occurred, but not due to nonexistence of Symlink target -> return empty if !os.IsNotExist(err) { return "", err } // If Symlink target exists -> get SHA1 if isn't a directory } else if !fileInfo.IsDir() { file, err := os.Open(artifact.LocalPath) if err != nil { return "", errorutils.CheckError(err) } defer file.Close() checksumInfo, err := checksum.Calc(file, checksum.SHA1) if err != nil { return "", err } sha1 := checksumInfo[checksum.SHA1] sha1Property = ";" + utils.SYMLINK_SHA1 + "=" + sha1 } artifactProps += utils.ARTIFACTORY_SYMLINK + "=" + artifactSymlink + sha1Property } props := uploadParams.GetProps() artifactProps = addProps(props, artifactProps) return artifactProps, nil }
users.spec.js
const db = require('../data/dbConfig'); const Users = require('./users-model'); describe('users router', function() { it('should run the tests', function() { expect(true).toBe(true); }); }); describe('users model', () => { beforeEach( async () => { await db('users').truncate(); }); describe('addUser function', () => { it('inserts users into the db', async () => { let userNumber; userNumber = await db('users'); expect(userNumber).toHaveLength(0); await Users.add({ username: 'test', password: 'test'}); userNumber = await db('users'); expect(userNumber).toHaveLength(1);
}) }) })
}); it('inserts the provided user into the db', async () => { let user = await Users.add({ username: 'test', password:'test' }); expect(user.username).toBe('test')
generate_spectogram.py
# %% import pandas as pd import librosa import librosa.display import os import numpy as np import joblib def scale_minmax(X, min=0.0, max=1.0): X_std = (X - X.min()) / (X.max() - X.min()) X_scaled = X_std * (max - min) + min return X_scaled def gen_melspect( file_path, output_name, sr=None, n_fft=2048, n_mels=128, win_length=None, hop_length=512, min_dur=8.0, output_length=251, image=False, dataset="iemocap", deltas=False, start=None, end=None, means=None, stds=None, ): y, sr = librosa.load(file_path, sr=sr) if means is not None: y = (y - means) / stds if start is not None: y = y[int(start * sr) : int(end * sr)] def pad(a, i): return a[0:i] if a.shape[0] > i else np.hstack((a, np.zeros(i - a.shape[0]))) def trim_pad_sample(x): samples = [] duration_s = x.shape[0] / float(sr) if duration_s < min_dur: samples.append(pad(x, int(sr * min_dur))) elif duration_s / min_dur > 2 or (duration_s / min_dur) % 1 > 0.65: pos = int(min_dur * sr) samples = [] samples.append(x[:pos]) x = x[pos:] dur_s = x.shape[0] / sr if dur_s / min_dur > 2 or (dur_s / min_dur) % 1 > 0.65: def append_sample(lst): temp = [] for item in lst: if len(item) > 1 and type(item) == list: temp.append(item) else: temp.append(item) return temp for item in append_sample(trim_pad_sample(x)): samples.append(item) else: x = x[: int(min_dur * float(sr))] samples.append(x) return samples if dataset == "iemocap": samples = trim_pad_sample(y) else: duration_s = y.shape[0] / float(sr) if duration_s > min_dur: y = y[: int(min_dur * sr)] samples = [y] k = 0 for item in samples: y = item res = librosa.feature.melspectrogram(
y, sr=sr, n_fft=n_fft, n_mels=n_mels, win_length=win_length, hop_length=hop_length, window="hamming", fmin=300, fmax=8000, ) res = librosa.power_to_db(res, np.max) if res.shape[1] > output_length: res = res[:, :output_length] # print(mfccs.shape) elif res.shape[1] < output_length: res = np.pad(res, ((0, 0), (0, output_length - res.shape[1])), "constant") if deltas: logmel_delta = librosa.feature.delta(res) deltadelta = librosa.feature.delta(res, order=2) if means is not None: res = librosa.util.normalize(res) logmel_delta = librosa.util.normalize(logmel_delta) deltadelta = librosa.util.normalize(deltadelta) res = np.stack([res, logmel_delta, deltadelta]) joblib.dump(res, output_name.format(k)) k += 1 # %% if __name__ == "__main__": n_mels = 128 # number of bins in spectrogram. Height of image # time_steps = 384 # number of time-steps. Width of image n_fft = 2048 hop_length = 512 # 1524 # number of samples per time-step in spectrogram win_length = 128 # n_fft512 min_dur = 8.0 dataset = "iemocap" grayscale = True mlst = [] if dataset == "iemocap": """ pd.Series(mlst).describe() count 2170.000000 mean 4.379649 std 3.415235 min 0.779937 25% 2.109938 50% 3.259937 75% 5.667500 max 34.138750 dtype: float64 """ # load audio. Using example from librosa print(os.getcwd()) source_path = "IEMOCAP_full_release.tar/IEMOCAP_full_release/Session{}/sentences/wav/" dest_path = "datasets/IEMOCAP/LOGMEL_DELTAS/" df = pd.read_csv("df_iemocap.csv") processed_files = [] for _, row in df.iterrows(): if row.name in processed_files: continue sess_path = source_path.format(row.wav_file[4]) folder = row.wav_file[:-5] source_file = os.path.join(sess_path, folder, row.wav_file + ".wav") if not os.path.exists(dest_path + folder): os.makedirs(dest_path + folder) # print('dest',dest_path + i) # print('source',file_path) sr = 16000 preemph_coef = 0.97 sample_rate = sr window_size = 0.025 window_stride = 0.01 num_mel_bins = 40 n_fft = 512 # int(sample_rate * window_size) win_length = int(sample_rate * window_size) # None# hop_length = int(sample_rate * window_stride) # 256# same_rows = df[df.wav_file == row.wav_file] init_start = 0.0 for _, i in same_rows.iterrows(): file_name = i.wav_file + "_" + str(i.name) out = dest_path + folder + "/" + file_name + "_{}.joblib" end = i.end_time - i.start_time + init_start gen_melspect( source_file, out, sr=sr, min_dur=3.0, output_length=300, dataset=dataset, n_fft=n_fft, win_length=win_length, hop_length=hop_length, n_mels=num_mel_bins, deltas=True, start=init_start, end=end, ) init_start = end processed_files.append(i.name)
TwitchAlert.py
#!/usr/bin/env python """ Koala Bot Base Cog code and additional base cog functions Commented using reStructuredText (reST) """ # Futures # Built-in/Generic Imports import os import time import re import aiohttp import logging from concurrent.futures import ThreadPoolExecutor logging.basicConfig(filename='TwitchAlert.log') # Own modules import KoalaBot from utils.KoalaColours import * from utils.KoalaUtils import error_embed, is_channel_in_guild, extract_id from utils import KoalaDBManager # Libs from discord.ext import commands, tasks from dotenv import load_dotenv import asyncio # Constants load_dotenv() DEFAULT_MESSAGE = "" TWITCH_ICON = "https://cdn3.iconfinder.com/data/icons/social-messaging-ui-color-shapes-2-free" \ "/128/social-twitch-circle-512.png" TWITCH_CLIENT_ID = os.environ.get('TWITCH_TOKEN') TWITCH_SECRET = os.environ.get('TWITCH_SECRET') TWITCH_USERNAME_REGEX = "^[a-z0-9][a-z0-9_]{3,24}$" LOOP_CHECK_LIVE_DELAY = 1 TEAMS_LOOP_CHECK_LIVE_DELAY = 1 REFRESH_TEAMS_DELAY = 5 # Variables def twitch_is_enabled(ctx): """ A command used to check if the guild has enabled twitch alert e.g. @commands.check(KoalaBot.is_admin) :param ctx: The context of the message :return: True if admin or test, False otherwise """ try: result = KoalaBot.check_guild_has_ext(ctx, "TwitchAlert") except PermissionError: result = False return result class TwitchAlert(commands.Cog): """ A discord.py cog for alerting when someone goes live on twitch """ def __init__(self, bot, database_manager=None): """ Initialises local variables :param bot: The bot client for this cog """ if not database_manager: database_manager = KoalaBot.database_manager self.bot = bot database_manager.create_base_tables() database_manager.insert_extension("TwitchAlert", 0, True, True) self.ta_database_manager = TwitchAlertDBManager(database_manager, bot) self.ta_database_manager.create_tables() self.loop_thread = None self.loop_team_thread = None self.running = False self.stop_loop = False @commands.command(name="twitchEditMsg", aliases=["edit_default_message"]) @commands.check(KoalaBot.is_admin) @commands.check(twitch_is_enabled) async def edit_default_message(self, ctx, raw_channel_id, *default_live_message): """ Edit the default message put in a Twitch Alert Notification :param ctx: The discord context of the command :param raw_channel_id: The channel ID where the twitch alert is being used :param default_live_message: The default live message of users within this Twitch Alert, leave empty for program default :return: """ try: channel_id = extract_id(raw_channel_id) except TypeError: channel_id = ctx.message.channel.id default_live_message = (raw_channel_id,) + default_live_message if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id): await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server.")) return # Assigning default message if provided if default_live_message is not None and default_live_message != (None,): default_message = " ".join(default_live_message) if len(default_message) > 1000: await ctx.send(embed=error_embed( "custom_message is too long, try something with less than 1000 characters")) return else: default_message = None # Creates a new Twitch Alert with the used guild ID and default message if provided default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id, default_message, replace=True) # Returns an embed with information altered new_embed = discord.Embed(title="Default Message Edited", colour=KOALA_GREEN, description=f"Guild: {ctx.message.guild.id}\n" f"Channel: {channel_id}\n" f"Default Message: {default_message}") await ctx.send(embed=new_embed) @commands.command(name="twitchViewMsg", aliases=["view_default_message"]) @commands.check(KoalaBot.is_admin) @commands.check(twitch_is_enabled) async def view_default_message(self, ctx, raw_channel_id=None): """ Shows the current default message for Twitch Alerts :param ctx: The discord context of the command :param raw_channel_id: The channel ID where the twitch alert is being used leave empty for program default :return: """ if raw_channel_id is None: channel_id = ctx.message.channel.id else: channel_id = extract_id(raw_channel_id) if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id): await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server.")) return # Creates a new Twitch Alert with the used guild ID and default message if provided default_message = self.ta_database_manager.get_default_message(channel_id)[0][0] # Returns an embed with information altered new_embed = discord.Embed(title="Default Message", colour=KOALA_GREEN, description=f"Guild: {ctx.message.guild.id}\n" f"Channel: {channel_id}\n" f"Default Message: {default_message}") # new_embed.set_footer(text=f"Twitch Alert ID: {new_id}") await ctx.send(embed=new_embed) @commands.command(name="twitchAdd", aliases=['add_user_to_twitch_alert']) @commands.check(KoalaBot.is_admin) @commands.check(twitch_is_enabled) async def add_user_to_twitch_alert(self, ctx, raw_channel_id, twitch_username=None, *custom_live_message): """ Add a Twitch user to a Twitch Alert :param ctx: The discord context of the command :param raw_channel_id: The channel ID where the twitch alert is being used :param twitch_username: The Twitch Username of the user being added (lowercase) :param custom_live_message: the custom live message for this user's alert :return: """ try: channel_id = extract_id(raw_channel_id) except TypeError: custom_live_message = (twitch_username,) + custom_live_message twitch_username = raw_channel_id channel_id = ctx.message.channel.id if twitch_username is None: raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.") elif not re.search(TWITCH_USERNAME_REGEX, twitch_username): raise discord.errors.InvalidArgument( "The given twitch_username is not a valid username (please use lowercase)") # Check the channel specified is in this guild if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id): await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server.")) return default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id) # Setting the custom message as required if custom_live_message is not None and custom_live_message != (None,): custom_message = " ".join(custom_live_message) default_message = custom_message if len(default_message) > 1000: await ctx.send(embed=error_embed( "custom_message is too long, try something with less than 1000 characters")) return else: custom_message = None self.ta_database_manager.add_user_to_ta(channel_id, twitch_username, custom_message, ctx.message.guild.id) # Response Message new_embed = discord.Embed(title="Added User to Twitch Alert", colour=KOALA_GREEN, description=f"Channel: {channel_id}\n" f"User: {twitch_username}\n" f"Message: {default_message}") await ctx.send(embed=new_embed) @commands.command(name="twitchRemove", aliases=['remove_user_from_twitch_alert']) @commands.check(KoalaBot.is_admin) @commands.check(twitch_is_enabled) async def remove_user_from_twitch_alert(self, ctx, raw_channel_id, twitch_username=None): """ Removes a user from a Twitch Alert :param ctx: the discord context :param raw_channel_id: The discord channel ID of the Twitch Alert :param twitch_username: The username of the user to be removed :return: """ try: channel_id = extract_id(raw_channel_id) except TypeError: twitch_username = raw_channel_id channel_id = ctx.message.channel.id if twitch_username is None: raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.") # Check the channel specified is in this guild if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id): await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server.")) return await self.ta_database_manager.remove_user_from_ta(channel_id, twitch_username) # Response Message new_embed = discord.Embed(title="Removed User from Twitch Alert", colour=KOALA_GREEN, description=f"Channel: {channel_id}\n" f"User: {twitch_username}") await ctx.send(embed=new_embed) @commands.command(name="twitchAddTeam", aliases=["add_team_to_twitch_alert"]) @commands.check(KoalaBot.is_admin) @commands.check(twitch_is_enabled) async def add_team_to_twitch_alert(self, ctx, raw_channel_id, team_name=None, *custom_live_message): """ Add a Twitch team to a Twitch Alert :param ctx: The discord context of the command :param raw_channel_id: The channel ID where the twitch alert is being used :param team_name: The Twitch team being added (lowercase) :param custom_live_message: the custom live message for this team's alert :return: """ try: channel_id = extract_id(raw_channel_id) except TypeError: custom_live_message = (team_name,) + custom_live_message team_name = raw_channel_id channel_id = ctx.message.channel.id if team_name is None: raise discord.errors.InvalidArgument("team_name is a required argument that is missing.") elif not re.search(TWITCH_USERNAME_REGEX, team_name): raise discord.errors.InvalidArgument( "The given team_name is not a valid twitch team name (please use lowercase)") # Check the channel specified is in this guild if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id): await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server.")) return self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id) # Setting the custom message as required if custom_live_message is not None and custom_live_message != (None,): default_message = " ".join(custom_live_message) if len(default_message) > 1000: await ctx.send(embed=error_embed( "custom_message is too long, try something with less than 1000 characters")) return else: default_message = DEFAULT_MESSAGE self.ta_database_manager.add_team_to_ta(channel_id, team_name, default_message, ctx.message.guild.id) # Response Message new_embed = discord.Embed(title="Added Team to Twitch Alert", colour=KOALA_GREEN, description=f"Channel: {channel_id}\n" f"Team: {team_name}\n" f"Message: {default_message}") # new_embed.set_footer(text=f"Twitch Alert ID: {channel_id}") await ctx.send(embed=new_embed) @commands.command(name="twitchRemoveTeam", aliases=["remove_team_from_twitch_alert"]) @commands.check(KoalaBot.is_admin) @commands.check(twitch_is_enabled) async def remove_team_from_twitch_alert(self, ctx, raw_channel_id, team_name=None): """ Removes a team from a Twitch Alert :param ctx: the discord context :param raw_channel_id: The discord channel ID of the Twitch Alert :param team_name: The Twitch team being added (lowercase) :return: """ try: channel_id = extract_id(raw_channel_id) except TypeError: team_name = raw_channel_id channel_id = ctx.message.channel.id if team_name is None: raise discord.errors.InvalidArgument("team_name is a required argument that is missing.") # Check the channel specified is in this guild if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id): await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server.")) return await self.ta_database_manager.remove_team_from_ta(channel_id, team_name) # Response Message new_embed = discord.Embed(title="Removed Team from Twitch Alert", colour=KOALA_GREEN, description=f"Channel: {channel_id}\n" f"Team: {team_name}") await ctx.send(embed=new_embed) @commands.command(name="twitchList", aliases=["list_twitch_alert"]) @commands.check(KoalaBot.is_admin) @commands.check(twitch_is_enabled) async def list_twitch_alert(self, ctx, raw_channel_id=None): """ Shows all current TwitchAlert users and teams in a channel :param ctx: :param raw_channel_id: :return: """ if raw_channel_id is None: channel_id = ctx.message.channel.id else: channel_id = extract_id(raw_channel_id) if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id): await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server.")) return embed = discord.Embed() embed.title = "Twitch Alerts" embed.colour = KOALA_GREEN embed.set_footer(text=f"Channel ID: {channel_id}") results = self.ta_database_manager.get_users_in_ta(channel_id) if results: users = "" for result in results: users += f"{result[0]}\n" embed.add_field(name=":bust_in_silhouette: Users", value=users) else: embed.add_field(name=":bust_in_silhouette: Users", value="None") results = self.ta_database_manager.get_teams_in_ta(channel_id) if results: teams = "" for result in results: teams += f"{result[0]}\n" embed.add_field(name=":busts_in_silhouette: Teams", value=teams) else: embed.add_field(name=":busts_in_silhouette: Teams", value="None") await ctx.send(embed=embed) @commands.Cog.listener() async def on_ready(self): """ When the bot is started up, the loop begins :return: """ if not self.running: self.start_loops() def start_loops(self): self.loop_update_teams.start() self.loop_check_team_live.start() self.loop_check_live.start() self.running = True def end_loops(self): self.loop_update_teams.cancel() self.loop_check_team_live.cancel() self.loop_check_live.cancel() self.running = False @tasks.loop(minutes=LOOP_CHECK_LIVE_DELAY) async def loop_check_live(self): """ A loop that continually checks the live status of users and sends alerts when online, removing them when offline :return: """ start = time.time() # logging.info("TwitchAlert: User Loop Started") sql_find_users = "SELECT twitch_username " \ "FROM UserInTwitchAlert " \ "JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \ "JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \ "WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE on TA.guild_id = GE.guild_id;" users = self.ta_database_manager.database_manager.db_execute_select(sql_find_users) usernames = [] for user in users: if not re.search(TWITCH_USERNAME_REGEX, user[0]): sql_remove_invalid_user = "DELETE FROM UserInTwitchAlert WHERE twitch_username = ?" self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[0]]) else: usernames.append(user[0]) # user_streams = self.ta_database_manager.twitch_handler.get_streams_data(usernames) if not usernames: return user_streams = await self.ta_database_manager.twitch_handler.get_streams_data(usernames) if user_streams is None: return # Deals with online streams for streams_details in user_streams: try: if streams_details.get('type') == "live": current_username = str.lower(streams_details.get("user_name")) usernames.remove(current_username) sql_find_message_id = \ "SELECT UserInTwitchAlert.channel_id, message_id, custom_message, default_message " \ "FROM UserInTwitchAlert " \ "JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \ "JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \ "WHERE extension_id = 'TwitchAlert' " \ " OR extension_id = 'All') GE on TA.guild_id = GE.guild_id " \ "WHERE twitch_username = ?;" results = self.ta_database_manager.database_manager.db_execute_select( sql_find_message_id, args=[current_username]) new_message_embed = None for result in results: channel_id = result[0] message_id = result[1] custom_message = result[2] channel_default_message = result[3] channel = self.bot.get_channel(id=channel_id) try: # If no Alert is posted if message_id is None: if new_message_embed is None: if custom_message is not None: message = custom_message else: message = channel_default_message new_message_embed = await self.create_alert_embed(streams_details, message) if new_message_embed is not None and channel is not None: new_message = await channel.send(embed=new_message_embed) sql_update_message_id = """ UPDATE UserInTwitchAlert SET message_id = ? WHERE channel_id = ? AND twitch_username = ?""" self.ta_database_manager.database_manager.db_execute_commit( sql_update_message_id, args=[new_message.id, result[0], current_username]) except discord.errors.Forbidden as err: logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}") sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?" self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel.id]) except Exception as err: logging.error(f"TwitchAlert: User Loop error {err}") # Deals with remaining offline streams await self.ta_database_manager.delete_all_offline_streams(False, usernames) time_diff = time.time() - start if time_diff > 5: logging.warning(f"TwitchAlert: User Loop Finished in > 5s | {time_diff}s") async def create_alert_embed(self, stream_data, message): """ Creates and sends an alert message :param stream_data: The twitch stream data to have in the message :param message: The custom message to be added as a description :return: The discord message id of the sent message """ user_details = await self.ta_database_manager.twitch_handler.get_user_data( stream_data.get("user_name")) game_details = await self.ta_database_manager.twitch_handler.get_game_data( stream_data.get("game_id")) return create_live_embed(stream_data, user_details, game_details, message) @tasks.loop(minutes=REFRESH_TEAMS_DELAY) async def loop_update_teams(self): start = time.time() # logging.info("TwitchAlert: Started Update Teams") await self.ta_database_manager.update_all_teams_members() time_diff = time.time() - start if time_diff > 5: logging.warning(f"TwitchAlert: Teams updated in > 5s | {time_diff}s") @tasks.loop(minutes=TEAMS_LOOP_CHECK_LIVE_DELAY) async def loop_check_team_live(self): """ A loop to repeatedly send messages if a member of a team is live, and remove it when they are not :return: """ start = time.time() # logging.info("TwitchAlert: Team Loop Started") sql_select_team_users = "SELECT twitch_username, twitch_team_name " \ "FROM UserInTwitchTeam " \ "JOIN TeamInTwitchAlert TITA " \ " ON UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id " \ "JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id " \ "JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \ "WHERE extension_id = 'TwitchAlert' " \ " OR extension_id = 'All') GE on TA.guild_id = GE.guild_id " users_and_teams = self.ta_database_manager.database_manager.db_execute_select(sql_select_team_users) usernames = [] for user in users_and_teams: if not re.search(TWITCH_USERNAME_REGEX, user[1]): sql_remove_invalid_user = "DELETE FROM TeamInTwitchAlert WHERE twitch_team_name = ?" self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[1]]) else: usernames.append(user[0]) if not usernames: return streams_data = await self.ta_database_manager.twitch_handler.get_streams_data(usernames) if streams_data is None: return # Deals with online streams for stream_data in streams_data: try: if stream_data.get('type') == "live": current_username = str.lower(stream_data.get("user_name")) usernames.remove(current_username) sql_find_message_id = """ SELECT TITA.channel_id, UserInTwitchTeam.message_id, TITA.team_twitch_alert_id, custom_message, default_message FROM UserInTwitchTeam JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id JOIN (SELECT extension_id, guild_id FROM GuildExtensions WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE ON TA.guild_id = GE.guild_id WHERE twitch_username = ?""" results = self.ta_database_manager.database_manager.db_execute_select( sql_find_message_id, args=[current_username]) new_message_embed = None for result in results: channel_id = result[0] message_id = result[1] team_twitch_alert_id = result[2] custom_message = result[3] channel_default_message = result[4] channel = self.bot.get_channel(id=channel_id) try: # If no Alert is posted if message_id is None: if new_message_embed is None: if custom_message is not None: message = custom_message else: message = channel_default_message new_message_embed = await self.create_alert_embed(stream_data, message) if new_message_embed is not None and channel is not None: new_message = await channel.send(embed=new_message_embed) sql_update_message_id = """ UPDATE UserInTwitchTeam SET message_id = ? WHERE team_twitch_alert_id = ? AND twitch_username = ?""" self.ta_database_manager.database_manager.db_execute_commit( sql_update_message_id, args=[new_message.id, team_twitch_alert_id, current_username]) except discord.errors.Forbidden as err: logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}") sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?" self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel.id]) except Exception as err: logging.error(f"TwitchAlert: Team Loop error {err}") # Deals with remaining offline streams await self.ta_database_manager.delete_all_offline_streams(True, usernames) time_diff = time.time() - start if time_diff > 5: logging.warning(f"TwitchAlert: Teams Loop Finished in > 5s | {time_diff}s") def create_live_embed(stream_info, user_info, game_info, message): """ Creates an embed for the go live announcement :param stream_info: The stream data from the Twitch API :param user_info: The user data for this streamer from the Twitch API :param game_info: The game data for this game from the Twitch API :param message: The custom message to be added as a description :return: The embed created """ embed = discord.Embed(colour=KOALA_GREEN) if message is not None and message != "": embed.description = message embed.set_author(name=stream_info.get("user_name") + " is now streaming!", icon_url=TWITCH_ICON) embed.title = "https://twitch.tv/" + str.lower(stream_info.get("user_name")) embed.add_field(name="Stream Title", value=stream_info.get("title")) if game_info is None: embed.add_field(name="Playing", value="No Category") else: embed.add_field(name="Playing", value=game_info.get("name")) embed.set_thumbnail(url=user_info.get("profile_image_url")) return embed class TwitchAPIHandler: """ A wrapper to interact with the twitch API """ def __init__(self, client_id: str, client_secret: str): self.client_id = client_id self.client_secret = client_secret self.params = {'client_id': self.client_id, 'client_secret': self.client_secret, 'grant_type': 'client_credentials'} self.token = {} @property def base_headers(self): return { 'Authorization': f'Bearer {self.token.get("access_token")}', 'Client-ID': self.client_id } async def get_new_twitch_oauth(self): """ Get a new OAuth2 token from twitch using client_id and client_secret :return: The new OAuth2 token """ async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client: async with client.post('https://id.twitch.tv/oauth2/token', params=self.params) as response: if response.status > 399: logging.critical(f'TwitchAlert: Error {response.status} while getting Oauth token') self.token = {} response_json = await response.json() try: response_json['expires_in'] += time.time() except KeyError: # probably shouldn't need this, but catch just in case logging.warning('TwitchAlert: Failed to set token expiration time') self.token = response_json return self.token async def requests_get(self, url, headers=None, params=None): """ Gets a response from a curl get request to the given url using headers of this object :param headers: the Headers required for the request, will use self.headers by default :param url: The URL to send the request to :param params: The parameters of the request :return: The response of the request """ if self.token.get('expires_in', 0) <= time.time() + 1 or not self.token: await self.get_new_twitch_oauth() async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client: async with client.get(url=url, headers=headers if headers else self.base_headers, params=params) as \ response: if response.status == 401: logging.info(f"TwitchAlert: {response.status}, getting new oauth and retrying") await self.get_new_twitch_oauth() return await self.requests_get(url, headers, params) elif response.status > 399: logging.warning(f'TwitchAlert: {response.status} while getting requesting URL:{url}') return await response.json() async def get_streams_data(self, usernames): """ Gets all stream information from a list of given usernames :param usernames: The list of usernames :return: The JSON data of the request """ url = 'https://api.twitch.tv/helix/streams?' next_hundred_users = usernames[:100] usernames = usernames[100:] result = (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get("data") while usernames: next_hundred_users = usernames[:100] usernames = usernames[100:] result += (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get( "data") return result async def get_user_data(self, username): """ Gets the user information of a given user :param username: The display twitch username of the user :return: The JSON information of the user's data """ url = 'https://api.twitch.tv/helix/users?login=' + username return (await self.requests_get(url)).get("data")[0] async def get_game_data(self, game_id): """ Gets the game information of a given game :param game_id: The twitch game ID of a game :return: The JSON information of the game's data """ if game_id != "": url = 'https://api.twitch.tv/helix/games?id=' + game_id game_data = await self.requests_get(url) return game_data.get("data")[0] else: return None async def get_team_users(self, team_id): """ Gets the users data about a given team :param team_id: The team name of the twitch team :return: the JSON information of the users """ url = 'https://api.twitch.tv/helix/teams?name=' + team_id return ( await self.requests_get(url)).get("data")[0].get("users") class TwitchAlertDBManager: """ A class for interacting with the Koala twitch database """ def __init__(self, database_manager: KoalaDBManager.KoalaDBManager, bot_client: discord.client): """ Initialises local variables :param database_manager: :param bot_client: """ self.database_manager = database_manager self.twitch_handler = TwitchAPIHandler(TWITCH_CLIENT_ID, TWITCH_SECRET) self.bot = bot_client def get_parent_database_manager(self): """ A getter for the database manager of this object :return: """ return self.database_manager def create_tables(self): """ Creates all the tables associated with the twitch alert extension :return: """ # TwitchAlerts sql_create_twitch_alerts_table = """ CREATE TABLE IF NOT EXISTS TwitchAlerts ( guild_id integer NOT NULL, channel_id integer NOT NULL, default_message text NOT NULL, PRIMARY KEY (guild_id, channel_id), CONSTRAINT fk_guild FOREIGN KEY (guild_id) REFERENCES GuildExtensions (guild_id) ON DELETE CASCADE );""" # UserInTwitchAlert sql_create_user_in_twitch_alert_table = """ CREATE TABLE IF NOT EXISTS UserInTwitchAlert ( channel_id integer NOT NULL, twitch_username text NOT NULL, custom_message text, message_id integer, PRIMARY KEY (channel_id, twitch_username), CONSTRAINT fk_channel FOREIGN KEY (channel_id) REFERENCES TwitchAlerts (channel_id) ON DELETE CASCADE );""" # TeamInTwitchAlert sql_create_team_in_twitch_alert_table = """ CREATE TABLE IF NOT EXISTS TeamInTwitchAlert ( team_twitch_alert_id integer PRIMARY KEY AUTOINCREMENT, channel_id integer NOT NULL, twitch_team_name text NOT NULL, custom_message text, CONSTRAINT fk_channel FOREIGN KEY (channel_id) REFERENCES TwitchAlerts (channel_id) ON DELETE CASCADE );""" # UserInTwitchTeam sql_create_user_in_twitch_team_table = """ CREATE TABLE IF NOT EXISTS UserInTwitchTeam ( team_twitch_alert_id text NOT NULL, twitch_username text NOT NULL, message_id integer, PRIMARY KEY (team_twitch_alert_id, twitch_username), CONSTRAINT fk_twitch_team_alert FOREIGN KEY (team_twitch_alert_id) REFERENCES TeamInTwitchAlert (team_twitch_alert_id) ON DELETE CASCADE );""" # Create Tables self.database_manager.db_execute_commit(sql_create_twitch_alerts_table) self.database_manager.db_execute_commit(sql_create_user_in_twitch_alert_table) self.database_manager.db_execute_commit(sql_create_team_in_twitch_alert_table) self.database_manager.db_execute_commit(sql_create_user_in_twitch_team_table) def new_ta(self, guild_id, channel_id, default_message=None, replace=False): """ Creates a new Twitch Alert and gives the ID associated with it :param guild_id: The discord guild ID where the Twitch Alert is located :param channel_id: The discord channel ID of the twitch Alert :param default_message: The default message of users in the Twitch Alert :param replace: True if the new ta should replace the current if exists :return: The new default_message """ sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id=?" message = self.database_manager.db_execute_select(sql_find_ta, args=[channel_id]) if message and not replace: return message[0][0] # Sets the default message if not provided if default_message is None: default_message = DEFAULT_MESSAGE # Insert new Twitch Alert to database if replace: sql_insert_twitch_alert = """ REPLACE INTO TwitchAlerts(guild_id, channel_id, default_message) VALUES(?,?,?) """ else: sql_insert_twitch_alert = """ INSERT INTO TwitchAlerts(guild_id, channel_id, default_message) VALUES(?,?,?) """ self.database_manager.db_execute_commit(sql_insert_twitch_alert, args=[guild_id, channel_id, default_message]) return default_message def get_default_message(self, channel_id): """ Get the set default message for the twitch alert :param channel_id: The discord channel ID of the twitch Alert :return: The current default_message """ sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id= ?" return self.database_manager.db_execute_select(sql_find_ta, args=[channel_id]) def add_user_to_ta(self, channel_id, twitch_username, custom_message, guild_id=None): """ Add a twitch user to a given Twitch Alert :param channel_id: The discord channel ID of the twitch Alert :param twitch_username: The Twitch username of the user to be added :param custom_message: The custom Message of the user's live notification. None = use default Twitch Alert message :param guild_id: The guild ID of the channel :return: :raises: KeyError if channel ID is not defined in TwitchAlerts and guild_id is not provided """ self.new_ta(guild_id, channel_id) if custom_message: sql_insert_user_twitch_alert = """ INSERT INTO UserInTwitchAlert(channel_id, twitch_username, custom_message) VALUES(?, ?, ?) """ self.database_manager.db_execute_commit( sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username), custom_message]) else: sql_insert_user_twitch_alert = """ INSERT INTO UserInTwitchAlert(channel_id, twitch_username) VALUES(?, ?) """ self.database_manager.db_execute_commit( sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username)]) async def remove_user_from_ta(self, channel_id, twitch_username): """ Removes a user from a given Twitch Alert :param channel_id: The discord channel ID of the twitch Alert :param twitch_username: The Twitch username of the user to be added :return: """ sql_get_message_id = "SELECT message_id " \ "FROM UserInTwitchAlert " \ "WHERE twitch_username = ? " \ "AND channel_id = ? " message_id = self.database_manager.db_execute_select(sql_get_message_id, args=[twitch_username, channel_id])[0][0] if message_id is not None: await self.delete_message(message_id, channel_id) sql_remove_entry = """DELETE FROM UserInTwitchAlert WHERE twitch_username = ? AND channel_id = ?""" self.database_manager.db_execute_commit(sql_remove_entry, args=[twitch_username, channel_id]) async def delete_message(self, message_id, channel_id): """ Deletes a given discord message :param message_id: discord message ID of the message to delete :param channel_id: discord channel ID which has the message :return: """ try: channel = self.bot.get_channel(int(channel_id)) if channel is None: logging.warning(f"TwitchAlert: Channel ID {channel_id} does not exist, removing from database") sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?" self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id]) return message = await channel.fetch_message(message_id) await message.delete() except discord.errors.NotFound as err: logging.warning(f"TwitchAlert: Message ID {message_id} does not exist, skipping \nError: {err}") except discord.errors.Forbidden as err: logging.warning(f"TwitchAlert: {err} Channel ID: {channel_id}") sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?" self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id]) def get_users_in_ta(self, channel_id): """ Returns all users in a given Twitch Alert :param channel_id: The channel ID of the Twitch Alert :return: The sql results of the users """ sql_get_users = "SELECT twitch_username FROM UserInTwitchAlert WHERE channel_id = ?" return self.database_manager.db_execute_select(sql_get_users, args=[channel_id]) def get_teams_in_ta(self, channel_id): """ Returns all teams in a given Twitch Alert :param channel_id: The channel ID of the Twitch Alert :return: The sql results of the teams """ sql_get_teams = "SELECT twitch_team_name FROM TeamInTwitchAlert WHERE channel_id = ?" return self.database_manager.db_execute_select(sql_get_teams, args=[channel_id]) def add_team_to_ta(self, channel_id, twitch_team, custom_message, guild_id=None): """ Add a twitch team to a given Twitch Alert :param channel_id: The discord channel ID of the twitch Alert :param twitch_team: The Twitch team to be added :param custom_message: The custom Message of the team's live notification. None = use default Twitch Alert message :param guild_id: The guild ID of the channel :return: :raises: KeyError if channel ID is not defined in TwitchAlerts and guild_id is not provided """ self.new_ta(guild_id, channel_id) if custom_message: sql_insert_team_twitch_alert = """ INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name, custom_message) VALUES(?, ?, ?) """ self.database_manager.db_execute_commit( sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team), custom_message]) else: sql_insert_team_twitch_alert = """ INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name) VALUES(?, ?) """ self.database_manager.db_execute_commit( sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team)]) async def remove_team_from_ta(self, channel_id, team_name): """ Removes a team from a given twitch alert :param channel_id: The channel ID of the Twitch Alert :param team_name: The team name of the team to be removed :return: """ sql_get_team_alert_id = "SELECT team_twitch_alert_id " \ "FROM TeamInTwitchAlert " \ "WHERE twitch_team_name = ? " \ " AND channel_id = ?" result = self.database_manager.db_execute_select(sql_get_team_alert_id, args=[team_name, channel_id]) if not result: raise AttributeError("Team name not found") team_alert_id = result[0][0] sql_get_message_id = """SELECT UserInTwitchTeam.message_id FROM UserInTwitchTeam WHERE team_twitch_alert_id = ?""" message_ids = self.database_manager.db_execute_select(sql_get_message_id, args=[team_alert_id]) if message_ids is not None: for message_id in message_ids: if message_id[0] is not None: await self.delete_message(message_id[0], channel_id) sql_remove_users = """DELETE FROM UserInTwitchTeam WHERE team_twitch_alert_id = ?""" sql_remove_team = """DELETE FROM TeamInTwitchAlert WHERE team_twitch_alert_id = ?""" self.database_manager.db_execute_commit(sql_remove_users, args=[team_alert_id]) self.database_manager.db_execute_commit(sql_remove_team, args=[team_alert_id]) async def update_team_members(self, twitch_team_id, team_name): """ Users in a team are updated to ensure they are assigned to the correct team :param twitch_team_id: the team twitch alert id :param team_name: the name of the team :return: """ if re.search(TWITCH_USERNAME_REGEX, team_name): users = await self.twitch_handler.get_team_users(team_name) for user in users: sql_add_user = """INSERT OR IGNORE INTO UserInTwitchTeam(team_twitch_alert_id, twitch_username) VALUES(?, ?)""" try: self.database_manager.db_execute_commit(sql_add_user, args=[twitch_team_id, user.get("user_login")], pass_errors=True) except KoalaDBManager.sqlite3.IntegrityError as err: logging.error(f"Twitch Alert: 1034: {err}") pass async def update_all_teams_members(self): """ Updates all teams with the current team members :return: """ sql_get_teams = """SELECT team_twitch_alert_id, twitch_team_name FROM TeamInTwitchAlert""" teams_info = self.database_manager.db_execute_select(sql_get_teams) for team_info in teams_info: await self.update_team_members(team_info[0], team_info[1]) async def delete_all_offline_streams(self, team: bool, usernames):
def setup(bot: KoalaBot) -> None: """ Load this cog to the KoalaBot. :param bot: the bot client for KoalaBot """ if TWITCH_SECRET is None or TWITCH_CLIENT_ID is None: logging.error("TwitchAlert not started. API keys not found in environment.") print("TwitchAlert not started. API keys not found in environment.") KoalaBot.database_manager.insert_extension("TwitchAlert", 0, False, False) else: bot.add_cog(TwitchAlert(bot)) logging.info("TwitchAlert is ready.") print("TwitchAlert is ready.")
""" A method that deletes all currently offline streams :param team: True if the users are from teams, false if individuals :param usernames: The usernames of the team members :return: """ if team: sql_select_offline_streams_with_message_ids = f""" SELECT channel_id, message_id FROM UserInTwitchTeam JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id WHERE message_id NOT NULL AND twitch_username in ({','.join(['?'] * len(usernames))})""" sql_update_offline_streams = f""" UPDATE UserInTwitchTeam SET message_id = NULL WHERE twitch_username in ({','.join(['?'] * len(usernames))})""" else: sql_select_offline_streams_with_message_ids = f""" SELECT channel_id, message_id FROM UserInTwitchAlert WHERE message_id NOT NULL AND twitch_username in ({','.join(['?'] * len(usernames))})""" sql_update_offline_streams = f""" UPDATE UserInTwitchAlert SET message_id = NULL WHERE twitch_username in ({','.join(['?'] * len(usernames))})""" results = self.database_manager.db_execute_select( sql_select_offline_streams_with_message_ids, usernames) for result in results: await self.delete_message(result[1], result[0]) self.database_manager.db_execute_commit(sql_update_offline_streams, usernames)
util.go
package internal import "path/filepath" func comparePathList(a, b []string) bool { if len(a) != len(b) { return false } for i, v := range a { if filepath.Clean(v) != filepath.Clean(b[i]) { return false } } return true } func compareStringList(a, b []string) bool { if len(a) != len(b) { return false } for i, v := range a { if v != b[i] { return false } } return true } func combineMaps(a, b map[string]string) map[string]string { result := make(map[string]string) for k, v := range a { result[k] = v } for k, v := range b { result[k] = v } return result } func Contains(slice []string, element string) bool { for _, x := range slice { if x == element
} return false }
{ return true }
0004_auto_20181212_1442.py
# Generated by Django 2.0.9 on 2018-12-12 14:42 import datetime from django.db import migrations, models
('leave', '0003_auto_20181212_1214'), ] operations = [ migrations.AddField( model_name='request', name='perm', field=models.BooleanField(default=True, verbose_name='Permission Granted'), ), migrations.AlterField( model_name='request', name='time', field=models.TimeField(default=datetime.time(14, 42, 31, 612078), null=True, verbose_name='Time'), ), ]
class Migration(migrations.Migration): dependencies = [
example04_housing_validation.py
""" Copyright (c) 2021 Olivier Sprangers as part of Airlab Amsterdam Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. https://github.com/elephaint/pgbm/blob/main/LICENSE """ #%% Load packages import torch from pgbm import PGBM from sklearn.model_selection import train_test_split from sklearn.datasets import fetch_california_housing import matplotlib.pyplot as plt #%% Objective for pgbm def
(yhat, y, sample_weight=None): gradient = (yhat - y) hessian = torch.ones_like(yhat) return gradient, hessian def rmseloss_metric(yhat, y, sample_weight=None): loss = (yhat - y).pow(2).mean().sqrt() return loss #%% Load data X, y = fetch_california_housing(return_X_y=True) #%% Parameters params = {'min_split_gain':0, 'min_data_in_leaf':2, 'max_leaves':8, 'max_bin':64, 'learning_rate':0.1, 'n_estimators':2000, 'verbose':2, 'early_stopping_rounds':100, 'feature_fraction':1, 'bagging_fraction':1, 'seed':1, 'reg_lambda':1, 'device':'gpu', 'gpu_device_id':0, 'derivatives':'exact', 'distribution':'normal'} n_forecasts = 1000 n_splits = 2 base_estimators = 2000 #%% Validation loop rmse, crps = torch.zeros(n_splits), torch.zeros(n_splits) for i in range(n_splits): print(f'Fold {i+1}/{n_splits}') # Split for model validation X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=i) X_train_val, X_val, y_train_val, y_val = train_test_split(X_train, y_train, test_size=0.2, random_state=i) # Build datasets train_data = (X_train, y_train) train_val_data = (X_train_val, y_train_val) valid_data = (X_val, y_val) # Train to retrieve best iteration print('PGBM Validating on partial dataset...') params['n_estimators'] = base_estimators model = PGBM() model.train(train_val_data, objective=mseloss_objective, metric=rmseloss_metric, valid_set=valid_data, params=params) # Set iterations to best iteration params['n_estimators'] = model.best_iteration # Retrain on full set print('PGBM Training on full dataset...') model = PGBM() model.train(train_data, objective=mseloss_objective, metric=rmseloss_metric, params=params) #% Predictions print('PGBM Prediction...') yhat_point = model.predict(X_test) yhat_dist = model.predict_dist(X_test, n_forecasts=n_forecasts) # Scoring rmse[i] = model.metric(yhat_point.cpu(), y_test) crps[i] = model.crps_ensemble(yhat_dist.cpu(), y_test).mean() # Print scores current fold print(f'RMSE Fold {i+1}, {rmse[i]:.2f}') print(f'CRPS Fold {i+1}, {crps[i]:.2f}') # Print final scores print(f'RMSE {rmse.mean():.2f}+-{rmse.std():.2f}') print(f'CRPS {crps.mean():.2f}+-{crps.std():.2f}') #%% Plot all samples plt.plot(y_test, 'o', label='Actual') plt.plot(yhat_point.cpu(), 'ko', label='Point prediction PGBM') plt.plot(yhat_dist.cpu().max(dim=0).values, 'k--', label='Max bound PGBM') plt.plot(yhat_dist.cpu().min(dim=0).values, 'k--', label='Min bound PGBM') plt.legend()
mseloss_objective
lenet.py
from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Conv2D from tensorflow.keras.layers import MaxPooling2D from tensorflow.keras.layers import Activation from tensorflow.keras.layers import Flatten from tensorflow.keras.layers import Dense from tensorflow.keras import backend as K class LeNet: @staticmethod def build(width, height, depth, classes, last_active="softmax"): # Initialize the model model = Sequential() input_shape = (height, width, depth) # If we are using 'channels-first', update the input shape if K.image_data_format() == 'channels_first': input_shape = (depth, height, width) # First set of CONV => RELU => POOL layers model.add(Conv2D(20, (5, 5), padding='same', input_shape=input_shape)) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2))) # Second set of CONV => RELU => POOL layers
model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2))) # First (and only) set of FC => RELU layers model.add(Flatten()) model.add(Dense(500)) model.add(Activation('relu')) model.add(Dense(classes)) model.add(Activation(last_active)) # return the constructed network architecture return model
model.add(Conv2D(50, (5, 5), padding='same'))
settings.py
import boto3 def
(name): if not name or name == "": raise ValueError("name not passed.") system_code = "spot-rec" name = f"/{system_code}/{name}" client = boto3.client('ssm') response = client.get_parameter(Name=name) return response ['Parameter']['Value'] client_id = __get_parameter('shared/client_id') client_secret = __get_parameter('shared/client_secret') DYNAMODB_TABLE = __get_parameter('dynamodb') DYNAMODB_TABLE_HASH_KEY = __get_parameter('dynamodb_hash_key_name') DYNAMODB_TABLE_SORT_KEY = __get_parameter('dynamodb_sort_key_name') AUDIO_UPLOAD_BUCKET = __get_parameter('audio_bucket_name') FEATURE_COL = __get_parameter('feature_column_name') FEATURE_VECTOR_LENGTH = int(__get_parameter('feature_vector_length')) ANNOY_INDEX_COL = __get_parameter('annoy_index_col_name')
__get_parameter
tools.conf.ts
declare const L: any; import 'leaflet-draw/dist/leaflet.draw-src'; import { setPopupHoverMode } from './popupControl.conf'; import { GlobalMapService } from '../../../maps.service'; import { LeafletService } from './../leaflet-map.service'; export function drawPlugin(map: L.Map, mapService: LeafletService) { const drawnItems: L.FeatureGroup = L.featureGroup().addTo(map); const drawControl = initDrawControl(drawnItems); map.addControl(drawControl); let editMode = false; const editTool = new L.EditToolbar.Edit(map, { featureGroup: drawnItems }); // overrideGeometries(); map.on(L.Draw.Event.CREATED, (geometry: any) => { const layer = geometry.layer; // Delete last drawn layer before adding the new one. drawnItems.clearLayers(); drawnItems.addLayer(layer); // Forces edit mode on event drawn editTool.enable(); editMode = !editMode; layer.on('dblclick', (e) => { // Re-enable map zoom on dbl-click map.doubleClickZoom.enable(); })
// Allow layer edition on click if (editMode) { editTool.disable(); editMode = !editMode; } else { editTool.enable(); editMode = !editMode; } }); }); map.on(L.Draw.Event.DRAWSTART, () => { // Disable editing if currently drawing if (editMode) { editTool.disable(); editMode = !editMode; } // Disable hover behavior setPopupHoverMode(map, mapService, false); }); map.on(L.Draw.Event.EDITSTART || L.Draw.Event.EDITRESIZE || L.Draw.Event.EDITMOVE, () => { // Enable hover behavior setPopupHoverMode(map, mapService, false); }); map.on(L.Draw.Event.EDITSTOP, () => { // Enable hover behavior setPopupHoverMode(map, mapService, true); }); // map.on(L.Draw.Event.DRAWSTOP, (geometry: any) => { // // Stay in drawing mode after finishing drawing a geometry // if (geometry.layerType === 'rectangle') { // // new L.Draw.Rectangle(map, drawControl.options.rectangle).enable(); // } // if (geometry.layerType === 'circle') { // // new L.Draw.Circle(map, drawControl.options.circle).enable(); // } // }); } /** * Initialize the Draw Control Toolbar * @param drawnItems The FeatureGroup drawn items are sent in */ function initDrawControl(drawnItems: L.FeatureGroup): any { L.drawLocal.edit.handlers.edit.tooltip = { text: null, subtext: null }; return new L.Control.Draw({ edit: { featureGroup: drawnItems, edit: false, remove: false }, draw: { polygon: false, polyline: false, marker: false, circlemarker: false } }); } /** * Add methods to current Geometry formats from the library */ function overrideGeometries(): void { // Define contains() method for each geometry L.Rectangle.include({ contains: function (markers: L.Marker[]) { const markersContained: boolean[] = []; markers.forEach(marker => { markersContained.push(this.getBounds().contains(marker.getLatLng())); }) return markersContained; } }); L.Circle.include({ contains: function (markers: L.Marker[]) { const markersContained: boolean[] = []; markers.forEach(marker => { markersContained.push(this.getLatLng().distanceTo(marker.getLatLng()) < this.getRadius()); }) return markersContained; } }); } /** * Temp * @param layer * @param layerGroup */ function checkMarkersContainedInGeometry(layer: any, layerGroup: L.LayerGroup): boolean { // Set an array containing all the markers const markers: L.Marker[] = GlobalMapService.jsonToArray(layerGroup.getLayers()); const result: boolean = layer.contains(markers); return result; }
layer.on('click', (e) => { // Prevent map dbl-click zoom map.doubleClickZoom.disable();
user.ts
import {EnumValideur} from "./dto"; export class User { id: number | undefined; email: string; firstName: string; lastName: string; } export class RoleDto { id: number; name: string; }
roleList: RoleDto[]; valideurState: EnumValideur; } export class UserModel extends UserExt { saving: boolean; }
export class UserExt extends User {
duplication.py
# ------------------------------------------------------------------------------ # # Author: # Armin Hasitzka ([email protected]) # # Licensed under the MIT license. # See LICENSE in the project root for license information.
# # ------------------------------------------------------------------------------ from utils.printer import Printer class TaskEliminateDuplicateReports(object): description = 'Eliminating duplicate reports ...' __tsan_data_race_max_stack_frames = 3 def __init__(self, bank): self.__bank = bank def setup(self, options): self.__printer = Printer(options) self.__duplicate_reports = [] self.__identifiers_funcs = { 'tsan': { 'data race': self.__tsan_data_race_identifiers, 'thread leak': self.__tsan_thread_leak_identifiers } } # TODO: split into separate lists for sanitizers and categories for better performance self.__known_identifiers = [] def process(self, report): if not self.__identifiers_funcs.get(report.sanitizer.name_short, {}).get(report.category_name): self.__printer.bailout('unable to analyse ' + str(report)) identifiers = self.__identifiers_funcs[report.sanitizer.name_short][report.category_name](report) if not identifiers: self.__printer.bailout('unable to extract identifiers from ' + str(report)) for identifier in identifiers: if identifier in self.__known_identifiers: self.__printer.task_info('removing ' + str(report)) self.__duplicate_reports.append(report) return self.__known_identifiers.extend(identifiers) def teardown(self): for report in self.__duplicate_reports: self.__bank.remove_report(report) def __tsan_data_race_identifiers(self, report): fragments = [] for stack in report.call_stacks: if 'tsan_data_race_type' in stack.special: fragment = [ stack.special.get('tsan_data_race_type'), stack.special.get('tsan_data_race_bytes') ] for i in range(min(len(stack.frames), self.__tsan_data_race_max_stack_frames)): fragment.extend([ stack.frames[i].src_file_rel_path, stack.frames[i].func_name, stack.frames[i].line_num, stack.frames[i].char_pos ]) fragments.append(':'.join(['?' if not f else str(f) for f in fragment])) if len(fragments) == 1: return fragments if len(fragments) == 2: # either way is fine! return [fragments[0] + ':' + fragments[1], fragments[1] + ':' + fragments[0]] def __tsan_thread_leak_identifiers(self, report): for stack in report.call_stacks: if stack.special.get('tsan_thread_leak_thread_name'): return [stack.special['tsan_thread_leak_thread_name']]
models.py
import tensorflow as tf import torch import yalp class BasicModel: def __init__(self, is_sequential=True): self.is_sequential = is_sequential self.backend = yalp.backend def dispatch(self): return self._model() def _model(self): if self.backend == 'tf': if self.is_sequential: #Simple binary classification model for 1D input model = tf.keras.Sequential() model.add(tf.keras.Input(shape=yalp.config.INPUT_SHAPE)) if yalp.config.DATATYPE == 'img': model.add(tf.keras.layers.Conv2D(32, 3, activation="relu")) model.add(tf.keras.layers.Conv2D(64, 3, activation="relu"))
#TODO if yalp.config.DATATYPE == 'tabular': model.add(tf.keras.layers.Dense(32)) model.add(tf.keras.layers.Dense(64)) model.add(tf.keras.layers.Dense(128)) if yalp.config.PROBLEM_TYPE == "classification": model.add(tf.keras.layers.Dense(yalp.config.NUM_CLASSES, activation='sigmoid')) elif yalp.config.PROBLEM_TYPE == "regression": model.add(tf.keras.layers.Dense(yalp.config.NUM_CLASSES)) return model
model.add(tf.keras.layers.MaxPooling2D(3)) model.add(tf.keras.layers.Flatten()) if yalp.config.DATATYPE == 'text':
__init__.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Logchecker tool for scanning log files against YETI Threat Intelligence Repository. By LIFARS This code is licensed under MIT license (see LICENSE for details) """ __version__ = "0.8" __author__ = "LIFARS LLC" __copyright__ = "Copyright (c) 2020,2021 LIFARS LLC" __credits__ = ["LIFARS LLC"] __license__ = "MIT" __maintainer__ = "LIFARS LLC" __status__ = "Production" import argparse import collections import configparser import csv import json import os import re import sys import Evtx.Evtx as evtx import pyeti Config = collections.namedtuple("Config", ["url", "key", "output"]) def is_valid_file(parser, arg): if not os.path.exists(arg): parser.error("The file %s does not exist!" % arg) else: return arg def main(): parser = argparse.ArgumentParser() parser.add_argument( "-c", "--config", help="Config file path. Config file should contain url of YETI database," " authorization key and output format. If it is present, it overrides" " --url, --key and --csv/--json options.", type=argparse.FileType("r"), ) parser.add_argument( "-f", "--file", help="[REQUIRED] Log file path.", type=lambda x: is_valid_file(parser, x), required=True, ) parser.add_argument( "-o", "--output", help="Output file path. If file does not exist, creates new file." "If not specified, output is printed to STDOUT.", type=argparse.FileType("w+"), ) parser.add_argument( "-a", "--address", default=False, action="store_true", help="Search only for ip addresses. If none of the address, " "domain or hash flag is specified, it search for all mentioned.", ) parser.add_argument( "-d", "--domain", default=False, action="store_true", help="Search only for domains. If none of the address, " "domain or hash flag is specified, it search for all mentioned.", ) parser.add_argument( "-H", "--hash", default=False, action="store_true", help="Search only for hashes. If none of the address, " "domain or hash flag is specified, it search for all mentioned.", ) parser.add_argument( "-A", "--all", default=False, action="store_true", help="Show all values in logs. By default it shows only values " "which have record in database.", ) group = parser.add_mutually_exclusive_group() group.add_argument( "-C", "--csv", default=False, action="store_true", help="Output in CSV format. This is default option.", ) group.add_argument( "-j", "--json", default=False, action="store_true", help="Output in JSON format. By default output is in CSV format.", ) parser.add_argument("-u", "--url", help="URL of YETI instance.", type=str) parser.add_argument("-k", "--key", help="API key for YETI.", type=str) args = parser.parse_args() if not (args.config or args.url): parser.error( "Missing URL of YETI. Use --url URL or add config file using --config CONFIG" ) url = args.url key = args.key csv = args.csv json = args.json if args.config: url, key, outf = parse_config_file(args.config) if outf.lower() == "json": json = True csv = False elif outf.lower() == "csv": json = False csv = True else: print("Unsupported output format. Using default", file=sys.stderr) json = False csv = True check_log_file( args.file, url, key, output=args.output, address=args.address, domain=args.domain, hash=args.hash, all=args.all, csv=csv, json=json, ) def parse_config_file(file): config = configparser.ConfigParser() config.read_file(file) url = config.get("DEFAULT", "url") key = config.get("DEFAULT", "api_key") output = config.get("DEFAULT", "output_format") return Config(url, key, output) def check_log_file(file, url, key, **kwargs): _, file_extension = os.path.splitext(file) print("reading file", file=sys.stderr) if file_extension == ".evtx": log = __read_evtx_file(file) else: log = __read_text_file(file) print("parsing file", file=sys.stderr) values = parse_log_file(log) print("looking in database", file=sys.stderr) results = [] a = kwargs.get("all", False) api = pyeti.YetiApi(url, api_key=key) for val, logs in values.items(): result = {"value": val} yeti = api.observable_search(value=val) if yeti: result["tags"] = yeti[0].get("tags", []) result["created"] = yeti[0].get("created", "") result["sources"] = yeti[0].get("sources", []) else: result["tags"] = [] result["created"] = "" result["sources"] = [] result["original_log"] = logs if yeti or a: results.append(result) print("writing results", file=sys.stderr) ret = kwargs.get("ret", False) if ret: return results output = kwargs.get("output", None) if not output: output = sys.stdout j = kwargs.get("json", False) if j: json.dump(results, output, indent=4, sort_keys=True) else: fields = ["value", "tags", "created", "sources", "original_log"] results = __flatten(map(__unpack_logs, map(__csv_row, results))) writer = csv.DictWriter(output, fieldnames=fields, quoting=csv.QUOTE_ALL) writer.writeheader() writer.writerows(results) outfh = kwargs.get("output", None) if outfh: outfh.close() print("finished", file=sys.stderr) def parse_log_file(log, **kwargs): addr_pattern = re.compile("(?:[0-9]{1,3}\.){3}[0-9]{1,3}") ipv6_pattern = re.compile( "(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|" "fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]+|" "::(?:ffff(?::0{1,4})?:)?" "(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}" "(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|" "(?:[0-9a-fA-F]{1,4}:){1,4}:" "(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}" "(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|" ":(?:(?::[0-9a-fA-F]{1,4}){1,7}|:)|" "[0-9a-fA-F]{1,4}:(?:(?::[0-9a-fA-F]{1,4}){1,6})|" "(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|" "(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|" "(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|" "(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|" "(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|" "(?:[0-9a-fA-F]{1,4}:){1,7}:" ) domain_pattern = re.compile("(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.)+[a-z]{2,6}") hash_pattern = re.compile("[0-9a-fA-F]{64}|[0-9a-fA-F]{40}|[0-9a-fA-F]{32}") a = kwargs.get("address", False) d = kwargs.get("domain", False) h = kwargs.get("hash", False) flags = a or d or h values = {} for line in log: if (not flags) or a: addr = addr_pattern.findall(line) for match in addr: values.setdefault(match, []).append(line) addr = ipv6_pattern.findall(line) for match in addr: values.setdefault(match.lower(), []).append(line) if (not flags) or d: dom = domain_pattern.findall(line) for match in dom: values.setdefault(match.lower(), []).append(line) if (not flags) or h: ha = hash_pattern.findall(line) for match in ha: values.setdefault(match.lower(), []).append(line) values.pop("schemas.microsoft.com", None) return values def __read_evtx_file(file): with evtx.Evtx(file) as f: log = list(map(evtx.Record.xml, f.records())) return log def __read_text_file(file): with open(file) as f: log = f.read().splitlines() return log def __dict_to_string(d): return " ".join(["{}:{}".format(key, val) for key, val in d.items()]) def __list_to_string(li):
def __csv_row(d): d["tags"] = __list_to_string([__dict_to_string(tag) for tag in d["tags"]]) d["sources"] = __list_to_string(d["sources"]) return d def __unpack_logs(d): result = [] for log in d["original_log"]: new = d.copy() new["original_log"] = log result.append(new) return result def __flatten(li): return [item for sublist in li for item in sublist] if __name__ == "__main__": main()
return " ".join(li)
variablescope.rs
//! A scope is something that contains variable values. use crate::css::{CssString, Selectors, Value}; use crate::output::Format; use crate::sass::{Expose, Function, Item, Mixin, Name, UseAs}; use crate::Error; use lazy_static::lazy_static; use std::collections::BTreeMap; use std::ops::Deref; use std::sync::{Arc, Mutex}; /// A static or dynamic scope referece. /// /// This dereferences to a [Scope]. #[derive(Clone)] pub enum ScopeRef { /// The builtin scopes in rsass is static. Builtin(&'static Scope), /// All other scopes are dynamic. This uses [Arc] reference counting. Dynamic(Arc<Scope>), } impl ScopeRef { /// Create a new global scope. /// /// A "global" scope is just a scope that have no parent. /// There will be multiple global scopes existing during the /// evaluation of a single sass file. pub fn new_global(format: Format) -> Self { Self::dynamic(Scope::new_global(format)) } /// Create a new subscope of a given parent. pub fn sub(parent: ScopeRef) -> Self { Self::dynamic(Scope::sub(parent)) } /// Create a new subscope of a given parent with selectors. pub fn sub_selectors(parent: ScopeRef, selectors: Selectors) -> Self { Self::dynamic(Scope::sub_selectors(parent, selectors)) } fn dynamic(scope: Scope) -> Self { ScopeRef::Dynamic(Arc::new(scope)) } /// Check if `a` and `b` references the same scope. pub fn is_same(a: &Self, b: &Self) -> bool { match (a, b) { (ScopeRef::Builtin(a), ScopeRef::Builtin(b)) => { std::ptr::eq(a, b) } (ScopeRef::Dynamic(ref a), ScopeRef::Dynamic(ref b)) => { Arc::ptr_eq(a, b) } _ => false, } } /// Evaluate a body of items in this scope. pub fn eval_body(self, body: &[Item]) -> Result<Option<Value>, Error> where Self: Sized, { for b in body { let result = match *b { Item::IfStatement(ref cond, ref do_if, ref do_else) => { if cond.evaluate(self.clone())?.is_true() { self.clone().eval_body(do_if)? } else { self.clone().eval_body(do_else)? } } Item::Each(ref names, ref values, ref body) => { let s = self.clone(); for value in values.evaluate(s.clone())?.iter_items() { s.define_multi(names, &value); if let Some(r) = s.clone().eval_body(body)? { return Ok(Some(r)); } } None } Item::For { ref name, ref from, ref to, inclusive, ref body, } => { let range = crate::value::ValueRange::new( from.evaluate(self.clone())?, to.evaluate(self.clone())?, inclusive, )?; let s = self.clone(); for value in range { s.define(name.clone(), &value); if let Some(r) = s.clone().eval_body(body)? { return Ok(Some(r)); } } None } Item::VariableDeclaration { ref name, ref val, default, global, } => { let val = val.evaluate(self.clone())?; self.set_variable(name.into(), val, default, global); None } Item::Return(ref v) => { Some(v.do_evaluate(self.clone(), true)?) } Item::While(ref cond, ref body) => { let scope = ScopeRef::sub(self.clone()); while cond.evaluate(scope.clone())?.is_true() { if let Some(r) = scope.clone().eval_body(body)? { return Ok(Some(r)); } } None } Item::Warn(ref value) => { eprintln!( "WARNING: {}", value .evaluate(self.clone())? .format(self.get_format()) ); None } Item::Error(ref value, ref pos) => { return Err(Error::AtError( value .evaluate(self)? .format(Format::introspect()) .to_string(), pos.clone(), )); } Item::None => None, Item::Comment(..) => None, ref x => { return Err(Error::S(format!( "Not implemented in function: {:?}", x ))) } }; if let Some(result) = result { return Ok(Some(result)); } } Ok(None) } fn with_forwarded(self) -> Self { if let Some(forwarded) = self.opt_forward() { let merged = ScopeRef::new_global(self.get_format()); merged.expose_star(&forwarded); merged.expose_star(&self); merged } else { self } } fn expose(self, filter: &Expose) -> Self { if filter == &Expose::All { self } else { let result = ScopeRef::new_global(self.get_format()); for (name, function) in &*self.functions.lock().unwrap() { if filter.allow_fun(name) { result.define_function(name.clone(), function.clone()); } } for (name, m) in &*self.mixins.lock().unwrap() { if filter.allow_fun(name) { result.define_mixin(name.clone(), m.clone()); } } for (name, value) in &*self.variables.lock().unwrap() { if filter.allow_var(name) { result.define(name.clone(), value); } } result } } } impl Deref for ScopeRef { type Target = Scope; fn deref(&self) -> &Scope { match self { ScopeRef::Builtin(m) => m, ScopeRef::Dynamic(m) => m, } } } /// Variables, functions and mixins are defined in a `Scope`. /// /// A scope can be a local scope, e.g. in a function, or the global scope. /// All non-global scopes have a parent. /// The global scope is global to a sass document, multiple different /// global scopes may exists in the same rust-language process. /// /// Scopes are often accessed through a [`ScopeRef`]. pub struct Scope { parent: Option<ScopeRef>, modules: Mutex<BTreeMap<String, ScopeRef>>, variables: Mutex<BTreeMap<Name, Value>>, mixins: Mutex<BTreeMap<Name, Mixin>>, functions: Mutex<BTreeMap<Name, Function>>, selectors: Option<Selectors>, forward: Mutex<Option<ScopeRef>>, format: Format, } impl<'a> Scope { /// Create a new global scope. /// /// A "global" scope is just a scope that have no parent. /// There will be multiple global scopes existing during the /// evaluation of a single sass file. pub fn new_global(format: Format) -> Self { Scope { parent: None, modules: Mutex::new(BTreeMap::new()), variables: Mutex::new(BTreeMap::new()), mixins: Mutex::new(BTreeMap::new()), functions: Mutex::new(BTreeMap::new()), selectors: None, forward: Default::default(), format, } } /// Create a scope for a built-in module. pub fn builtin_module(name: &'static str) -> Self { let s = Scope::new_global(Default::default()); s.set_variable( Name::from_static("@scope_name@"), name.into(), false, false, ); s } pub(crate) fn get_name(&self) -> String { match self.get_or_none(&Name::from_static("@scope_name@")) { Some(Value::Literal(s)) => s.value().into(), _ => "".into(), } } /// Create a new subscope of a given parent. pub fn sub(parent: ScopeRef) -> Self { let format = parent.get_format(); Scope { parent: Some(parent), modules: Mutex::new(BTreeMap::new()), variables: Mutex::new(BTreeMap::new()), mixins: Mutex::new(BTreeMap::new()), functions: Mutex::new(BTreeMap::new()), selectors: None, forward: Default::default(), format, } } /// Create a new subscope of a given parent with selectors. pub fn sub_selectors(parent: ScopeRef, selectors: Selectors) -> Self { let format = parent.get_format(); Scope { parent: Some(parent), modules: Mutex::new(BTreeMap::new()), variables: Mutex::new(BTreeMap::new()), mixins: Mutex::new(BTreeMap::new()), functions: Mutex::new(BTreeMap::new()), selectors: Some(selectors), forward: Default::default(), format, } } /// Define a module in the scope. /// /// This is used by the `@use` statement. pub fn define_module(&self, name: String, module: ScopeRef) { self.modules.lock().unwrap().insert(name, module); } /// Get a module. /// /// This is used when refering to a function or variable with /// namespace.name notation. pub fn get_module(&self, name: &str) -> Option<ScopeRef> { self.modules .lock() .unwrap() .get(name) .cloned() .or_else(|| self.parent.as_ref().and_then(|p| p.get_module(name))) } /// Get the format used in this scope. pub fn get_format(&self) -> Format { self.format } /// Define a none-default, non-global variable. pub fn define(&self, name: Name, val: &Value) { self.set_variable(name, val.clone(), false, false) } /// Define a variable with a value. /// /// The `$` sign is not included in `name`. pub fn set_variable( &self, name: Name, val: Value, default: bool, global: bool, ) { if default && !matches!(self.get_or_none(&name), Some(Value::Null) | None) { return; } if global { self.define_global(name, val); } else { self.variables.lock().unwrap().insert(name, val); } } /// Define a variable in the global scope that is an ultimate /// parent of this scope. pub fn define_global(&self, name: Name, val: Value) { if let Some(ref parent) = self.parent
else { self.variables.lock().unwrap().insert(name, val); } } /// Define multiple names from a value that is a list. /// Special case: in names is a single name, value is used directly. pub fn define_multi(&self, names: &[Name], value: &Value) { if names.len() == 1 { self.define(names[0].clone(), value); } else { let values = value.clone().iter_items(); if values.len() > names.len() { panic!( "Expected {} values, but got {}", names.len(), values.len(), ) } else { let mut values = values.iter(); for name in names { self.define( name.clone(), values.next().unwrap_or(&Value::Null), ) } } } } /// Get the Value for a variable. pub fn get_or_none(&self, name: &Name) -> Option<Value> { if let Some((modulename, name)) = name.split_module() { if let Some(module) = self.get_module(&modulename) { return module.get_or_none(&name); } } self.variables .lock() .unwrap() .get(name) .cloned() .or_else(|| { self.parent.as_ref().and_then(|p| p.get_or_none(name)) }) } /// Get the value for a variable (or an error). pub fn get(&self, name: &str) -> Result<Value, Error> { match self.get_or_none(&name.into()) { Some(value) => Ok(value), None => Err(Error::undefined_variable(name)), } } /// Copy a set of local variables to a temporary holder pub fn store_local_values( &self, names: &[Name], ) -> Vec<(Name, Option<Value>)> { let vars = self.variables.lock().unwrap(); names .iter() .map(|name| (name.clone(), vars.get(name).cloned())) .collect() } /// Restore a set of local variables from a temporary holder pub fn restore_local_values(&self, data: Vec<(Name, Option<Value>)>) { let mut vars = self.variables.lock().unwrap(); for (name, value) in data { if let Some(value) = value { vars.insert(name, value); } else { vars.remove(&name); } } } /// Get the global Value for a variable. pub fn get_global_or_none(&self, name: &Name) -> Option<Value> { if let Some(ref parent) = self.parent { parent.get_global_or_none(name) } else { self.get_or_none(name) } } /// Get a mixin by name. /// /// Returns the formal args and the body of the mixin. pub fn get_mixin(&self, name: &Name) -> Option<Mixin> { if let Some((modulename, name)) = name.split_module() { self.get_module(&modulename) .and_then(|m| m.get_mixin(&name)) } else { self.mixins.lock().unwrap().get(name).cloned().or_else(|| { self.parent.as_ref().and_then(|p| p.get_mixin(name)) }) } } /// Define a mixin. pub fn define_mixin(&self, name: Name, mixin: Mixin) { self.mixins.lock().unwrap().insert(name, mixin); } /// Define a function. pub fn define_function(&self, name: Name, func: Function) { self.functions.lock().unwrap().insert(name, func); } /// Get a function by name. pub fn get_function( &self, name: &Name, ) -> Result<Option<Function>, Error> { if let Some((modulename, name)) = name.split_module() { if let Some(module) = self.get_module(&modulename) { if let Some(f) = module.get_function(&name)? { Ok(Some(f)) } else { Err(Error::error("Undefined function.")) } } else { return Err(Error::error(format!( "There is no module with the namespace {:?}.", modulename ))); } } else { let f = self.functions.lock().unwrap().get(name).cloned(); if let Some(f) = f { Ok(Some(f)) } else if let Some(ref parent) = self.parent { parent.get_function(name) } else { Ok(None) } } } /// Only for exposing builtin functions; will panic on unknown. pub(crate) fn get_lfunction(&self, name: &Name) -> Function { self.functions.lock().unwrap().get(name).unwrap().clone() } /// Get the selectors active for this scope. pub fn get_selectors(&self) -> &Selectors { lazy_static! { static ref ROOT: Selectors = Selectors::root(); } self.selectors.as_ref().unwrap_or_else(|| { self.parent .as_ref() .map(|p| p.get_selectors()) .unwrap_or_else(|| &ROOT) }) } pub(crate) fn do_use( &self, module: ScopeRef, name: &str, as_n: &UseAs, expose: &Expose, ) -> Result<(), Error> { let module = module.with_forwarded(); match as_n { UseAs::KeepName => { let name = name .rfind(|c| c == ':' || c == '/') .map(|i| &name[i + 1..]) .unwrap_or(name); self.define_module(name.into(), module.expose(expose)); } UseAs::Star => { self.expose_star(&module.expose(expose)); } UseAs::Name(name) => { self.define_module(name.clone(), module.expose(expose)); } UseAs::Prefix(prefix) => { for (name, function) in &*module.functions.lock().unwrap() { let name = format!("{}{}", prefix, name).into(); if expose.allow_var(&name) { self.define_function(name, function.clone()); } } for (name, value) in &*module.variables.lock().unwrap() { let name = format!("{}{}", prefix, name).into(); if expose.allow_fun(&name) { self.define(name, value); } } for (name, m) in &*module.mixins.lock().unwrap() { let name = format!("{}{}", prefix, name).into(); if expose.allow_fun(&name) { self.define_mixin(name, m.clone()); } } } } Ok(()) } pub(crate) fn expose_star(&self, other: &Scope) { for (name, function) in &*other.functions.lock().unwrap() { self.define_function(name.clone(), function.clone()); } for (name, value) in &*other.variables.lock().unwrap() { self.define(name.clone(), value); } for (name, m) in &*other.mixins.lock().unwrap() { self.define_mixin(name.clone(), m.clone()); } } /// Get the functions of this scope as a `Value::Map`. pub fn functions_map(&self) -> Value { use crate::css::ValueMap; use crate::value::Quotes; let mut result = ValueMap::new(); for (name, value) in &*self.functions.lock().unwrap() { let name = name.to_string(); result.insert( CssString::new(name.clone(), Quotes::Double).into(), Value::Function(name, Some(value.clone())), ); } Value::Map(result) } /// Get the variables of this scope as a `Value::Map`. pub fn variables_map(&self) -> Value { use crate::css::ValueMap; use crate::value::Quotes; let mut result = ValueMap::new(); for (name, value) in &*self.variables.lock().unwrap() { if name != &Name::from_static("@scope_name@") { result.insert( CssString::new(name.to_string(), Quotes::Double).into(), value.clone(), ); } } Value::Map(result) } /// Get the forward scope for this scope. /// /// Create a new one if necessary. pub fn forward(&self) -> ScopeRef { self.forward .lock() .unwrap() .get_or_insert_with(|| ScopeRef::new_global(self.get_format())) .clone() } /// Get the forward scope for this scope. /// /// Create a new one if necessary. pub fn opt_forward(&self) -> Option<ScopeRef> { self.forward.lock().unwrap().clone() } } #[cfg(test)] pub mod test { macro_rules! assert_expr { ($context:expr, $input:expr, $expected:expr) => {{ assert_eq!( do_evaluate_or_error($context, $input) .unwrap_or_else(|e| panic!("{}", e)), $expected ) }}; ($input:expr, $expected:expr) => {{ assert_expr!(&[], $input, $expected) }}; } #[test] fn variable_value() { assert_expr!(&[("red", "#f02a42")], b"$red;", "#f02a42") } #[test] fn undefined_variable() { assert_eq!( "Undefined variable: \"$x\"", format!("{}", do_evaluate_or_error(&[], b"$x;").err().unwrap()) ) } #[test] fn partial_variable_value() { assert_expr!( &[("red", "#f02a42")], b"solid 1px $red;", "solid 1px #f02a42" ) } #[test] fn simple_arithmetic() { assert_expr!(b"3 + 3;", "6") } #[test] fn simple_arithmetic_2() { assert_expr!(b"2 + 3 * 4;", "14") } #[test] fn simple_arithmetic_3() { assert_expr!(&[("four", "4")], b"2 + 3 * $four;", "14") } // The following tests are from aboud division are from // http://sass-lang.com/documentation/file.SASS_REFERENCE.html , // Section "Divison and /" #[test] fn div_slash_1() { assert_expr!(b"10px/8px;", "10px/8px") } #[test] fn div_slash_2() { assert_expr!(&[("width", "1000px")], b"$width/2;", "500px") } #[test] fn div_slash_4() { assert_expr!(b"(500px/2);", "250px") } #[test] fn div_slash_5() { assert_expr!(b"5px + 8px/2px;", "9px") } #[test] fn div_slash_6() { assert_expr!(b"(italic bold 10px/8px);", "italic bold 10px/8px") } #[test] fn negative_in_arithmetic() { assert_expr!(&[("m", "20")], b"1000px + $m * -2;", "960px") } #[test] fn double_div_1() { assert_expr!(b"15/3/5;", "15/3/5") } #[test] fn double_div_2() { assert_expr!(b"15 / 3 / 5;", "15/3/5") } #[test] fn double_div_3() { assert_expr!(b"(15 / 3 / 5);", "1") } #[test] fn long_div_and_mul_sequence() { assert_expr!(b"(3 / 2 / 2 / 2 * 32 / 2 / 2);", "3") } #[test] fn double_div_4() { assert_expr!(b"(15 / 3) / 5;", "1"); } #[test] fn double_div_5() { assert_expr!(&[("five", "5")], b"15 / 3 / $five;", "1") } #[test] fn sum_w_unit() { assert_expr!(b"3px + 3px + 3px;", "9px") } #[test] fn multi_multi() { assert_expr!( &[("stuff", "1 2 3")], b"1 2 3, $stuff 4 5 (6, 7 8 9);", "1 2 3, 1 2 3 4 5 6, 7 8 9" ) } #[test] fn url_keeps_parens() { assert_expr!( b"black url(starfield.png) repeat;", "black url(starfield.png) repeat" ) } #[test] fn color_unchanged_1() { assert_expr!(b"#AbC;", "#AbC") } #[test] fn color_unchanged_2() { assert_expr!(b"#AAbbCC;", "#AAbbCC") } #[test] fn color_add_each_component() { assert_expr!(b"#AbC + 1;", "#abbccd") } #[test] fn color_add_each_component_overflow() { assert_expr!(b"#00f + 1;", "#0101ff") } #[test] fn color_add_components() { assert_expr!(b"#AbC + #001;", "#aabbdd") } #[test] fn color_add_components_overflow() { assert_expr!(b"#1000ff + #001;", "#1000ff") } #[test] fn color_add_components_to_named_overflow() { assert_expr!(b"#0000ff + #001;", "blue") } #[test] fn color_add_components_to_named() { assert_expr!(b"#00f + #0f0 + #f00;", "white") } #[test] fn color_simple_rgba() { assert_expr!(b"rgba(1,2,3,.6);", "rgba(1, 2, 3, 0.6)") } #[test] fn color_add_to_rgba() { assert_expr!(b"rgba(0, 0, 0, 1) + #111;", "#111111") } #[test] fn color_subtract() { assert_expr!(b"#fff - 1;", "#fefefe") } #[test] fn color_subtract_underflow() { assert_expr!(b"#000 - 1;", "black") } #[test] fn color_subtract_components() { assert_expr!(b"#fff - #ff8;", "#000077") // Or should it be #007? } #[test] fn color_subtract_components_underflow() { assert_expr!(b"#000001 - #001;", "black") } #[test] fn color_division() { assert_expr!(b"(#101010 / 7);", "#020202") } #[test] fn color_add_rgb_1() { assert_expr!(b"rgb(10,10,10) + #010001;", "#0b0a0b") } #[test] fn color_add_rgb_2() { assert_expr!(b"#010000 + rgb(255, 255, 255);", "white") } #[test] fn color_named_args() { assert_expr!(b"rgb($blue: 3, $red: 1, $green: 2);", "#010203") } #[test] fn color_mixed_args() { assert_expr!(b"rgb(1, $blue: 3, $green: 2);", "#010203") } #[test] fn color_mixed_with_alpha_1() { assert_expr!( b"mix(rgba(255, 0, 0, 0.5), #00f);", "rgba(64, 0, 191, 0.75)" ) } #[test] fn color_mixed_with_alpha_2() { assert_expr!( b"mix(#00f, rgba(255, 0, 0, 0.5));", "rgba(64, 0, 191, 0.75)" ) } #[test] fn value_multiple_dashes() { assert_expr!(b"foo-bar-baz 17%;", "foo-bar-baz 17%") } #[test] fn color_arithemtic_by_name() { assert_expr!(b"red + blue;", "fuchsia") } #[test] fn function_if() { assert_expr!(b"if(true, foo, bar);", "foo") } #[test] fn function_if_false() { assert_expr!(b"if(false, foo, bar);", "bar") } #[test] fn function_if_named() { assert_expr!( b"if($if_true: hey, $if_false: ho, $condition: true);", "hey" ) } #[test] fn function_if_named_dash() { assert_expr!( b"if($if-true: hey, $if-false: ho, $condition: true);", "hey" ) } #[test] fn quoted_string() { assert_expr!(b"\"foobar\";", "\"foobar\"") } #[test] fn unquote_string() { assert_expr!(b"unquote(\"foo bar\");", "foo bar") } #[test] fn equal_true() { assert_expr!(b"17 == 10 + 7;", "true") } #[test] fn equal_false() { assert_expr!(b"17 == 10 + 8;", "false") } #[test] fn not_equal_true() { assert_expr!(b"17 != 10 + 8;", "true") } #[test] fn not_equal_false() { assert_expr!(b"18 != 10 + 8;", "false") } #[test] fn simple_boolean() { assert_expr!(b"3 >= 2 and 1 < 10;", "true") } pub fn do_evaluate( s: &[(&'static str, &str)], expression: &[u8], ) -> String { match do_evaluate_or_error(s, expression) { Ok(v) => v, Err(e) => panic!("{}", e), } } pub fn do_evaluate_or_error( s: &[(&'static str, &str)], expression: &[u8], ) -> Result<String, crate::Error> { use super::ScopeRef; use crate::parser::value::value_expression; use crate::parser::{code_span, ParseError}; use crate::sass::Name; use nom::bytes::complete::tag; use nom::sequence::terminated; let f = Default::default(); let scope = ScopeRef::new_global(f); for &(name, val) in s { let val = value_expression(code_span(val.as_bytes())); scope.define( Name::from_static(name), &ParseError::check(val)?.evaluate(scope.clone())?, ); } let expr = terminated(value_expression, tag(";"))(code_span(expression)); Ok(ParseError::check(expr)? .evaluate(scope)? .format(f) .to_string()) } }
{ parent.define_global(name, val); }
aui-sortable-layout-coverage.js
if (typeof __coverage__ === 'undefined') { __coverage__ = {}; } if (!__coverage__['build/aui-sortable-layout/aui-sortable-layout.js']) {
__cov_Bkabxj96f5yfp8GqViY5TQ.s['1']++;YUI.add('aui-sortable-layout',function(A,NAME){__cov_Bkabxj96f5yfp8GqViY5TQ.f['1']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['2']++;var Lang=A.Lang,isBoolean=Lang.isBoolean,isFunction=Lang.isFunction,isObject=Lang.isObject,isString=Lang.isString,isValue=Lang.isValue,toInt=Lang.toInt,ceil=Math.ceil,DDM=A.DD.DDM,PLACEHOLDER_MARGIN_BOTTOM=0,PLACEHOLDER_MARGIN_TOP=0,PLACEHOLDER_TARGET_MARGIN_BOTTOM=0,PLACEHOLDER_TARGET_MARGIN_TOP=0,isNodeList=function(v){__cov_Bkabxj96f5yfp8GqViY5TQ.f['2']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['3']++;return v instanceof A.NodeList;},concat=function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['3']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['4']++;return Array.prototype.slice.call(arguments).join(' ');},nodeListSetter=function(val){__cov_Bkabxj96f5yfp8GqViY5TQ.f['4']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['5']++;return isNodeList(val)?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['1'][0]++,val):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['1'][1]++,A.all(val));},getNumStyle=function(elem,styleName){__cov_Bkabxj96f5yfp8GqViY5TQ.f['5']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['6']++;return toInt(elem.getStyle(styleName));},getCN=A.getClassName,CSS_DRAG_INDICATOR=getCN('sortable-layout','drag','indicator'),CSS_DRAG_INDICATOR_ICON=getCN('sortable-layout','drag','indicator','icon'),CSS_DRAG_INDICATOR_ICON_LEFT=getCN('sortable-layout','drag','indicator','icon','left'),CSS_DRAG_INDICATOR_ICON_RIGHT=getCN('sortable-layout','drag','indicator','icon','right'),CSS_DRAG_TARGET_INDICATOR=getCN('sortable-layout','drag','target','indicator'),CSS_ICON=getCN('icon'),CSS_ICON_CIRCLE_TRIANGLE_L=getCN('icon','circle','triangle','l'),CSS_ICON_CIRCLE_TRIANGLE_R=getCN('icon','circle','triangle','r'),TPL_PLACEHOLDER='<div class="'+CSS_DRAG_INDICATOR+'">'+'<div class="'+concat(CSS_DRAG_INDICATOR_ICON,CSS_DRAG_INDICATOR_ICON_LEFT,CSS_ICON,CSS_ICON_CIRCLE_TRIANGLE_R)+'"></div>'+'<div class="'+concat(CSS_DRAG_INDICATOR_ICON,CSS_DRAG_INDICATOR_ICON_RIGHT,CSS_ICON,CSS_ICON_CIRCLE_TRIANGLE_L)+'"></div>'+'<div>';__cov_Bkabxj96f5yfp8GqViY5TQ.s['7']++;var SortableLayout=A.Component.create({NAME:'sortable-layout',ATTRS:{delegateConfig:{value:null,setter:function(val){__cov_Bkabxj96f5yfp8GqViY5TQ.f['6']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['8']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['9']++;var config=A.merge({bubbleTargets:instance,dragConfig:{},nodes:instance.get('dragNodes'),target:true},val);__cov_Bkabxj96f5yfp8GqViY5TQ.s['10']++;A.mix(config.dragConfig,{groups:instance.get('groups'),startCentered:true});__cov_Bkabxj96f5yfp8GqViY5TQ.s['11']++;return config;},validator:isObject},proxyNode:{setter:function(val){__cov_Bkabxj96f5yfp8GqViY5TQ.f['7']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['12']++;return isString(val)?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['2'][0]++,A.Node.create(val)):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['2'][1]++,val);}},dragNodes:{validator:isString},dropContainer:{value:function(dropNode){__cov_Bkabxj96f5yfp8GqViY5TQ.f['8']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['13']++;return dropNode;},validator:isFunction},dropNodes:{setter:'_setDropNodes'},groups:{value:['sortable-layout']},lazyStart:{value:false,validator:isBoolean},placeholder:{value:TPL_PLACEHOLDER,setter:function(val){__cov_Bkabxj96f5yfp8GqViY5TQ.f['9']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['14']++;var placeholder=isString(val)?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['3'][0]++,A.Node.create(val)):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['3'][1]++,val);__cov_Bkabxj96f5yfp8GqViY5TQ.s['15']++;if(!placeholder.inDoc()){__cov_Bkabxj96f5yfp8GqViY5TQ.b['4'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['16']++;A.getBody().prepend(placeholder.hide());}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['4'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['17']++;PLACEHOLDER_MARGIN_BOTTOM=getNumStyle(placeholder,'marginBottom');__cov_Bkabxj96f5yfp8GqViY5TQ.s['18']++;PLACEHOLDER_MARGIN_TOP=getNumStyle(placeholder,'marginTop');__cov_Bkabxj96f5yfp8GqViY5TQ.s['19']++;placeholder.addClass(CSS_DRAG_TARGET_INDICATOR);__cov_Bkabxj96f5yfp8GqViY5TQ.s['20']++;PLACEHOLDER_TARGET_MARGIN_BOTTOM=getNumStyle(placeholder,'marginBottom');__cov_Bkabxj96f5yfp8GqViY5TQ.s['21']++;PLACEHOLDER_TARGET_MARGIN_TOP=getNumStyle(placeholder,'marginTop');__cov_Bkabxj96f5yfp8GqViY5TQ.s['22']++;return placeholder;}},proxy:{value:null,setter:function(val){__cov_Bkabxj96f5yfp8GqViY5TQ.f['10']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['23']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['24']++;var defaults={moveOnEnd:false,positionProxy:false};__cov_Bkabxj96f5yfp8GqViY5TQ.s['25']++;if(instance.get('proxyNode')){__cov_Bkabxj96f5yfp8GqViY5TQ.b['5'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['26']++;defaults.borderStyle=null;}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['5'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['27']++;return A.merge(defaults,(__cov_Bkabxj96f5yfp8GqViY5TQ.b['6'][0]++,val)||(__cov_Bkabxj96f5yfp8GqViY5TQ.b['6'][1]++,{}));}}},EXTENDS:A.Base,prototype:{initializer:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['11']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['28']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['29']++;instance.bindUI();},bindUI:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['12']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['30']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['31']++;instance.publish('placeholderAlign',{defaultFn:instance._defPlaceholderAlign,queuable:false,emitFacade:true,bubbles:true});__cov_Bkabxj96f5yfp8GqViY5TQ.s['32']++;instance._bindDDEvents();__cov_Bkabxj96f5yfp8GqViY5TQ.s['33']++;instance._bindDropZones();},addDropNode:function(node,config){__cov_Bkabxj96f5yfp8GqViY5TQ.f['13']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['34']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['35']++;node=A.one(node);__cov_Bkabxj96f5yfp8GqViY5TQ.s['36']++;if(!DDM.getDrop(node)){__cov_Bkabxj96f5yfp8GqViY5TQ.b['7'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['37']++;instance.addDropTarget(new A.DD.Drop(A.merge({bubbleTargets:instance,groups:instance.get('groups'),node:node},config)));}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['7'][1]++;}},addDropTarget:function(drop){__cov_Bkabxj96f5yfp8GqViY5TQ.f['14']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['38']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['39']++;drop.addToGroup(instance.get('groups'));},alignPlaceholder:function(region,isTarget){__cov_Bkabxj96f5yfp8GqViY5TQ.f['15']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['40']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['41']++;var placeholder=instance.get('placeholder');__cov_Bkabxj96f5yfp8GqViY5TQ.s['42']++;if(!instance.lazyEvents){__cov_Bkabxj96f5yfp8GqViY5TQ.b['8'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['43']++;placeholder.show();}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['8'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['44']++;instance._syncPlaceholderSize();__cov_Bkabxj96f5yfp8GqViY5TQ.s['45']++;placeholder.setXY(instance.getPlaceholderXY(region,isTarget));},calculateDirections:function(drag){__cov_Bkabxj96f5yfp8GqViY5TQ.f['16']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['46']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['47']++;var lastY=instance.lastY;__cov_Bkabxj96f5yfp8GqViY5TQ.s['48']++;var lastX=instance.lastX;__cov_Bkabxj96f5yfp8GqViY5TQ.s['49']++;var x=drag.lastXY[0];__cov_Bkabxj96f5yfp8GqViY5TQ.s['50']++;var y=drag.lastXY[1];__cov_Bkabxj96f5yfp8GqViY5TQ.s['51']++;if(x!=lastX){__cov_Bkabxj96f5yfp8GqViY5TQ.b['9'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['52']++;instance.XDirection=x<lastX?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['10'][0]++,'left'):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['10'][1]++,'right');}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['9'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['53']++;if(y!=lastY){__cov_Bkabxj96f5yfp8GqViY5TQ.b['11'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['54']++;instance.YDirection=y<lastY?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['12'][0]++,'up'):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['12'][1]++,'down');}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['11'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['55']++;instance.lastX=x;__cov_Bkabxj96f5yfp8GqViY5TQ.s['56']++;instance.lastY=y;},calculateQuadrant:function(drag,drop){__cov_Bkabxj96f5yfp8GqViY5TQ.f['17']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['57']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['58']++;var quadrant=1;__cov_Bkabxj96f5yfp8GqViY5TQ.s['59']++;var region=drop.get('node').get('region');__cov_Bkabxj96f5yfp8GqViY5TQ.s['60']++;var mouseXY=drag.mouseXY;__cov_Bkabxj96f5yfp8GqViY5TQ.s['61']++;var mouseX=mouseXY[0];__cov_Bkabxj96f5yfp8GqViY5TQ.s['62']++;var mouseY=mouseXY[1];__cov_Bkabxj96f5yfp8GqViY5TQ.s['63']++;var top=region.top;__cov_Bkabxj96f5yfp8GqViY5TQ.s['64']++;var left=region.left;__cov_Bkabxj96f5yfp8GqViY5TQ.s['65']++;var vCenter=top+(region.bottom-top)/2;__cov_Bkabxj96f5yfp8GqViY5TQ.s['66']++;var hCenter=left+(region.right-left)/2;__cov_Bkabxj96f5yfp8GqViY5TQ.s['67']++;if(mouseY<vCenter){__cov_Bkabxj96f5yfp8GqViY5TQ.b['13'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['68']++;quadrant=mouseX>hCenter?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['14'][0]++,1):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['14'][1]++,2);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['13'][1]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['69']++;quadrant=mouseX<hCenter?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['15'][0]++,3):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['15'][1]++,4);}__cov_Bkabxj96f5yfp8GqViY5TQ.s['70']++;instance.quadrant=quadrant;__cov_Bkabxj96f5yfp8GqViY5TQ.s['71']++;return quadrant;},getPlaceholderXY:function(region,isTarget){__cov_Bkabxj96f5yfp8GqViY5TQ.f['18']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['72']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['73']++;var placeholder=instance.get('placeholder');__cov_Bkabxj96f5yfp8GqViY5TQ.s['74']++;var marginBottom=PLACEHOLDER_MARGIN_BOTTOM;__cov_Bkabxj96f5yfp8GqViY5TQ.s['75']++;var marginTop=PLACEHOLDER_MARGIN_TOP;__cov_Bkabxj96f5yfp8GqViY5TQ.s['76']++;if(isTarget){__cov_Bkabxj96f5yfp8GqViY5TQ.b['16'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['77']++;marginBottom=PLACEHOLDER_TARGET_MARGIN_BOTTOM;__cov_Bkabxj96f5yfp8GqViY5TQ.s['78']++;marginTop=PLACEHOLDER_TARGET_MARGIN_TOP;}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['16'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['79']++;placeholder.toggleClass(CSS_DRAG_TARGET_INDICATOR,isTarget);__cov_Bkabxj96f5yfp8GqViY5TQ.s['80']++;var regionBottom=ceil(region.bottom);__cov_Bkabxj96f5yfp8GqViY5TQ.s['81']++;var regionLeft=ceil(region.left);__cov_Bkabxj96f5yfp8GqViY5TQ.s['82']++;var regionTop=ceil(region.top);__cov_Bkabxj96f5yfp8GqViY5TQ.s['83']++;var x=regionLeft;__cov_Bkabxj96f5yfp8GqViY5TQ.s['84']++;var y=instance.quadrant<3?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['17'][0]++,regionTop-(placeholder.get('offsetHeight')+marginBottom)):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['17'][1]++,regionBottom+marginTop);__cov_Bkabxj96f5yfp8GqViY5TQ.s['85']++;return[x,y];},removeDropTarget:function(drop){__cov_Bkabxj96f5yfp8GqViY5TQ.f['19']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['86']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['87']++;drop.removeFromGroup(instance.get('groups'));},_alignCondition:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['20']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['88']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['89']++;var activeDrag=DDM.activeDrag;__cov_Bkabxj96f5yfp8GqViY5TQ.s['90']++;var activeDrop=instance.activeDrop;__cov_Bkabxj96f5yfp8GqViY5TQ.s['91']++;if((__cov_Bkabxj96f5yfp8GqViY5TQ.b['19'][0]++,activeDrag)&&(__cov_Bkabxj96f5yfp8GqViY5TQ.b['19'][1]++,activeDrop)){__cov_Bkabxj96f5yfp8GqViY5TQ.b['18'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['92']++;var dragNode=activeDrag.get('node');__cov_Bkabxj96f5yfp8GqViY5TQ.s['93']++;var dropNode=activeDrop.get('node');__cov_Bkabxj96f5yfp8GqViY5TQ.s['94']++;return!dragNode.contains(dropNode);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['18'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['95']++;return true;},_bindDDEvents:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['21']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['96']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['97']++;var delegateConfig=instance.get('delegateConfig');__cov_Bkabxj96f5yfp8GqViY5TQ.s['98']++;var proxy=instance.get('proxy');__cov_Bkabxj96f5yfp8GqViY5TQ.s['99']++;instance.delegate=new A.DD.Delegate(delegateConfig);__cov_Bkabxj96f5yfp8GqViY5TQ.s['100']++;instance.delegate.dd.plug(A.Plugin.DDProxy,proxy);__cov_Bkabxj96f5yfp8GqViY5TQ.s['101']++;instance.on('drag:end',A.bind(instance._onDragEnd,instance));__cov_Bkabxj96f5yfp8GqViY5TQ.s['102']++;instance.on('drag:enter',A.bind(instance._onDragEnter,instance));__cov_Bkabxj96f5yfp8GqViY5TQ.s['103']++;instance.on('drag:exit',A.bind(instance._onDragExit,instance));__cov_Bkabxj96f5yfp8GqViY5TQ.s['104']++;instance.on('drag:over',A.bind(instance._onDragOver,instance));__cov_Bkabxj96f5yfp8GqViY5TQ.s['105']++;instance.on('drag:start',A.bind(instance._onDragStart,instance));__cov_Bkabxj96f5yfp8GqViY5TQ.s['106']++;instance.after('drag:start',A.bind(instance._afterDragStart,instance));__cov_Bkabxj96f5yfp8GqViY5TQ.s['107']++;instance.on('quadrantEnter',instance._syncPlaceholderUI);__cov_Bkabxj96f5yfp8GqViY5TQ.s['108']++;instance.on('quadrantExit',instance._syncPlaceholderUI);},_bindDropZones:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['22']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['109']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['110']++;var dropNodes=instance.get('dropNodes');__cov_Bkabxj96f5yfp8GqViY5TQ.s['111']++;if(dropNodes){__cov_Bkabxj96f5yfp8GqViY5TQ.b['20'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['112']++;dropNodes.each(function(node,i){__cov_Bkabxj96f5yfp8GqViY5TQ.f['23']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['113']++;instance.addDropNode(node);});}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['20'][1]++;}},_defPlaceholderAlign:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['24']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['114']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['115']++;var activeDrop=instance.activeDrop;__cov_Bkabxj96f5yfp8GqViY5TQ.s['116']++;var placeholder=instance.get('placeholder');__cov_Bkabxj96f5yfp8GqViY5TQ.s['117']++;if((__cov_Bkabxj96f5yfp8GqViY5TQ.b['22'][0]++,activeDrop)&&(__cov_Bkabxj96f5yfp8GqViY5TQ.b['22'][1]++,placeholder)){__cov_Bkabxj96f5yfp8GqViY5TQ.b['21'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['118']++;var node=activeDrop.get('node');__cov_Bkabxj96f5yfp8GqViY5TQ.s['119']++;var isTarget=!!node.drop;__cov_Bkabxj96f5yfp8GqViY5TQ.s['120']++;instance.lastAlignDrop=activeDrop;__cov_Bkabxj96f5yfp8GqViY5TQ.s['121']++;instance.alignPlaceholder(activeDrop.get('node').get('region'),isTarget);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['21'][1]++;}},_evOutput:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['25']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['122']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['123']++;return{drag:DDM.activeDrag,drop:instance.activeDrop,quadrant:instance.quadrant,XDirection:instance.XDirection,YDirection:instance.YDirection};},_fireQuadrantEvents:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['26']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['124']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['125']++;var evOutput=instance._evOutput();__cov_Bkabxj96f5yfp8GqViY5TQ.s['126']++;var lastQuadrant=instance.lastQuadrant;__cov_Bkabxj96f5yfp8GqViY5TQ.s['127']++;var quadrant=instance.quadrant;__cov_Bkabxj96f5yfp8GqViY5TQ.s['128']++;if(quadrant!=lastQuadrant){__cov_Bkabxj96f5yfp8GqViY5TQ.b['23'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['129']++;if(lastQuadrant){__cov_Bkabxj96f5yfp8GqViY5TQ.b['24'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['130']++;instance.fire('quadrantExit',A.merge({lastDrag:instance.lastDrag,lastDrop:instance.lastDrop,lastQuadrant:instance.lastQuadrant,lastXDirection:instance.lastXDirection,lastYDirection:instance.lastYDirection},evOutput));}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['24'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['131']++;instance.fire('quadrantEnter',evOutput);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['23'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['132']++;instance.fire('quadrantOver',evOutput);__cov_Bkabxj96f5yfp8GqViY5TQ.s['133']++;instance.lastDrag=DDM.activeDrag;__cov_Bkabxj96f5yfp8GqViY5TQ.s['134']++;instance.lastDrop=instance.activeDrop;__cov_Bkabxj96f5yfp8GqViY5TQ.s['135']++;instance.lastQuadrant=quadrant;__cov_Bkabxj96f5yfp8GqViY5TQ.s['136']++;instance.lastXDirection=instance.XDirection;__cov_Bkabxj96f5yfp8GqViY5TQ.s['137']++;instance.lastYDirection=instance.YDirection;},_getAppendNode:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['27']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['138']++;return DDM.activeDrag.get('node');},_positionNode:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['28']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['139']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['140']++;var activeDrop=(__cov_Bkabxj96f5yfp8GqViY5TQ.b['25'][0]++,instance.lastAlignDrop)||(__cov_Bkabxj96f5yfp8GqViY5TQ.b['25'][1]++,instance.activeDrop);__cov_Bkabxj96f5yfp8GqViY5TQ.s['141']++;if(activeDrop){__cov_Bkabxj96f5yfp8GqViY5TQ.b['26'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['142']++;var dragNode=instance._getAppendNode();__cov_Bkabxj96f5yfp8GqViY5TQ.s['143']++;var dropNode=activeDrop.get('node');__cov_Bkabxj96f5yfp8GqViY5TQ.s['144']++;var isTarget=isValue(dropNode.drop);__cov_Bkabxj96f5yfp8GqViY5TQ.s['145']++;var topQuadrants=instance.quadrant<3;__cov_Bkabxj96f5yfp8GqViY5TQ.s['146']++;if(instance._alignCondition()){__cov_Bkabxj96f5yfp8GqViY5TQ.b['27'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['147']++;if(isTarget){__cov_Bkabxj96f5yfp8GqViY5TQ.b['28'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['148']++;dropNode[topQuadrants?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['29'][0]++,'placeBefore'):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['29'][1]++,'placeAfter')](dragNode);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['28'][1]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['149']++;var dropContainer=instance.get('dropContainer').apply(instance,[dropNode]);__cov_Bkabxj96f5yfp8GqViY5TQ.s['150']++;dropContainer[topQuadrants?(__cov_Bkabxj96f5yfp8GqViY5TQ.b['30'][0]++,'prepend'):(__cov_Bkabxj96f5yfp8GqViY5TQ.b['30'][1]++,'append')](dragNode);}}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['27'][1]++;}}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['26'][1]++;}},_syncPlaceholderUI:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['29']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['151']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['152']++;if(instance._alignCondition()){__cov_Bkabxj96f5yfp8GqViY5TQ.b['31'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['153']++;instance.fire('placeholderAlign',{drop:instance.activeDrop,originalEvent:event});}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['31'][1]++;}},_syncPlaceholderSize:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['30']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['154']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['155']++;var node=instance.activeDrop.get('node');__cov_Bkabxj96f5yfp8GqViY5TQ.s['156']++;var placeholder=instance.get('placeholder');__cov_Bkabxj96f5yfp8GqViY5TQ.s['157']++;if(placeholder){__cov_Bkabxj96f5yfp8GqViY5TQ.b['32'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['158']++;placeholder.set('offsetWidth',node.get('offsetWidth'));}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['32'][1]++;}},_syncProxyNodeUI:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['31']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['159']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['160']++;var dragNode=DDM.activeDrag.get('dragNode');__cov_Bkabxj96f5yfp8GqViY5TQ.s['161']++;var proxyNode=instance.get('proxyNode');__cov_Bkabxj96f5yfp8GqViY5TQ.s['162']++;if((__cov_Bkabxj96f5yfp8GqViY5TQ.b['34'][0]++,proxyNode)&&(__cov_Bkabxj96f5yfp8GqViY5TQ.b['34'][1]++,!proxyNode.compareTo(dragNode))){__cov_Bkabxj96f5yfp8GqViY5TQ.b['33'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['163']++;dragNode.append(proxyNode);__cov_Bkabxj96f5yfp8GqViY5TQ.s['164']++;instance._syncProxyNodeSize();}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['33'][1]++;}},_syncProxyNodeSize:function(){__cov_Bkabxj96f5yfp8GqViY5TQ.f['32']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['165']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['166']++;var node=DDM.activeDrag.get('node');__cov_Bkabxj96f5yfp8GqViY5TQ.s['167']++;var proxyNode=instance.get('proxyNode');__cov_Bkabxj96f5yfp8GqViY5TQ.s['168']++;if((__cov_Bkabxj96f5yfp8GqViY5TQ.b['36'][0]++,node)&&(__cov_Bkabxj96f5yfp8GqViY5TQ.b['36'][1]++,proxyNode)){__cov_Bkabxj96f5yfp8GqViY5TQ.b['35'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['169']++;proxyNode.set('offsetHeight',node.get('offsetHeight'));__cov_Bkabxj96f5yfp8GqViY5TQ.s['170']++;proxyNode.set('offsetWidth',node.get('offsetWidth'));}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['35'][1]++;}},_afterDragStart:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['33']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['171']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['172']++;if(instance.get('proxy')){__cov_Bkabxj96f5yfp8GqViY5TQ.b['37'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['173']++;instance._syncProxyNodeUI(event);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['37'][1]++;}},_onDragEnd:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['34']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['174']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['175']++;var placeholder=instance.get('placeholder');__cov_Bkabxj96f5yfp8GqViY5TQ.s['176']++;var proxyNode=instance.get('proxyNode');__cov_Bkabxj96f5yfp8GqViY5TQ.s['177']++;if(!instance.lazyEvents){__cov_Bkabxj96f5yfp8GqViY5TQ.b['38'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['178']++;instance._positionNode(event);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['38'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['179']++;if(proxyNode){__cov_Bkabxj96f5yfp8GqViY5TQ.b['39'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['180']++;proxyNode.remove();}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['39'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['181']++;if(placeholder){__cov_Bkabxj96f5yfp8GqViY5TQ.b['40'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['182']++;placeholder.hide();}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['40'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['183']++;instance.lastQuadrant=null;__cov_Bkabxj96f5yfp8GqViY5TQ.s['184']++;instance.lastXDirection=null;__cov_Bkabxj96f5yfp8GqViY5TQ.s['185']++;instance.lastYDirection=null;},_onDragEnter:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['35']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['186']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['187']++;instance.activeDrop=DDM.activeDrop;__cov_Bkabxj96f5yfp8GqViY5TQ.s['188']++;if((__cov_Bkabxj96f5yfp8GqViY5TQ.b['42'][0]++,instance.lazyEvents)&&(__cov_Bkabxj96f5yfp8GqViY5TQ.b['42'][1]++,instance.lastActiveDrop)){__cov_Bkabxj96f5yfp8GqViY5TQ.b['41'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['189']++;instance.lazyEvents=false;__cov_Bkabxj96f5yfp8GqViY5TQ.s['190']++;instance._syncPlaceholderUI(event);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['41'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['191']++;if(!instance.lastActiveDrop){__cov_Bkabxj96f5yfp8GqViY5TQ.b['43'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['192']++;instance.lastActiveDrop=DDM.activeDrop;}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['43'][1]++;}},_onDragExit:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['36']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['193']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['194']++;instance._syncPlaceholderUI(event);__cov_Bkabxj96f5yfp8GqViY5TQ.s['195']++;instance.activeDrop=DDM.activeDrop;__cov_Bkabxj96f5yfp8GqViY5TQ.s['196']++;instance.lastActiveDrop=DDM.activeDrop;},_onDragOver:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['37']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['197']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['198']++;var drag=event.drag;__cov_Bkabxj96f5yfp8GqViY5TQ.s['199']++;if(instance.activeDrop==DDM.activeDrop){__cov_Bkabxj96f5yfp8GqViY5TQ.b['44'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['200']++;instance.calculateDirections(drag);__cov_Bkabxj96f5yfp8GqViY5TQ.s['201']++;instance.calculateQuadrant(drag,instance.activeDrop);__cov_Bkabxj96f5yfp8GqViY5TQ.s['202']++;instance._fireQuadrantEvents();}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['44'][1]++;}},_onDragStart:function(event){__cov_Bkabxj96f5yfp8GqViY5TQ.f['38']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['203']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['204']++;if(instance.get('lazyStart')){__cov_Bkabxj96f5yfp8GqViY5TQ.b['45'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['205']++;instance.lazyEvents=true;}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['45'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['206']++;instance.lastActiveDrop=null;__cov_Bkabxj96f5yfp8GqViY5TQ.s['207']++;instance.activeDrop=DDM.activeDrop;},_setDropNodes:function(val){__cov_Bkabxj96f5yfp8GqViY5TQ.f['39']++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['208']++;var instance=this;__cov_Bkabxj96f5yfp8GqViY5TQ.s['209']++;if(isFunction(val)){__cov_Bkabxj96f5yfp8GqViY5TQ.b['46'][0]++;__cov_Bkabxj96f5yfp8GqViY5TQ.s['210']++;val=val.call(instance);}else{__cov_Bkabxj96f5yfp8GqViY5TQ.b['46'][1]++;}__cov_Bkabxj96f5yfp8GqViY5TQ.s['211']++;return nodeListSetter(val);}}});__cov_Bkabxj96f5yfp8GqViY5TQ.s['212']++;A.SortableLayout=SortableLayout;},'2.5.0',{'requires':['dd-delegate','dd-drag','dd-drop','dd-proxy','aui-node','aui-component'],'skinnable':true});
__coverage__['build/aui-sortable-layout/aui-sortable-layout.js'] = {"path":"build/aui-sortable-layout/aui-sortable-layout.js","s":{"1":0,"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0,"53":0,"54":0,"55":0,"56":0,"57":0,"58":0,"59":0,"60":0,"61":0,"62":0,"63":0,"64":0,"65":0,"66":0,"67":0,"68":0,"69":0,"70":0,"71":0,"72":0,"73":0,"74":0,"75":0,"76":0,"77":0,"78":0,"79":0,"80":0,"81":0,"82":0,"83":0,"84":0,"85":0,"86":0,"87":0,"88":0,"89":0,"90":0,"91":0,"92":0,"93":0,"94":0,"95":0,"96":0,"97":0,"98":0,"99":0,"100":0,"101":0,"102":0,"103":0,"104":0,"105":0,"106":0,"107":0,"108":0,"109":0,"110":0,"111":0,"112":0,"113":0,"114":0,"115":0,"116":0,"117":0,"118":0,"119":0,"120":0,"121":0,"122":0,"123":0,"124":0,"125":0,"126":0,"127":0,"128":0,"129":0,"130":0,"131":0,"132":0,"133":0,"134":0,"135":0,"136":0,"137":0,"138":0,"139":0,"140":0,"141":0,"142":0,"143":0,"144":0,"145":0,"146":0,"147":0,"148":0,"149":0,"150":0,"151":0,"152":0,"153":0,"154":0,"155":0,"156":0,"157":0,"158":0,"159":0,"160":0,"161":0,"162":0,"163":0,"164":0,"165":0,"166":0,"167":0,"168":0,"169":0,"170":0,"171":0,"172":0,"173":0,"174":0,"175":0,"176":0,"177":0,"178":0,"179":0,"180":0,"181":0,"182":0,"183":0,"184":0,"185":0,"186":0,"187":0,"188":0,"189":0,"190":0,"191":0,"192":0,"193":0,"194":0,"195":0,"196":0,"197":0,"198":0,"199":0,"200":0,"201":0,"202":0,"203":0,"204":0,"205":0,"206":0,"207":0,"208":0,"209":0,"210":0,"211":0,"212":0},"b":{"1":[0,0],"2":[0,0],"3":[0,0],"4":[0,0],"5":[0,0],"6":[0,0],"7":[0,0],"8":[0,0],"9":[0,0],"10":[0,0],"11":[0,0],"12":[0,0],"13":[0,0],"14":[0,0],"15":[0,0],"16":[0,0],"17":[0,0],"18":[0,0],"19":[0,0],"20":[0,0],"21":[0,0],"22":[0,0],"23":[0,0],"24":[0,0],"25":[0,0],"26":[0,0],"27":[0,0],"28":[0,0],"29":[0,0],"30":[0,0],"31":[0,0],"32":[0,0],"33":[0,0],"34":[0,0],"35":[0,0],"36":[0,0],"37":[0,0],"38":[0,0],"39":[0,0],"40":[0,0],"41":[0,0],"42":[0,0],"43":[0,0],"44":[0,0],"45":[0,0],"46":[0,0]},"f":{"1":0,"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0},"fnMap":{"1":{"name":"(anonymous_1)","line":1,"loc":{"start":{"line":1,"column":31},"end":{"line":1,"column":50}}},"2":{"name":"(anonymous_2)","line":28,"loc":{"start":{"line":28,"column":17},"end":{"line":28,"column":29}}},"3":{"name":"(anonymous_3)","line":32,"loc":{"start":{"line":32,"column":13},"end":{"line":32,"column":24}}},"4":{"name":"(anonymous_4)","line":36,"loc":{"start":{"line":36,"column":21},"end":{"line":36,"column":35}}},"5":{"name":"(anonymous_5)","line":40,"loc":{"start":{"line":40,"column":18},"end":{"line":40,"column":44}}},"6":{"name":"(anonymous_6)","line":105,"loc":{"start":{"line":105,"column":20},"end":{"line":105,"column":34}}},"7":{"name":"(anonymous_7)","line":133,"loc":{"start":{"line":133,"column":20},"end":{"line":133,"column":34}}},"8":{"name":"(anonymous_8)","line":155,"loc":{"start":{"line":155,"column":19},"end":{"line":155,"column":38}}},"9":{"name":"(anonymous_9)","line":200,"loc":{"start":{"line":200,"column":20},"end":{"line":200,"column":34}}},"10":{"name":"(anonymous_10)","line":229,"loc":{"start":{"line":229,"column":20},"end":{"line":229,"column":34}}},"11":{"name":"(anonymous_11)","line":265,"loc":{"start":{"line":265,"column":21},"end":{"line":265,"column":32}}},"12":{"name":"(anonymous_12)","line":277,"loc":{"start":{"line":277,"column":16},"end":{"line":277,"column":27}}},"13":{"name":"(anonymous_13)","line":300,"loc":{"start":{"line":300,"column":21},"end":{"line":300,"column":44}}},"14":{"name":"(anonymous_14)","line":329,"loc":{"start":{"line":329,"column":23},"end":{"line":329,"column":38}}},"15":{"name":"(anonymous_15)","line":344,"loc":{"start":{"line":344,"column":26},"end":{"line":344,"column":53}}},"16":{"name":"(anonymous_16)","line":366,"loc":{"start":{"line":366,"column":29},"end":{"line":366,"column":44}}},"17":{"name":"(anonymous_17)","line":398,"loc":{"start":{"line":398,"column":27},"end":{"line":398,"column":48}}},"18":{"name":"(anonymous_18)","line":434,"loc":{"start":{"line":434,"column":26},"end":{"line":434,"column":53}}},"19":{"name":"(anonymous_19)","line":471,"loc":{"start":{"line":471,"column":26},"end":{"line":471,"column":41}}},"20":{"name":"(anonymous_20)","line":486,"loc":{"start":{"line":486,"column":25},"end":{"line":486,"column":36}}},"21":{"name":"(anonymous_21)","line":508,"loc":{"start":{"line":508,"column":23},"end":{"line":508,"column":34}}},"22":{"name":"(anonymous_22)","line":536,"loc":{"start":{"line":536,"column":24},"end":{"line":536,"column":35}}},"23":{"name":"(anonymous_23)","line":541,"loc":{"start":{"line":541,"column":31},"end":{"line":541,"column":49}}},"24":{"name":"(anonymous_24)","line":554,"loc":{"start":{"line":554,"column":30},"end":{"line":554,"column":46}}},"25":{"name":"(anonymous_25)","line":584,"loc":{"start":{"line":584,"column":19},"end":{"line":584,"column":30}}},"26":{"name":"(anonymous_26)","line":602,"loc":{"start":{"line":602,"column":29},"end":{"line":602,"column":40}}},"27":{"name":"(anonymous_27)","line":649,"loc":{"start":{"line":649,"column":24},"end":{"line":649,"column":35}}},"28":{"name":"(anonymous_28)","line":660,"loc":{"start":{"line":660,"column":23},"end":{"line":660,"column":39}}},"29":{"name":"(anonymous_29)","line":699,"loc":{"start":{"line":699,"column":28},"end":{"line":699,"column":44}}},"30":{"name":"(anonymous_30)","line":717,"loc":{"start":{"line":717,"column":30},"end":{"line":717,"column":41}}},"31":{"name":"(anonymous_31)","line":738,"loc":{"start":{"line":738,"column":26},"end":{"line":738,"column":42}}},"32":{"name":"(anonymous_32)","line":756,"loc":{"start":{"line":756,"column":28},"end":{"line":756,"column":39}}},"33":{"name":"(anonymous_33)","line":781,"loc":{"start":{"line":781,"column":25},"end":{"line":781,"column":41}}},"34":{"name":"(anonymous_34)","line":796,"loc":{"start":{"line":796,"column":20},"end":{"line":796,"column":36}}},"35":{"name":"(anonymous_35)","line":827,"loc":{"start":{"line":827,"column":22},"end":{"line":827,"column":38}}},"36":{"name":"(anonymous_36)","line":855,"loc":{"start":{"line":855,"column":21},"end":{"line":855,"column":37}}},"37":{"name":"(anonymous_37)","line":872,"loc":{"start":{"line":872,"column":21},"end":{"line":872,"column":37}}},"38":{"name":"(anonymous_38)","line":893,"loc":{"start":{"line":893,"column":22},"end":{"line":893,"column":38}}},"39":{"name":"(anonymous_39)","line":913,"loc":{"start":{"line":913,"column":23},"end":{"line":913,"column":37}}}},"statementMap":{"1":{"start":{"line":1,"column":0},"end":{"line":938,"column":3}},"2":{"start":{"line":9,"column":0},"end":{"line":60,"column":16}},"3":{"start":{"line":29,"column":8},"end":{"line":29,"column":41}},"4":{"start":{"line":33,"column":8},"end":{"line":33,"column":63}},"5":{"start":{"line":37,"column":8},"end":{"line":37,"column":50}},"6":{"start":{"line":41,"column":8},"end":{"line":41,"column":47}},"7":{"start":{"line":76,"column":0},"end":{"line":923,"column":3}},"8":{"start":{"line":106,"column":16},"end":{"line":106,"column":36}},"9":{"start":{"line":108,"column":16},"end":{"line":115,"column":18}},"10":{"start":{"line":117,"column":16},"end":{"line":120,"column":19}},"11":{"start":{"line":122,"column":16},"end":{"line":122,"column":30}},"12":{"start":{"line":134,"column":16},"end":{"line":134,"column":64}},"13":{"start":{"line":156,"column":16},"end":{"line":156,"column":32}},"14":{"start":{"line":201,"column":16},"end":{"line":201,"column":75}},"15":{"start":{"line":203,"column":16},"end":{"line":207,"column":17}},"16":{"start":{"line":204,"column":20},"end":{"line":206,"column":22}},"17":{"start":{"line":209,"column":16},"end":{"line":209,"column":85}},"18":{"start":{"line":210,"column":16},"end":{"line":210,"column":79}},"19":{"start":{"line":212,"column":16},"end":{"line":212,"column":64}},"20":{"start":{"line":214,"column":16},"end":{"line":214,"column":92}},"21":{"start":{"line":215,"column":16},"end":{"line":215,"column":86}},"22":{"start":{"line":217,"column":16},"end":{"line":217,"column":35}},"23":{"start":{"line":230,"column":16},"end":{"line":230,"column":36}},"24":{"start":{"line":232,"column":16},"end":{"line":235,"column":18}},"25":{"start":{"line":238,"column":16},"end":{"line":240,"column":17}},"26":{"start":{"line":239,"column":20},"end":{"line":239,"column":48}},"27":{"start":{"line":242,"column":16},"end":{"line":242,"column":52}},"28":{"start":{"line":266,"column":12},"end":{"line":266,"column":32}},"29":{"start":{"line":268,"column":12},"end":{"line":268,"column":30}},"30":{"start":{"line":278,"column":12},"end":{"line":278,"column":32}},"31":{"start":{"line":281,"column":12},"end":{"line":286,"column":15}},"32":{"start":{"line":288,"column":12},"end":{"line":288,"column":37}},"33":{"start":{"line":289,"column":12},"end":{"line":289,"column":38}},"34":{"start":{"line":301,"column":12},"end":{"line":301,"column":32}},"35":{"start":{"line":303,"column":12},"end":{"line":303,"column":31}},"36":{"start":{"line":305,"column":12},"end":{"line":320,"column":13}},"37":{"start":{"line":306,"column":16},"end":{"line":319,"column":18}},"38":{"start":{"line":330,"column":12},"end":{"line":330,"column":32}},"39":{"start":{"line":332,"column":12},"end":{"line":334,"column":14}},"40":{"start":{"line":345,"column":12},"end":{"line":345,"column":32}},"41":{"start":{"line":346,"column":12},"end":{"line":346,"column":58}},"42":{"start":{"line":348,"column":12},"end":{"line":350,"column":13}},"43":{"start":{"line":349,"column":16},"end":{"line":349,"column":35}},"44":{"start":{"line":353,"column":12},"end":{"line":353,"column":44}},"45":{"start":{"line":355,"column":12},"end":{"line":357,"column":14}},"46":{"start":{"line":367,"column":12},"end":{"line":367,"column":32}},"47":{"start":{"line":368,"column":12},"end":{"line":368,"column":39}},"48":{"start":{"line":369,"column":12},"end":{"line":369,"column":39}},"49":{"start":{"line":371,"column":12},"end":{"line":371,"column":35}},"50":{"start":{"line":372,"column":12},"end":{"line":372,"column":35}},"51":{"start":{"line":375,"column":12},"end":{"line":378,"column":13}},"52":{"start":{"line":377,"column":16},"end":{"line":377,"column":69}},"53":{"start":{"line":381,"column":12},"end":{"line":384,"column":13}},"54":{"start":{"line":383,"column":16},"end":{"line":383,"column":66}},"55":{"start":{"line":386,"column":12},"end":{"line":386,"column":31}},"56":{"start":{"line":387,"column":12},"end":{"line":387,"column":31}},"57":{"start":{"line":399,"column":12},"end":{"line":399,"column":32}},"58":{"start":{"line":400,"column":12},"end":{"line":400,"column":29}},"59":{"start":{"line":401,"column":12},"end":{"line":401,"column":56}},"60":{"start":{"line":402,"column":12},"end":{"line":402,"column":39}},"61":{"start":{"line":403,"column":12},"end":{"line":403,"column":36}},"62":{"start":{"line":404,"column":12},"end":{"line":404,"column":36}},"63":{"start":{"line":406,"column":12},"end":{"line":406,"column":33}},"64":{"start":{"line":407,"column":12},"end":{"line":407,"column":35}},"65":{"start":{"line":410,"column":12},"end":{"line":410,"column":58}},"66":{"start":{"line":412,"column":12},"end":{"line":412,"column":59}},"67":{"start":{"line":414,"column":12},"end":{"line":419,"column":13}},"68":{"start":{"line":415,"column":16},"end":{"line":415,"column":54}},"69":{"start":{"line":418,"column":16},"end":{"line":418,"column":54}},"70":{"start":{"line":421,"column":12},"end":{"line":421,"column":41}},"71":{"start":{"line":423,"column":12},"end":{"line":423,"column":28}},"72":{"start":{"line":435,"column":12},"end":{"line":435,"column":32}},"73":{"start":{"line":436,"column":12},"end":{"line":436,"column":58}},"74":{"start":{"line":437,"column":12},"end":{"line":437,"column":57}},"75":{"start":{"line":438,"column":12},"end":{"line":438,"column":51}},"76":{"start":{"line":440,"column":12},"end":{"line":445,"column":13}},"77":{"start":{"line":443,"column":16},"end":{"line":443,"column":64}},"78":{"start":{"line":444,"column":16},"end":{"line":444,"column":58}},"79":{"start":{"line":449,"column":12},"end":{"line":449,"column":73}},"80":{"start":{"line":451,"column":12},"end":{"line":451,"column":51}},"81":{"start":{"line":452,"column":12},"end":{"line":452,"column":47}},"82":{"start":{"line":453,"column":12},"end":{"line":453,"column":45}},"83":{"start":{"line":455,"column":12},"end":{"line":455,"column":31}},"84":{"start":{"line":459,"column":12},"end":{"line":460,"column":108}},"85":{"start":{"line":462,"column":12},"end":{"line":462,"column":26}},"86":{"start":{"line":472,"column":12},"end":{"line":472,"column":32}},"87":{"start":{"line":474,"column":12},"end":{"line":476,"column":14}},"88":{"start":{"line":487,"column":12},"end":{"line":487,"column":32}},"89":{"start":{"line":488,"column":12},"end":{"line":488,"column":44}},"90":{"start":{"line":489,"column":12},"end":{"line":489,"column":49}},"91":{"start":{"line":491,"column":12},"end":{"line":496,"column":13}},"92":{"start":{"line":492,"column":16},"end":{"line":492,"column":54}},"93":{"start":{"line":493,"column":16},"end":{"line":493,"column":54}},"94":{"start":{"line":495,"column":16},"end":{"line":495,"column":52}},"95":{"start":{"line":498,"column":12},"end":{"line":498,"column":24}},"96":{"start":{"line":509,"column":12},"end":{"line":509,"column":32}},"97":{"start":{"line":510,"column":12},"end":{"line":510,"column":64}},"98":{"start":{"line":511,"column":12},"end":{"line":511,"column":46}},"99":{"start":{"line":514,"column":12},"end":{"line":514,"column":66}},"100":{"start":{"line":517,"column":12},"end":{"line":517,"column":63}},"101":{"start":{"line":519,"column":12},"end":{"line":519,"column":75}},"102":{"start":{"line":520,"column":12},"end":{"line":520,"column":79}},"103":{"start":{"line":521,"column":12},"end":{"line":521,"column":77}},"104":{"start":{"line":522,"column":12},"end":{"line":522,"column":77}},"105":{"start":{"line":523,"column":12},"end":{"line":523,"column":79}},"106":{"start":{"line":524,"column":12},"end":{"line":524,"column":85}},"107":{"start":{"line":526,"column":12},"end":{"line":526,"column":70}},"108":{"start":{"line":527,"column":12},"end":{"line":527,"column":69}},"109":{"start":{"line":537,"column":12},"end":{"line":537,"column":32}},"110":{"start":{"line":538,"column":12},"end":{"line":538,"column":54}},"111":{"start":{"line":540,"column":12},"end":{"line":544,"column":13}},"112":{"start":{"line":541,"column":16},"end":{"line":543,"column":19}},"113":{"start":{"line":542,"column":20},"end":{"line":542,"column":47}},"114":{"start":{"line":555,"column":12},"end":{"line":555,"column":32}},"115":{"start":{"line":556,"column":12},"end":{"line":556,"column":49}},"116":{"start":{"line":557,"column":12},"end":{"line":557,"column":58}},"117":{"start":{"line":559,"column":12},"end":{"line":573,"column":13}},"118":{"start":{"line":560,"column":16},"end":{"line":560,"column":50}},"119":{"start":{"line":565,"column":16},"end":{"line":565,"column":44}},"120":{"start":{"line":567,"column":16},"end":{"line":567,"column":52}},"121":{"start":{"line":569,"column":16},"end":{"line":572,"column":18}},"122":{"start":{"line":585,"column":12},"end":{"line":585,"column":32}},"123":{"start":{"line":587,"column":12},"end":{"line":593,"column":14}},"124":{"start":{"line":603,"column":12},"end":{"line":603,"column":32}},"125":{"start":{"line":604,"column":12},"end":{"line":604,"column":48}},"126":{"start":{"line":605,"column":12},"end":{"line":605,"column":53}},"127":{"start":{"line":606,"column":12},"end":{"line":606,"column":45}},"128":{"start":{"line":608,"column":12},"end":{"line":628,"column":13}},"129":{"start":{"line":610,"column":16},"end":{"line":624,"column":17}},"130":{"start":{"line":612,"column":20},"end":{"line":623,"column":22}},"131":{"start":{"line":627,"column":16},"end":{"line":627,"column":57}},"132":{"start":{"line":632,"column":12},"end":{"line":632,"column":52}},"133":{"start":{"line":635,"column":12},"end":{"line":635,"column":47}},"134":{"start":{"line":636,"column":12},"end":{"line":636,"column":52}},"135":{"start":{"line":637,"column":12},"end":{"line":637,"column":45}},"136":{"start":{"line":638,"column":12},"end":{"line":638,"column":58}},"137":{"start":{"line":639,"column":12},"end":{"line":639,"column":58}},"138":{"start":{"line":650,"column":12},"end":{"line":650,"column":46}},"139":{"start":{"line":661,"column":12},"end":{"line":661,"column":32}},"140":{"start":{"line":662,"column":12},"end":{"line":662,"column":75}},"141":{"start":{"line":664,"column":12},"end":{"line":689,"column":13}},"142":{"start":{"line":665,"column":16},"end":{"line":665,"column":57}},"143":{"start":{"line":666,"column":16},"end":{"line":666,"column":54}},"144":{"start":{"line":673,"column":16},"end":{"line":673,"column":54}},"145":{"start":{"line":674,"column":16},"end":{"line":674,"column":59}},"146":{"start":{"line":676,"column":16},"end":{"line":688,"column":17}},"147":{"start":{"line":677,"column":20},"end":{"line":687,"column":21}},"148":{"start":{"line":678,"column":24},"end":{"line":678,"column":88}},"149":{"start":{"line":684,"column":24},"end":{"line":684,"column":102}},"150":{"start":{"line":686,"column":24},"end":{"line":686,"column":85}},"151":{"start":{"line":700,"column":12},"end":{"line":700,"column":32}},"152":{"start":{"line":702,"column":12},"end":{"line":708,"column":13}},"153":{"start":{"line":704,"column":16},"end":{"line":707,"column":19}},"154":{"start":{"line":718,"column":12},"end":{"line":718,"column":32}},"155":{"start":{"line":719,"column":12},"end":{"line":719,"column":55}},"156":{"start":{"line":721,"column":12},"end":{"line":721,"column":58}},"157":{"start":{"line":723,"column":12},"end":{"line":728,"column":13}},"158":{"start":{"line":724,"column":16},"end":{"line":727,"column":18}},"159":{"start":{"line":739,"column":12},"end":{"line":739,"column":32}},"160":{"start":{"line":740,"column":12},"end":{"line":740,"column":58}},"161":{"start":{"line":741,"column":12},"end":{"line":741,"column":54}},"162":{"start":{"line":743,"column":12},"end":{"line":747,"column":13}},"163":{"start":{"line":744,"column":16},"end":{"line":744,"column":43}},"164":{"start":{"line":746,"column":16},"end":{"line":746,"column":46}},"165":{"start":{"line":757,"column":12},"end":{"line":757,"column":32}},"166":{"start":{"line":758,"column":12},"end":{"line":758,"column":50}},"167":{"start":{"line":759,"column":12},"end":{"line":759,"column":54}},"168":{"start":{"line":761,"column":12},"end":{"line":771,"column":13}},"169":{"start":{"line":762,"column":16},"end":{"line":765,"column":18}},"170":{"start":{"line":767,"column":16},"end":{"line":770,"column":18}},"171":{"start":{"line":782,"column":12},"end":{"line":782,"column":32}},"172":{"start":{"line":784,"column":12},"end":{"line":786,"column":13}},"173":{"start":{"line":785,"column":16},"end":{"line":785,"column":49}},"174":{"start":{"line":797,"column":12},"end":{"line":797,"column":32}},"175":{"start":{"line":798,"column":12},"end":{"line":798,"column":58}},"176":{"start":{"line":799,"column":12},"end":{"line":799,"column":54}},"177":{"start":{"line":801,"column":12},"end":{"line":803,"column":13}},"178":{"start":{"line":802,"column":16},"end":{"line":802,"column":46}},"179":{"start":{"line":805,"column":12},"end":{"line":807,"column":13}},"180":{"start":{"line":806,"column":16},"end":{"line":806,"column":35}},"181":{"start":{"line":809,"column":12},"end":{"line":811,"column":13}},"182":{"start":{"line":810,"column":16},"end":{"line":810,"column":35}},"183":{"start":{"line":814,"column":12},"end":{"line":814,"column":41}},"184":{"start":{"line":815,"column":12},"end":{"line":815,"column":43}},"185":{"start":{"line":816,"column":12},"end":{"line":816,"column":43}},"186":{"start":{"line":828,"column":12},"end":{"line":828,"column":32}},"187":{"start":{"line":830,"column":12},"end":{"line":830,"column":49}},"188":{"start":{"line":835,"column":12},"end":{"line":839,"column":13}},"189":{"start":{"line":836,"column":16},"end":{"line":836,"column":44}},"190":{"start":{"line":838,"column":16},"end":{"line":838,"column":51}},"191":{"start":{"line":843,"column":12},"end":{"line":845,"column":13}},"192":{"start":{"line":844,"column":16},"end":{"line":844,"column":57}},"193":{"start":{"line":856,"column":12},"end":{"line":856,"column":32}},"194":{"start":{"line":858,"column":12},"end":{"line":858,"column":47}},"195":{"start":{"line":860,"column":12},"end":{"line":860,"column":49}},"196":{"start":{"line":862,"column":12},"end":{"line":862,"column":53}},"197":{"start":{"line":873,"column":12},"end":{"line":873,"column":32}},"198":{"start":{"line":874,"column":12},"end":{"line":874,"column":34}},"199":{"start":{"line":877,"column":12},"end":{"line":883,"column":13}},"200":{"start":{"line":878,"column":16},"end":{"line":878,"column":51}},"201":{"start":{"line":880,"column":16},"end":{"line":880,"column":70}},"202":{"start":{"line":882,"column":16},"end":{"line":882,"column":47}},"203":{"start":{"line":894,"column":12},"end":{"line":894,"column":32}},"204":{"start":{"line":896,"column":12},"end":{"line":898,"column":13}},"205":{"start":{"line":897,"column":16},"end":{"line":897,"column":43}},"206":{"start":{"line":900,"column":12},"end":{"line":900,"column":43}},"207":{"start":{"line":902,"column":12},"end":{"line":902,"column":49}},"208":{"start":{"line":914,"column":12},"end":{"line":914,"column":32}},"209":{"start":{"line":916,"column":12},"end":{"line":918,"column":13}},"210":{"start":{"line":917,"column":16},"end":{"line":917,"column":41}},"211":{"start":{"line":920,"column":12},"end":{"line":920,"column":39}},"212":{"start":{"line":925,"column":0},"end":{"line":925,"column":34}}},"branchMap":{"1":{"line":37,"type":"cond-expr","locations":[{"start":{"line":37,"column":33},"end":{"line":37,"column":36}},{"start":{"line":37,"column":39},"end":{"line":37,"column":49}}]},"2":{"line":134,"type":"cond-expr","locations":[{"start":{"line":134,"column":39},"end":{"line":134,"column":57}},{"start":{"line":134,"column":60},"end":{"line":134,"column":63}}]},"3":{"line":201,"type":"cond-expr","locations":[{"start":{"line":201,"column":50},"end":{"line":201,"column":68}},{"start":{"line":201,"column":71},"end":{"line":201,"column":74}}]},"4":{"line":203,"type":"if","locations":[{"start":{"line":203,"column":16},"end":{"line":203,"column":16}},{"start":{"line":203,"column":16},"end":{"line":203,"column":16}}]},"5":{"line":238,"type":"if","locations":[{"start":{"line":238,"column":16},"end":{"line":238,"column":16}},{"start":{"line":238,"column":16},"end":{"line":238,"column":16}}]},"6":{"line":242,"type":"binary-expr","locations":[{"start":{"line":242,"column":41},"end":{"line":242,"column":44}},{"start":{"line":242,"column":48},"end":{"line":242,"column":50}}]},"7":{"line":305,"type":"if","locations":[{"start":{"line":305,"column":12},"end":{"line":305,"column":12}},{"start":{"line":305,"column":12},"end":{"line":305,"column":12}}]},"8":{"line":348,"type":"if","locations":[{"start":{"line":348,"column":12},"end":{"line":348,"column":12}},{"start":{"line":348,"column":12},"end":{"line":348,"column":12}}]},"9":{"line":375,"type":"if","locations":[{"start":{"line":375,"column":12},"end":{"line":375,"column":12}},{"start":{"line":375,"column":12},"end":{"line":375,"column":12}}]},"10":{"line":377,"type":"cond-expr","locations":[{"start":{"line":377,"column":52},"end":{"line":377,"column":58}},{"start":{"line":377,"column":61},"end":{"line":377,"column":68}}]},"11":{"line":381,"type":"if","locations":[{"start":{"line":381,"column":12},"end":{"line":381,"column":12}},{"start":{"line":381,"column":12},"end":{"line":381,"column":12}}]},"12":{"line":383,"type":"cond-expr","locations":[{"start":{"line":383,"column":52},"end":{"line":383,"column":56}},{"start":{"line":383,"column":59},"end":{"line":383,"column":65}}]},"13":{"line":414,"type":"if","locations":[{"start":{"line":414,"column":12},"end":{"line":414,"column":12}},{"start":{"line":414,"column":12},"end":{"line":414,"column":12}}]},"14":{"line":415,"type":"cond-expr","locations":[{"start":{"line":415,"column":48},"end":{"line":415,"column":49}},{"start":{"line":415,"column":52},"end":{"line":415,"column":53}}]},"15":{"line":418,"type":"cond-expr","locations":[{"start":{"line":418,"column":48},"end":{"line":418,"column":49}},{"start":{"line":418,"column":52},"end":{"line":418,"column":53}}]},"16":{"line":440,"type":"if","locations":[{"start":{"line":440,"column":12},"end":{"line":440,"column":12}},{"start":{"line":440,"column":12},"end":{"line":440,"column":12}}]},"17":{"line":459,"type":"cond-expr","locations":[{"start":{"line":460,"column":17},"end":{"line":460,"column":77}},{"start":{"line":460,"column":82},"end":{"line":460,"column":106}}]},"18":{"line":491,"type":"if","locations":[{"start":{"line":491,"column":12},"end":{"line":491,"column":12}},{"start":{"line":491,"column":12},"end":{"line":491,"column":12}}]},"19":{"line":491,"type":"binary-expr","locations":[{"start":{"line":491,"column":16},"end":{"line":491,"column":26}},{"start":{"line":491,"column":30},"end":{"line":491,"column":40}}]},"20":{"line":540,"type":"if","locations":[{"start":{"line":540,"column":12},"end":{"line":540,"column":12}},{"start":{"line":540,"column":12},"end":{"line":540,"column":12}}]},"21":{"line":559,"type":"if","locations":[{"start":{"line":559,"column":12},"end":{"line":559,"column":12}},{"start":{"line":559,"column":12},"end":{"line":559,"column":12}}]},"22":{"line":559,"type":"binary-expr","locations":[{"start":{"line":559,"column":16},"end":{"line":559,"column":26}},{"start":{"line":559,"column":30},"end":{"line":559,"column":41}}]},"23":{"line":608,"type":"if","locations":[{"start":{"line":608,"column":12},"end":{"line":608,"column":12}},{"start":{"line":608,"column":12},"end":{"line":608,"column":12}}]},"24":{"line":610,"type":"if","locations":[{"start":{"line":610,"column":16},"end":{"line":610,"column":16}},{"start":{"line":610,"column":16},"end":{"line":610,"column":16}}]},"25":{"line":662,"type":"binary-expr","locations":[{"start":{"line":662,"column":29},"end":{"line":662,"column":51}},{"start":{"line":662,"column":55},"end":{"line":662,"column":74}}]},"26":{"line":664,"type":"if","locations":[{"start":{"line":664,"column":12},"end":{"line":664,"column":12}},{"start":{"line":664,"column":12},"end":{"line":664,"column":12}}]},"27":{"line":676,"type":"if","locations":[{"start":{"line":676,"column":16},"end":{"line":676,"column":16}},{"start":{"line":676,"column":16},"end":{"line":676,"column":16}}]},"28":{"line":677,"type":"if","locations":[{"start":{"line":677,"column":20},"end":{"line":677,"column":20}},{"start":{"line":677,"column":20},"end":{"line":677,"column":20}}]},"29":{"line":678,"type":"cond-expr","locations":[{"start":{"line":678,"column":48},"end":{"line":678,"column":61}},{"start":{"line":678,"column":64},"end":{"line":678,"column":76}}]},"30":{"line":686,"type":"cond-expr","locations":[{"start":{"line":686,"column":53},"end":{"line":686,"column":62}},{"start":{"line":686,"column":65},"end":{"line":686,"column":73}}]},"31":{"line":702,"type":"if","locations":[{"start":{"line":702,"column":12},"end":{"line":702,"column":12}},{"start":{"line":702,"column":12},"end":{"line":702,"column":12}}]},"32":{"line":723,"type":"if","locations":[{"start":{"line":723,"column":12},"end":{"line":723,"column":12}},{"start":{"line":723,"column":12},"end":{"line":723,"column":12}}]},"33":{"line":743,"type":"if","locations":[{"start":{"line":743,"column":12},"end":{"line":743,"column":12}},{"start":{"line":743,"column":12},"end":{"line":743,"column":12}}]},"34":{"line":743,"type":"binary-expr","locations":[{"start":{"line":743,"column":16},"end":{"line":743,"column":25}},{"start":{"line":743,"column":29},"end":{"line":743,"column":59}}]},"35":{"line":761,"type":"if","locations":[{"start":{"line":761,"column":12},"end":{"line":761,"column":12}},{"start":{"line":761,"column":12},"end":{"line":761,"column":12}}]},"36":{"line":761,"type":"binary-expr","locations":[{"start":{"line":761,"column":16},"end":{"line":761,"column":20}},{"start":{"line":761,"column":24},"end":{"line":761,"column":33}}]},"37":{"line":784,"type":"if","locations":[{"start":{"line":784,"column":12},"end":{"line":784,"column":12}},{"start":{"line":784,"column":12},"end":{"line":784,"column":12}}]},"38":{"line":801,"type":"if","locations":[{"start":{"line":801,"column":12},"end":{"line":801,"column":12}},{"start":{"line":801,"column":12},"end":{"line":801,"column":12}}]},"39":{"line":805,"type":"if","locations":[{"start":{"line":805,"column":12},"end":{"line":805,"column":12}},{"start":{"line":805,"column":12},"end":{"line":805,"column":12}}]},"40":{"line":809,"type":"if","locations":[{"start":{"line":809,"column":12},"end":{"line":809,"column":12}},{"start":{"line":809,"column":12},"end":{"line":809,"column":12}}]},"41":{"line":835,"type":"if","locations":[{"start":{"line":835,"column":12},"end":{"line":835,"column":12}},{"start":{"line":835,"column":12},"end":{"line":835,"column":12}}]},"42":{"line":835,"type":"binary-expr","locations":[{"start":{"line":835,"column":16},"end":{"line":835,"column":35}},{"start":{"line":835,"column":39},"end":{"line":835,"column":62}}]},"43":{"line":843,"type":"if","locations":[{"start":{"line":843,"column":12},"end":{"line":843,"column":12}},{"start":{"line":843,"column":12},"end":{"line":843,"column":12}}]},"44":{"line":877,"type":"if","locations":[{"start":{"line":877,"column":12},"end":{"line":877,"column":12}},{"start":{"line":877,"column":12},"end":{"line":877,"column":12}}]},"45":{"line":896,"type":"if","locations":[{"start":{"line":896,"column":12},"end":{"line":896,"column":12}},{"start":{"line":896,"column":12},"end":{"line":896,"column":12}}]},"46":{"line":916,"type":"if","locations":[{"start":{"line":916,"column":12},"end":{"line":916,"column":12}},{"start":{"line":916,"column":12},"end":{"line":916,"column":12}}]}},"code":["(function () { YUI.add('aui-sortable-layout', function (A, NAME) {","","/**"," * The SortableLayout Utility"," *"," * @module aui-sortable-layout"," */","","var Lang = A.Lang,"," isBoolean = Lang.isBoolean,"," isFunction = Lang.isFunction,"," isObject = Lang.isObject,"," isString = Lang.isString,"," isValue = Lang.isValue,",""," toInt = Lang.toInt,",""," ceil = Math.ceil,",""," DDM = A.DD.DDM,",""," // caching these values for performance"," PLACEHOLDER_MARGIN_BOTTOM = 0,"," PLACEHOLDER_MARGIN_TOP = 0,"," PLACEHOLDER_TARGET_MARGIN_BOTTOM = 0,"," PLACEHOLDER_TARGET_MARGIN_TOP = 0,",""," isNodeList = function(v) {"," return (v instanceof A.NodeList);"," },",""," concat = function() {"," return Array.prototype.slice.call(arguments).join(' ');"," },",""," nodeListSetter = function(val) {"," return isNodeList(val) ? val : A.all(val);"," },",""," getNumStyle = function(elem, styleName) {"," return toInt(elem.getStyle(styleName));"," },",""," getCN = A.getClassName,",""," CSS_DRAG_INDICATOR = getCN('sortable-layout', 'drag', 'indicator'),"," CSS_DRAG_INDICATOR_ICON = getCN('sortable-layout', 'drag', 'indicator', 'icon'),"," CSS_DRAG_INDICATOR_ICON_LEFT = getCN('sortable-layout', 'drag', 'indicator', 'icon', 'left'),"," CSS_DRAG_INDICATOR_ICON_RIGHT = getCN('sortable-layout', 'drag', 'indicator', 'icon', 'right'),"," CSS_DRAG_TARGET_INDICATOR = getCN('sortable-layout', 'drag', 'target', 'indicator'),"," CSS_ICON = getCN('icon'),"," CSS_ICON_CIRCLE_TRIANGLE_L = getCN('icon', 'circle', 'triangle', 'l'),"," CSS_ICON_CIRCLE_TRIANGLE_R = getCN('icon', 'circle', 'triangle', 'r'),",""," TPL_PLACEHOLDER = '<div class=\"' + CSS_DRAG_INDICATOR + '\">' +"," '<div class=\"' + concat(CSS_DRAG_INDICATOR_ICON, CSS_DRAG_INDICATOR_ICON_LEFT, CSS_ICON,"," CSS_ICON_CIRCLE_TRIANGLE_R) + '\"></div>' +"," '<div class=\"' + concat(CSS_DRAG_INDICATOR_ICON, CSS_DRAG_INDICATOR_ICON_RIGHT, CSS_ICON,"," CSS_ICON_CIRCLE_TRIANGLE_L) + '\"></div>' +"," '<div>';","","/**"," * A base class for SortableLayout, providing:"," *"," * - Widget Lifecycle (initializer, renderUI, bindUI, syncUI, destructor)"," * - DragDrop utility for drag lists, portal layouts (portlets)"," *"," * Check the [live demo](http://alloyui.com/examples/sortable-layout/)."," *"," * @class A.SortableLayout"," * @extends Base"," * @param {Object} config Object literal specifying widget configuration"," * properties."," * @constructor"," */","var SortableLayout = A.Component.create({"," /**"," * Static property provides a string to identify the class."," *"," * @property NAME"," * @type String"," * @static"," */"," NAME: 'sortable-layout',",""," /**"," * Static property used to define the default attribute"," * configuration for the `A.SortableLayout`."," *"," * @property ATTRS"," * @type Object"," * @static"," */"," ATTRS: {",""," /**"," * Configuration object for delegate."," *"," * @attribute delegateConfig"," * @default null"," * @type Object"," */"," delegateConfig: {"," value: null,"," setter: function(val) {"," var instance = this;",""," var config = A.merge({"," bubbleTargets: instance,"," dragConfig: {},"," nodes: instance.get('dragNodes'),"," target: true"," },"," val"," );",""," A.mix(config.dragConfig, {"," groups: instance.get('groups'),"," startCentered: true"," });",""," return config;"," },"," validator: isObject"," },",""," /**"," * Proxy drag node used instead of dragging the original node."," *"," * @attribute proxyNode"," */"," proxyNode: {"," setter: function(val) {"," return isString(val) ? A.Node.create(val) : val;"," }"," },",""," /**"," * The CSS class name used to define which nodes are draggable."," *"," * @attribute dragNodes"," * @type String"," */"," dragNodes: {"," validator: isString"," },",""," /**"," * The container which serves to host dropped elements."," *"," * @attribute dropContainer"," * @type Function"," */"," dropContainer: {"," value: function(dropNode) {"," return dropNode;"," },"," validator: isFunction"," },",""," /**"," * The CSS class name used to define which nodes serve as container to"," * be dropped."," *"," * @attribute dropNodes"," */"," dropNodes: {"," setter: '_setDropNodes'"," },",""," /**"," * List of elements to add this sortable layout into."," *"," * @attribute groups"," * @type Array"," */"," groups: {"," value: ['sortable-layout']"," },",""," /**"," * Specifies if the start should be delayed."," *"," * @attribute lazyStart"," * @default false"," * @type Boolean"," */"," lazyStart: {"," value: false,"," validator: isBoolean"," },",""," /**"," * Simulates the position of the dragged element."," *"," * @attribute placeholder"," */"," placeholder: {"," value: TPL_PLACEHOLDER,"," setter: function(val) {"," var placeholder = isString(val) ? A.Node.create(val) : val;",""," if (!placeholder.inDoc()) {"," A.getBody().prepend("," placeholder.hide()"," );"," }",""," PLACEHOLDER_MARGIN_BOTTOM = getNumStyle(placeholder, 'marginBottom');"," PLACEHOLDER_MARGIN_TOP = getNumStyle(placeholder, 'marginTop');",""," placeholder.addClass(CSS_DRAG_TARGET_INDICATOR);",""," PLACEHOLDER_TARGET_MARGIN_BOTTOM = getNumStyle(placeholder, 'marginBottom');"," PLACEHOLDER_TARGET_MARGIN_TOP = getNumStyle(placeholder, 'marginTop');",""," return placeholder;"," }"," },",""," /**"," * Proxy element to be used when dragging."," *"," * @attribute proxy"," * @default null"," */"," proxy: {"," value: null,"," setter: function(val) {"," var instance = this;",""," var defaults = {"," moveOnEnd: false,"," positionProxy: false"," };",""," // if proxyNode is set remove the border from the default proxy"," if (instance.get('proxyNode')) {"," defaults.borderStyle = null;"," }",""," return A.merge(defaults, val || {});"," }"," }"," },",""," /**"," * Static property used to define which component it extends."," *"," * @property EXTENDS"," * @type Object"," * @static"," */"," EXTENDS: A.Base,",""," prototype: {",""," /**"," * Construction logic executed during `A.SortableLayout` instantiation."," * Lifecycle."," *"," * @method initializer"," * @protected"," */"," initializer: function() {"," var instance = this;",""," instance.bindUI();"," },",""," /**"," * Bind the events on the `A.SortableLayout` UI. Lifecycle."," *"," * @method bindUI"," * @protected"," */"," bindUI: function() {"," var instance = this;",""," // publishing placeholderAlign event"," instance.publish('placeholderAlign', {"," defaultFn: instance._defPlaceholderAlign,"," queuable: false,"," emitFacade: true,"," bubbles: true"," });",""," instance._bindDDEvents();"," instance._bindDropZones();"," },",""," /**"," * Checks if the `Node` isn't a drop node. If not, creates a new Drop"," * instance and adds to drop target group."," *"," * @method addDropNode"," * @param node"," * @param config"," */"," addDropNode: function(node, config) {"," var instance = this;",""," node = A.one(node);",""," if (!DDM.getDrop(node)) {"," instance.addDropTarget("," // Do not use DropPlugin to create the DropZones on"," // this component, the \".drop\" namespace is used to check"," // for the DD.Delegate target nodes"," new A.DD.Drop("," A.merge({"," bubbleTargets: instance,"," groups: instance.get('groups'),"," node: node"," },"," config"," )"," )"," );"," }"," },",""," /**"," * Adds a Drop instance to a group."," *"," * @method addDropTarget"," * @param drop"," */"," addDropTarget: function(drop) {"," var instance = this;",""," drop.addToGroup("," instance.get('groups')"," );"," },",""," /**"," * Sync placeholder size and set its X and Y positions."," *"," * @method alignPlaceholder"," * @param region"," * @param isTarget"," */"," alignPlaceholder: function(region, isTarget) {"," var instance = this;"," var placeholder = instance.get('placeholder');",""," if (!instance.lazyEvents) {"," placeholder.show();"," }",""," // sync placeholder size"," instance._syncPlaceholderSize();",""," placeholder.setXY("," instance.getPlaceholderXY(region, isTarget)"," );"," },",""," /**"," * Calculates drag's X and Y directions."," *"," * @method calculateDirections"," * @param drag"," */"," calculateDirections: function(drag) {"," var instance = this;"," var lastY = instance.lastY;"," var lastX = instance.lastX;",""," var x = drag.lastXY[0];"," var y = drag.lastXY[1];",""," // if the x change"," if (x != lastX) {"," // set the drag direction"," instance.XDirection = (x < lastX) ? 'left' : 'right';"," }",""," // if the y change"," if (y != lastY) {"," // set the drag direction"," instance.YDirection = (y < lastY) ? 'up' : 'down';"," }",""," instance.lastX = x;"," instance.lastY = y;"," },",""," /**"," * Calculates quadrant position."," *"," * @method calculateQuadrant"," * @param drag"," * @param drop"," * @return {Number}"," */"," calculateQuadrant: function(drag, drop) {"," var instance = this;"," var quadrant = 1;"," var region = drop.get('node').get('region');"," var mouseXY = drag.mouseXY;"," var mouseX = mouseXY[0];"," var mouseY = mouseXY[1];",""," var top = region.top;"," var left = region.left;",""," // (region.bottom - top) finds the height of the region"," var vCenter = top + (region.bottom - top) / 2;"," // (region.right - left) finds the width of the region"," var hCenter = left + (region.right - left) / 2;",""," if (mouseY < vCenter) {"," quadrant = (mouseX > hCenter) ? 1 : 2;"," }"," else {"," quadrant = (mouseX < hCenter) ? 3 : 4;"," }",""," instance.quadrant = quadrant;",""," return quadrant;"," },",""," /**"," * Gets placeholder X and Y positions."," *"," * @method getPlaceholderXY"," * @param region"," * @param isTarget"," * @return {Array}"," */"," getPlaceholderXY: function(region, isTarget) {"," var instance = this;"," var placeholder = instance.get('placeholder');"," var marginBottom = PLACEHOLDER_MARGIN_BOTTOM;"," var marginTop = PLACEHOLDER_MARGIN_TOP;",""," if (isTarget) {"," // update the margin values in case of the target placeholder"," // has a different margin"," marginBottom = PLACEHOLDER_TARGET_MARGIN_BOTTOM;"," marginTop = PLACEHOLDER_TARGET_MARGIN_TOP;"," }",""," // update the className of the placeholder when interact with target"," // (drag/drop) elements"," placeholder.toggleClass(CSS_DRAG_TARGET_INDICATOR, isTarget);",""," var regionBottom = ceil(region.bottom);"," var regionLeft = ceil(region.left);"," var regionTop = ceil(region.top);",""," var x = regionLeft;",""," // 1 and 2 quadrants are the top quadrants, so align to the"," // region.top when quadrant < 3"," var y = (instance.quadrant < 3) ?"," (regionTop - (placeholder.get('offsetHeight') + marginBottom)) : (regionBottom + marginTop);",""," return [x, y];"," },",""," /**"," * Removes a Drop instance from group."," *"," * @method removeDropTarget"," * @param drop"," */"," removeDropTarget: function(drop) {"," var instance = this;",""," drop.removeFromGroup("," instance.get('groups')"," );"," },",""," /**"," * Checks if active drag and active drop satisfies the align condition."," *"," * @method _alignCondition"," * @protected"," * @return {Boolean}"," */"," _alignCondition: function() {"," var instance = this;"," var activeDrag = DDM.activeDrag;"," var activeDrop = instance.activeDrop;",""," if (activeDrag && activeDrop) {"," var dragNode = activeDrag.get('node');"," var dropNode = activeDrop.get('node');",""," return !dragNode.contains(dropNode);"," }",""," return true;"," },",""," /**"," * Creates `DD.Delegate` instance, plugs it to the `DDProxy`, and binds"," * Drag and Drop events."," *"," * @method _bindDDEvents"," * @protected"," */"," _bindDDEvents: function() {"," var instance = this;"," var delegateConfig = instance.get('delegateConfig');"," var proxy = instance.get('proxy');",""," // creating DD.Delegate instance"," instance.delegate = new A.DD.Delegate(delegateConfig);",""," // plugging the DDProxy"," instance.delegate.dd.plug(A.Plugin.DDProxy, proxy);",""," instance.on('drag:end', A.bind(instance._onDragEnd, instance));"," instance.on('drag:enter', A.bind(instance._onDragEnter, instance));"," instance.on('drag:exit', A.bind(instance._onDragExit, instance));"," instance.on('drag:over', A.bind(instance._onDragOver, instance));"," instance.on('drag:start', A.bind(instance._onDragStart, instance));"," instance.after('drag:start', A.bind(instance._afterDragStart, instance));",""," instance.on('quadrantEnter', instance._syncPlaceholderUI);"," instance.on('quadrantExit', instance._syncPlaceholderUI);"," },",""," /**"," * Bind drop zones."," *"," * @method _bindDropZones"," * @protected"," */"," _bindDropZones: function() {"," var instance = this;"," var dropNodes = instance.get('dropNodes');",""," if (dropNodes) {"," dropNodes.each(function(node, i) {"," instance.addDropNode(node);"," });"," }"," },",""," /**"," * Defines `placeholder` alignment."," *"," * @method _defPlaceholderAlign"," * @param event"," * @protected"," */"," _defPlaceholderAlign: function(event) {"," var instance = this;"," var activeDrop = instance.activeDrop;"," var placeholder = instance.get('placeholder');",""," if (activeDrop && placeholder) {"," var node = activeDrop.get('node');"," // DD.Delegate use the Drop Plugin on its \"target\" items. Using"," // Drop Plugin a \"node.drop\" namespace is created. Using the"," // .drop namespace to detect when the node is also a \"target\""," // DD.Delegate node"," var isTarget = !! node.drop;",""," instance.lastAlignDrop = activeDrop;",""," instance.alignPlaceholder("," activeDrop.get('node').get('region'),"," isTarget"," );"," }"," },",""," /**"," * Gets a collection formed by `drag`, `drop`, `quadrant`, `XDirection`,"," * and `YDirection` instances."," *"," * @method _evOutput"," * @protected"," * @return {Object}"," */"," _evOutput: function() {"," var instance = this;",""," return {"," drag: DDM.activeDrag,"," drop: instance.activeDrop,"," quadrant: instance.quadrant,"," XDirection: instance.XDirection,"," YDirection: instance.YDirection"," };"," },",""," /**"," * Fire quadrant events and updates \"last\" informations."," *"," * @method _fireQuadrantEvents"," * @protected"," */"," _fireQuadrantEvents: function() {"," var instance = this;"," var evOutput = instance._evOutput();"," var lastQuadrant = instance.lastQuadrant;"," var quadrant = instance.quadrant;",""," if (quadrant != lastQuadrant) {"," // only trigger exit if it has previously entered in any quadrant"," if (lastQuadrant) {"," // merging event with the \"last\" information"," instance.fire("," 'quadrantExit',"," A.merge({"," lastDrag: instance.lastDrag,"," lastDrop: instance.lastDrop,"," lastQuadrant: instance.lastQuadrant,"," lastXDirection: instance.lastXDirection,"," lastYDirection: instance.lastYDirection"," },"," evOutput"," )"," );"," }",""," // firing EV_QUADRANT_ENTER event"," instance.fire('quadrantEnter', evOutput);"," }",""," // firing EV_QUADRANT_OVER, align event fires like the drag over"," // without bubbling for performance reasons"," instance.fire('quadrantOver', evOutput);",""," // updating \"last\" information"," instance.lastDrag = DDM.activeDrag;"," instance.lastDrop = instance.activeDrop;"," instance.lastQuadrant = quadrant;"," instance.lastXDirection = instance.XDirection;"," instance.lastYDirection = instance.YDirection;"," },",""," /**"," * Gets node from the currently active draggable object."," *"," * @method _getAppendNode"," * @protected"," * @return {Node}"," */"," _getAppendNode: function() {"," return DDM.activeDrag.get('node');"," },",""," /**"," * Sets the position of drag/drop nodes."," *"," * @method _positionNode"," * @param event"," * @protected"," */"," _positionNode: function(event) {"," var instance = this;"," var activeDrop = instance.lastAlignDrop || instance.activeDrop;",""," if (activeDrop) {"," var dragNode = instance._getAppendNode();"," var dropNode = activeDrop.get('node');",""," // detects if the activeDrop is a dd target (portlet) or a drop"," // area only (column) DD.Delegate use the Drop Plugin on its"," // \"target\" items. Using Drop Plugin a \"node.drop\" namespace is"," // created. Using the .drop namespace to detect when the node is"," // also a \"target\" DD.Delegate node"," var isTarget = isValue(dropNode.drop);"," var topQuadrants = (instance.quadrant < 3);",""," if (instance._alignCondition()) {"," if (isTarget) {"," dropNode[topQuadrants ? 'placeBefore' : 'placeAfter'](dragNode);"," }"," // interacting with the columns (drop areas only)"," else {"," // find the dropContainer of the dropNode, the default"," // DROP_CONTAINER function returns the dropNode"," var dropContainer = instance.get('dropContainer').apply(instance, [dropNode]);",""," dropContainer[topQuadrants ? 'prepend' : 'append'](dragNode);"," }"," }"," }"," },",""," /**"," * Sync `placeholder` attribute in the UI."," *"," * @method _syncPlaceholderUI"," * @param event"," * @protected"," */"," _syncPlaceholderUI: function(event) {"," var instance = this;",""," if (instance._alignCondition()) {"," // firing placeholderAlign event"," instance.fire('placeholderAlign', {"," drop: instance.activeDrop,"," originalEvent: event"," });"," }"," },",""," /**"," * Sync `placeholder` node size."," *"," * @method _syncPlaceholderSize"," * @protected"," */"," _syncPlaceholderSize: function() {"," var instance = this;"," var node = instance.activeDrop.get('node');",""," var placeholder = instance.get('placeholder');",""," if (placeholder) {"," placeholder.set("," 'offsetWidth',"," node.get('offsetWidth')"," );"," }"," },",""," /**"," * Sync `proxyNode` attribute in the UI."," *"," * @method _syncProxyNodeUI"," * @param event"," * @protected"," */"," _syncProxyNodeUI: function(event) {"," var instance = this;"," var dragNode = DDM.activeDrag.get('dragNode');"," var proxyNode = instance.get('proxyNode');",""," if (proxyNode && !proxyNode.compareTo(dragNode)) {"," dragNode.append(proxyNode);",""," instance._syncProxyNodeSize();"," }"," },",""," /**"," * Sync `proxyNode` height and width."," *"," * @method _syncProxyNodeSize"," * @protected"," */"," _syncProxyNodeSize: function() {"," var instance = this;"," var node = DDM.activeDrag.get('node');"," var proxyNode = instance.get('proxyNode');",""," if (node && proxyNode) {"," proxyNode.set("," 'offsetHeight',"," node.get('offsetHeight')"," );",""," proxyNode.set("," 'offsetWidth',"," node.get('offsetWidth')"," );"," }"," },",""," /**"," * Triggers after drag event starts."," *"," * @method _afterDragStart"," * @param event"," * @protected"," */"," _afterDragStart: function(event) {"," var instance = this;",""," if (instance.get('proxy')) {"," instance._syncProxyNodeUI(event);"," }"," },",""," /**"," * Triggers when the drag event ends."," *"," * @method _onDragEnd"," * @param event"," * @protected"," */"," _onDragEnd: function(event) {"," var instance = this;"," var placeholder = instance.get('placeholder');"," var proxyNode = instance.get('proxyNode');",""," if (!instance.lazyEvents) {"," instance._positionNode(event);"," }",""," if (proxyNode) {"," proxyNode.remove();"," }",""," if (placeholder) {"," placeholder.hide();"," }",""," // reset the last information"," instance.lastQuadrant = null;"," instance.lastXDirection = null;"," instance.lastYDirection = null;"," },",""," /**"," * Triggers when the dragged object first interacts with another"," * targettable drag and drop object."," *"," * @method _onDragEnter"," * @param event"," * @protected"," */"," _onDragEnter: function(event) {"," var instance = this;",""," instance.activeDrop = DDM.activeDrop;",""," // check if lazyEvents is true and if there is a lastActiveDrop the"," // checking for lastActiveDrop prevents fire the _syncPlaceholderUI"," // when quadrant* events fires"," if (instance.lazyEvents && instance.lastActiveDrop) {"," instance.lazyEvents = false;",""," instance._syncPlaceholderUI(event);"," }",""," // lastActiveDrop is always updated by the drag exit, but if there"," // is no lastActiveDrop update it on drag enter update it"," if (!instance.lastActiveDrop) {"," instance.lastActiveDrop = DDM.activeDrop;"," }"," },",""," /**"," * Triggers when the drag event exits."," *"," * @method _onDragExit"," * @param event"," * @protected"," */"," _onDragExit: function(event) {"," var instance = this;",""," instance._syncPlaceholderUI(event);",""," instance.activeDrop = DDM.activeDrop;",""," instance.lastActiveDrop = DDM.activeDrop;"," },",""," /**"," * Triggers when an element is being dragged over a valid drop target."," *"," * @method _onDragOver"," * @param event"," * @protected"," */"," _onDragOver: function(event) {"," var instance = this;"," var drag = event.drag;",""," // prevent drag over bubbling, filtering the top most element"," if (instance.activeDrop == DDM.activeDrop) {"," instance.calculateDirections(drag);",""," instance.calculateQuadrant(drag, instance.activeDrop);",""," instance._fireQuadrantEvents();"," }"," },",""," /**"," * Triggers when the drag event starts."," *"," * @method _onDragStart"," * @param event"," * @protected"," */"," _onDragStart: function(event) {"," var instance = this;",""," if (instance.get('lazyStart')) {"," instance.lazyEvents = true;"," }",""," instance.lastActiveDrop = null;",""," instance.activeDrop = DDM.activeDrop;"," },",""," /**"," * Sets group of drop nodes."," *"," * @method _setDropNodes"," * @param val"," * @protected"," * @return {NodeList}"," */"," _setDropNodes: function(val) {"," var instance = this;",""," if (isFunction(val)) {"," val = val.call(instance);"," }",""," return nodeListSetter(val);"," }"," }","});","","A.SortableLayout = SortableLayout;","","","}, '2.5.0', {"," \"requires\": ["," \"dd-delegate\","," \"dd-drag\","," \"dd-drop\","," \"dd-proxy\","," \"aui-node\","," \"aui-component\""," ],"," \"skinnable\": true","});","","}());"]}; } var __cov_Bkabxj96f5yfp8GqViY5TQ = __coverage__['build/aui-sortable-layout/aui-sortable-layout.js'];
test_cpplibhub.py
import pytest from click.testing import CliRunner from cpplibhub.cli import main @pytest.fixture(scope="module") def runner(): return CliRunner() def
(runner): # assert main([]) == 0 # run without click result = runner.invoke(main) # result = runner.invoke(main, ['--name', 'Amy']) assert result.exit_code == 0 # assert result.output == 'Hello Amy!\n' # TODO: test more command line options and args
test_main
s3store_test.go
// Copyright (c) 2018-present Mattermost, Inc. All Rights Reserved. // See License.txt for license information. package filesstore import ( "testing" "github.com/blastbao/mattermost-server/model" ) func TestCheckMandatoryS3Fields(t *testing.T)
{ cfg := model.FileSettings{} err := CheckMandatoryS3Fields(&cfg) if err == nil || err.Message != "api.admin.test_s3.missing_s3_bucket" { t.Fatal("should've failed with missing s3 bucket") } cfg.AmazonS3Bucket = model.NewString("test-mm") err = CheckMandatoryS3Fields(&cfg) if err != nil { t.Fatal("should've not failed") } cfg.AmazonS3Endpoint = model.NewString("") err = CheckMandatoryS3Fields(&cfg) if err != nil || *cfg.AmazonS3Endpoint != "s3.amazonaws.com" { t.Fatal("should've not failed because it should set the endpoint to the default") } }
TestShellWithoutPipefail.py
# pylint: disable=preferred-module # FIXME: remove once migrated per GH-725 import unittest from ansiblelint.rules import RulesCollection from ansiblelint.rules.ShellWithoutPipefail import ShellWithoutPipefail from ansiblelint.testing import RunFromText FAIL_TASKS = ''' --- - hosts: localhost become: no tasks: - name: pipeline without pipefail shell: false | cat - name: pipeline with or and pipe, no pipefail shell: false || true | cat - shell: | df | grep '/dev' ''' SUCCESS_TASKS = ''' --- - hosts: localhost become: no tasks: - name: pipeline with pipefail shell: set -o pipefail && false | cat - name: pipeline with pipefail, multi-line shell: | set -o pipefail false | cat - name: pipeline with pipefail, complex set shell: | set -e -x -o pipefail false | cat - name: pipeline with pipefail, complex set shell: | set -e -x -o pipefail false | cat - name: pipeline with pipefail, complex set shell: | set -eo pipefail false | cat - name: pipeline without pipefail, ignoring errors shell: false | cat ignore_errors: true - name: non-pipeline without pipefail shell: "true" - name: command without pipefail command: "true" - name: shell with or shell: false || true - shell: | set -o pipefail df | grep '/dev' - name: should not fail due to ignore_errors being true shell: false | cat ignore_errors: true ''' class TestShellWithoutPipeFail(unittest.TestCase): collection = RulesCollection() collection.register(ShellWithoutPipefail()) def setUp(self): self.runner = RunFromText(self.collection) def test_fail(self): results = self.runner.run_playbook(FAIL_TASKS) self.assertEqual(3, len(results)) def test_success(self):
results = self.runner.run_playbook(SUCCESS_TASKS) self.assertEqual(0, len(results))
models.py
from django.db import models from django.contrib.auth.models import User from django.db.models.signals import post_save from django.dispatch import receiver class Profile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE) full_name = models.CharField(max_length=100) email = models.EmailField(max_length=150) phone = models.CharField(max_length=10) def
(self): return self.user.username @receiver(post_save, sender=User) def update_profile_signal(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) instance.profile.save()
__str__
caaa.005.001.10.xsd.go
// Code generated by download. DO NOT EDIT. package iso20022_caaa_005_001_10 import ( "bytes" "encoding/base64" "encoding/xml" "time" ) type AcceptorCancellationRequest10 struct { Envt CardPaymentEnvironment77 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Envt"` Cntxt CardPaymentContext28 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Cntxt"` Tx CardPaymentTransaction108 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tx"` } type AcceptorCancellationRequestV10 struct { Hdr Header59 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Hdr"` CxlReq AcceptorCancellationRequest10 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CxlReq"` SctyTrlr ContentInformationType27 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SctyTrlr,omitempty"` } type Acquirer10 struct { Id GenericIdentification177 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id,omitempty"` ParamsVrsn Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ParamsVrsn,omitempty"` } // Must match the pattern [A-Z]{3,3} type ActiveCurrencyCode string // May be one of ADDR, PBOX, HOME, BIZZ, MLTO, DLVY type AddressType2Code string type AddressVerification1 struct { AdrDgts Max5NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AdrDgts,omitempty"` PstlCdDgts Max5NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PstlCdDgts,omitempty"` } // May be one of HS25, HS38, HS51, HS01 type Algorithm11Code string // May be one of HS25, HS38, HS51, HS01, SH31, SH32, SH33, SH35, SHK1, SHK2 type Algorithm16Code string // May be one of MACC, MCCS, CMA1, MCC1, CMA9, CMA5, CMA2, CM31, CM32, CM33, MCS3, CCA1, CCA2, CCA3 type Algorithm17Code string // May be one of EA2C, E3DC, DKP9, UKPT, UKA2, EA9C, EA5C, DA12, DA19, DA25, N108, EA5R, EA9R, EA2R, E3DR, E36C, E36R, SD5C, UKA1, UKA3 type Algorithm24Code string // May be one of ERS2, ERS1, RPSS, ERS3, ED32, ED33, ED35, ED23, ED25, ES22, ES32, ES33, ES35, ES23, ES25, ED22 type Algorithm25Code string // May be one of ERSA, RSAO type Algorithm7Code string // May be one of MGF1 type Algorithm8Code string type AlgorithmIdentification12 struct { Algo Algorithm8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Algo"` Param Parameter5 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Param,omitempty"` } type AlgorithmIdentification18 struct { Algo Algorithm8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Algo"` Param Parameter9 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Param,omitempty"` } type AlgorithmIdentification19 struct { Algo Algorithm7Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Algo"` Param Parameter10 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Param,omitempty"` } type AlgorithmIdentification21 struct { Algo Algorithm16Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Algo"` } type AlgorithmIdentification22 struct { Algo Algorithm17Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Algo"` Param Parameter7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Param,omitempty"` } type AlgorithmIdentification29 struct { Algo Algorithm24Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Algo"` Param Parameter12 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Param,omitempty"` } type AlgorithmIdentification30 struct { Algo Algorithm25Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Algo"` Param Parameter15 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Param,omitempty"` } // May be one of MONE, POIN type AmountUnit1Code string // Must match the pattern [A-Z0-9]{4,4}[A-Z]{2,2}[A-Z0-9]{2,2}([A-Z0-9]{3,3}){0,1} type AnyBICDec2014Identifier string // May be one of ATTD, SATT, UATT type AttendanceContext1Code string // May be one of CNAT, LATT, OATT, OUAT, CATT type AttributeType1Code string type AuthenticatedData7 struct { Vrsn float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn,omitempty"` Rcpt []Recipient10Choice `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Rcpt"` MACAlgo AlgorithmIdentification22 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MACAlgo"` NcpsltdCntt EncapsulatedContent3 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcpsltdCntt"` MAC Max140Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MAC"` } // May be one of ICCD, AGNT, MERC, ACQR, ISSR, TRML type AuthenticationEntity2Code string // May be one of NPIN, PPSG, PSWD, SCRT, SCNL, SNCT, CPSG, ADDB, BIOM, CDHI, CRYP, CSCV, PSVE, CSEC, ADDS, MANU, FPIN, TOKP type AuthenticationMethod6Code string // May be one of TOKA, ADDB, BYPS, BIOM, CDHI, CRYP, CSCV, MANU, MERC, MOBL, FPIN, NPIN, OTHR, PPSG, PSVE, PSWD, TOKP, SCRT, SCNL, CSEC, SNCT, ADDS, CPSG, TOKN, UKNW type AuthenticationMethod8Code string // May be one of DENY, MRCH, CARD, AUTH, CRPT, UCRP type AuthenticationResult1Code string // Must match the pattern [a-zA-Z0-9]{1,30} type BBANIdentifier string // May be one of LNGT, NUL8, NULG, NULL, RAND type BytePadding1Code string // May be one of TAGC, PHYS, BRCD, MGST, CICC, DFLE, CTLS, ECTL, CDFL type CardDataReading5Code string // May be one of TAGC, PHYS, BRCD, MGST, CICC, DFLE, CTLS, ECTL, CDFL, SICC, UNKW, QRCD, OPTC type CardDataReading8Code string type CardDirectDebit2 struct { DbtrId Debtor4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DbtrId,omitempty"` CdtrId Creditor4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CdtrId"` MndtRltdInf MandateRelatedInformation13 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MndtRltdInf"` } // May be one of FFLB, SFLB, NFLB type CardFallback1Code string // May be one of ACCT, BARC, ISO2, PHON, CPAN, PRIV, UUID type CardIdentificationType1Code string type CardPaymentContext28 struct { PmtCntxt PaymentContext27 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PmtCntxt,omitempty"` SaleCntxt SaleContext4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleCntxt,omitempty"` DrctDbtCntxt CardDirectDebit2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DrctDbtCntxt,omitempty"` } type CardPaymentEnvironment77 struct { Acqrr Acquirer10 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Acqrr,omitempty"` Mrchnt Organisation41 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Mrchnt,omitempty"` POI PointOfInteraction11 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 POI,omitempty"` Card PaymentCard31 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Card,omitempty"` Chck Check1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Chck,omitempty"` StordValAcct []StoredValueAccount2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 StordValAcct,omitempty"` LltyAcct []LoyaltyAccount2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 LltyAcct,omitempty"` CstmrDvc CustomerDevice3 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CstmrDvc,omitempty"` Wllt CustomerDevice3 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Wllt,omitempty"` PmtTkn Token1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PmtTkn,omitempty"` MrchntTkn MerchantToken1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MrchntTkn,omitempty"` Crdhldr Cardholder17 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Crdhldr,omitempty"` PrtctdCrdhldrData ContentInformationType28 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtctdCrdhldrData,omitempty"` SaleEnvt RetailerSaleEnvironment2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleEnvt,omitempty"` } // May be one of BALC, CACT, CRDP, CAFH, CAVR, CSHW, CSHD, DEFR, LOAD, ORCR, PINC, QUCH, RFND, RESA, VALC, UNLD, CAFT, CAFL, CIDD type CardPaymentServiceType12Code string // May be one of IRES, URES, PRES, ARES, FREC, RREC type CardPaymentServiceType3Code string // May be one of AGGR, DCCV, GRTT, LOYT, NRES, PUCO, RECP, SOAF, VCAU, INSI, INSA, CSHB, INST, NRFD type CardPaymentServiceType9Code string type CardPaymentTransaction106 struct { SaleRefId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleRefId,omitempty"` TxId TransactionIdentifier1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxId"` POIId GenericIdentification32 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 POIId,omitempty"` InitrTxId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 InitrTxId,omitempty"` RcptTxId Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RcptTxId,omitempty"` TxTp CardPaymentServiceType12Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxTp"` AddtlSvc []CardPaymentServiceType9Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlSvc,omitempty"` SvcAttr CardPaymentServiceType3Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SvcAttr,omitempty"` CardDataNtryMd CardDataReading8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardDataNtryMd,omitempty"` TxRslt CardPaymentTransactionResult4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxRslt,omitempty"` } type CardPaymentTransaction108 struct { TxCaptr bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxCaptr,omitempty"` MrchntCtgyCd Min3Max4Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MrchntCtgyCd"` CstmrCnsnt []bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CstmrCnsnt,omitempty"` CardPrgrmmPropsd []Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardPrgrmmPropsd,omitempty"` CardPrgrmmApld Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardPrgrmmApld,omitempty"` SaleRefId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleRefId,omitempty"` TxId TransactionIdentifier1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxId"` OrgnlTx CardPaymentTransaction106 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OrgnlTx"` InitrTxId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 InitrTxId,omitempty"` RcptTxId Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RcptTxId,omitempty"` RcncltnId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RcncltnId,omitempty"` TxDtls CardPaymentTransactionDetails34 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxDtls"` AddtlTxData []Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlTxData,omitempty"` } type CardPaymentTransactionDetails34 struct { Ccy ActiveCurrencyCode `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ccy,omitempty"` TtlAmt float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TtlAmt"` VldtyDt ISODate `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 VldtyDt,omitempty"` ICCRltdData Max10000Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ICCRltdData,omitempty"` } type CardPaymentTransactionResult4 struct { AuthstnNtty GenericIdentification90 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthstnNtty,omitempty"` RspnToAuthstn ResponseType10 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RspnToAuthstn"` AuthstnCd Min6Max8Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthstnCd,omitempty"` } // May be one of COMM, CONS type CardProductType1Code string type Cardholder17 struct { Id PersonIdentification15 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id,omitempty"` Nm Max45Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Nm,omitempty"` Lang string `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Lang,omitempty"` BllgAdr PostalAddress22 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 BllgAdr,omitempty"` ShppgAdr PostalAddress22 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ShppgAdr,omitempty"` TripNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TripNb,omitempty"` Vhcl Vehicle1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vhcl,omitempty"` Authntcn []CardholderAuthentication14 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Authntcn,omitempty"` TxVrfctnRslt []TransactionVerificationResult4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxVrfctnRslt,omitempty"` PrsnlData Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrsnlData,omitempty"` MobData []MobileData3 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MobData,omitempty"` } type CardholderAuthentication14 struct { AuthntcnMtd AuthenticationMethod8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcnMtd,omitempty"` AuthntcnXmptn Exemption1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcnXmptn,omitempty"` AuthntcnVal Max5000Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcnVal,omitempty"` PrtctdAuthntcnVal ContentInformationType28 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtctdAuthntcnVal,omitempty"` CrdhldrOnLinePIN OnLinePIN8 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CrdhldrOnLinePIN,omitempty"` CrdhldrId PersonIdentification15 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CrdhldrId,omitempty"` AdrVrfctn AddressVerification1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AdrVrfctn,omitempty"` AuthntcnTp Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcnTp,omitempty"` AuthntcnLvl Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcnLvl,omitempty"` AuthntcnRslt AuthenticationResult1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcnRslt,omitempty"` AuthntcnAddtlInf ExternallyDefinedData2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcnAddtlInf,omitempty"` } // May be one of APKI, CHDT, MNSG, MNVR, FBIG, FBIO, FDSG, FCPN, FEPN, NPIN, PKIS, SCEC, NBIO, NOVF, OTHR type CardholderVerificationCapability4Code string type CashAccountIdentification7Choice struct { IBAN IBAN2007Identifier `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IBAN,omitempty"` BBAN BBANIdentifier `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 BBAN,omitempty"` UPIC UPICIdentifier `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UPIC,omitempty"` DmstAcct SimpleIdentificationInformation4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DmstAcct,omitempty"` } type CertificateIssuer1 struct { RltvDstngshdNm []RelativeDistinguishedName1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RltvDstngshdNm"` } type Check1 struct { BkId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 BkId,omitempty"` AcctNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AcctNb,omitempty"` ChckNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ChckNb,omitempty"` ChckCardNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ChckCardNb,omitempty"` ChckTrckData2 TrackData2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ChckTrckData2,omitempty"` ChckTp CheckType1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ChckTp,omitempty"` Ctry Max3Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ctry,omitempty"` } // May be one of BANK, BUSI, GOVC, PAYR, PERS type CheckType1Code string type CommunicationAddress9 struct { PstlAdr PostalAddress22 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PstlAdr,omitempty"` Email Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Email,omitempty"` URLAdr Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 URLAdr,omitempty"` Phne PhoneNumber `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Phne,omitempty"` CstmrSvc PhoneNumber `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CstmrSvc,omitempty"` AddtlCtctInf Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlCtctInf,omitempty"` } type CommunicationCharacteristics5 struct { ComTp POICommunicationType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ComTp"` RmotPty []PartyType7Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RmotPty"` Actv bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Actv"` Params NetworkParameters7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Params,omitempty"` PhysIntrfc PhysicalInterfaceParameter1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PhysIntrfc,omitempty"` } type ContentInformationType26 struct { CnttTp ContentType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CnttTp"` EnvlpdData EnvelopedData8 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 EnvlpdData,omitempty"` AuthntcdData AuthenticatedData7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcdData,omitempty"` SgndData SignedData6 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SgndData,omitempty"` DgstdData DigestedData5 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstdData,omitempty"` } type ContentInformationType27 struct { CnttTp ContentType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CnttTp"` AuthntcdData AuthenticatedData7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AuthntcdData"` } type ContentInformationType28 struct { CnttTp ContentType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CnttTp"` EnvlpdData EnvelopedData8 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 EnvlpdData"` } // May be one of DATA, SIGN, EVLP, DGST, AUTH type ContentType2Code string // Must match the pattern [A-Z]{2,2} type CountryCode string type Creditor4 struct { Cdtr PartyIdentification178Choice `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Cdtr"` RegnId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RegnId,omitempty"` } type CryptographicKey15 struct { Id Max350Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` AddtlId Max35Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlId,omitempty"` Nm Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Nm,omitempty"` SctyPrfl Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SctyPrfl,omitempty"` ItmNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ItmNb,omitempty"` Vrsn Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn"` Tp CryptographicKeyType3Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp,omitempty"` Fctn []KeyUsage1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Fctn,omitempty"` ActvtnDt ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ActvtnDt,omitempty"` DeactvtnDt ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DeactvtnDt,omitempty"` KeyVal ContentInformationType26 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyVal,omitempty"` KeyChckVal Max35Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyChckVal,omitempty"` AddtlMgmtInf []GenericInformation1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlMgmtInf,omitempty"` } // May be one of AES2, EDE3, DKP9, AES9, AES5, EDE4 type CryptographicKeyType3Code string type CustomerDevice3 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id,omitempty"` Tp Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp,omitempty"` Prvdr Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Prvdr,omitempty"` } type DateAndPlaceOfBirth1 struct { BirthDt ISODate `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 BirthDt"` PrvcOfBirth Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrvcOfBirth,omitempty"` CityOfBirth Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CityOfBirth"` CtryOfBirth CountryCode `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CtryOfBirth"` } type Debtor4 struct { Dbtr PartyIdentification178Choice `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Dbtr,omitempty"` AcctId CashAccountIdentification7Choice `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AcctId,omitempty"` } type DigestedData5 struct { Vrsn float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn,omitempty"` DgstAlgo AlgorithmIdentification21 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstAlgo"` NcpsltdCntt EncapsulatedContent3 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcpsltdCntt"` Dgst Max140Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Dgst"` } type DisplayCapabilities4 struct { Dstn []UserInterface4Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Dstn"` AvlblFrmt []OutputFormat1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AvlblFrmt,omitempty"` NbOfLines float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NbOfLines,omitempty"` LineWidth float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 LineWidth,omitempty"` AvlblLang []string `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AvlblLang,omitempty"` } type Document struct { AccptrCxlReq AcceptorCancellationRequestV10 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AccptrCxlReq"` } type EncapsulatedContent3 struct { CnttTp ContentType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CnttTp"` Cntt Max100KBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Cntt,omitempty"` } type EncryptedContent6 struct { CnttTp ContentType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CnttTp"` CnttNcrptnAlgo AlgorithmIdentification29 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CnttNcrptnAlgo,omitempty"` NcrptdData Max100KBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcrptdData"` } // May be one of TR31, TR34, I238 type EncryptionFormat2Code string type EnvelopedData8 struct { Vrsn float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn,omitempty"` OrgtrInf OriginatorInformation1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OrgtrInf,omitempty"` Rcpt []Recipient10Choice `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Rcpt"` NcrptdCntt EncryptedContent6 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcrptdCntt,omitempty"` } // Must match the pattern [a-zA-Z0-9]{3} type Exact3AlphaNumericText string // Must match the pattern [0-9]{3} type Exact3NumericText string // Must match the pattern [0-9]{4} type Exact4NumericText string // May be one of LOWA, MINT, RECP, SCPE, SCAD, TRAE, PKGE, TMBE type Exemption1Code string type ExternallyDefinedData2 struct { Id Max1025Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` Val Max100KBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Val,omitempty"` PrtctdVal ContentInformationType26 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtctdVal,omitempty"` Tp Max1025Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp,omitempty"` } type GenericIdentification176 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` Tp PartyType33Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp,omitempty"` Issr PartyType33Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Issr,omitempty"` Ctry Min2Max3AlphaText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ctry,omitempty"` ShrtNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ShrtNm,omitempty"` } type GenericIdentification177 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` Tp PartyType33Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp,omitempty"` Issr PartyType33Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Issr,omitempty"` Ctry Min2Max3AlphaText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ctry,omitempty"` ShrtNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ShrtNm,omitempty"` RmotAccs NetworkParameters7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RmotAccs,omitempty"` Glctn Geolocation1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Glctn,omitempty"` } type GenericIdentification32 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` Tp PartyType3Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp,omitempty"` Issr PartyType4Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Issr,omitempty"` ShrtNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ShrtNm,omitempty"` } type GenericIdentification36 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` Issr Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Issr"` SchmeNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SchmeNm,omitempty"` } type GenericIdentification4 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` IdTp Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IdTp"` } type GenericIdentification48 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` Vrsn Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn"` Issr Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Issr"` } type GenericIdentification90 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id,omitempty"` Tp PartyType14Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp"` Issr PartyType4Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Issr,omitempty"` Ctry Min2Max3AlphaText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ctry,omitempty"` ShrtNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ShrtNm,omitempty"` } type GenericInformation1 struct { Nm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Nm"` Val Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Val,omitempty"` } type Geolocation1 struct { GeogcCordints GeolocationGeographicCoordinates1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 GeogcCordints,omitempty"` UTMCordints GeolocationUTMCoordinates1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UTMCordints,omitempty"` } type GeolocationGeographicCoordinates1 struct { Lat Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Lat"` Long Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Long"` } type GeolocationUTMCoordinates1 struct { UTMZone Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UTMZone"` UTMEstwrd Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UTMEstwrd"` UTMNrthwrd Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UTMNrthwrd"` } type Header59 struct { MsgFctn MessageFunction42Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MsgFctn"` PrtcolVrsn Max6Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtcolVrsn"` XchgId float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 XchgId"` ReTrnsmssnCntr Max3NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ReTrnsmssnCntr,omitempty"` CreDtTm ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CreDtTm"` InitgPty GenericIdentification176 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 InitgPty"` RcptPty GenericIdentification177 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RcptPty,omitempty"` Tracblt []Traceability8 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tracblt,omitempty"` } // Must match the pattern [A-Z]{2,2}[0-9]{2,2}[a-zA-Z0-9]{1,30} type IBAN2007Identifier string // Must match the pattern [0-9]{3,3} type ISO3NumericCountryCode string type ISODate time.Time func (t *ISODate) UnmarshalText(text []byte) error { return (*xsdDate)(t).UnmarshalText(text) } func (t ISODate) MarshalText() ([]byte, error) { return xsdDate(t).MarshalText() } type ISODateTime time.Time func (t *ISODateTime) UnmarshalText(text []byte) error { return (*xsdDateTime)(t).UnmarshalText(text) } func (t ISODateTime) MarshalText() ([]byte, error) { return xsdDateTime(t).MarshalText() } type IssuerAndSerialNumber2 struct { Issr CertificateIssuer1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Issr"` SrlNb Max500Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SrlNb"` } type KEK7 struct { Vrsn float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn,omitempty"` KEKId KEKIdentifier2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KEKId"` KeyNcrptnAlgo AlgorithmIdentification29 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyNcrptnAlgo"` NcrptdKey Max500Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcrptdKey"` } type KEKIdentifier2 struct { KeyId Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyId"` KeyVrsn Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyVrsn"` SeqNb float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SeqNb,omitempty"` DerivtnId Min5Max16Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DerivtnId,omitempty"` } type KeyTransport7 struct { Vrsn float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn,omitempty"` RcptId Recipient9Choice `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RcptId"` KeyNcrptnAlgo AlgorithmIdentification19 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyNcrptnAlgo"` NcrptdKey Max5000Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcrptdKey"` } // May be one of ENCR, DCPT, DENC, DDEC, TRNI, TRNX, MACG, MACV, SIGG, SUGV, PINE, PIND, PINV, KEYG, KEYI, KEYX, KEYD type KeyUsage1Code string // May be one of INDR, IPMP, MPOI, MPMP, MSLE, SSLE, VNDG type LocationCategory3Code string // May be one of ABRD, NMDC, FIXD, VIRT type LocationCategory4Code string type LoyaltyAccount2 struct { LltyId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 LltyId,omitempty"` NtryMd CardDataReading8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NtryMd,omitempty"` IdTp CardIdentificationType1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IdTp,omitempty"` Brnd Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Brnd,omitempty"` Prvdr Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Prvdr,omitempty"` OwnrNm Max45Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OwnrNm,omitempty"` Unit AmountUnit1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Unit,omitempty"` Ccy ActiveCurrencyCode `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ccy,omitempty"` Bal float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Bal,omitempty"` } // May be one of ALLO, DENY, PRCS, PROP, REQU type LoyaltyHandling1Code string type MandateRelatedInformation13 struct { MndtId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MndtId"` DtOfSgntr ISODate `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DtOfSgntr,omitempty"` MndtImg Max2MBBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MndtImg,omitempty"` } type Max10000Binary []byte func (t *Max10000Binary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max10000Binary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } type Max100KBinary []byte func (t *Max100KBinary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max100KBinary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // May be no more than 1025 items long type Max1025Text string // May be no more than 104 items long type Max104Text string type Max10KBinary []byte func (t *Max10KBinary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max10KBinary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // May be no more than 10 items long type Max10Text string // Must match the pattern [0-9]{1,11} type Max11NumericText string type Max140Binary []byte func (t *Max140Binary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max140Binary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // May be no more than 140 items long type Max140Text string // Must match the pattern [0-9]{1,15} type Max15NumericText string // May be no more than 16 items long type Max16Text string // Must match the pattern [0-9]{1,19} type Max19NumericText string // May be no more than 256 items long type Max256Text string type Max2KBinary []byte func (t *Max2KBinary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max2KBinary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } type Max2MBBinary []byte func (t *Max2MBBinary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max2MBBinary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // Must match the pattern [0-9]{1,2} type Max2NumericText string type Max3000Binary []byte func (t *Max3000Binary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max3000Binary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // May be no more than 30 items long type Max30Text string // May be no more than 350 items long type Max350Text string type Max35Binary []byte func (t *Max35Binary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max35Binary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // Must match the pattern [0-9]{1,35} type Max35NumericText string // May be no more than 35 items long type Max35Text string // May be no more than 37 items long type Max37Text string // Must match the pattern [0-9]{1,3} type Max3NumericText string // May be no more than 3 items long type Max3Text string // May be no more than 45 items long type Max45Text string type Max5000Binary []byte func (t *Max5000Binary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max5000Binary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } type Max500Binary []byte func (t *Max500Binary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Max500Binary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // May be no more than 500 items long type Max500Text string // Must match the pattern [0-9]{1,5} type Max5NumericText string // May be no more than 6 items long type Max6Text string // May be no more than 70 items long type Max70Text string // May be no more than 76 items long type Max76Text string type MemoryCharacteristics1 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` TtlSz float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TtlSz"` FreeSz float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 FreeSz"` Unit MemoryUnit1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Unit"` } // May be one of BYTE, EXAB, GIGA, KILO, MEGA, PETA, TERA type MemoryUnit1Code string type MerchantToken1 struct { Tkn Min8Max28NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tkn,omitempty"` TknXpryDt Max10Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknXpryDt,omitempty"` TknChrtc []Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknChrtc,omitempty"` TknRqstr PaymentTokenIdentifiers1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknRqstr,omitempty"` TknAssrncLvl float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknAssrncLvl,omitempty"` TknAssrncData Max500Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknAssrncData,omitempty"` TknAssrncMtd Max2NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknAssrncMtd,omitempty"` TknInittdInd bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknInittdInd,omitempty"` } // May be one of AUTQ, AUTP, CCAV, CCAK, CCAQ, CCAP, CMPV, CMPK, DCAV, DCRR, DCCQ, DCCP, DGNP, DGNQ, FAUQ, FAUP, FCMV, FCMK, FRVA, FRVR, RCLQ, RCLP, RVRA, RVRR, CDDQ, CDDK, CDDR, CDDP, TRNR, TRNA, NFRQ, NFRP, TRPQ, TRPP type MessageFunction42Code string // Must match the pattern [a-zA-Z]{2,3} type Min2Max3AlphaText string // Must match the pattern [0-9]{2,3} type Min2Max3NumericText string // May be no more than 4 items long type Min3Max4Text string type Min5Max16Binary []byte func (t *Min5Max16Binary) UnmarshalText(text []byte) error { return (*xsdBase64Binary)(t).UnmarshalText(text) } func (t Min5Max16Binary) MarshalText() ([]byte, error) { return xsdBase64Binary(t).MarshalText() } // May be no more than 8 items long type Min6Max8Text string // Must match the pattern [0-9]{8,28} type Min8Max28NumericText string type MobileData3 struct { MobCtryCd Min2Max3AlphaText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MobCtryCd,omitempty"` MobNtwkCd Min2Max3NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MobNtwkCd,omitempty"` MobMskdMSISDN Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MobMskdMSISDN,omitempty"` Glctn Geolocation1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Glctn,omitempty"` SnstvMobData SensitiveMobileData1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SnstvMobData,omitempty"` PrtctdMobData ContentInformationType28 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtctdMobData,omitempty"` } type NameAndAddress6 struct { Nm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Nm"` Adr PostalAddress2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Adr"` } type NetworkParameters7 struct { Adr []NetworkParameters9 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Adr"` UsrNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UsrNm,omitempty"` AccsCd Max35Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AccsCd,omitempty"` SvrCert []Max10KBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SvrCert,omitempty"` SvrCertIdr []Max140Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SvrCertIdr,omitempty"` ClntCert []Max10KBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ClntCert,omitempty"` SctyPrfl Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SctyPrfl,omitempty"` } type NetworkParameters9 struct { NtwkTp NetworkType1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NtwkTp"` AdrVal Max500Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AdrVal"` } // May be one of IPNW, PSTN type NetworkType1Code string // May be one of OFLN, ONLN, SMON type OnLineCapability1Code string type OnLinePIN8 struct { NcrptdPINBlck ContentInformationType28 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcrptdPINBlck"` PINFrmt PINFormat3Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PINFrmt"` AddtlInpt Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlInpt,omitempty"` } type Organisation26 struct { CmonNm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CmonNm"` Adr Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Adr,omitempty"` CtryCd ISO3NumericCountryCode `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CtryCd"` MrchntCtgyCd Min3Max4Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MrchntCtgyCd"` RegdIdr Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RegdIdr"` } type Organisation41 struct { Id GenericIdentification32 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id,omitempty"` CmonNm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CmonNm,omitempty"` LctnCtgy LocationCategory4Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 LctnCtgy,omitempty"` LctnAndCtct CommunicationAddress9 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 LctnAndCtct,omitempty"` SchmeData Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SchmeData,omitempty"` } type OriginatorInformation1 struct { Cert []Max5000Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Cert,omitempty"` } // May be one of MREF, TEXT, HTML type OutputFormat1Code string // May be one of ISO0, ISO1, ISO2, ISO3, ISO4, ISO5 type PINFormat3Code string // May be one of BLTH, ETHR, GPRS, GSMF, PSTN, RS23, USBD, USBH, WIFI, WT2G, WT3G, WT4G, WT5G type POICommunicationType2Code string // May be one of APPL, CERT, EVAL type POIComponentAssessment1Code string // May be one of WAIT, OUTD, OPER, DACT type POIComponentStatus1Code string // May be one of AQPP, APPR, TLPR, SCPR, SERV, TERM, DVCE, SECM, APLI, EMVK, EMVO, MDWR, DRVR, OPST, MRPR, CRTF, TMSP, SACP, SAPR, LOGF, MDFL, SOFT, CONF, RPFL type POIComponentType6Code string type PackageType2 struct { PackgId GenericIdentification176 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PackgId,omitempty"` PackgLngth float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PackgLngth,omitempty"` OffsetStart float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OffsetStart,omitempty"` OffsetEnd float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OffsetEnd,omitempty"` PackgBlck []ExternallyDefinedData2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PackgBlck,omitempty"` } type Parameter10 struct { NcrptnFrmt EncryptionFormat2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcrptnFrmt,omitempty"` DgstAlgo Algorithm16Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstAlgo,omitempty"` MskGnrtrAlgo AlgorithmIdentification18 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MskGnrtrAlgo,omitempty"` } type Parameter12 struct { NcrptnFrmt EncryptionFormat2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcrptnFrmt,omitempty"` InitlstnVctr Max500Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 InitlstnVctr,omitempty"` BPddg BytePadding1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 BPddg,omitempty"` } type Parameter15 struct { DgstAlgo Algorithm16Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstAlgo,omitempty"` MskGnrtrAlgo AlgorithmIdentification12 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MskGnrtrAlgo,omitempty"` SaltLngth float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaltLngth,omitempty"` TrlrFld float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TrlrFld,omitempty"` OIDCrvNm Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OIDCrvNm,omitempty"` } type Parameter5 struct { DgstAlgo Algorithm11Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstAlgo,omitempty"` } type Parameter7 struct { InitlstnVctr Max500Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 InitlstnVctr,omitempty"` BPddg BytePadding1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 BPddg,omitempty"` } type Parameter9 struct { DgstAlgo Algorithm16Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstAlgo,omitempty"` } type PartyIdentification178Choice struct { AnyBIC AnyBICDec2014Identifier `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AnyBIC,omitempty"` PrtryId GenericIdentification36 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtryId,omitempty"` NmAndAdr NameAndAddress6 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NmAndAdr,omitempty"` } // May be one of OPOI, MERC, ACCP, ITAG, ACQR, CISS, DLIS, ICCA type PartyType14Code string // May be one of OPOI, MERC, ACCP, ITAG, ACQR, CISS, DLIS, MTMG, TAXH, TMGT type PartyType33Code string // May be one of OPOI, MERC, ACCP, ITAG, ACQR, CISS, DLIS type PartyType3Code string // May be one of MERC, ACCP, ITAG, ACQR, CISS, TAXH type PartyType4Code string // May be one of ACQR, ITAG, PCPT, TMGT, SALE type PartyType7Code string type PaymentCard31 struct { PrtctdCardData ContentInformationType28 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtctdCardData,omitempty"` PrvtCardData Max100KBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrvtCardData,omitempty"` PlainCardData PlainCardData15 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PlainCardData,omitempty"` PmtAcctRef Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PmtAcctRef,omitempty"` MskdPAN Max30Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MskdPAN,omitempty"` IssrBIN Max15NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IssrBIN,omitempty"` CardCtryCd Max3Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardCtryCd,omitempty"` CardCcyCd Exact3AlphaNumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardCcyCd,omitempty"` CardPdctPrfl Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardPdctPrfl,omitempty"` CardBrnd Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardBrnd,omitempty"` CardPdctTp CardProductType1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardPdctTp,omitempty"` CardPdctSubTp Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardPdctSubTp,omitempty"` IntrnlCard bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IntrnlCard,omitempty"` AllwdPdct []Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AllwdPdct,omitempty"` SvcOptn Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SvcOptn,omitempty"` AddtlCardData Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlCardData,omitempty"` } type PaymentContext27 struct { CardPres bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardPres,omitempty"` CrdhldrPres bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CrdhldrPres,omitempty"` OnLineCntxt bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OnLineCntxt,omitempty"` AttndncCntxt AttendanceContext1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AttndncCntxt,omitempty"` TxEnvt TransactionEnvironment1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxEnvt,omitempty"` TxChanl TransactionChannel5Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxChanl,omitempty"` AttndntMsgCpbl bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AttndntMsgCpbl,omitempty"` AttndntLang string `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AttndntLang,omitempty"` CardDataNtryMd CardDataReading8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardDataNtryMd,omitempty"` FllbckInd CardFallback1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 FllbckInd,omitempty"` SpprtdOptn []SupportedPaymentOption1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SpprtdOptn,omitempty"` } type PaymentTokenIdentifiers1 struct { PrvdrId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrvdrId"` RqstrId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RqstrId"` } type PersonIdentification15 struct { DrvrLicNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DrvrLicNb,omitempty"` DrvrLicLctn Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DrvrLicLctn,omitempty"` DrvrLicNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DrvrLicNm,omitempty"` DrvrId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DrvrId,omitempty"` CstmrNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CstmrNb,omitempty"` SclSctyNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SclSctyNb,omitempty"` AlnRegnNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AlnRegnNb,omitempty"` PsptNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PsptNb,omitempty"` TaxIdNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TaxIdNb,omitempty"` IdntyCardNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IdntyCardNb,omitempty"` MplyrIdNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MplyrIdNb,omitempty"` MplyeeIdNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MplyeeIdNb,omitempty"` JobNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 JobNb,omitempty"` Dept Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Dept,omitempty"` EmailAdr Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 EmailAdr,omitempty"` DtAndPlcOfBirth DateAndPlaceOfBirth1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DtAndPlcOfBirth,omitempty"` Othr []GenericIdentification4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Othr,omitempty"` } // Must match the pattern \+[0-9]{1,3}-[0-9()+\-]{1,30} type PhoneNumber string type PhysicalInterfaceParameter1 struct { IntrfcNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IntrfcNm"` IntrfcTp POICommunicationType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IntrfcTp,omitempty"` UsrNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UsrNm,omitempty"` AccsCd Max35Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AccsCd,omitempty"` SctyPrfl Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SctyPrfl,omitempty"` AddtlParams Max2KBinary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlParams,omitempty"` } type PlainCardData15 struct { PAN Min8Max28NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PAN"` CardSeqNb Min2Max3NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardSeqNb,omitempty"` FctvDt Max10Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 FctvDt,omitempty"` XpryDt Max10Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 XpryDt"` SvcCd Exact3NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SvcCd,omitempty"` Trck1 Max76Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Trck1,omitempty"` Trck2 Max37Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Trck2,omitempty"` Trck3 Max104Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Trck3,omitempty"` CrdhldrNm Max45Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CrdhldrNm,omitempty"` } type PlainCardData17 struct { PAN Min8Max28NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PAN,omitempty"` Trck1 Max76Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Trck1,omitempty"` Trck2 Max37Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Trck2,omitempty"` Trck3 Max104Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Trck3,omitempty"` AddtlCardData []Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlCardData,omitempty"` NtryMd CardDataReading5Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NtryMd,omitempty"` } type PointOfInteraction11 struct { Id GenericIdentification177 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` SysNm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SysNm,omitempty"` GrpId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 GrpId,omitempty"` Cpblties PointOfInteractionCapabilities9 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Cpblties,omitempty"` TmZone Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TmZone,omitempty"` TermnlIntgtn LocationCategory3Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TermnlIntgtn,omitempty"` Cmpnt []PointOfInteractionComponent11 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Cmpnt,omitempty"` } type PointOfInteractionCapabilities9 struct { CardRdngCpblties []CardDataReading8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardRdngCpblties,omitempty"` CrdhldrVrfctnCpblties []CardholderVerificationCapability4Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CrdhldrVrfctnCpblties,omitempty"` PINLngthCpblties float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PINLngthCpblties,omitempty"` ApprvlCdLngth float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ApprvlCdLngth,omitempty"` MxScrptLngth float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MxScrptLngth,omitempty"` CardCaptrCpbl bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CardCaptrCpbl,omitempty"` OnLineCpblties OnLineCapability1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OnLineCpblties,omitempty"` MsgCpblties []DisplayCapabilities4 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MsgCpblties,omitempty"` } type PointOfInteractionComponent11 struct { Tp POIComponentType6Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp"` SubTpInf Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SubTpInf,omitempty"` Id PointOfInteractionComponentIdentification2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` Sts PointOfInteractionComponentStatus3 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Sts,omitempty"` StdCmplc []GenericIdentification48 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 StdCmplc,omitempty"` Chrtcs PointOfInteractionComponentCharacteristics7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Chrtcs,omitempty"` Assmnt []PointOfInteractionComponentAssessment1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Assmnt,omitempty"` Packg []PackageType2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Packg,omitempty"` } type PointOfInteractionComponentAssessment1 struct { Tp POIComponentAssessment1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp"` Assgnr []Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Assgnr"` DlvryDt ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DlvryDt,omitempty"` XprtnDt ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 XprtnDt,omitempty"` Nb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Nb"` } type PointOfInteractionComponentCharacteristics7 struct { Mmry []MemoryCharacteristics1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Mmry,omitempty"` Com []CommunicationCharacteristics5 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Com,omitempty"` SctyAccsMdls float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SctyAccsMdls,omitempty"` SbcbrIdntyMdls float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SbcbrIdntyMdls,omitempty"` SctyElmt []CryptographicKey15 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SctyElmt,omitempty"` } type PointOfInteractionComponentIdentification2 struct { ItmNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ItmNb,omitempty"` PrvdrId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrvdrId,omitempty"` Id Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id,omitempty"` SrlNb Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SrlNb,omitempty"` } type PointOfInteractionComponentStatus3 struct { VrsnNb Max256Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 VrsnNb,omitempty"` Sts POIComponentStatus1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Sts,omitempty"` XpryDt ISODate `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 XpryDt,omitempty"` } type PostalAddress2 struct { StrtNm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 StrtNm,omitempty"` PstCdId Max16Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PstCdId"` TwnNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TwnNm"` CtrySubDvsn Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CtrySubDvsn,omitempty"` Ctry CountryCode `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ctry"` } type PostalAddress22 struct { AdrTp AddressType2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AdrTp,omitempty"` Dept Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Dept,omitempty"` SubDept Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SubDept,omitempty"` AdrLine []Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AdrLine,omitempty"` StrtNm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 StrtNm,omitempty"` BldgNb Max16Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 BldgNb,omitempty"` PstCd Max16Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PstCd,omitempty"` TwnNm Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TwnNm,omitempty"` CtrySubDvsn []Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CtrySubDvsn,omitempty"` CtryCd Min2Max3AlphaText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CtryCd,omitempty"` } type Recipient10Choice struct { KeyTrnsprt KeyTransport7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyTrnsprt,omitempty"` KEK KEK7 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KEK,omitempty"` KeyIdr KEKIdentifier2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyIdr,omitempty"` } type Recipient9Choice struct { IssrAndSrlNb IssuerAndSerialNumber2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IssrAndSrlNb,omitempty"` KeyIdr KEKIdentifier2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 KeyIdr,omitempty"` } type RelativeDistinguishedName1 struct { AttrTp AttributeType1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AttrTp"` AttrVal Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AttrVal"` } // May be one of APPR, DECL, PART, SUSP, TECH type Response9Code string type ResponseType10 struct { Rspn Response9Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Rspn"` RspnRsn Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RspnRsn,omitempty"` AddtlRspnInf Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlRspnInf,omitempty"` } type RetailerSaleEnvironment2 struct { SaleCpblties []SaleCapabilities1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleCpblties,omitempty"` Ccy ActiveCurrencyCode `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ccy,omitempty"` MinAmtToDlvr float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MinAmtToDlvr,omitempty"` MaxCshBckAmt float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MaxCshBckAmt,omitempty"` MinSpltAmt float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MinSpltAmt,omitempty"` DbtPrefrdFlg bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DbtPrefrdFlg,omitempty"` LltyHdlg LoyaltyHandling1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 LltyHdlg,omitempty"` } // May be one of CHDI, CHER, CHIN, CHST, CUDI, CUAS, CUER, CUIN, POIR, PRDC, PRRP, PRVC type SaleCapabilities1Code string type SaleContext4 struct { SaleId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleId,omitempty"` SaleRefNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleRefNb,omitempty"` SaleRcncltnId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleRcncltnId,omitempty"` CshrId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CshrId,omitempty"` CshrLang []string `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CshrLang,omitempty"` ShftNb Max2NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ShftNb,omitempty"` CstmrOrdrReqFlg bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 CstmrOrdrReqFlg,omitempty"` PurchsOrdrNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PurchsOrdrNb,omitempty"` InvcNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 InvcNb,omitempty"` DlvryNoteNb Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DlvryNoteNb,omitempty"` SpnsrdMrchnt []Organisation26 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SpnsrdMrchnt,omitempty"` SpltPmt bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SpltPmt,omitempty"` RmngAmt float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RmngAmt,omitempty"` ForceOnlnFlg bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ForceOnlnFlg,omitempty"` ReuseCardDataFlg bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 ReuseCardDataFlg,omitempty"` AllwdNtryMd []CardDataReading8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AllwdNtryMd,omitempty"` SaleTknScp SaleTokenScope1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SaleTknScp,omitempty"` AddtlSaleData Max70Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlSaleData,omitempty"` } // May be one of MULT, SNGL type SaleTokenScope1Code string type SensitiveMobileData1 struct { MSISDN Max35NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MSISDN"` IMSI Max35NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IMSI,omitempty"` IMEI Max35NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IMEI,omitempty"` } type SignedData6 struct { Vrsn float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn,omitempty"` DgstAlgo []AlgorithmIdentification21 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstAlgo,omitempty"` NcpsltdCntt EncapsulatedContent3 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NcpsltdCntt,omitempty"` Cert []Max5000Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Cert,omitempty"` Sgnr []Signer5 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Sgnr,omitempty"` } type Signer5 struct { Vrsn float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Vrsn,omitempty"` SgnrId Recipient9Choice `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SgnrId,omitempty"` DgstAlgo AlgorithmIdentification21 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DgstAlgo"` SgndAttrbts []GenericInformation1 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SgndAttrbts,omitempty"` SgntrAlgo AlgorithmIdentification30 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 SgntrAlgo"` Sgntr Max3000Binary `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Sgntr"` } type SimpleIdentificationInformation4 struct { Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id"` } type StoredValueAccount2 struct { AcctTp StoredValueAccountType1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AcctTp,omitempty"` IdTp CardIdentificationType1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 IdTp,omitempty"` Id Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Id,omitempty"` Brnd Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Brnd,omitempty"` Prvdr Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Prvdr,omitempty"` OwnrNm Max45Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 OwnrNm,omitempty"` XpryDt Max10Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 XpryDt,omitempty"` NtryMd CardDataReading8Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NtryMd,omitempty"` Ccy ActiveCurrencyCode `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Ccy,omitempty"` Bal float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Bal,omitempty"` } // May be one of BNKA, CWVC, CPYA, ELMY, GIFT, GCER, MLVC, OLVC, MERC, OTHR, PHON, CARD, TRVL type StoredValueAccountType1Code string // May be one of PART, MSRV type SupportedPaymentOption1Code string type Token1 struct { PmtTkn Max19NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PmtTkn,omitempty"` TknXpryDt Exact4NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknXpryDt,omitempty"` TknRqstrId Max11NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknRqstrId,omitempty"` TknAssrncData Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknAssrncData,omitempty"` TknAssrncMtd Max2NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknAssrncMtd,omitempty"` TknInittdInd bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TknInittdInd,omitempty"` } type Traceability8 struct { RlayId GenericIdentification177 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RlayId"` PrtcolNm Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtcolNm,omitempty"` PrtcolVrsn Max6Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 PrtcolVrsn,omitempty"` TracDtTmIn ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TracDtTmIn"` TracDtTmOut ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TracDtTmOut"` } type TrackData2 struct { TrckNb float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TrckNb,omitempty"` TrckFrmt TrackFormat1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TrckFrmt,omitempty"` TrckVal Max140Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TrckVal"` } // May be one of AAMV, CMC7, E13B, ISOF, JIS1, JIS2 type TrackFormat1Code string // May be one of MAIL, TLPH, ECOM, TVPY, SECM, MOBL, MPOS type TransactionChannel5Code string // May be one of MERC, PRIV, PUBL type TransactionEnvironment1Code string type TransactionIdentifier1 struct { TxDtTm ISODateTime `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxDtTm"` TxRef Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TxRef"` } type TransactionVerificationResult4 struct { Mtd AuthenticationMethod6Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Mtd"` VrfctnNtty AuthenticationEntity2Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 VrfctnNtty,omitempty"` Rslt Verification1Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Rslt,omitempty"` AddtlRslt Max500Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlRslt,omitempty"` } // Must match the pattern [0-9]{8,17} type UPICIdentifier string // May be one of CDSP, CRCP, MDSP, MRCP, CRDO type UserInterface4Code string type Vehicle1 struct { VhclNb Max35NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 VhclNb,omitempty"` TrlrNb Max35NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TrlrNb,omitempty"` VhclTag Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 VhclTag,omitempty"` VhclTagNtryMd CardDataReading5Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 VhclTagNtryMd,omitempty"` UnitNb Max35NumericText `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 UnitNb,omitempty"` RplcmntCar bool `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RplcmntCar,omitempty"` Odmtr float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Odmtr,omitempty"` Hbmtr float64 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Hbmtr,omitempty"` TrlrHrs Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 TrlrHrs,omitempty"` RefrHrs Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 RefrHrs,omitempty"` MntncId Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 MntncId,omitempty"` DrvrOrVhclCard PlainCardData17 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 DrvrOrVhclCard,omitempty"` AddtlVhclData []Vehicle2 `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 AddtlVhclData,omitempty"` } type Vehicle2 struct { Tp Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Tp,omitempty"` NtryMd CardDataReading5Code `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 NtryMd,omitempty"` Data Max35Text `xml:"urn:iso:std:iso:20022:tech:xsd:caaa.005.001.10 Data"` } // May be one of FAIL, MISS, NOVF, PART, SUCC, ERRR type Verification1Code string type xsdBase64Binary []byte func (b *xsdBase64Binary) UnmarshalText(text []byte) (err error) { *b, err = base64.StdEncoding.DecodeString(string(text)) return } func (b xsdBase64Binary) MarshalText() ([]byte, error) { var buf bytes.Buffer enc := base64.NewEncoder(base64.StdEncoding, &buf) enc.Write([]byte(b)) enc.Close() return buf.Bytes(), nil } type xsdDate time.Time func (t *xsdDate) UnmarshalText(text []byte) error { return _unmarshalTime(text, (*time.Time)(t), "2006-01-02") } func (t xsdDate) MarshalText() ([]byte, error) { return _marshalTime((time.Time)(t), "2006-01-02") } func (t xsdDate) MarshalXML(e *xml.Encoder, start xml.StartElement) error { if (time.Time)(t).IsZero() { return nil } m, err := t.MarshalText() if err != nil
return e.EncodeElement(m, start) } func (t xsdDate) MarshalXMLAttr(name xml.Name) (xml.Attr, error) { if (time.Time)(t).IsZero() { return xml.Attr{}, nil } m, err := t.MarshalText() return xml.Attr{Name: name, Value: string(m)}, err } func _unmarshalTime(text []byte, t *time.Time, format string) (err error) { s := string(bytes.TrimSpace(text)) *t, err = time.Parse(format, s) if _, ok := err.(*time.ParseError); ok { *t, err = time.Parse(format+"Z07:00", s) } return err } func _marshalTime(t time.Time, format string) ([]byte, error) { return []byte(t.Format(format + "Z07:00")), nil } type xsdDateTime time.Time func (t *xsdDateTime) UnmarshalText(text []byte) error { return _unmarshalTime(text, (*time.Time)(t), "2006-01-02T15:04:05.999999999") } func (t xsdDateTime) MarshalText() ([]byte, error) { return _marshalTime((time.Time)(t), "2006-01-02T15:04:05.999999999") } func (t xsdDateTime) MarshalXML(e *xml.Encoder, start xml.StartElement) error { if (time.Time)(t).IsZero() { return nil } m, err := t.MarshalText() if err != nil { return err } return e.EncodeElement(m, start) } func (t xsdDateTime) MarshalXMLAttr(name xml.Name) (xml.Attr, error) { if (time.Time)(t).IsZero() { return xml.Attr{}, nil } m, err := t.MarshalText() return xml.Attr{Name: name, Value: string(m)}, err }
{ return err }
iroha.py
#!/usr/bin/env python3 # # Copyright Soramitsu Co., Ltd. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 # from . import ed25519 as ed25519_sha3 import nacl.signing as ed25519_sha2 import hashlib import binascii import grpc import time import re import os from . import commands_pb2 from . import endpoint_pb2 from . import endpoint_pb2_grpc from . import primitive_pb2 from . import queries_pb2 from . import transaction_pb2 class IrohaCrypto(object): """ Collection of general crypto-related functions """ @staticmethod def derive_public_key(private_key): """ Calculate public key from private key :param private_key: hex encoded private key :return: hex encoded public key """ if isinstance(private_key, (str, bytes)): # default, legacy secret = binascii.unhexlify(private_key) public_key = ed25519_sha3.publickey_unsafe(secret) hex_public_key = binascii.hexlify(public_key) return hex_public_key elif isinstance(private_key, ed25519_sha2.SigningKey): return 'ed0120' + binascii.hexlify(private_key.verify_key._key).decode("utf-8") @staticmethod def get_payload_to_be_signed(proto): """ :proto: proto transaction or query :return: bytes representation of what has to be signed """ if hasattr(proto, 'payload'): return proto.payload.SerializeToString() # signing of meta is implemented for block streaming queries, # because they do not have a payload in their schema elif hasattr(proto, 'meta'): return proto.meta.SerializeToString() raise RuntimeError('Unknown message type.') @staticmethod def hash(proto_with_payload): """ Calculates hash of payload of proto message :proto_with_payload: proto transaction or query :return: bytes representation of hash """ obj = IrohaCrypto.get_payload_to_be_signed(proto_with_payload) hash = hashlib.sha3_256(obj).digest() return hash @staticmethod def _signature(message, private_key): """ Calculate signature for given message and private key :param message: proto that has payload message inside :param private_key: hex string with private key :return: a proto Signature message """ public_key = IrohaCrypto.derive_public_key(private_key) if isinstance(private_key, (str, bytes)): # default, legacy message_hash = IrohaCrypto.hash(message) sk = binascii.unhexlify(private_key) pk = binascii.unhexlify(public_key) signature_bytes = ed25519_sha3.signature_unsafe( message_hash, sk, pk) elif isinstance(private_key, ed25519_sha2.SigningKey): signature_bytes = private_key.sign( IrohaCrypto.get_payload_to_be_signed(message)).signature else: raise RuntimeError('Unsupported private key type.') signature = primitive_pb2.Signature() signature.public_key = public_key signature.signature = binascii.hexlify(signature_bytes) return signature @staticmethod def sign_transaction(transaction, *private_keys): """ Add specified signatures to a transaction. Source transaction will be modified :param transaction: the transaction to be signed :param private_keys: hex strings of private keys to sign the transaction :return: the modified transaction """ assert len(private_keys), 'At least one private key has to be passed' signatures = [] for private_key in private_keys: signature = IrohaCrypto._signature(transaction, private_key) signatures.append(signature) transaction.signatures.extend(signatures) return transaction @staticmethod def sign_query(query, private_key): """ Add a signature to a query. Source query will be modified :param query: the query to be signed :param private_key: hex string of private key to sign the query :return: the modified query """ signature = IrohaCrypto._signature(query, private_key) query.signature.CopyFrom(signature) return query @staticmethod def is_sha2_signature_valid(message, signature): """ Verify sha2 signature validity. :param signature: the signature to be checked :param message: message to check the signature against :return: bool, whether the signature is valid for the message """ parse_message = IrohaCrypto.get_payload_to_be_signed(message) signature_bytes = binascii.unhexlify(signature.signature) public_key = ed25519_sha2.VerifyKey(binascii.unhexlify(signature.public_key)[3:]) valid_message = ed25519_sha2.VerifyKey.verify(public_key, parse_message, signature_bytes) if valid_message == parse_message: return True return False @staticmethod def is_signature_valid(message, signature): """ Verify sha3 signature validity. To check sha2 signature need use the "is_sha2_signature_valid" method :param signature: the signature to be checked :param message: message to check the signature against :return: bool, whether the signature is valid for the message """ message_hash = IrohaCrypto.hash(message) try: signature_bytes = binascii.unhexlify(signature.signature) public_key = binascii.unhexlify(signature.public_key) ed25519_sha3.checkvalid(signature_bytes, message_hash, public_key) return True except (ed25519_sha3.SignatureMismatch, ValueError): return False @staticmethod def reduced_hash(transaction): """ Calculates hash of reduced payload of a transaction :param transaction: transaction to be processed :return: hex representation of hash """ bytes = transaction.payload.reduced_payload.SerializeToString() hash = hashlib.sha3_256(bytes).digest() hex_hash = binascii.hexlify(hash) return hex_hash @staticmethod def private_key(): """ Generates new random ed25519/sha3 private key :return: hex representation of private key """ return binascii.b2a_hex(os.urandom(32)) class Iroha(object): """ Collection of factory methods for transactions and queries creation """ def __init__(self, creator_account=None): self.creator_account = creator_account @staticmethod def _camel_case_to_snake_case(camel_case_string): """Transforms""" tmp = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', camel_case_string) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', tmp).lower() @staticmethod def now(): """Current timestamp in milliseconds""" return int(round(time.time() * 1000)) def transaction(self, commands, quorum=1, creator_account=None, created_time=None):
@staticmethod def command(name, **kwargs): """ Creates a protobuf command to be inserted into a transaction :param name: CamelCased name of command :param kwargs: command arguments as they defined in schema :return: a proto command Usage example: cmd = Iroha.command('CreateDomain', domain_id='test', default_role='user') """ command_wrapper = commands_pb2.Command() field_name = Iroha._camel_case_to_snake_case(name) internal_command = getattr(command_wrapper, field_name) for key, value in kwargs.items(): if 'permissions' == key: permissions_attr = getattr(internal_command, key) permissions_attr.extend(value) continue if 'peer' == key: peer_attr = getattr(internal_command, key) peer_attr.CopyFrom(value) continue setattr(internal_command, key, value) return command_wrapper def query(self, name, counter=1, creator_account=None, created_time=None, page_size=None, first_tx_hash=None, **kwargs): """ Creates a protobuf query with specified set of entities :param name: CamelCased name of query to be executed :param counter: query counter, should be incremented for each new query :param creator_account: account id of query creator :param created_time: query creation timestamp in milliseconds :param page_size: a non-zero positive number, size of result rowset for queries with pagination :param first_tx_hash: optional hash of a transaction that will be the beginning of the next page :param kwargs: query arguments as they defined in schema :return: a proto query """ assert creator_account or self.creator_account, \ "No account name specified as query creator id" pagination_meta = None if not created_time: created_time = self.now() if not creator_account: creator_account = self.creator_account if page_size or first_tx_hash: pagination_meta = queries_pb2.TxPaginationMeta() pagination_meta.page_size = page_size if first_tx_hash: pagination_meta.first_tx_hash = first_tx_hash meta = queries_pb2.QueryPayloadMeta() meta.created_time = created_time meta.creator_account_id = creator_account meta.query_counter = counter query_wrapper = queries_pb2.Query() query_wrapper.payload.meta.CopyFrom(meta) field_name = Iroha._camel_case_to_snake_case(name) internal_query = getattr(query_wrapper.payload, field_name) for key, value in kwargs.items(): if 'tx_hashes' == key: hashes_attr = getattr(internal_query, key) hashes_attr.extend(value) continue setattr(internal_query, key, value) if pagination_meta: pagination_meta_attr = getattr(internal_query, 'pagination_meta') pagination_meta_attr.CopyFrom(pagination_meta) if not len(kwargs): message = getattr(queries_pb2, name)() internal_query.CopyFrom(message) return query_wrapper def blocks_query(self, counter=1, creator_account=None, created_time=None): """ Creates a protobuf query for a blocks stream :param counter: query counter, should be incremented for each new query :param creator_account: account id of query creator :param created_time: query creation timestamp in milliseconds :return: a proto blocks query """ if not created_time: created_time = self.now() if not creator_account: creator_account = self.creator_account meta = queries_pb2.QueryPayloadMeta() meta.created_time = created_time meta.creator_account_id = creator_account meta.query_counter = counter query_wrapper = queries_pb2.BlocksQuery() query_wrapper.meta.CopyFrom(meta) return query_wrapper @staticmethod def batch(transactions, atomic=True): """ Tie transactions to be a single batch. All of them will have a common batch meta. :param transactions: list of transactions to be tied into a batch :param atomic: boolean - prescribes type of batch: ATOMIC if true, otherwise - ORDERED :return: nothing, source transactions will be modified """ meta_ref = transaction_pb2.Transaction.Payload.BatchMeta batch_type = meta_ref.ATOMIC if atomic else meta_ref.ORDERED reduced_hashes = [] for transaction in transactions: reduced_hash = IrohaCrypto.reduced_hash(transaction) reduced_hashes.append(reduced_hash) meta = meta_ref() meta.type = batch_type meta.reduced_hashes.extend(reduced_hashes) for transaction in transactions: transaction.payload.batch.CopyFrom(meta) class IrohaGrpc(object): """ Possible implementation of gRPC transport to Iroha """ def __init__(self, address=None, timeout=None, secure=False, *, max_message_length=None): """ Create Iroha gRPC client :param address: Iroha Torii address with port, example "127.0.0.1:50051" :param timeout: timeout for network I/O operations in seconds :param secure: enable grpc ssl channel :param max_message_length: it is max message length in bytes for grpc """ self._address = address if address else '127.0.0.1:50051' channel_kwargs = {} if max_message_length is not None: channel_kwargs['options'] = [ ('grpc.max_send_message_length', max_message_length), ('grpc.max_receive_message_length', max_message_length)] if secure: self._channel = grpc.secure_channel(self._address, grpc.ssl_channel_credentials(), **channel_kwargs) else: self._channel = grpc.insecure_channel(self._address, **channel_kwargs) self._timeout = timeout self._command_service_stub = endpoint_pb2_grpc.CommandService_v1Stub( self._channel) self._query_service_stub = endpoint_pb2_grpc.QueryService_v1Stub( self._channel) def send_tx(self, transaction, timeout=None): """ Send a transaction to Iroha :param transaction: protobuf Transaction :param timeout: timeout for network I/O operations in seconds :return: None :raise: grpc.RpcError with .code() available in case of any error """ if not timeout: timeout = self._timeout self._command_service_stub.Torii(transaction, timeout=timeout) def send_txs(self, transactions, timeout=None): """ Send a series of transactions to Iroha at once. Useful for submitting batches of transactions. :param transactions: list of protobuf transactions to be sent :param timeout: timeout for network I/O operations in seconds :return: None :raise: grpc.RpcError with .code() available in case of any error """ if not timeout: timeout = self._timeout tx_list = endpoint_pb2.TxList() tx_list.transactions.extend(transactions) self._command_service_stub.ListTorii(tx_list, timeout=timeout) def send_query(self, query, timeout=None): """ Send a query to Iroha :param query: protobuf Query :param timeout: timeout for network I/O operations in seconds :return: a protobuf response to the query :raise: grpc.RpcError with .code() available in case of any error """ if not timeout: timeout = self._timeout response = self._query_service_stub.Find(query, timeout=timeout) return response def send_blocks_stream_query(self, query, timeout=None): """ Send a query for blocks stream to Iroha :param query: protobuf BlocksQuery :param timeout: timeout for network I/O operations in seconds :return: an iterable over a stream of blocks :raise: grpc.RpcError with .code() available in case of any error """ if not timeout: timeout = self._timeout response = self._query_service_stub.FetchCommits( query, timeout=timeout) for block in response: yield block def tx_status(self, transaction, timeout=None): """ Request a status of a transaction :param transaction: the transaction, which status is about to be known :param timeout: timeout for network I/O operations in seconds :return: a tuple with the symbolic status description, integral status code, and error code (will be 0 if no error occurred) :raise: grpc.RpcError with .code() available in case of any error """ if not timeout: timeout = self._timeout request = endpoint_pb2.TxStatusRequest() request.tx_hash = binascii.hexlify(IrohaCrypto.hash(transaction)) response = self._command_service_stub.Status(request, timeout=timeout) return self._parse_tx_status(response) def tx_status_stream(self, transaction, timeout=None): """ Generator of transaction statuses from status stream :param transaction: the transaction, which status is about to be known :param timeout: timeout for network I/O operations in seconds :return: an iterable over a series of tuples with symbolic status description, integral status code, and error code (will be 0 if no error occurred) :raise: grpc.RpcError with .code() available in case of any error """ tx_hash = IrohaCrypto.hash(transaction) yield from self.tx_hash_status_stream(tx_hash, timeout) def tx_hash_status_stream(self, transaction_hash: "str or bytes", timeout=None): """ Generator of transaction statuses from status stream :param transaction_hash: the hash of transaction, which status is about to be known :param timeout: timeout for network I/O operations in seconds :return: an iterable over a series of tuples with symbolic status description, integral status code, and error code (will be 0 if no error occurred) :raise: grpc.RpcError with .code() available in case of any error """ if not timeout: timeout = self._timeout request = endpoint_pb2.TxStatusRequest() if isinstance(transaction_hash, bytes): request.tx_hash = binascii.hexlify(transaction_hash) else: request.tx_hash = transaction_hash.encode('utf-8') response = self._command_service_stub.StatusStream( request, timeout=timeout) for status in response: status_name, status_code, error_code = self._parse_tx_status( status) yield status_name, status_code, error_code @staticmethod def _parse_tx_status(response): """ Parse protocol.ToriiResponse into a tuple :param response: response to be parsed :return: a tuple with the symbolic status description, integral status code, and error code (will be 0 if no error occurred) """ status_name = endpoint_pb2.TxStatus.Name(response.tx_status) status_code = response.tx_status error_code = response.error_code return status_name, status_code, error_code
""" Creates a protobuf transaction with specified set of entities :param commands: list of commands generated via command factory method :param quorum: required number of signatures, 1 is default :param creator_account: id of transaction creator account :param created_time: transaction creation timestamp in milliseconds :return: a proto transaction """ assert creator_account or self.creator_account, \ "No account name specified as transaction creator id" if not created_time: created_time = self.now() if not creator_account: creator_account = self.creator_account tx = transaction_pb2.Transaction() core_payload = tx.payload.reduced_payload # setting transaction contents core_payload.quorum = quorum core_payload.created_time = created_time core_payload.creator_account_id = creator_account core_payload.commands.extend(commands) return tx
redirect.js
module.exports = { "redirect": [ /**************************************************** * POTENTIALLY TEMPORARY REDIRECTS ****************************************************/ /**************************************************** * From Studio(potentially temporary until the support of old bundles stops, mapped) ****************************************************/ { from: "/refguide/web-modeler/domain-models-association-properties-wm", to: "/studio/domain-models-association-properties" }, { from: "/refguide/web-modeler", to: "/studio/" }, { from: "/refguide/web-modeler/microflows-wm", to: "/studio/microflows" }, { from: "/refguide/web-modeler/microflows-expressions-wm", to: "/studio/microflows-expressions" }, { from: "/refguide/web-modeler/app-settings-wm", to: "/studio/settings-widget-overview" }, { from: "/howto/sap/use-sap-odata-model-creator", to: "/partners/sap/use-sap-odata-model-creator" }, { from: "/refguide/siemens/mindsphere-module-details", to: "/partners/siemens/mindsphere-module-details" }, /**************************************************** * PERMANENT REDIRECTS ****************************************************/ /**************************************************** * Documents (permanent, unmapped) ****************************************************/ { from: "/docs/Overview", to: "/" }, { from: "/docs/", to: "/" }, /**************************************************** * Studio Pro Guide (permanent, mapped) ****************************************************/ { from: "/refguide/mindsphere/mindsphere-module-details", to: "/partners/siemens/mindsphere-module-details" }, /**************************************************** * Reference Guide version 7 (permanent, unmapped) ****************************************************/ { from: "/refguide/moving-from-6-to-7", to: "/refguide7/moving-from-6-to-7" }, /**************************************************** * How-to's (permanent, unmapped) ****************************************************/ { from: "/howtogeneral/bestpractices/ux-best-practices", to: "/howto/front-end/ux-best-practices" }, { from: "/howtogeneral/bestpractices/best-practices-security-and-improvements-for-mendix-applications", to: "/howto/security/best-practices-security" }, { from: "/howtogeneral/bestpractices/best-practices-for-app-performance-in-mendix-7", to: "/howto/general/community-best-practices-for-app-performance" }, { from: "/howto/ux/configuring-your-theme", to: "/howto/front-end/configuring-your-theme" }, /**************************************************** * How-to's version 7 (permanent, unmapped) ****************************************************/ { from: "/howto7/ux/configuring-your-theme", to: "/howto7/front-end/configuring-your-theme" }, /**************************************************** * Studio Guide (permanent, unmapped) ****************************************************/ { from: "/howto/tutorials/", to: "/studio/general" }, { from: "/howto/tutorials/mendix-tutorials", to: "/studio/general" }, /**************************************************** * Developer Portal Guide (permanent, unmapped) ****************************************************/ { from: "/howtogeneral/mendixcloud/trends", to: "/developerportal/operate/trends" }, { from: "/developerportal/operate/mendix-cloud-status", to: "/developerportal/deploy/mendix-cloud-status" }, { from: "/community/tools/the-mendix-job-board", to: "/developerportal/community-tools/mendix-job-board" }, { from: "/community/tools/the-mendix-mvp-program", to: "/developerportal/community-tools/mendix-mvp-program" }, { from: "/mendixcloud/deploying-to-the-cloud", to: "/developerportal/deploy/mendix-cloud-deploy" }, { from: "/mendixcloud/maintenance-windows", to: "/developerportal/deploy/maintenance-windows" }, { from: "/developerportal/howto/migrating-to-v4", to: "/developerportal/deploy/migrating-to-v4" }, { from: "/deployment/mendixcloud/how-to-deploy-a-mendix-app-on-azure", to: "/developerportal/deploy/azure-deploy" }, { from: "/mendixcloud/how-to-link-app-to-node", to: "/developerportal/deploy/licensing-apps" }, { from: "/howtogeneral/support/", to: "/developerportal/support/" }, { from: "/community/app-store/", to: "/developerportal/app-store/" }, { from: "/community/app-store/app-store-overview", to: "/developerportal/app-store/app-store-overview" }, { from: "/deployment/mendixcloud/sending-email", to: "/developerportal/deploy/sending-email" }, /**************************************************** * From the Developer Portal (permanent, mapped) ****************************************************/ { from: "/developerportal/settings/technical-contact", to: "/developerportal/company-app-roles/technical-contact" }, { from: "/developerportal/general/technical-contact", to: "/developerportal/company-app-roles/technical-contact" }, { from: "/deployment/mendixcloud/certificates", to: "/developerportal/deploy/certificates" }, { from: "/refguide/certificates", to: "/developerportal/deploy/certificates" }, { from: "/mendixcloud/monitoring-application-health", to: "/developerportal/operate/monitoring-application-health" }, { from: "/developerportal/howto/deploying-to-the-cloud", to: "/developerportal/deploy/mendix-cloud-deploy" }, { from: "/deployment/on-premises/deploy-mendix-on-microsoft-windows", to: "/developerportal/deploy/deploy-mendix-on-microsoft-windows" }, { from: "/deployment/on-premises", to: "/developerportal/deploy/on-premises-design" }, { from: "/developerportal/community-tools/the-mendix-job-board", to: "/developerportal/community-tools/mendix-job-board" }, { from: "/refguide/publish-packages-to-mobile-stores", to: "/howto/mobile/publishing-a-mendix-hybrid-mobile-app-in-mobile-app-stores" }, { from: "/refguide/team-server", to: "/developerportal/develop/team-server" }, { from: "/developerportal/deploy/integrate-with-mendix-sso", to: "/developerportal/deploy/mendix-sso" }, /**************************************************** * From the App Store (permanent, mapped) ****************************************************/ { from: "/community/app-store/use-app-store-content-in-the-modeler", to: "/developerportal/app-store/app-store-content" }, { from: "/developerportal/app-store/use-app-store-content-in-the-modeler", to: "/developerportal/app-store/app-store-content" }, { from: "/mendixcloud/java-in-the-cloud", to: "/developerportal/deploy/java-in-the-cloud" }, { from: "/mendixcloud/security-constraints-in-the-mendix-cloud", to: "/developerportal/deploy/java-in-the-cloud" }, { from: "/howto50/Contributing+to+a+GitHub+repository", to: "/howto/collaboration-requirements-management/contribute-to-a-github-repository" }, { from: "/howto/collaboration-project-management/contribute-to-a-github-repository", to: "/howto/collaboration-requirements-management/contribute-to-a-github-repository" }, { from: "/mendixcloud/Integrate+your+app+with+Mendix+SSO", to: "/developerportal/deploy/integrate-with-mendix-sso" }, { from: "/howto/ux/create-a-custom-theme-with-the-mendix-ui-framework", to: "/howto/front-end/atlas-ui" }, { from: "/howto/front-end/create-a-custom-theme-with-the-mendix-ui-framework", to: "/howto/front-end/atlas-ui" }, { from: "/releasenotes/desktop-modeler/", to: "/releasenotes/studio-pro/" }, { from: "/releasenotes/desktop-modeler/8.0", to: "/releasenotes/studio-pro/8.0" }, { from: "/releasenotes/desktop-modeler/7.23", to: "/releasenotes/studio-pro/7.23" }, { from: "/releasenotes/desktop-modeler/7.22", to: "/releasenotes/studio-pro/7.22" }, { from: "/releasenotes/desktop-modeler/7.21", to: "/releasenotes/studio-pro/7.21" }, { from: "/releasenotes/desktop-modeler/7.20", to: "/releasenotes/studio-pro/7.20" }, { from: "/releasenotes/desktop-modeler/7.19", to: "/releasenotes/studio-pro/7.19" }, { from: "/releasenotes/desktop-modeler/7.18", to: "/releasenotes/studio-pro/7.18" }, { from: "/releasenotes/desktop-modeler/7.17", to: "/releasenotes/studio-pro/7.17" }, { from: "/releasenotes/desktop-modeler/7.16", to: "/releasenotes/studio-pro/7.16" }, { from: "/releasenotes/desktop-modeler/7.15", to: "/releasenotes/studio-pro/7.15" }, { from: "/releasenotes/desktop-modeler/7.14", to: "/releasenotes/studio-pro/7.14" }, { from: "/releasenotes/desktop-modeler/7.13", to: "/releasenotes/studio-pro/7.13" }, { from: "/releasenotes/desktop-modeler/7.12", to: "/releasenotes/studio-pro/7.12" }, { from: "/releasenotes/desktop-modeler/7.11", to: "/releasenotes/studio-pro/7.11" }, { from: "/releasenotes/desktop-modeler/7.10", to: "/releasenotes/studio-pro/7.10" }, { from: "/releasenotes/desktop-modeler/7.9", to: "/releasenotes/studio-pro/7.9" }, { from: "/releasenotes/desktop-modeler/7.8", to: "/releasenotes/studio-pro/7.8" }, { from: "/releasenotes/desktop-modeler/7.7", to: "/releasenotes/studio-pro/7.7" }, { from: "/releasenotes/desktop-modeler/7.6", to: "/releasenotes/studio-pro/7.6" }, { from: "/releasenotes/desktop-modeler/7.5", to: "/releasenotes/studio-pro/7.5" }, { from: "/releasenotes/desktop-modeler/7.4", to: "/releasenotes/studio-pro/7.4" }, { from: "/releasenotes/desktop-modeler/7.3", to: "/releasenotes/studio-pro/7.3" }, { from: "/releasenotes/desktop-modeler/7.2", to: "/releasenotes/studio-pro/7.2" }, { from: "/releasenotes/desktop-modeler/7.1", to: "/releasenotes/studio-pro/7.1" }, { from: "/releasenotes/desktop-modeler/7.0", to: "/releasenotes/studio-pro/7.0" }, { from: "/releasenotes/desktop-modeler/6.10", to: "/releasenotes/studio-pro/6.10" }, { from: "/releasenotes/desktop-modeler/6.9", to: "/releasenotes/studio-pro/6.9" }, { from: "/releasenotes/desktop-modeler/6.8", to: "/releasenotes/studio-pro/6.8" }, { from: "/releasenotes/desktop-modeler/6.7", to: "/releasenotes/studio-pro/6.7" }, { from: "/releasenotes/desktop-modeler/6.6", to: "/releasenotes/studio-pro/6.6" }, { from: "/releasenotes/desktop-modeler/6.5", to: "/releasenotes/studio-pro/6.5" }, { from: "/releasenotes/desktop-modeler/6.4", to: "/releasenotes/studio-pro/6.4" }, { from: "/releasenotes/desktop-modeler/6.3", to: "/releasenotes/studio-pro/6.3" }, { from: "/releasenotes/desktop-modeler/6.2", to: "/releasenotes/studio-pro/6.2" }, { from: "/releasenotes/desktop-modeler/6.1", to: "/releasenotes/studio-pro/6.1" }, { from: "/releasenotes/desktop-modeler/6.0", to: "/releasenotes/studio-pro/6.0" }, /**************************************************** * From the Support Portal (permanent, mapped) ****************************************************/ { from: "/community/app-store-content-support", to: "/developerportal/app-store/app-store-content-support" }, { from: "/mendixcloud/custom-domains", to: "/developerportal/deploy/custom-domains" }, { from: "/developerportal/howto/how-to-link-app-to-node", to: "/developerportal/deploy/licensing-apps" }, { from: "/developerportal/howto/how-to-link-a-different-app-to-a-node", to: "/developerportal/deploy/licensing-apps" }, { from: "/developerportal/support/new-app-request-template", to: "/developerportal/support/new-app-node-request-template" }, { from: "/developerportal/support/export-a-project-package", to: "/refguide/export-project-package-dialog" }, { from: "/developerportal/support/change-affected-apps", to: "/developerportal/support/prepare-your-project" }, /**************************************************** * From Studio Pro version 8 (permanent as backups, mapped) ****************************************************/ { from: "/refguide8/modeler", to: "/refguide/modeling" }, { from: "/refguide8/Modeler", to: "/refguide/modeling" }, { from: "/refguide8/desktop-modeler", to: "/refguide/modeling" }, { from: "/refguide8/desktop-modeler-overview", to: "/refguide/studio-pro-overview" }, { from: "/refguide8/download-from-team-server-dialog", to: "/refguide/download-from-version-control-dialog" }, { from: "/refguide8/open-project-dialog", to: "/refguide/open-app-dialog" }, { from: "/refguide8/upload-to-team-server-dialog", to: "/refguide/upload-to-version-control-dialog" }, { from: "/refguide8/microflow-expressions", to: "/refguide/expressions" }, { from: "/deployment/cloud-foundry/", to: "/developerportal/deploy/cloud-foundry-deploy" }, { from: "/howto8/solving-load-and-import-errors", to: "/howto/monitoring-troubleshooting/solving-load-and-import-errors" }, { from: "/refguide8/drop-down-widget", to: "/refguide/drop-down" }, { from: "/refguide8/horizontal-split-pane", to: "/refguide/scroll-container" }, { from: "/refguide8/vertical-split-pane", to: "/refguide/scroll-container" }, { from: "/refguide8/Select++Elements", to: "/refguide/select--elements" }, { from: "/refguide8/Developing+Hybrid+Mobile+Apps", to: "/refguide/developing-hybrid-mobile-apps" }, { from: "/refguide8/enumeration-values", to: "/refguide/enumerations" }, { from: "/refguide8/inheritance-split", to: "/refguide/object-type-decision" }, { from: "/refguide8/exclusive-split", to: "/refguide/decision" }, { from: "/howto/deploying-a-mendix-app-to-cloud-foundry", to: "/developerportal/deploy/cloud-foundry-deploy" }, { from: "/refguide8/menu-item", to: "/refguide/menu" }, { from: "/refguide8/Show+Page", to: "/refguide/show-page" }, { from: "/refguide8/Validation+Feedback", to: "/refguide/validation-feedback" }, { from: "/refguide8/Show+Message", to: "/refguide/show-message" }, { from: "/refguide8/Show+Home+Page", to: "/refguide/show-home-page" }, { from: "/refguide8/Download+File", to: "/refguide/download-file" }, { from: "/refguide8/Close+Form", to: "/refguide/close-page" }, { from: "/refguide8/Row+(document+template)", to: "/refguide/row-document-template" }, { from: "/refguide8/Table+(document+template", to: "/refguide/table-document-template" }, { from: "/refguide8/Cell+(document+template)", to: "/refguide/cell-document-template" }, { from: "/refguide8/Static+Image+(document+template)", to: "/refguide/static-image-document-template" }, { from: "/refguide8/Title+(document+template)", to: "/refguide/title-document-template" }, { from: "/refguide8/Static+label+(document+template)", to: "/refguide/static-label-document-template" }, { from: "/refguide8/Page+Break+(document+template)", to: "/refguide/page-break-document-template" }, { from: "/refguide8/Line+Break+(document+template)", to: "/refguide/line-break-document-template" }, { from: "/refguide8/Header+(document+template)", to: "/refguide/header-document-template" }, { from: "/refguide8/Footer+(document+template)", to: "/refguide/footer-document-template" }, { from: "/refguide8/Dynamic+label+(document+template)", to: "/refguide/dynamic-label-document-template" }, { from: "/refguide8/Template+Grid+(document+template)", to: "/refguide/template-grid-document-template" }, { from: "/refguide8/Data+Grid+(document+template)", to: "/refguide/data-grid-document-template" }, { from: "/refguide8/Sort+Bar", to: "/refguide/sort-bar" }, { from: "/refguide8/Columns+(document+template)", to: "/refguide/columns-document-template" }, { from: "/refguide8/Dynamic+Image+(document+template)", to: "/refguide/dynamic-image-document-template" }, { from: "/refguide8/Data+View+(document+template)", to: "/refguide/data-view-document-template" }, { from: "/refguide8/Document+Templates", to: "/refguide/document-templates" }, { from: "/refguide8/tab-page", to: "/refguide/tab-container" }, { from: "/refguide8/action-button", to: "/refguide/button-properties" }, { from: "/refguide8/drop-down-button", to: "/refguide/button-properties" }, { from: "/refguide8/image-property", to: "/refguide/button-properties" }, { from: "/refguide8/grid-action-button", to: "/refguide/control-bar" }, { from: "/refguide8/remove-button", to: "/refguide/control-bar" }, { from: "/refguide8/select-button", to: "/refguide/control-bar" }, { from: "/refguide8/add-button", to: "/refguide/control-bar" }, { from: "/refguide8/deselect-all-button", to: "/refguide/control-bar" }, { from: "/refguide8/export-to-csv-button", to: "/refguide/control-bar" }, { from: "/refguide8/export-to-excel-button", to: "/refguide/control-bar" }, { from: "/refguide8/grid-new-button", to: "/refguide/control-bar" }, { from: "/refguide8/search-button", to: "/refguide/control-bar" }, { from: "/refguide8/select-all-button", to: "/refguide/control-bar" }, { from: "/refguide8/comparison-search-field", to: "/refguide/search-bar" }, { from: "/refguide8/drop-down-search-field", to: "/refguide/search-bar" }, { from: "/refguide8/range-search-field", to: "/refguide/search-bar" }, { from: "/refguide8/opening-pages", to: "/refguide/pages" }, { from: "/refguide8/starting-microflows", to: "/refguide/on-click-event" }, { from: "/refguide8/app-settings-dialog", to: "/refguide/new-project" }, /**************************************************** * From Desktop Modeler version 7 (permanent, mapped) ****************************************************/ { from: "/refguide7/modeler", to: "/refguide7/desktop-modeler-overview" }, { from: "/refguide7/Modeler", to: "/refguide7/desktop-modeler-overview" }, { from: "/refguide7/desktop-webmodeler", to: "/refguide7/collaborative-development" }, { from: "/web-modeler/general-sync-webmodeler-desktopmodeler-wm", to: "/refguide7/collaborative-development" }, { from: "/howto/web-modeler/syncing-webmodeler-desktop", to: "/refguide7/collaborative-development" }, { from: "/refguide7/sync-webmodeler-desktopmodeler", to: "/refguide7/collaborative-development" }, { from: "/refguide7/download-from-team-server-dialog", to: "/refguide7/download-from-version-control-dialog" }, { from: "/refguide7/open-project-dialog", to: "/refguide7/open-app-dialog" }, { from: "/refguide7/upload-to-team-server-dialog", to: "/refguide7/upload-to-version-control-dialog" }, { from: "/refguide7/microflow-expressions", to: "/refguide7/expressions" }, { from: "/deployment/cloud-foundry/", to: "/developerportal/deploy/cloud-foundry-deploy" }, { from: "/howto7/solving-load-and-import-errors", to: "/howto7/monitoring-troubleshooting/solving-load-and-import-errors" }, { from: "/refguide7/drop-down-widget", to: "/refguide7/drop_down" }, { from: "/refguide7/horizontal-split-pane", to: "/refguide7/scroll-container" }, { from: "/refguide7/vertical-split-pane", to: "/refguide7/scroll-container" }, { from: "/refguide7/Select++Elements", to: "/refguide7/select--elements" }, { from: "/refguide7/Developing+Hybrid+Mobile+Apps", to: "/refguide7/developing-hybrid-mobile-apps" }, { from: "/howto7/deploying-a-mendix-app-to-cloud-foundry", to: "/developerportal/deploy/cloud-foundry-deploy" }, { from: "/refguide7/Show+Page", to: "/refguide7/show-page" }, { from: "/refguide7/Validation+Feedback", to: "/refguide7/validation-feedback" }, { from: "/refguide7/Show+Message", to: "/refguide7/show-message" }, { from: "/refguide7/Show+Home+Page", to: "/refguide7/show-home-page" }, { from: "/refguide7/Download+File", to: "/refguide7/download-file" }, { from: "/refguide7/Close+Form", to: "/refguide7/close-page" }, { from: "/refguide7/Row+(document+template)", to: "/refguide7/row-document-template" }, { from: "/refguide7/Table+(document+template", to: "/refguide7/table-document-template" }, { from: "/refguide7/Cell+(document+template)", to: "/refguide7/cell-document-template" }, { from: "/refguide7/Static+Image+(document+template)", to: "/refguide7/static-image-document-template" }, { from: "/refguide7/Title+(document+template)", to: "/refguide7/title-document-template" }, { from: "/refguide7/Static+label+(document+template)", to: "/refguide7/static-label-document-template" }, { from: "/refguide7/Page+Break+(document+template)", to: "/refguide7/page-break-document-template" }, { from: "/refguide7/Line+Break+(document+template)", to: "/refguide7/line-break-document-template" }, { from: "/refguide7/Header+(document+template)", to: "/refguide7/header-document-template" }, { from: "/refguide7/Footer+(document+template)", to: "/refguide7/footer-document-template" }, { from: "/refguide7/Dynamic+label+(document+template)", to: "/refguide7/dynamic-label-document-template" }, { from: "/refguide7/Template+Grid+(document+template)", to: "/refguide7/template-grid-document-template" }, { from: "/refguide7/Data+Grid+(document+template)", to: "/refguide7/data-grid-document-template" }, { from: "/refguide7/Sort+Bar", to: "/refguide7/sort-bar" }, { from: "/refguide7/Columns+(document+template)", to: "/refguide7/columns-document-template" }, { from: "/refguide7/Dynamic+Image+(document+template)", to: "/refguide7/dynamic-image-document-template" }, { from: "/refguide7/Data+View+(document+template)", to: "/refguide7/data-view-document-template" }, { from: "/refguide7/Document+Templates", to: "/refguide7/document-templates" }, /**************************************************** * From Desktop Modeler version 6 (permanent) ****************************************************/ { from: "/refguide6/Reference+Guide+6", to: "/refguide6/" }, { from: "/refguide6/Modeler", to: "/refguide6/modeler" }, { from: "/refguide6/TreeNavigation", to: "/refguide6/" }, { from: "/refguide6/Access+Rules", to: "/refguide6/access-rules" }, { from: "/refguide6/Action+Button", to: "/refguide6/action-button" }, { from: "/refguide6/Action+Call+Activities", to: "/refguide6/action-call-activities" }, { from: "/refguide6/Actions", to: "/refguide6/actions" }, { from: "/refguide6/Activities", to: "/refguide6/activities" }, { from: "/refguide6/Add+button", to: "/refguide6/add-button" }, { from: "/refguide6/Add+date+function+calls", to: "/refguide6/add-date-function-calls" }, { from: "/refguide6/Administrator", to: "/refguide6/administrator" }, { from: "/refguide6/Aggregate+List", to: "/refguide6/aggregate-list" }, { from: "/refguide6/Annotation", to: "/refguide6/annotation" }, { from: "/refguide6/Annotation+flow", to: "/refguide6/annotation-flow" }, { from: "/refguide6/Annotations", to: "/refguide6/annotations" }, { from: "/refguide6/Anonymous+Users", to: "/refguide6/anonymous-users" }, { from: "/refguide6/App+Platform", to: "/refguide6/app-platform" }, { from: "/refguide6/App+Settings+Dialog", to: "/refguide6/app-settings-dialog" }, { from: "/refguide6/Arithmetic+expressions", to: "/refguide6/arithmetic-expressions" }, { from: "/refguide6/Association+Source", to: "/refguide6/association-source" }, { from: "/refguide6/Associations", to: "/refguide6/associations" }, { from: "/refguide6/Attributes", to: "/refguide6/attributes" }, { from: "/refguide6/Back+button", to: "/refguide6/back-button" }, { from: "/refguide6/Basic+Reports", to: "/refguide6/basic-reports" }, { from: "/refguide6/Between+date+function+calls", to: "/refguide6/between-date-function-calls" }, { from: "/refguide6/Boolean+expressions", to: "/refguide6/boolean-expressions" }, { from: "/refguide6/Branch+Line+Manager+Dialog", to: "/refguide6/branch-line-manager-dialog" }, { from: "/refguide6/Break+Event", to: "/refguide6/break-event" }, { from: "/refguide6/Button+Widgets", to: "/refguide6/button-widgets" }, { from: "/refguide6/Call+Rest+Action", to: "/refguide6/call-rest-action" }, { from: "/refguide6/Call+Web+Service", to: "/refguide6/call-web-service" }, { from: "/refguide6/Call+Web+Service+Action", to: "/refguide6/call-web-service-action" }, { from: "/refguide6/Cancel+button", to: "/refguide6/cancel-button" }, { from: "/refguide6/Cast+Object", to: "/refguide6/cast-object" }, { from: "/refguide6/Cell+Document+Template", to: "/refguide6/cell-document-template" }, { from: "/refguide6/certificates", to: "/developerportal/deploy/certificates" }, { from: "/refguide6/Change+List", to: "/refguide6/change-list" }, { from: "/refguide6/Change+Object", to: "/refguide6/change-object" }, { from: "/refguide6/Change+Variable", to: "/refguide6/change-variable" }, { from: "/refguide6/Check+box", to: "/refguide6/check-box" }, { from: "/refguide6/Client+Activities", to: "/refguide6/client-activities" }, { from: "/refguide6/Close+Form", to: "/refguide6/close-form" }, { from: "/refguide6/Close+page+button", to: "/refguide6/close-page-button" }, { from: "/refguide6/Clustered+Mendix+Runtime", to: "/refguide6/clustered-mendix-runtime" }, { from: "/refguide6/Clustered+Mendix+Business+Server", to: "/refguide6/clustered-mendix-runtime" }, { from: "/refguide6/Columns", to: "/refguide6/columns" }, { from: "/refguide6/Columns+Document+Template", to: "/refguide6/columns-document-template" }, { from: "/refguide6/Commit+Dialog", to: "/refguide6/commit-dialog" }, { from: "/refguide6/Commit+Object(s)", to: "/refguide6/committing-objects" }, { from: "/refguide6/Committing+Objects", to: "/refguide6/committing-objects" }, { from: "/refguide6/Common+Widget+Properties", to: "/refguide6/common-widget-properties" }, { from: "/refguide6/Common+Widgets", to: "/refguide6/common-widgets" }, { from: "/refguide6/Comparison+Search+Field", to: "/refguide6/comparison-search-field" }, { from: "/refguide6/Conditions", to: "/refguide6/conditions" }, { from: "/refguide6/Configuration", to: "/refguide6/configuration" }, { from: "/refguide6/Configuring+Hybrid+Mobile+Apps+To+Run+Offline", to: "/refguide6/configuring-hybrid-mobile-apps-to-run-offline" }, { from: "/refguide6/Constants", to: "/refguide6/constants" }, { from: "/refguide6/Consumed+App+Services", to: "/refguide6/consumed-app-services" }, { from: "/refguide6/Consumed+REST+Services", to: "/refguide6/consumed-rest-services" }, { from: "/refguide6/Consumed+web+service", to: "/refguide6/consumed-web-service" }, { from: "/refguide6/Consumed+Web+Services", to: "/refguide6/consumed-web-services" }, { from: "/refguide6/Container", to: "/refguide6/container" }, { from: "/refguide6/Container+Widgets", to: "/refguide6/container-widgets" }, { from: "/refguide6/Context+Mechanism", to: "/refguide6/context-mechanism" }, { from: "/refguide6/Continue+Event", to: "/refguide6/continue-event" }, { from: "/refguide6/Control+Bar", to: "/refguide6/control-bar" }, { from: "/refguide6/Create+Branch+Line+Dialog", to: "/refguide6/create-branch-line-dialog" }, { from: "/refguide6/Create+Deployment+Package+Dialog", to: "/refguide6/create-deployment-package-dialog" }, { from: "/refguide6/Create+List", to: "/refguide6/create-list" }, { from: "/refguide6/Create+Object", to: "/refguide6/create-object" }, { from: "/refguide6/Create+Variable", to: "/refguide6/create-variable" }, { from: "/refguide6/Custom+Settings", to: "/refguide6/custom-settings" }, { from: "/refguide6/Customizing+Hybrid+Mobile+Apps", to: "/refguide6/customizing-hybrid-mobile-apps" }, { from: "/refguide6/Customizing+PhoneGap+Build+packages", to: "/refguide6/customizing-phonegap-build-packages" }, { from: "/refguide6/Data+grid", to: "/refguide6/data-grid" }, { from: "/refguide6/Data+Grid+Document+Template", to: "/refguide6/data-grid-document-template" }, { from: "/refguide6/Data+Sets", to: "/refguide6/data-sets" }, { from: "/refguide6/Data+Sources", to: "/refguide6/data-sources" }, { from: "/refguide6/Data+Storage", to: "/refguide6/data-storage" }, { from: "/refguide6/Data+Types", to: "/refguide6/data-types" }, { from: "/refguide6/Data+view", to: "/refguide6/data-view" }, { from: "/refguide6/Data+view+action+button", to: "/refguide6/data-view-action-button" }, { from: "/refguide6/Data+view+cancel+button", to: "/refguide6/data-view-cancel-button" }, { from: "/refguide6/Data+view+close+button", to: "/refguide6/data-view-close-button" }, { from: "/refguide6/Data+view+control+bar", to: "/refguide6/data-view-control-bar" }, { from: "/refguide6/Data+View+Document+Template", to: "/refguide6/data-view-document-template" }, { from: "/refguide6/Data+view+save+button", to: "/refguide6/data-view-save-button" }, { from: "/refguide6/Data+Widgets", to: "/refguide6/data-widgets" }, { from: "/refguide6/Database+Source", to: "/refguide6/database-source" }, { from: "/refguide6/Date+and+Time+Handling+in+3.0", to: "/refguide6/date-and-time-handling-in-3.0" }, { from: "/refguide6/Date+creation", to: "/refguide6/date-creation" }, { from: "/refguide6/Date+picker", to: "/refguide6/date-picker" }, { from: "/refguide6/Date+Range+Field", to: "/refguide6/date-range-field" }, { from: "/refguide6/Date+Range+Selector", to: "/refguide6/date-range-selector" }, { from: "/refguide6/DateTime+handling+FAQ", to: "/refguide6/datetime-handling-faq" }, { from: "/refguide6/DB2", to: "/refguide6/db2" }, { from: "/refguide6/Delete+button", to: "/refguide6/delete-button" }, { from: "/refguide6/Delete+Object(s)", to: "/refguide6/deleting-objects" }, { from: "/refguide6/Deleting+Objects", to: "/refguide6/deleting-objects" }, { from: "/refguide6/Demo+Users", to: "/refguide6/demo-users" }, { from: "/refguide6/Deploy+To+The+Cloud+Dialog", to: "/refguide6/deploy-to-the-cloud-dialog" }, { from: "/refguide6/Deselect+all+button", to: "/refguide6/deselect-all-button" }, { from: "/refguide6/Desktop+profile", to: "/refguide6/desktop-profile" }, { from: "/refguide6/Developing+Hybrid+Mobile+Apps", to: "/refguide6/developing-hybrid-mobile-apps" }, { from: "/refguide6/Dialogs", to: "/refguide6/dialogs" }, { from: "/refguide6/Document+Generation+Activities", to: "/refguide6/document-generation-activities" }, { from: "/refguide6/Document+Template", to: "/refguide6/document-template" }, { from: "/refguide6/Document+Templates", to: "/refguide6/document-templates" }, { from: "/refguide6/Domain+Model", to: "/refguide6/domain-model" }, { from: "/refguide6/Download+File", to: "/refguide6/download-file" }, { from: "/refguide6/Download+From+Team+Server+Dialog", to: "/refguide6/download-from-team-server-dialog" }, { from: "~\/refguide6\/Drop-down", to: "/refguide6/drop_down", "exact": true }, { from: "/refguide6/Drop+Down+Widget", to: "/refguide6/drop_down" }, { from: "/refguide6/Drop+Down Widget", to: "/refguide6/drop_down" }, { from: "/refguide6/Drop+Down", to: "/refguide6/drop-down" }, { from: "/refguide6/Drop+down+button", to: "/refguide6/drop-down-button" }, { from: "/refguide6/Drop+Down+Search+Field", to: "/refguide6/drop-down-search-field" }, { from: "/refguide6/Dynamic+Image+Document+Template", to: "/refguide6/dynamic-image-document-template" }, { from: "/refguide6/Dynamic+Label+Document+Template", to: "/refguide6/dynamic-label-document-template" }, { from: "/refguide6/Edit+button", to: "/refguide6/edit-button" }, { from: "/refguide6/Edit+Cloud+Foundry+Settings+Dialog", to: "/refguide6/edit-cloud-foundry-settings-dialog" }, { from: "/refguide6/End+Event", to: "/refguide6/end-event" }, { from: "/refguide6/Entities", to: "/refguide6/entities" }, { from: "/refguide6/Entity+Path+Source", to: "/refguide6/entity-path-source" }, { from: "/refguide6/Enumeration+Values", to: "/refguide6/enumeration-values" }, { from: "/refguide6/Enumerations", to: "/refguide6/enumerations" }, { from: "/refguide6/Enumerations+in+microflow+expressions", to: "/refguide6/enumerations-in-microflow-expressions" }, { from: "/refguide6/Error+Event", to: "/refguide6/error-event" }, { from: "/refguide6/Event+Handlers", to: "/refguide6/event-handlers" }, { from: "/refguide6/Exclusive+Split", to: "/refguide6/exclusive-split" }, { from: "/refguide6/Export+Mapping+Action", to: "/refguide6/export-mapping-action" }, { from: "/refguide6/Export+Mappings", to: "/refguide6/export-mappings" }, { from: "/refguide6/Export+to+CSV+button", to: "/refguide6/export-to-csv-button" }, { from: "/refguide6/Export+to+excel+button", to: "/refguide6/export-to-excel-button" }, { from: "/refguide6/Export+XML", to: "/refguide6/export-xml" }, { from: "/refguide6/File+manager", to: "/refguide6/file-manager" }, { from: "/refguide6/File+Widgets", to: "/refguide6/file-widgets" }, { from: "/refguide6/Footer+Document+Template", to: "/refguide6/footer-document-template" }, { from: "/refguide6/Garbage+collection", to: "/refguide6/garbage-collection" }, { from: "/refguide6/General", to: "/refguide6/general" }, { from: "/refguide6/Generate+Document", to: "/refguide6/generate-document" }, { from: "/refguide6/Getting+the+Mendix+Developer+App", to: "/refguide6/getting-the-mendix-developer-app" }, { from: "/refguide6/Grid+action+button", to: "/refguide6/grid-action-button" }, { from: "/refguide6/Grid+microflow+button", to: "/refguide6/grid-microflow-button" }, { from: "/refguide6/Grid+New+Button", to: "/refguide6/grid-new-button" }, { from: "/refguide6/Group+box", to: "/refguide6/group-box" }, { from: "/refguide6/Section", to: "/refguide6/group-box" }, { from: "/refguide6/Header", to: "/refguide6/header" }, { from: "/refguide6/Header+Document+Template", to: "/refguide6/header-document-template" }, { from: "/refguide6/History+Dialog", to: "/refguide6/history-dialog" }, { from: "/refguide6/Horizontal+Split+Pane", to: "/refguide6/horizontal-split-pane" }, { from: "/refguide6/If+expressions", to: "/refguide6/if-expressions" }, { from: "/refguide6/Image", to: "/refguide6/image" }, { from: "/refguide6/Image+uploader", to: "/refguide6/image-uploader" }, { from: "/refguide6/Image+viewer", to: "/refguide6/image-viewer" }, { from: "/refguide6/Images+refguide", to: "/refguide6/images" }, { from: "/refguide6/Images", to: "/refguide6/images" }, { from: "/refguide6/Image+Property", to: "/refguide6/image-property" }, { from: "/refguide6/Import+Mapping+Action", to: "/refguide6/import-mapping-action" }, { from: "/refguide6/Import+Mappings", to: "/refguide6/import-mappings" }, { from: "/refguide6/Import+XML", to: "/refguide6/import-xml" }, { from: "/refguide6/Inheritance+Split", to: "/refguide6/inheritance-split" }, { from: "/refguide6/Indexes", to: "/refguide6/indexes" }, { from: "/refguide6/Input+reference+set+selector", to: "/refguide6/input-reference-set-selector" }, { from: "/refguide6/Input+Widgets", to: "/refguide6/input-widgets" }, { from: "/refguide6/Integration", to: "/refguide6/integration" }, { from: "/refguide6/Integration+Activities", to: "/refguide6/integration-activities" }, { from: "/refguide6/ISession+API+Usage", to: "/refguide6/isession-api-usage" }, { from: "/refguide6/Java+Action+Call", to: "/refguide6/java-action-call" }, { from: "/refguide6/Java+Actions", to: "/refguide6/java-actions" }, { from: "/refguide6/Java+Memory+Usage+With+Mendix", to: "/refguide6/java-memory-usage-with-mendix" }, { from: "/refguide6/Java+Programming", to: "/refguide6/java-programming" }, { from: "/refguide6/JSON+Structures", to: "/refguide6/json-structures" }, { from: "/refguide6/Keep+alive+mechanism+for+Persistent+Sessions", to: "/refguide6/keep-alive-mechanism-for-persistent-sessions" }, { from: "/refguide6/Label", to: "/refguide6/label" }, { from: "/refguide6/Layout", to: "/refguide6/layout" }, { from: "/refguide6/Layouts", to: "/refguide6/layout" }, { from: "/refguide6/Layout+grid", to: "/refguide6/layout-grid" }, { from: "/refguide6/Layout+Widgets", to: "/refguide6/layout-widgets" }, { from: "/refguide6/Line+Break+Document+Template", to: "/refguide6/line-break-document-template" }, { from: "/refguide6/Link+button", to: "/refguide6/link-button" }, { from: "/refguide6/List+Activities", to: "/refguide6/list-activities" }, { from: "/refguide6/List+Operation", to: "/refguide6/list-operation" }, { from: "/refguide6/List+view", to: "/refguide6/list-view" }, { from: "/refguide6/Listen+To+Grid+Source", to: "/refguide6/listen-to-grid-source" }, { from: "/refguide6/Log+Message", to: "/refguide6/log-message" }, { from: "/refguide6/Logging", to: "/refguide6/logging" }, { from: "/refguide6/Logging+Activities", to: "/refguide6/logging-activities" }, { from: "/refguide6/Loop", to: "/refguide6/loop" }, { from: "/refguide6/Managing+App+Signing+Keys", to: "/refguide6/managing-app-signing-keys" }, { from: "/refguide6/Map+Automatically", to: "/refguide6/map-automatically" }, { from: "/refguide6/Mapping+Documents", to: "/refguide6/mapping-documents" }, { from: "/refguide6/Mathematical+function+calls", to: "/refguide6/mathematical-function-calls" }, { from: "/refguide6/Menu", to: "/refguide6/menu" }, { from: "/refguide6/Menu+Bar", to: "/refguide6/menu-bar" }, { from: "/refguide6/Menu+Item", to: "/refguide6/menu-item" }, { from: "/refguide6/Menu+Widgets", to: "/refguide6/menu-widgets" }, { from: "/refguide6/Merge", to: "/refguide6/merge" }, { from: "/refguide6/Merge+Dialog", to: "/refguide6/merge-dialog" }, { from: "/refguide6/Microflow", to: "/refguide6/microflow" }, { from: "/refguide6/Microflow+Activities", to: "/refguide6/microflow-activities" }, { from: "/refguide6/Microflow+Call", to: "/refguide6/microflow-call" }, { from: "/refguide6/Microflow+Element+Common+Properties", to: "/refguide6/microflow-element-common-properties" }, { from: "/refguide6/Microflow+Expressions", to: "/refguide6/microflow-expressions" }, { from: "/refguide6/Microflow+Source", to: "/refguide6/microflow-source" }, { from: "/refguide6/Microflows", to: "/refguide6/microflows" }, { from: "/refguide6/Mobile", to: "/refguide6/mobile" }, { from: "/refguide6/Model+Share", to: "/refguide6/model-share" }, { from: "/refguide6/Modeler", to: "/refguide6/modeler" }, { from: "/refguide6/Module+Role", to: "/refguide6/module-role" }, { from: "/refguide6/Module+Security", to: "/refguide6/module-security" }, { from: "/refguide6/Module+Status", to: "/refguide6/module-status" }, { from: "/refguide6/Modules", to: "/refguide6/modules" }, { from: "/refguide6/Monitoring+-+Mendix+Runtime", to: "/refguide6/monitoring-mendix-runtime" }, { from: "/refguide6/Monitoring+-+Mendix+Business+Server", to: "/refguide6/monitoring-mendix-runtime" }, { from: "/refguide6/Monitoring+-+What+to+monitor", to: "/refguide6/monitoring-what-to-monitor" }, { from: "/refguide6/Moving+from+5+to+6", to: "/refguide6/moving-from-5-to-6" }, { from: "/refguide6/MySQL", to: "/refguide6/mysql" }, { from: "/refguide6/Navigation", to: "/refguide6/navigation" }, { from: "/refguide6/Navigation+list", to: "/refguide6/navigation-list" }, { from: "/refguide6/Navigation+Tree", to: "/refguide6/navigation-tree" }, { from: "/refguide6/New+button", to: "/refguide6/new-button" }, { from: "/refguide6/NULL+Ordering+Behavior", to: "/refguide6/null-ordering-behavior" }, { from: "/refguide6/Numeric+formatting", to: "/refguide6/numeric-formatting" }, { from: "/refguide6/Object+Activities", to: "/refguide6/object-activities" }, { from: "/refguide6/OData+Query+Options", to: "/refguide6/odata-query-options" }, { from: "/refguide6/OData+Representation", to: "/refguide6/odata-representation" }, { from: "/refguide6/Offline", to: "/refguide6/offline" }, { from: "/refguide6/Offline+device+profile", to: "/refguide6/offline-device-profile" }, { from: "/refguide6/On+Click+Event", to: "/refguide6/on-click-event" }, { from: "/refguide6/Open+Project+Dialog", to: "/refguide6/open-project-dialog" }, { from: "/refguide6/Opening+Pages", to: "/refguide6/opening-pages" }, { from: "/refguide6/Operations", to: "/refguide6/operations" }, { from: "/refguide6/OQL", to: "/refguide6/oql" }, { from: "/refguide6/OQL+Aggregation", to: "/refguide6/oql-aggregation" }, { from: "/refguide6/OQL+Case+Expression", to: "/refguide6/oql-case-expression" }, { from: "/refguide6/OQL+CAST", to: "/refguide6/oql-cast" }, { from: "/refguide6/OQL+COALESCE", to: "/refguide6/oql-coalesce" }, { from: "/refguide6/OQL+DATEDIFF", to: "/refguide6/oql-datediff" }, { from: "/refguide6/OQL+DATEPART", to: "/refguide6/oql-datepart" }, { from: "/refguide6/OQL+Expressions", to: "/refguide6/oql-expressions" }, { from: "/refguide6/OQL+From+Clause", to: "/refguide6/oql-from-clause" }, { from: "/refguide6/OQL+FULL+OUTER+JOIN", to: "/refguide6/oql-full-outer-join" }, { from: "/refguide6/OQL+Functions", to: "/refguide6/oql-functions" }, { from: "/refguide6/OQL+Group+by+Clause", to: "/refguide6/oql-group-by-clause" }, { from: "/refguide6/OQL+INNER+JOIN", to: "/refguide6/oql-inner-join" }, { from: "/refguide6/OQL+LEFT+OUTER+JOIN", to: "/refguide6/oql-left-outer-join" }, { from: "/refguide6/OQL+LENGTH", to: "/refguide6/oql-length" }, { from: "/refguide6/OQL+Limit+Clause", to: "/refguide6/oql-limit-clause" }, { from: "/refguide6/OQL+Operators", to: "/refguide6/oql-operators" }, { from: "/refguide6/OQL+Order+by+Clause", to: "/refguide6/oql-order-by-clause" }, { from: "/refguide6/OQL+Parameters", to: "/refguide6/oql-parameters" }, { from: "/refguide6/OQL+RANGEBEGIN", to: "/refguide6/oql-rangebegin" }, { from: "/refguide6/OQL+RANGEEND", to: "/refguide6/oql-rangeend" }, { from: "/refguide6/OQL+RIGHT+OUTER+JOIN", to: "/refguide6/oql-right-outer-join" }, { from: "/refguide6/OQL+ROUND", to: "/refguide6/oql-round" }, { from: "/refguide6/OQL+Select+Clause", to: "/refguide6/oql-select-clause" }, { from: "/refguide6/OQL+Where+Clause", to: "/refguide6/oql-where-clause" }, { from: "/refguide6/Oracle", to: "/refguide6/oracle" }, { from: "/refguide6/Packaging+Hybrid+Mobile+Apps", to: "/refguide6/packaging-hybrid-mobile-apps" }, { from: "/refguide6/Page", to: "/refguide6/page" }, { from: "/refguide6/Page+Break+Document+Template", to: "/refguide6/page-break-document-template" }, { from: "/refguide6/Page+Concepts", to: "/refguide6/page-concepts" }, { from: "/refguide6/Page+Templates", to: "/refguide6/page-templates" }, { from: "/refguide6/Page+title", to: "/refguide6/page-title" }, { from: "/refguide6/Pages", to: "/refguide6/pages" }, { from: "/refguide6/Parameter", to: "/refguide6/parameter" }, { from: "/refguide6/Parse+and+format+date+function+calls", to: "/refguide6/parse-and-format-date-function-calls" }, { from: "/refguide6/Parse+and+format+decimal+function+calls", to: "/refguide6/parse-and-format-decimal-function-calls" }, { from: "/refguide6/Parse+and+format+float+function+calls", to: "/refguide6/parse-and-format-float-function-calls" }, { from: "/refguide6/Parse+integer", to: "/refguide6/parse-integer" }, { from: "/refguide6/Password+Policy", to: "/refguide6/password-policy" }, { from: "/refguide6/Persistability", to: "/refguide6/persistability" }, { from: "/refguide6/Phone+profile", to: "/refguide6/phone-profile" }, { from: "/refguide6/Placeholder", to: "/refguide6/placeholder" }, { from: "/refguide6/Preferences+Dialog", to: "/refguide6/preferences-dialog" }, { from: "/refguide6/Proactive+Maintenance", to: "/refguide6/proactive-maintenance" }, { from: "/refguide6/Project", to: "/refguide6/project" }, { from: "/refguide6/Project+Security", to: "/refguide6/project-security" }, { from: "/refguide6/Project+Settings", to: "/refguide6/project-settings" }, { from: "/refguide6/Publish+Packages+To+Mobile+Stores", to: "/refguide6/publish-packages-to-mobile-stores" }, { from: "/refguide6/Published+App+Service", to: "/refguide6/published-app-service" }, { from: "/refguide6/Published+App+Services", to: "/refguide6/published-app-services" }, { from: "/refguide6/Published+OData+resource", to: "/refguide6/published-odata-resource" }, { from: "/refguide6/Published+OData+Services", to: "/refguide6/published-odata-services" }, { from: "/refguide6/Published+web+service", to: "/refguide6/published-web-service" }, { from: "/refguide6/Published+Web+Services", to: "/refguide6/published-web-services" }, { from: "/refguide6/Radio+buttons", to: "/refguide6/radio-buttons" }, { from: "/refguide6/Range+Search+Field", to: "/refguide6/range-search-field" }, { from: "/refguide6/Reference+selector", to: "/refguide6/reference-selector" }, { from: "/refguide6/Reference+set+selector", to: "/refguide6/reference-set-selector" }, { from: "/refguide6/Regular+Expressions", to: "/refguide6/regular-expressions" }, { from: "/refguide6/Relational+expressions", to: "/refguide6/relational-expressions" },
from: "/refguide6/Remove+button", to: "/refguide6/remove-button" }, { from: "/refguide6/Removed+APIs", to: "/refguide6/removed-apis" }, { from: "/refguide6/Report+Button", to: "/refguide6/report-button" }, { from: "/refguide6/Report+Chart", to: "/refguide6/report-chart" }, { from: "/refguide6/Report+Date+Parameter", to: "/refguide6/report-date-parameter" }, { from: "/refguide6/Report+Grid", to: "/refguide6/report-grid" }, { from: "/refguide6/Report+Pane", to: "/refguide6/report-pane" }, { from: "/refguide6/Report+Parameter", to: "/refguide6/report-parameter" }, { from: "/refguide6/Report+Widgets", to: "/refguide6/report-widgets" }, { from: "/refguide6/Reporting", to: "/refguide6/report-widgets" }, { from: "/refguide6/Retrieve", to: "/refguide6/retrieve" }, { from: "/refguide6/Review+log+files+-+MS+IIS+Server", to: "/refguide6/review-log-files-ms-iis-server" }, { from: "/refguide6/Review+log+files+-+MS+SQL+Server", to: "/refguide6/review-log-files-ms-sql-server" }, { from: "/refguide6/Rollback+Object", to: "/refguide6/rollback-object" }, { from: "/refguide6/Row+Document+Template", to: "/refguide6/row-document-template" }, { from: "/refguide6/Rules", to: "/refguide6/rules" }, { from: "/refguide6/Runtime", to: "/refguide6/runtime" }, { from: "/refguide6/Save+button", to: "/refguide6/save-button" }, { from: "/refguide6/Scheduled+Events", to: "/refguide6/scheduled-events" }, { from: "/refguide6/Scroll+Container", to: "/refguide6/scroll-container" }, { from: "/refguide6/Scroll+Container+Region", to: "/refguide6/scroll-container-region" }, { from: "/refguide6/Search+Bar", to: "/refguide6/search-bar" }, { from: "/refguide6/Search+button", to: "/refguide6/search-button" }, { from: "/refguide6/Security", to: "/refguide6/security" }, { from: "/refguide6/Select++Elements", to: "/refguide6/select--elements" }, { from: "/refguide6/Select+all+button", to: "/refguide6/select-all-button" }, { from: "/refguide6/Select+app+service", to: "/refguide6/select-app-service" }, { from: "/refguide6/Select+button", to: "/refguide6/select-button" }, { from: "/refguide6/Sequence+Flow", to: "/refguide6/sequence-flow" }, { from: "/refguide6/Settings", to: "/refguide6/settings" }, { from: "/refguide6/Show+Home+Page", to: "/refguide6/show-home-page" }, { from: "/refguide6/Show+Message", to: "/refguide6/show-message" }, { from: "/refguide6/Show+Page", to: "/refguide6/show-page" }, { from: "/refguide6/Sidebar+toggle+button", to: "/refguide6/sidebar-toggle-button" }, { from: "/refguide6/Sign+In+Dialog", to: "/refguide6/sign-in-dialog" }, { from: "/refguide6/Sign+out+button", to: "/refguide6/sign-out-button" }, { from: "/refguide6/Simple+Menu+Bar", to: "/refguide6/simple-menu-bar" }, { from: "/refguide6/Snippet", to: "/refguide6/snippet" }, { from: "/refguide6/Snippet+Call", to: "/refguide6/snippet-call" }, { from: "/refguide6/Sort+Bar", to: "/refguide6/sort-bar" }, { from: "/refguide6/Special+checks", to: "/refguide6/special-checks" }, { from: "/refguide6/Start+Event", to: "/refguide6/start-event" }, { from: "/refguide6/Starting+Microflows", to: "/refguide6/starting-microflows" }, { from: "/refguide6/Static+Image+Document+Template", to: "/refguide6/static-image-document-template" }, { from: "/refguide6/Static+Label+Document+Template", to: "/refguide6/static-label-document-template" }, { from: "/refguide6/String+function+calls", to: "/refguide6/string-function-calls" }, { from: "/refguide6/Style", to: "/refguide6/style" }, { from: "/refguide6/Sync+button", to: "/refguide6/sync-button" }, { from: "/refguide6/System+Requirements", to: "/refguide6/system-requirements" }, { from: "/refguide6/System+Texts", to: "/refguide6/system-texts" }, { from: "/refguide6/Tab+container", to: "/refguide6/tab-container" }, { from: "/refguide6/Tab+page", to: "/refguide6/tab-page" }, { from: "/refguide6/Table", to: "/refguide6/table" }, { from: "/refguide6/Table+cell", to: "/refguide6/table-cell" }, { from: "/refguide6/Table+Document+Template", to: "/refguide6/table-document-template" }, { from: "/refguide6/Table+row", to: "/refguide6/table-row" }, { from: "/refguide6/Tablet+profile", to: "/refguide6/tablet-profile" }, { from: "/refguide6/Team+Server", to: "/refguide6/team-server" }, { from: "/refguide6/Team+Server+FAQ", to: "/refguide6/team-server-faq" }, { from: "/refguide6/Template+grid", to: "/refguide6/template-grid" }, { from: "/refguide6/Template+Grid+Document+Template", to: "/refguide6/template-grid-document-template" }, { from: "/refguide6/Text", to: "/refguide6/text" }, { from: "/refguide6/Text+area", to: "/refguide6/text-area" }, { from: "/refguide6/Text+box", to: "/refguide6/text-box" }, { from: "/refguide6/Third+Party+Licenses", to: "/refguide6/third-party-licenses" }, { from: "/refguide6/Title+Document+Template", to: "/refguide6/title-document-template" }, { from: "/refguide6/To+float", to: "/refguide6/to-float" }, { from: "/refguide6/To+string", to: "/refguide6/to-string" }, { from: "/refguide6/Transient+Objects+Garbage+Collecting", to: "/refguide6/transient-objects-garbage-collecting" }, { from: "/refguide6/Translatable+Texts", to: "/refguide6/translatable-texts" }, { from: "/refguide6/Trim+to+date", to: "/refguide6/trim-to-date" }, { from: "/refguide6/Troubleshooting", to: "/refguide6/troubleshooting" }, { from: "/refguide6/Unary+expressions", to: "/refguide6/unary-expressions" }, { from: "/refguide6/Upload+To+Team+Server+Dialog", to: "/refguide6/upload-to-team-server-dialog" }, { from: "/refguide6/User+Roles", to: "/refguide6/user-roles" }, { from: "/refguide6/User+Role", to: "/refguide6/user-roles" }, { from: "/refguide6/Using+a+proxy+to+call+a+webservice", to: "/refguide6/using-a-proxy-to-call-a-webservice" }, { from: "/refguide6/Using+Eclipse", to: "/refguide6/using-eclipse" }, { from: "/refguide6/Validation+Feedback", to: "/refguide6/validation-feedback" }, { from: "/refguide6/Validation+Rules", to: "/refguide6/validation-rules" }, { from: "/refguide6/Variable+Activities", to: "/refguide6/variable-activities" }, { from: "/refguide6/Version+Control", to: "/refguide6/version-control" }, { from: "/refguide6/Version+Control+Concepts", to: "/refguide6/version-control-concepts" }, { from: "/refguide6/Version+Control+Scenarios", to: "/refguide6/version-control-scenarios" }, { from: "/refguide6/version-downgrade-prevention", to: "/developerportal/deploy/version-downgrade-prevention" }, { from: "/refguide6/Vertical+Split+Pane", to: "/refguide6/vertical-split-pane" }, { from: "/refguide6/XML+Inheritance+and+Choice", to: "/refguide6/xml-inheritance-and-choice" }, { from: "/refguide6/XML+Reference+Guide", to: "/refguide6/xml-reference-guide" }, { from: "/refguide6/XML+Schema+Support", to: "/refguide6/xml-schema-support" }, { from: "/refguide6/XML+Schemas", to: "/refguide6/xml-schemas" }, { from: "/refguide6/XPath", to: "/refguide6/xpath" }, { from: "/refguide6/XPath+avg", to: "/refguide6/xpath-avg" }, { from: "/refguide6/XPath+Constraint+Functions", to: "/refguide6/xpath-constraint-functions" }, { from: "/refguide6/XPath+Constraints", to: "/refguide6/xpath-constraints" }, { from: "/refguide6/XPath+contains", to: "/refguide6/xpath-contains" }, { from: "/refguide6/XPath+count", to: "/refguide6/xpath-count" }, { from: "/refguide6/XPath+day+from+dateTime", to: "/refguide6/xpath-day-from-datetime" }, { from: "/refguide6/XPath+day+of+year+from+dateTime", to: "/refguide6/xpath-day-of-year-from-datetime" }, { from: "/refguide6/XPath+ends+with", to: "/refguide6/xpath-ends-with" }, { from: "/refguide6/XPath+Expressions", to: "/refguide6/xpath-expressions" }, { from: "/refguide6/XPath+false", to: "/refguide6/xpath-false" }, { from: "/refguide6/XPath+hours+from+dateTime", to: "/refguide6/xpath-hours-from-datetime" }, { from: "/refguide6/XPath+id", to: "/refguide6/xpath-id" }, { from: "/refguide6/XPath+Keywords+and+System+Variables", to: "/refguide6/xpath-keywords-and-system-variables" }, { from: "/refguide6/XPath+length", to: "/refguide6/xpath-length" }, { from: "/refguide6/XPath+max", to: "/refguide6/xpath-max" }, { from: "/refguide6/XPath+min", to: "/refguide6/xpath-min" }, { from: "/refguide6/XPath+minutes+from+dateTime", to: "/refguide6/xpath-minutes-from-datetime" }, { from: "/refguide6/XPath+month+from+dateTime", to: "/refguide6/xpath-month-from-datetime" }, { from: "/refguide6/XPath+not", to: "/refguide6/xpath-not" }, { from: "/refguide6/XPath+Operators", to: "/refguide6/xpath-operators" }, { from: "/refguide6/XPath+quarter+from+dateTime", to: "/refguide6/xpath-quarter-from-datetime" }, { from: "/refguide6/XPath+Query+Functions", to: "/refguide6/xpath-query-functions" }, { from: "/refguide6/XPath+seconds+from+dateTime", to: "/refguide6/xpath-seconds-from-datetime" }, { from: "/refguide6/XPath+Source", to: "/refguide6/xpath-source" }, { from: "/refguide6/XPath+starts+with", to: "/refguide6/xpath-starts-with" }, { from: "/refguide6/XPath+string+length", to: "/refguide6/xpath-string-length" }, { from: "/refguide6/XPath+sum", to: "/refguide6/xpath-sum" }, { from: "/refguide6/XPath+Tokens", to: "/refguide6/xpath-tokens" }, { from: "/refguide6/XPath+true", to: "/refguide6/xpath-true" }, { from: "/refguide6/XPath+week+from+dateTime", to: "/refguide6/xpath-week-from-datetime" }, { from: "/refguide6/XPath+weekday+from+dateTime", to: "/refguide6/xpath-weekday-from-datetime" }, { from: "/refguide6/XPath+year+from+dateTime", to: "/refguide6/xpath-year-from-datetime" }, { from: "/howto6/Solving+Load+and+Import+Errors", to: "/howto6/solving-load-and-import-errors" }, /**************************************************** * Add-on Guides (permanent, mapped) ****************************************************/ { from: "/addons/apm-addon/", to: "/addons/apd-addon/" }, { from: "/apm/installation-guide", to: "/addons/apd-addon/ig-two" }, { from: "/apm/reference-guide/rg-2/reference-guide-2", to: "/addons/apd-addon/rg-two-apm" }, { from: "~*\\\/aqm\\\/", to: "/addons/aqm-addon/index", exact: true }, { from: "~*\\\/apm\\\/", to: "/addons/apd-addon/index", exact: true }, { from: "~*\\\/ats\\\/", to: "/addons/ats-addon/index", exact: true }, ] }
{
test_pysnooper.py
# Copyright 2019 Ram Rachum and collaborators. # This program is distributed under the MIT license. import io import textwrap import threading import types import sys from pysnooper.utils import truncate from python_toolbox import sys_tools, temp_file_tools import pytest import pysnooper from pysnooper.variables import needs_parentheses from .utils import (assert_output, assert_sample_output, VariableEntry, CallEntry, LineEntry, ReturnEntry, OpcodeEntry, ReturnValueEntry, ExceptionEntry) def test_string_io(): string_io = io.StringIO() @pysnooper.snoop(string_io) def my_function(foo): x = 7 y = 8 return y + x result = my_function('baba') assert result == 15 output = string_io.getvalue() assert_output( output, ( VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):'), LineEntry('x = 7'), VariableEntry('x', '7'), LineEntry('y = 8'), VariableEntry('y', '8'), LineEntry('return y + x'), ReturnEntry('return y + x'), ReturnValueEntry('15'), ) ) def test_thread_info(): @pysnooper.snoop(thread_info=True) def my_function(foo): x = 7 y = 8 return y + x with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = my_function('baba') assert result == 15 output = output_capturer.string_io.getvalue() assert_output( output, ( VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):'), LineEntry('x = 7'), VariableEntry('x', '7'), LineEntry('y = 8'), VariableEntry('y', '8'), LineEntry('return y + x'), ReturnEntry('return y + x'), ReturnValueEntry('15'), ) ) def test_multi_thread_info(): @pysnooper.snoop(thread_info=True) def my_function(foo): x = 7 y = 8 return y + x with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: my_function('baba') t1 = threading.Thread(target=my_function, name="test123",args=['bubu']) t1.start() t1.join() t1 = threading.Thread(target=my_function, name="bibi",args=['bibi']) t1.start() t1.join() output = output_capturer.string_io.getvalue() calls = [line for line in output.split("\n") if "call" in line] main_thread = calls[0] assert len(main_thread) == len(calls[1]) assert len(main_thread) == len(calls[2]) main_thread_call_str = main_thread.find("call") assert main_thread_call_str == calls[1].find("call") assert main_thread_call_str == calls[2].find("call") thread_info_regex = '([0-9]+-{name}+[ ]+)' assert_output( output, ( VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):', thread_info_regex=thread_info_regex.format( name="MainThread")), LineEntry('x = 7', thread_info_regex=thread_info_regex.format( name="MainThread")), VariableEntry('x', '7'), LineEntry('y = 8', thread_info_regex=thread_info_regex.format( name="MainThread")), VariableEntry('y', '8'), LineEntry('return y + x', thread_info_regex=thread_info_regex.format( name="MainThread")), ReturnEntry('return y + x'), ReturnValueEntry('15'), VariableEntry('foo', value_regex="u?'bubu'"), CallEntry('def my_function(foo):', thread_info_regex=thread_info_regex.format( name="test123")), LineEntry('x = 7', thread_info_regex=thread_info_regex.format( name="test123")), VariableEntry('x', '7'), LineEntry('y = 8', thread_info_regex=thread_info_regex.format( name="test123")), VariableEntry('y', '8'), LineEntry('return y + x', thread_info_regex=thread_info_regex.format( name="test123")), ReturnEntry('return y + x'), ReturnValueEntry('15'), VariableEntry('foo', value_regex="u?'bibi'"), CallEntry('def my_function(foo):', thread_info_regex=thread_info_regex.format(name='bibi')), LineEntry('x = 7', thread_info_regex=thread_info_regex.format(name='bibi')), VariableEntry('x', '7'), LineEntry('y = 8', thread_info_regex=thread_info_regex.format(name='bibi')), VariableEntry('y', '8'), LineEntry('return y + x', thread_info_regex=thread_info_regex.format(name='bibi')), ReturnEntry('return y + x'), ReturnValueEntry('15'), ) ) def test_callable(): string_io = io.StringIO() def write(msg): string_io.write(msg) @pysnooper.snoop(write) def my_function(foo): x = 7 y = 8 return y + x result = my_function('baba') assert result == 15 output = string_io.getvalue() assert_output( output, ( VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):'), LineEntry('x = 7'), VariableEntry('x', '7'), LineEntry('y = 8'), VariableEntry('y', '8'), LineEntry('return y + x'), ReturnEntry('return y + x'), ReturnValueEntry('15'), ) ) def test_watch(): class Foo(object): def __init__(self): self.x = 2 def square(self): self.x **= 2 @pysnooper.snoop(watch=( 'foo.x', 'io.__name__', 'len(foo.__dict__["x"] * "abc")', )) def my_function(): foo = Foo() for i in range(2): foo.square() with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = my_function() assert result is None output = output_capturer.string_io.getvalue() assert_output( output, ( VariableEntry('Foo'), VariableEntry('io.__name__', "'io'"), CallEntry('def my_function():'), LineEntry('foo = Foo()'), VariableEntry('foo'), VariableEntry('foo.x', '2'), VariableEntry('len(foo.__dict__["x"] * "abc")', '6'), LineEntry(), VariableEntry('i', '0'), LineEntry(), VariableEntry('foo.x', '4'), VariableEntry('len(foo.__dict__["x"] * "abc")', '12'), LineEntry(), VariableEntry('i', '1'), LineEntry(), VariableEntry('foo.x', '16'), VariableEntry('len(foo.__dict__["x"] * "abc")', '48'), LineEntry(), ReturnEntry(), ReturnValueEntry('None') ) ) def test_watch_explode(): class Foo: def __init__(self, x, y): self.x = x self.y = y @pysnooper.snoop(watch_explode=('_d', '_point', 'lst + []')) def my_function(): _d = {'a': 1, 'b': 2, 'c': 'ignore'} _point = Foo(x=3, y=4) lst = [7, 8, 9] lst.append(10) with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = my_function() assert result is None output = output_capturer.string_io.getvalue() assert_output( output, ( VariableEntry('Foo'), CallEntry('def my_function():'), LineEntry(), VariableEntry('_d'), VariableEntry("_d['a']", '1'), VariableEntry("_d['b']", '2'), VariableEntry("_d['c']", "'ignore'"), LineEntry(), VariableEntry('_point'), VariableEntry('_point.x', '3'), VariableEntry('_point.y', '4'), LineEntry(), VariableEntry('lst'), VariableEntry('(lst + [])[0]', '7'), VariableEntry('(lst + [])[1]', '8'), VariableEntry('(lst + [])[2]', '9'), VariableEntry('lst + []'), LineEntry(), VariableEntry('lst'), VariableEntry('(lst + [])[3]', '10'), VariableEntry('lst + []'), ReturnEntry(), ReturnValueEntry('None') ) ) def test_variables_classes(): class WithSlots(object): __slots__ = ('x', 'y') def __init__(self): self.x = 3 self.y = 4 @pysnooper.snoop(watch=( pysnooper.Keys('_d', exclude='c'), pysnooper.Attrs('_d'), # doesn't have attributes pysnooper.Attrs('_s'), pysnooper.Indices('_lst')[-3:], )) def my_function(): _d = {'a': 1, 'b': 2, 'c': 'ignore'} _s = WithSlots() _lst = list(range(1000)) with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = my_function() assert result is None output = output_capturer.string_io.getvalue() assert_output( output, ( VariableEntry('WithSlots'), CallEntry('def my_function():'), LineEntry(), VariableEntry('_d'), VariableEntry("_d['a']", '1'), VariableEntry("_d['b']", '2'), LineEntry(), VariableEntry('_s'), VariableEntry('_s.x', '3'), VariableEntry('_s.y', '4'), LineEntry(), VariableEntry('_lst'), VariableEntry('_lst[997]', '997'), VariableEntry('_lst[998]', '998'), VariableEntry('_lst[999]', '999'), ReturnEntry(), ReturnValueEntry('None') ) ) def test_single_watch_no_comma(): class Foo(object): def __init__(self): self.x = 2 def square(self): self.x **= 2 @pysnooper.snoop(watch='foo') def my_function(): foo = Foo() for i in range(2): foo.square() with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = my_function() assert result is None output = output_capturer.string_io.getvalue() assert_output( output, ( VariableEntry('Foo'), CallEntry('def my_function():'), LineEntry('foo = Foo()'), VariableEntry('foo'), LineEntry(), VariableEntry('i', '0'), LineEntry(), LineEntry(), VariableEntry('i', '1'), LineEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('None') ) ) def test_long_variable(): @pysnooper.snoop() def my_function(): foo = list(range(1000)) return foo with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = my_function() assert result == list(range(1000)) output = output_capturer.string_io.getvalue() regex = r'^\[0, 1, 2, .*\.\.\..*, 997, 998, 999\]$' assert_output( output, ( CallEntry('def my_function():'), LineEntry('foo = list(range(1000))'), VariableEntry('foo', value_regex=regex), LineEntry(), ReturnEntry(), ReturnValueEntry(value_regex=regex) ) ) def test_repr_exception(): class Bad(object): def __repr__(self): 1 / 0 @pysnooper.snoop() def my_function(): bad = Bad() with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = my_function() assert result is None output = output_capturer.string_io.getvalue() assert_output( output, ( VariableEntry('Bad'), CallEntry('def my_function():'), LineEntry('bad = Bad()'), VariableEntry('bad', value='REPR FAILED'), ReturnEntry(), ReturnValueEntry('None') ) ) def test_depth(): string_io = io.StringIO() def f4(x4): result4 = x4 * 2 return result4 def f3(x3): result3 = f4(x3) return result3 def f2(x2): result2 = f3(x2) return result2 @pysnooper.snoop(string_io, depth=3) def f1(x1): result1 = f2(x1) return result1 result = f1(10) assert result == 20 output = string_io.getvalue() assert_output( output, ( VariableEntry(), VariableEntry(), CallEntry('def f1(x1):'), LineEntry(), VariableEntry(), VariableEntry(), CallEntry('def f2(x2):'), LineEntry(), VariableEntry(), VariableEntry(), CallEntry('def f3(x3):'), LineEntry(), VariableEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('20'), VariableEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('20'), VariableEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('20'), ) ) def test_method_and_prefix(): class Baz(object): def __init__(self): self.x = 2 @pysnooper.snoop(watch=('self.x',), prefix='ZZZ') def square(self): foo = 7 self.x **= 2 return self baz = Baz() with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = baz.square() assert result is baz assert result.x == 4 output = output_capturer.string_io.getvalue() assert_output( output, ( VariableEntry('self', prefix='ZZZ'), VariableEntry('self.x', '2', prefix='ZZZ'), CallEntry('def square(self):', prefix='ZZZ'), LineEntry('foo = 7', prefix='ZZZ'), VariableEntry('foo', '7', prefix='ZZZ'), LineEntry('self.x **= 2', prefix='ZZZ'), VariableEntry('self.x', '4', prefix='ZZZ'), LineEntry(prefix='ZZZ'), ReturnEntry(prefix='ZZZ'), ReturnValueEntry(prefix='ZZZ'), ), prefix='ZZZ' ) def test_file_output(): with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder: path = folder / 'foo.log' @pysnooper.snoop(path) def my_function(_foo): x = 7 y = 8 return y + x result = my_function('baba') assert result == 15 with path.open() as output_file: output = output_file.read() assert_output( output, ( VariableEntry('_foo', value_regex="u?'baba'"), CallEntry('def my_function(_foo):'), LineEntry('x = 7'), VariableEntry('x', '7'), LineEntry('y = 8'), VariableEntry('y', '8'), LineEntry('return y + x'), ReturnEntry('return y + x'), ReturnValueEntry('15'), ) ) def test_confusing_decorator_lines(): string_io = io.StringIO() def empty_decorator(function): return function @empty_decorator @pysnooper.snoop(string_io, depth=2) # Multi-line decorator for extra confusion! @empty_decorator @empty_decorator def my_function(foo): x = lambda bar: 7 y = 8 return y + x(foo) result = my_function('baba') assert result == 15 output = string_io.getvalue() assert_output( output, ( VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):'), LineEntry(), VariableEntry(), LineEntry(), VariableEntry(), LineEntry(), # inside lambda VariableEntry('bar', value_regex="u?'baba'"), CallEntry('x = lambda bar: 7'), LineEntry(), ReturnEntry(), ReturnValueEntry('7'), # back in my_function ReturnEntry(), ReturnValueEntry('15'), ) ) def test_lambda(): string_io = io.StringIO() my_function = pysnooper.snoop(string_io)(lambda x: x ** 2) result = my_function(7) assert result == 49 output = string_io.getvalue() assert_output( output, ( VariableEntry('x', '7'), CallEntry(source_regex='^my_function = pysnooper.*'), LineEntry(source_regex='^my_function = pysnooper.*'), ReturnEntry(source_regex='^my_function = pysnooper.*'), ReturnValueEntry('49'), ) ) def test_unavailable_source(): with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder, \ sys_tools.TempSysPathAdder(str(folder)): module_name = 'iaerojajsijf' python_file_path = folder / ('%s.py' % (module_name,)) content = textwrap.dedent(u''' import pysnooper @pysnooper.snoop() def f(x): return x ''') with python_file_path.open('w') as python_file: python_file.write(content) module = __import__(module_name) python_file_path.unlink() with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = getattr(module, 'f')(7) assert result == 7 output = output_capturer.output assert_output( output, ( VariableEntry(stage='starting'), CallEntry('SOURCE IS UNAVAILABLE'), LineEntry('SOURCE IS UNAVAILABLE'), ReturnEntry('SOURCE IS UNAVAILABLE'), ReturnValueEntry('7'), ) ) def test_no_overwrite_by_default(): with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder: path = folder / 'foo.log' with path.open('w') as output_file: output_file.write(u'lala') @pysnooper.snoop(str(path)) def my_function(foo): x = 7 y = 8 return y + x result = my_function('baba') assert result == 15 with path.open() as output_file: output = output_file.read() assert output.startswith('lala') shortened_output = output[4:] assert_output( shortened_output, ( VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):'), LineEntry('x = 7'), VariableEntry('x', '7'), LineEntry('y = 8'), VariableEntry('y', '8'), LineEntry('return y + x'), ReturnEntry('return y + x'), ReturnValueEntry('15'), ) ) def test_overwrite(): with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder: path = folder / 'foo.log' with path.open('w') as output_file: output_file.write(u'lala') @pysnooper.snoop(str(path), overwrite=True) def my_function(foo): x = 7 y = 8 return y + x result = my_function('baba') result = my_function('baba') assert result == 15 with path.open() as output_file: output = output_file.read() assert 'lala' not in output assert_output( output, ( VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):'), LineEntry('x = 7'), VariableEntry('x', '7'), LineEntry('y = 8'), VariableEntry('y', '8'), LineEntry('return y + x'), ReturnEntry('return y + x'), ReturnValueEntry('15'), VariableEntry('foo', value_regex="u?'baba'"), CallEntry('def my_function(foo):'), LineEntry('x = 7'), VariableEntry('x', '7'), LineEntry('y = 8'), VariableEntry('y', '8'), LineEntry('return y + x'), ReturnEntry('return y + x'), ReturnValueEntry('15'), ) ) def test_error_in_overwrite_argument(): with temp_file_tools.create_temp_folder(prefix='pysnooper') as folder: with pytest.raises(Exception, match='can only be used when writing'): @pysnooper.snoop(overwrite=True) def my_function(foo): x = 7 y = 8 return y + x def test_needs_parentheses(): assert not needs_parentheses('x') assert not needs_parentheses('x.y') assert not needs_parentheses('x.y.z') assert not needs_parentheses('x.y.z[0]') assert not needs_parentheses('x.y.z[0]()') assert not needs_parentheses('x.y.z[0]()(3, 4 * 5)') assert not needs_parentheses('foo(x)') assert not needs_parentheses('foo(x+y)') assert not needs_parentheses('(x+y)') assert not needs_parentheses('[x+1 for x in ()]') assert needs_parentheses('x + y') assert needs_parentheses('x * y') assert needs_parentheses('x and y') assert needs_parentheses('x if z else y') def test_with_block(): # Testing that a single Tracer can handle many mixed uses snoop = pysnooper.snoop() def foo(x): if x == 0: bar1(x) qux() return with snoop: # There should be line entries for these three lines, # no line entries for anything else in this function, # but calls to all bar functions should be traced foo(x - 1) bar2(x) qux() int(4) bar3(9) return x @snoop def
(_x): qux() @snoop def bar2(_x): qux() @snoop def bar3(_x): qux() def qux(): return 9 # not traced, mustn't show up with sys_tools.OutputCapturer(stdout=False, stderr=True) as output_capturer: result = foo(2) assert result == 2 output = output_capturer.string_io.getvalue() assert_output( output, ( # In first with VariableEntry('x', '2'), VariableEntry('bar1'), VariableEntry('bar2'), VariableEntry('bar3'), VariableEntry('foo'), VariableEntry('qux'), VariableEntry('snoop'), LineEntry('foo(x - 1)'), # In with in recursive call VariableEntry('x', '1'), VariableEntry('bar1'), VariableEntry('bar2'), VariableEntry('bar3'), VariableEntry('foo'), VariableEntry('qux'), VariableEntry('snoop'), LineEntry('foo(x - 1)'), # Call to bar1 from if block outside with VariableEntry('_x', '0'), VariableEntry('qux'), CallEntry('def bar1(_x):'), LineEntry('qux()'), ReturnEntry('qux()'), ReturnValueEntry('None'), # In with in recursive call LineEntry('bar2(x)'), # Call to bar2 from within with VariableEntry('_x', '1'), VariableEntry('qux'), CallEntry('def bar2(_x):'), LineEntry('qux()'), ReturnEntry('qux()'), ReturnValueEntry('None'), # In with in recursive call LineEntry('qux()'), # Call to bar3 from after with VariableEntry('_x', '9'), VariableEntry('qux'), CallEntry('def bar3(_x):'), LineEntry('qux()'), ReturnEntry('qux()'), ReturnValueEntry('None'), # -- Similar to previous few sections, # -- but from first call to foo # In with in first call LineEntry('bar2(x)'), # Call to bar2 from within with VariableEntry('_x', '2'), VariableEntry('qux'), CallEntry('def bar2(_x):'), LineEntry('qux()'), ReturnEntry('qux()'), ReturnValueEntry('None'), # In with in first call LineEntry('qux()'), # Call to bar3 from after with VariableEntry('_x', '9'), VariableEntry('qux'), CallEntry('def bar3(_x):'), LineEntry('qux()'), ReturnEntry('qux()'), ReturnValueEntry('None'), ), ) def test_with_block_depth(): string_io = io.StringIO() def f4(x4): result4 = x4 * 2 return result4 def f3(x3): result3 = f4(x3) return result3 def f2(x2): result2 = f3(x2) return result2 def f1(x1): str(3) with pysnooper.snoop(string_io, depth=3): result1 = f2(x1) return result1 result = f1(10) assert result == 20 output = string_io.getvalue() assert_output( output, ( VariableEntry(), VariableEntry(), VariableEntry(), LineEntry('result1 = f2(x1)'), VariableEntry(), VariableEntry(), CallEntry('def f2(x2):'), LineEntry(), VariableEntry(), VariableEntry(), CallEntry('def f3(x3):'), LineEntry(), VariableEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('20'), VariableEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('20'), ) ) def test_cellvars(): string_io = io.StringIO() def f2(a): def f3(a): x = 0 x += 1 def f4(a): y = x return 42 return f4(a) return f3(a) def f1(a): with pysnooper.snoop(string_io, depth=4): result1 = f2(a) return result1 result = f1(42) assert result == 42 output = string_io.getvalue() assert_output( output, ( VariableEntry(), VariableEntry(), VariableEntry(), LineEntry('result1 = f2(a)'), VariableEntry(), CallEntry('def f2(a):'), LineEntry(), VariableEntry(), LineEntry(), VariableEntry("a"), CallEntry('def f3(a):'), LineEntry(), VariableEntry("x"), LineEntry(), VariableEntry("x"), LineEntry(), VariableEntry(), LineEntry(), VariableEntry(), VariableEntry("x"), CallEntry('def f4(a):'), LineEntry(), VariableEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry(), ReturnEntry(), ReturnValueEntry(), ReturnEntry(), ReturnValueEntry(), ) ) def test_var_order(): string_io = io.StringIO() def f(one, two, three, four): five = None six = None seven = None five, six, seven = 5, 6, 7 with pysnooper.snoop(string_io, depth=2): result = f(1, 2, 3, 4) output = string_io.getvalue() assert_output( output, ( VariableEntry(), VariableEntry(), LineEntry('result = f(1, 2, 3, 4)'), VariableEntry("one", "1"), VariableEntry("two", "2"), VariableEntry("three", "3"), VariableEntry("four", "4"), CallEntry('def f(one, two, three, four):'), LineEntry(), VariableEntry("five"), LineEntry(), VariableEntry("six"), LineEntry(), VariableEntry("seven"), LineEntry(), VariableEntry("five", "5"), VariableEntry("six", "6"), VariableEntry("seven", "7"), ReturnEntry(), ReturnValueEntry(), ) ) def test_truncate(): max_length = 20 for i in range(max_length * 2): string = i * 'a' truncated = truncate(string, max_length) if len(string) <= max_length: assert string == truncated else: assert truncated == 'aaaaaaaa...aaaaaaaaa' assert len(truncated) == max_length def test_indentation(): from .samples import indentation, recursion assert_sample_output(indentation) assert_sample_output(recursion) def test_exception(): from .samples import exception assert_sample_output(exception) def test_generator(): string_io = io.StringIO() original_tracer = sys.gettrace() original_tracer_active = lambda: (sys.gettrace() is original_tracer) @pysnooper.snoop(string_io) def f(x1): assert not original_tracer_active() x2 = (yield x1) assert not original_tracer_active() x3 = 'foo' assert not original_tracer_active() x4 = (yield 2) assert not original_tracer_active() return assert original_tracer_active() generator = f(0) assert original_tracer_active() first_item = next(generator) assert original_tracer_active() assert first_item == 0 second_item = generator.send('blabla') assert original_tracer_active() assert second_item == 2 with pytest.raises(StopIteration) as exc_info: generator.send('looloo') assert original_tracer_active() output = string_io.getvalue() assert_output( output, ( VariableEntry('x1', '0'), VariableEntry(), CallEntry(), LineEntry(), VariableEntry(), VariableEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('0'), # Pause and resume: VariableEntry('x1', '0'), VariableEntry(), VariableEntry(), VariableEntry(), CallEntry(), VariableEntry('x2', "'blabla'"), LineEntry(), LineEntry(), VariableEntry('x3', "'foo'"), LineEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry('2'), # Pause and resume: VariableEntry('x1', '0'), VariableEntry(), VariableEntry(), VariableEntry(), VariableEntry(), VariableEntry(), CallEntry(), VariableEntry('x4', "'looloo'"), LineEntry(), LineEntry(), ReturnEntry(), ReturnValueEntry(None), ) ) def test_custom_repr(): string_io = io.StringIO() def large(l): return isinstance(l, list) and len(l) > 5 def print_list_size(l): return 'list(size={})'.format(len(l)) def print_dict(d): return 'dict(keys={})'.format(sorted(list(d.keys()))) def evil_condition(x): return large(x) or isinstance(x, dict) @pysnooper.snoop(string_io, custom_repr=( (large, print_list_size), (dict, print_dict), (evil_condition, lambda x: 'I am evil'))) def sum_to_x(x): l = list(range(x)) a = {'1': 1, '2': 2} return sum(l) result = sum_to_x(10000) output = string_io.getvalue() assert_output( output, ( VariableEntry('x', '10000'), CallEntry(), LineEntry(), VariableEntry('l', 'list(size=10000)'), LineEntry(), VariableEntry('a', "dict(keys=['1', '2'])"), LineEntry(), ReturnEntry(), ReturnValueEntry('49995000'), ) )
bar1
set_rf.py
import numbers from warnings import catch_warnings, simplefilter, warn import threading from abc import ABCMeta, abstractmethod import numpy as np from scipy.sparse import issparse from scipy.sparse import hstack as sparse_hstack from joblib import Parallel, delayed from sklearn.base import ClassifierMixin, RegressorMixin, MultiOutputMixin from sklearn.metrics import r2_score from sklearn.preprocessing import OneHotEncoder from sklearn.tree import (DecisionTreeClassifier, DecisionTreeRegressor, ExtraTreeClassifier, ExtraTreeRegressor) from sklearn.tree._tree import DTYPE, DOUBLE from sklearn.utils import check_random_state, check_array, compute_sample_weight from sklearn.exceptions import DataConversionWarning from sklearn.ensemble._base import BaseEnsemble, _partition_estimators from sklearn.utils.fixes import _joblib_parallel_args from sklearn.utils.multiclass import check_classification_targets from sklearn.utils.validation import check_is_fitted, _check_sample_weight from sklearn.utils.validation import _deprecate_positional_args from settree.set_tree import SetTree from settree.set_data import OPERATIONS __all__ = ["SetRandomForestClassifier", "SetRandomForestRegressor"] MAX_INT = np.iinfo(np.int32).max def _get_n_samples_bootstrap(n_samples, max_samples): """ Get the number of samples in a bootstrap sample. Parameters ---------- n_samples : int Number of samples in the dataset. max_samples : int or float The maximum number of samples to draw from the total available: - if float, this indicates a fraction of the total and should be the interval `(0, 1)`; - if int, this indicates the exact number of samples; - if None, this indicates the total number of samples. Returns ------- n_samples_bootstrap : int The total number of samples to draw for the bootstrap sample. """ if max_samples is None: return n_samples if isinstance(max_samples, numbers.Integral): if not (1 <= max_samples <= n_samples): msg = "`max_samples` must be in range 1 to {} but got value {}" raise ValueError(msg.format(n_samples, max_samples)) return max_samples if isinstance(max_samples, numbers.Real): if not (0 < max_samples < 1): msg = "`max_samples` must be in range (0, 1) but got value {}" raise ValueError(msg.format(max_samples)) return int(round(n_samples * max_samples)) msg = "`max_samples` should be int or float, but got type '{}'" raise TypeError(msg.format(type(max_samples))) def _generate_sample_indices(random_state, n_samples, n_samples_bootstrap): """ Private function used to _parallel_build_trees function.""" random_instance = check_random_state(random_state) sample_indices = random_instance.randint(0, n_samples, n_samples_bootstrap) return sample_indices def _generate_unsampled_indices(random_state, n_samples, n_samples_bootstrap): """ Private function used to forest._set_oob_score function.""" sample_indices = _generate_sample_indices(random_state, n_samples, n_samples_bootstrap) sample_counts = np.bincount(sample_indices, minlength=n_samples) unsampled_mask = sample_counts == 0 indices_range = np.arange(n_samples) unsampled_indices = indices_range[unsampled_mask] return unsampled_indices def _parallel_build_trees(tree, forest, X_set, y, sample_weight, tree_idx, n_trees, verbose=0, class_weight=None, n_samples_bootstrap=None): """ Private function used to fit a single tree in parallel.""" if verbose > 1: print("building tree %d of %d" % (tree_idx + 1, n_trees)) if forest.bootstrap: n_samples = X_set.shape[0] if sample_weight is None: curr_sample_weight = np.ones((n_samples,), dtype=np.float64) else: curr_sample_weight = sample_weight.copy() indices = _generate_sample_indices(tree.random_state, n_samples, n_samples_bootstrap) X_subset = X_set.get_subset(indices) y_subset = y.take(indices) sample_weights_subset = None if sample_weight is None else curr_sample_weight.take(indices) # todo: currently not supporting those options # sample_counts = np.bincount(indices, minlength=n_samples) # curr_sample_weight *= sample_counts # # if class_weight == 'subsample': # with catch_warnings(): # simplefilter('ignore', DeprecationWarning) # curr_sample_weight *= compute_sample_weight('auto', y, # indices=indices) # elif class_weight == 'balanced_subsample': # curr_sample_weight *= compute_sample_weight('balanced', y, # indices=indices) tree.fit(X_subset, y_subset, sample_weights_subset) else: tree.fit(X_set, y, sample_weight) return tree class BaseForest(MultiOutputMixin, BaseEnsemble, metaclass=ABCMeta): """ Base class for forests of trees. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__(self, base_estimator, n_estimators=100, *,
estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, max_samples=None): super().__init__( base_estimator=base_estimator, n_estimators=n_estimators, estimator_params=estimator_params) self.bootstrap = bootstrap self.oob_score = oob_score self.n_jobs = n_jobs self.random_state = random_state self.verbose = verbose self.warm_start = warm_start self.class_weight = class_weight self.max_samples = max_samples def apply(self, X_set): """ Apply trees in the forest to X, return leaf indices. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- X_leaves : ndarray of shape (n_samples, n_estimators) For each datapoint x in X and for each tree in the forest, return the index of the leaf x ends up in. """ #X = self._validate_X_predict(X) results = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, **_joblib_parallel_args(prefer="threads"))( delayed(tree.apply)(X_set) for tree in self.estimators_) return np.array(results).T def decision_path(self, X_set): # todo currently not working """ Return the decision path in the forest. .. versionadded:: 0.18 Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- indicator : sparse matrix of shape (n_samples, n_nodes) Return a node indicator matrix where non zero elements indicates that the samples goes through the nodes. The matrix is of CSR format. n_nodes_ptr : ndarray of shape (n_estimators + 1,) The columns from indicator[n_nodes_ptr[i]:n_nodes_ptr[i+1]] gives the indicator value for the i-th estimator. """ #X = self._validate_X_predict(X) indicators = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, **_joblib_parallel_args(prefer='threads'))( delayed(tree.decision_path)(X_set) for tree in self.estimators_) n_nodes = [0] n_nodes.extend([i.shape[1] for i in indicators]) n_nodes_ptr = np.array(n_nodes).cumsum() return sparse_hstack(indicators).tocsr(), n_nodes_ptr def fit(self, X_set, y, sample_weight=None): # Validate or convert input data if issparse(y): raise ValueError( "sparse multilabel-indicator for y is not supported." ) # X, y = self._validate_data(X, y, multi_output=True, # accept_sparse="csc", dtype=DTYPE) if sample_weight is not None: sample_weight = _check_sample_weight(sample_weight, X_set) # Remap output self.n_features_ = X_set.shape[1] y = np.atleast_1d(y) if y.ndim == 2 and y.shape[1] == 1: warn("A column-vector y was passed when a 1d array was" " expected. Please change the shape of y to " "(n_samples,), for example using ravel().", DataConversionWarning, stacklevel=2) if y.ndim == 1: # reshape is necessary to preserve the data contiguity against vs # [:, np.newaxis] that does not. y = np.reshape(y, (-1, 1)) self.n_outputs_ = y.shape[1] y, expanded_class_weight = self._validate_y_class_weight(y) # todo: the default was to cast y into float - keep it with it's current dtype #if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous: # y = np.ascontiguousarray(y, dtype=DOUBLE) if expanded_class_weight is not None: if sample_weight is not None: sample_weight = sample_weight * expanded_class_weight else: sample_weight = expanded_class_weight # Get bootstrap sample size n_samples_bootstrap = _get_n_samples_bootstrap( n_samples=X_set.shape[0], max_samples=self.max_samples ) # Check parameters self._validate_estimator() if not self.bootstrap and self.oob_score: raise ValueError("Out of bag estimation only available" " if bootstrap=True") random_state = check_random_state(self.random_state) if not self.warm_start or not hasattr(self, "estimators_"): # Free allocated memory, if any self.estimators_ = [] n_more_estimators = self.n_estimators - len(self.estimators_) if n_more_estimators < 0: raise ValueError('n_estimators=%d must be larger or equal to ' 'len(estimators_)=%d when warm_start==True' % (self.n_estimators, len(self.estimators_))) elif n_more_estimators == 0: warn("Warm-start fitting without increasing n_estimators does not " "fit new trees.") else: if self.warm_start and len(self.estimators_) > 0: # We draw from the random state to get the random state we # would have got if we hadn't used a warm_start. random_state.randint(MAX_INT, size=len(self.estimators_)) trees = [self._make_estimator(append=False, random_state=random_state) for i in range(n_more_estimators)] # Parallel loop: we prefer the threading backend as the Cython code # for fitting the trees is internally releasing the Python GIL # making threading more efficient than multiprocessing in # that case. However, for joblib 0.12+ we respect any # parallel_backend contexts set at a higher level, # since correctness does not rely on using threads. trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, **_joblib_parallel_args(prefer='threads'))( delayed(_parallel_build_trees)( t, self, X_set, y, sample_weight, i, len(trees), verbose=self.verbose, class_weight=self.class_weight, n_samples_bootstrap=n_samples_bootstrap) for i, t in enumerate(trees)) # Collect newly grown trees self.estimators_.extend(trees) if self.oob_score: self._set_oob_score(X_set, y) # Decapsulate classes_ attributes if hasattr(self, "classes_") and self.n_outputs_ == 1: self.n_classes_ = self.n_classes_[0] self.classes_ = self.classes_[0] return self @abstractmethod def _set_oob_score(self, X_set, y): """ Calculate out of bag predictions and score.""" def _validate_y_class_weight(self, y): # Default implementation return y, None def _validate_X_predict(self, X): """ Validate X whenever one tries to predict, apply, predict_proba.""" check_is_fitted(self) return self.estimators_[0]._validate_X_predict(X, check_input=True) @property def feature_importances_(self): """ The impurity-based feature importances. The higher, the more important the feature. The importance of a feature is computed as the (normalized) total reduction of the criterion brought by that feature. It is also known as the Gini importance. Warning: impurity-based feature importances can be misleading for high cardinality features (many unique values). See :func:`sklearn.inspection.permutation_importance` as an alternative. Returns ------- feature_importances_ : ndarray of shape (n_features,) The values of this array sum to 1, unless all trees are single node trees consisting of only the root node, in which case it will be an array of zeros. """ check_is_fitted(self) all_importances = Parallel(n_jobs=self.n_jobs, **_joblib_parallel_args(prefer='threads'))( delayed(getattr)(tree, 'feature_importances_') for tree in self.estimators_ if tree.tree_.node_count > 1) if not all_importances: return np.zeros(self.n_features_, dtype=np.float64) all_importances = np.mean(all_importances, axis=0, dtype=np.float64) return all_importances / np.sum(all_importances) def _accumulate_prediction(predict, X_set, out, lock): """ This is a utility function for joblib's Parallel. It can't go locally in ForestClassifier or ForestRegressor, because joblib complains that it cannot pickle it when placed there. """ prediction = predict(X_set) with lock: if len(out) == 1: out[0] += prediction else: for i in range(len(out)): out[i] += prediction[i] class SetForestClassifier(ClassifierMixin, BaseForest, metaclass=ABCMeta): """ Base class for forest of trees-based classifiers. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__(self, base_estimator, n_estimators=100, *, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, max_samples=None): super().__init__( base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight, max_samples=max_samples) def _set_oob_score(self, X_set, y): """ Compute out-of-bag score.""" #X = check_array(X, dtype=DTYPE, accept_sparse='csr') n_classes_ = self.n_classes_ n_samples = y.shape[0] oob_decision_function = [] oob_score = 0.0 predictions = [np.zeros((n_samples, n_classes_[k])) for k in range(self.n_outputs_)] n_samples_bootstrap = _get_n_samples_bootstrap( n_samples, self.max_samples ) for estimator in self.estimators_: unsampled_indices = _generate_unsampled_indices( estimator.random_state, n_samples, n_samples_bootstrap) X_subsample = X_set.get_subset(unsampled_indices) p_estimator = estimator.predict_proba(X_subsample) if self.n_outputs_ == 1: p_estimator = [p_estimator] for k in range(self.n_outputs_): predictions[k][unsampled_indices, :] += p_estimator[k] for k in range(self.n_outputs_): if (predictions[k].sum(axis=1) == 0).any(): warn("Some inputs do not have OOB scores. " "This probably means too few trees were used " "to compute any reliable oob estimates.") decision = (predictions[k] / predictions[k].sum(axis=1)[:, np.newaxis]) oob_decision_function.append(decision) oob_score += np.mean(y[:, k] == np.argmax(predictions[k], axis=1), axis=0) if self.n_outputs_ == 1: self.oob_decision_function_ = oob_decision_function[0] else: self.oob_decision_function_ = oob_decision_function self.oob_score_ = oob_score / self.n_outputs_ def _validate_y_class_weight(self, y): check_classification_targets(y) y = np.copy(y) expanded_class_weight = None if self.class_weight is not None: y_original = np.copy(y) self.classes_ = [] self.n_classes_ = [] y_store_unique_indices = np.zeros(y.shape, dtype=np.int) for k in range(self.n_outputs_): classes_k, y_store_unique_indices[:, k] = \ np.unique(y[:, k], return_inverse=True) self.classes_.append(classes_k) self.n_classes_.append(classes_k.shape[0]) y = y_store_unique_indices if self.class_weight is not None: valid_presets = ('balanced', 'balanced_subsample') if isinstance(self.class_weight, str): if self.class_weight not in valid_presets: raise ValueError('Valid presets for class_weight include ' '"balanced" and "balanced_subsample".' 'Given "%s".' % self.class_weight) if self.warm_start: warn('class_weight presets "balanced" or ' '"balanced_subsample" are ' 'not recommended for warm_start if the fitted data ' 'differs from the full dataset. In order to use ' '"balanced" weights, use compute_class_weight ' '("balanced", classes, y). In place of y you can use ' 'a large enough sample of the full training set ' 'target to properly estimate the class frequency ' 'distributions. Pass the resulting weights as the ' 'class_weight parameter.') if (self.class_weight != 'balanced_subsample' or not self.bootstrap): if self.class_weight == "balanced_subsample": class_weight = "balanced" else: class_weight = self.class_weight expanded_class_weight = compute_sample_weight(class_weight, y_original) return y, expanded_class_weight def predict(self, X_set): """ Predict class for X. The predicted class of an input sample is a vote by the trees in the forest, weighted by their probability estimates. That is, the predicted class is the one with highest mean probability estimate across the trees. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- y : ndarray of shape (n_samples,) or (n_samples, n_outputs) The predicted classes. """ proba = self.predict_proba(X_set) if self.n_outputs_ == 1: return self.classes_.take(np.argmax(proba, axis=1), axis=0) else: n_samples = proba[0].shape[0] # all dtypes should be the same, so just take the first class_type = self.classes_[0].dtype predictions = np.empty((n_samples, self.n_outputs_), dtype=class_type) for k in range(self.n_outputs_): predictions[:, k] = self.classes_[k].take(np.argmax(proba[k], axis=1), axis=0) return predictions def predict_proba(self, X_set): """ Predict class probabilities for X. The predicted class probabilities of an input sample are computed as the mean predicted class probabilities of the trees in the forest. The class probability of a single tree is the fraction of samples of the same class in a leaf. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- p : ndarray of shape (n_samples, n_classes), or a list of n_outputs such arrays if n_outputs > 1. The class probabilities of the input samples. The order of the classes corresponds to that in the attribute :term:`classes_`. """ check_is_fitted(self) # Check data # X = self._validate_X_predict(X) # Assign chunk of trees to jobs n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs) # avoid storing the output of every estimator by summing them here all_proba = [np.zeros((X_set.shape[0], j), dtype=np.float64) for j in np.atleast_1d(self.n_classes_)] lock = threading.Lock() Parallel(n_jobs=n_jobs, verbose=self.verbose, **_joblib_parallel_args(require="sharedmem"))( delayed(_accumulate_prediction)(e.predict_proba, X_set, all_proba, lock) for e in self.estimators_) for proba in all_proba: proba /= len(self.estimators_) if len(all_proba) == 1: return all_proba[0] else: return all_proba def predict_log_proba(self, X_set): """ Predict class log-probabilities for X. The predicted class log-probabilities of an input sample is computed as the log of the mean predicted class probabilities of the trees in the forest. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- p : ndarray of shape (n_samples, n_classes), or a list of n_outputs such arrays if n_outputs > 1. The class probabilities of the input samples. The order of the classes corresponds to that in the attribute :term:`classes_`. """ proba = self.predict_proba(X_set) if self.n_outputs_ == 1: return np.log(proba) else: for k in range(self.n_outputs_): proba[k] = np.log(proba[k]) return proba class SetForestRegressor(RegressorMixin, BaseForest, metaclass=ABCMeta): """ Base class for forest of trees-based regressors. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__(self, base_estimator, n_estimators=100, *, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, max_samples=None): super().__init__( base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, max_samples=max_samples) def predict(self, X_set): """ Predict regression target for X. The predicted regression target of an input sample is computed as the mean predicted regression targets of the trees in the forest. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) The input samples. Internally, its dtype will be converted to ``dtype=np.float32``. If a sparse matrix is provided, it will be converted into a sparse ``csr_matrix``. Returns ------- y : ndarray of shape (n_samples,) or (n_samples, n_outputs) The predicted values. """ check_is_fitted(self) # Check data # X = self._validate_X_predict(X) # Assign chunk of trees to jobs n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs) # avoid storing the output of every estimator by summing them here if self.n_outputs_ > 1: y_hat = np.zeros((X_set.shape[0], self.n_outputs_), dtype=np.float64) else: y_hat = np.zeros((X_set.shape[0]), dtype=np.float64) # Parallel loop lock = threading.Lock() Parallel(n_jobs=n_jobs, verbose=self.verbose, **_joblib_parallel_args(require="sharedmem"))( delayed(_accumulate_prediction)(e.predict, X_set, [y_hat], lock) for e in self.estimators_) y_hat /= len(self.estimators_) return y_hat def _set_oob_score(self, X_set, y): """ Compute out-of-bag scores.""" # X = check_array(X, dtype=DTYPE, accept_sparse='csr') n_samples = y.shape[0] predictions = np.zeros((n_samples, self.n_outputs_)) n_predictions = np.zeros((n_samples, self.n_outputs_)) n_samples_bootstrap = _get_n_samples_bootstrap( n_samples, self.max_samples ) for estimator in self.estimators_: unsampled_indices = _generate_unsampled_indices( estimator.random_state, n_samples, n_samples_bootstrap) X_subset = X_set.get_subset(unsampled_indices) p_estimator = estimator.predict(X_subset) if self.n_outputs_ == 1: p_estimator = p_estimator[:, np.newaxis] predictions[unsampled_indices, :] += p_estimator n_predictions[unsampled_indices, :] += 1 if (n_predictions == 0).any(): warn("Some inputs do not have OOB scores. " "This probably means too few trees were used " "to compute any reliable oob estimates.") n_predictions[n_predictions == 0] = 1 predictions /= n_predictions self.oob_prediction_ = predictions if self.n_outputs_ == 1: self.oob_prediction_ = \ self.oob_prediction_.reshape((n_samples, )) self.oob_score_ = 0.0 for k in range(self.n_outputs_): self.oob_score_ += r2_score(y[:, k], predictions[:, k]) self.oob_score_ /= self.n_outputs_ def _compute_partial_dependence_recursion(self, grid, target_features): """Fast partial dependence computation. Parameters ---------- grid : ndarray of shape (n_samples, n_target_features) The grid points on which the partial dependence should be evaluated. target_features : ndarray of shape (n_target_features) The set of target features for which the partial dependence should be evaluated. Returns ------- averaged_predictions : ndarray of shape (n_samples,) The value of the partial dependence function on each grid point. """ grid = np.asarray(grid, dtype=DTYPE, order='C') averaged_predictions = np.zeros(shape=grid.shape[0], dtype=np.float64, order='C') for tree in self.estimators_: # Note: we don't sum in parallel because the GIL isn't released in # the fast method. tree.tree_.compute_partial_dependence( grid, target_features, averaged_predictions) # Average over the forest averaged_predictions /= len(self.estimators_) return averaged_predictions class SetRandomForestClassifier(SetForestClassifier): @_deprecate_positional_args def __init__(self, n_estimators=100, *, criterion="gini", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", max_leaf_nodes=None, min_impurity_decrease=0., min_impurity_split=None, splitter='xgboost', operations=OPERATIONS, use_attention_set=True, attention_set_limit=1, bootstrap=True, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, ccp_alpha=0.0, max_samples=None): super().__init__( base_estimator=SetTree(), n_estimators=n_estimators, estimator_params=tuple(SetTree().get_params()), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight, max_samples=max_samples) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes self.min_impurity_decrease = min_impurity_decrease self.min_impurity_split = min_impurity_split self.operations = operations self.splitter = splitter self.use_attention_set = use_attention_set self.attention_set_limit = attention_set_limit self.classifier = True self.ccp_alpha = ccp_alpha class SetRandomForestRegressor(SetForestRegressor): @_deprecate_positional_args def __init__(self, n_estimators=100, *, criterion="mse", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", max_leaf_nodes=None, min_impurity_decrease=0., min_impurity_split=None, splitter='xgboost', operations=OPERATIONS, use_attention_set=True, attention_set_limit=1, bootstrap=True, oob_score=False, n_jobs=None, random_state=None, verbose=0, warm_start=False, ccp_alpha=0.0, max_samples=None): super().__init__( base_estimator=SetTree(), n_estimators=n_estimators, estimator_params=tuple(SetTree().get_params()), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, max_samples=max_samples) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes self.min_impurity_decrease = min_impurity_decrease self.min_impurity_split = min_impurity_split self.operations = operations self.splitter = splitter self.use_attention_set = use_attention_set self.attention_set_limit = attention_set_limit self.classifier = False self.ccp_alpha = ccp_alpha
Container.Styled.js
import styled from 'styled-components'
export const Container = styled.div` width: 1000px; max-width: 100%; padding: 0 20px; margin: 0 auto; `
borrowck-assign-to-andmut-in-borrowed-loc.rs
// Test that assignments to an `&mut` pointer which is found in a // borrowed (but otherwise non-aliasable) location is illegal. struct S<'a> { pointer: &'a mut isize } fn
<'a>(p: &'a mut S<'a>) -> S<'a> { S { pointer: &mut *p.pointer } } fn main() { let mut x = 1; { let mut y = S { pointer: &mut x }; let z = copy_borrowed_ptr(&mut y); *y.pointer += 1; //~^ ERROR cannot use `*y.pointer` //~| ERROR cannot assign to `*y.pointer` *z.pointer += 1; } }
copy_borrowed_ptr
shared.go
// Package pan 百度网盘提取分享文件的下载链接 package pan import ( "bytes" "fmt" "github.com/iikira/BaiduPCS-Go/requester" "github.com/json-iterator/go" "net/http" "net/url" "path" "strings" ) // SharedInfo 百度网盘文件分享页信息 type SharedInfo struct { SharedURL string HTTPS bool UK int64 `json:"uk"` // 百度网盘用户id ShareID int64 `json:"shareid"` // 分享id RootSharePath string `json:"rootSharePath"` // 分享的目录, 基于分享者的网盘根目录 Timestamp int64 // unix 时间戳 Sign []byte // 签名 Client *requester.HTTPClient } // NewSharedInfo 解析百度网盘文件分享页信息, // sharedURL 分享链接 func NewSharedInfo(sharedURL string) (si *SharedInfo) { return &SharedInfo{ SharedURL: sharedURL, } } func (si *SharedInfo) inited() bool { return si.UK != 0 && si.ShareID != 0 && si.RootSharePath != "" } func (si *SharedInfo) lazyInit() { if si.Client == nil { si.Client = requester.NewHTTPClient() } } // SetHTTPS 设置是否启用 https func (si *SharedInfo) SetHTTPS(https bool) { si.HTTPS = https } func (si *SharedInfo) getScheme() string { if si.HTTPS { return "https" } else { return "http" } } // Auth 验证提取码 // passwd 提取码, 没有则留空 func (si *SharedInfo) Auth(passwd string) error { if si.SharedURL == "" { return ErrSharedInfoNotSetSharedURL } si.lazyInit() // 不自动跳转 si.Client.CheckRedirect = func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse } resp, err := si.Client.Req("GET", si.SharedURL, nil, nil) if resp != nil { defer resp.Body.Close() } if err != nil { return err } switch resp.StatusCode / 100 { case 3: // 需要输入提取密码 locURL, err := resp.Location() if err != nil { return fmt.Errorf("检测提取码, 提取 Location 错误, %s", err) } // 验证提取密码 body, err := si.Client.Fetch("POST", si.getScheme()+"://pan.baidu.com/share/verify?"+locURL.RawQuery, map[string]string{ "pwd": passwd, "vcode": "", "vcode_str": "", }, map[string]string{ "Content-Type": "application/x-www-form-urlencoded", "Referer": "https://pan.baidu.com/", }) if err != nil { return fmt.Errorf("验证提取密码网络错误, %s", err) } jsonData := &RemoteErrInfo{} err = jsoniter.Unmarshal(body, jsonData) if err != nil { return fmt.Errorf("验证提取密码, json数据解析失败, %s", err) } switch jsonData.ErrNo { case 0: // 密码正确 break default: return fmt.Errorf("验证提取密码遇到错误, %s", jsonData) } case 4, 5: return fmt.Errorf(resp.Status) } return nil } // InitInfo 获取 UK, ShareID, RootSharePath 如果有提取码, 先需进行验证 func (si *SharedInfo) InitInfo() error { si.lazyInit() // 须是手机浏览器的标识, 否则可能抓不到数据 si.Client.SetUserAgent("Mozilla/5.0 (Linux; Android 7.0; HUAWEI NXT-AL10 Build/HUAWEINXT-AL10) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.137 Mobile Safari/537.36") body, err := si.Client.Fetch("GET", si.SharedURL, nil, nil) if err != nil { return err } rawYunData := YunDataExp.FindSubmatch(body) if len(rawYunData) < 2 { // 检测是否需要提取密码 if bytes.Contains(body, []byte("请输入提取密码")) { return fmt.Errorf("需要输入提取密码") } return fmt.Errorf("分享页数据解析失败") } err = jsoniter.Unmarshal(rawYunData[1], si) if err != nil { return fmt.Errorf("分享页, json数据解析失败, %s", err) } if si.UK == 0 || si.ShareID == 0 { return fmt.Errorf("分享页, json数据解析失败, 未找到 shareid 或 uk 值") } return nil } // FileDirectory 文件和目录的信息 type FileDirectory struct { FsID int64 `json:"fs_id"` // fs_id Path string `json:"path"` // 路径 Filename string `json:"server_filename"` // 文件名 或 目录名 Ctime int64 `json:"server_ctime"` // 创建日期 Mtime int64 `json:"server_mtime"` // 修改日期 MD5 string `json:"md5"` // md5 值 Size int64 `json:"size"` // 文件大小 (目录为0) Isdir int `json:"isdir"` // 是否为目录 Dlink string `json:"dlink"` //下载直链 } // fileDirectoryString 文件和目录的信息, 字段类型全为 string type fileDirectoryString struct { FsID string `json:"fs_id"` // fs_id Path string `json:"path"` // 路径 Filename string `json:"server_filename"` // 文件名 或 目录名 Ctime string `json:"server_ctime"` // 创建日期 Mtime string `json:"server_mtime"` // 修改日期 MD5 string `json:"md5"` // md5 值 Size string `json:"size"` // 文件大小 (目录为0) Isdir string `json:"isdir"` // 是否为目录 Dlink string `json:"dlink"` // 下载链接 } func (fdss *fileDirectoryString) convert() *FileDirectory { return &FileDirectory{ FsID: MustParseInt64(fdss.FsID), Path: fdss.Path, Filename: fdss.Filename, Ctime: MustParseInt64(fdss.Ctime), Mtime: MustParseInt64(fdss.Mtime), MD5: fdss.MD5, Size: MustParseInt64(fdss.Size), Isdir: MustParseInt(fdss.Isdir), Dlink: fdss.Dlink, } } // List 获取文件列表, subDir 为相对于分享目录的目录 func (si *SharedInfo) List(subDir string) (fds []*FileDirectory, err error) { if !si.inited() { return nil, ErrSharedInfoNotInit } si.lazyInit() si.signature() var ( isRoot = 0 escapedDir string ) cleanedSubDir := path.Clean(subDir) if cleanedSubDir == "." || cleanedSubDir == "/" { isRoot = 1 } else { dir := path.Clean(si.RootSharePath + "/" + subDir) escapedDir = url.PathEscape(dir) } listURL := fmt.Sprintf( si.getScheme()+"://pan.baidu.com/share/list?shareid=%d&uk=%d&root=%d&dir=%s&sign=%x&timestamp=%d&devuid=&clienttype=1&channel=android_7.0&version=8.2.0", si.ShareID, si.UK, isRoot, escapedDir, si.Sign, si.Timestamp, ) body, err := si.Client.Fetch("GET", listURL, nil, nil) if err != nil { return nil, fmt.Errorf("获取文件列表网络错误, %s", err) } va
!= 0 { // 根目录 jsonData := struct { *RemoteErrInfo List []*fileDirectoryString `json:"list"` }{ RemoteErrInfo: errInfo, } err = jsoniter.Unmarshal(body, &jsonData) if err == nil { fds = make([]*FileDirectory, len(jsonData.List)) for k, info := range jsonData.List { fds[k] = info.convert() } } } else { jsonData := struct { *RemoteErrInfo List []*FileDirectory `json:"list"` }{ RemoteErrInfo: errInfo, } err = jsoniter.Unmarshal(body, &jsonData) if err == nil { fds = jsonData.List } } if err != nil { return nil, fmt.Errorf("获取文件列表, json 数据解析失败, %s", err) } if errInfo.ErrNo != 0 { return nil, errInfo } return fds, nil } // Meta 获取文件/目录元信息, filePath 为相对于分享目录的目录 func (si *SharedInfo) Meta(filePath string) (fd *FileDirectory, err error) { cleanedPath := path.Clean(filePath) dir, fileName := path.Split(cleanedPath) dirInfo, err := si.List(dir) if err != nil { return nil, err } for k := range dirInfo { if strings.Compare(dirInfo[k].Filename, fileName) == 0 { return dirInfo[k], nil } } return nil, fmt.Errorf("未匹配到文件路径") }
r errInfo = &RemoteErrInfo{} if isRoot
test_diff.py
""" @brief test log(time=200s) """ import unittest from pyquickhelper.loghelper import fLOG from pyquickhelper.pycode import ExtTestCase from pymyinstall.packaged import small_set class TestDifference(ExtTestCase): def test_diff(self):
def test_diff2(self): fLOG( __file__, self._testMethodName, OutputPrint=__name__ == "__main__") res = small_set() count = {} for mod in res: count[mod.name] = 1 self.assertIn("coverage", count) if __name__ == "__main__": unittest.main()
fLOG( __file__, self._testMethodName, OutputPrint=__name__ == "__main__") name = set(_.name for _ in small_set()) keep = [] for mod in small_set(): if mod.name not in name: keep.append(mod) self.assertGreater(len(keep), 0) for mod in keep: if mod.mname is None: fLOG( "ModuleInstall('{0}', '{1}'),".format(mod.name, mod.kind)) else: fLOG("ModuleInstall('{0}', '{1}', mname='{2}'),".format( mod.name, mod.kind, mod.mname))
main.py
# -*- coding utf-8 -*- # filename: main.py import web
'/wx', 'Handle', ) if __name__ == '__main__': app = web.application(urls, globals()) app.run()
from handle import Handle urls = (
recursion.py
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' Copyright 2020, Yutong Xie, UIUC. Using recursion to validate BST ''' # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution(object): def isValidBST(self, root): """ :type root: TreeNode :rtype: bool """ def helper(node, lower = float('-inf'), upper = float('inf')): if not node: return True val = node.val if val <= lower or val >= upper: return False if not helper(node.left, lower, val):
if not helper(node.right, val, upper) : return False return True return helper(root)
return False
__init__.py
from .scraping import evaluate_comment_reply_pair
]
__all__ = [ "evaluate_comment_reply_pair",
Hero.js
const Hero = () => { return ( <section> <div className="p-24 bg-cover bg-center" style={{ backgroundImage: `url(${bgImage})` }}></div> <div className="max-w-5xl mx-auto"> <div className="flex flex-col sm:flex-row items-center justify-center mt-12 md:mt-16"> <div className="text-center"> <h1 className="font-black text-4xl sm:text-4xl md:text-5xl italic leading-tight px-4">Inspire creators with your ideas</h1> <p className="max-w-xl mt-4 mx-auto text-xl px-4">Creative ideas can be shared in from of challenges on GoSocial. Budding creators take these as self-challenges to learn new things.</p> <div className="mt-10"> <div className="flex flex-row items-center justify-center"> <svg width="20" height="34" xmlns="http://www.w3.org/2000/svg" xlink="http://www.w3.org/1999/xlink"> <defs> <path d="M.893 17.214l16.941-6.613a2.143 2.143 0 000-3.917L.893.071v32.143" id="a"/> </defs> <use stroke="#264BBB" strokeWidth="1.5" fill="#264BBB" strokeLinecap="round" strokeLinejoin="round" href="#a" transform="translate(0 1)" fillRule="evenodd"/> </svg> <p className="text-lg md:text-xl uppercase ml-4">35k Submissions in challenges</p> </div> </div> </div> </div> </div> </section> ); }; export default Hero;
import React from 'react'; import bgImage from './img/hero-bg.jpg'
sp_stats.go
package main import (//"os" "fmt" //"time" //"strings" //"strconv" //"flag" "math" "reflect" //"encoding/json" //"io/ioutil" )
} type stat struct { Name string Value int Count int Avg int Min int Max int } func (s stat) String() string { return fmt.Sprintf("%d from %d points ; %d<%d<%d.", s.Value, s.Count, s.Min, s.Avg, s.Max) } /** Do the Add process @param app current app status @param options what to add, should be name1=value1,name2=value2 */ func stats_work(app App_Data, options string) { stats_handleFlags() data := readData(app.file_name) uniq_sprints := make(map[string]bool) uniq_names := make(map[string]bool) count_by_name := make(map[string]int) stats := make(map[string]stat) for _, v := range data.Points { if uniq_sprints[v.Sprint]==false { uniq_sprints[v.Sprint] = true } if uniq_names[v.Name]==false { uniq_names[v.Name] = true } count_by_name[v.Name] += v.Value if val, exists := stats[v.Name]; exists {//update val.Value += v.Value val.Count += 1 val.Avg = val.Value/val.Count val.Min = int(math.Min(float64(val.Min), float64(v.Value))) val.Max = int(math.Max(float64(val.Max), float64(v.Value))) stats[v.Name] = val } else {//add s := stat{} s.Name = v.Name s.Value = v.Value s.Count = 1 s.Avg = s.Value s.Min = s.Value s.Max = s.Value stats[v.Name] = s } } keys := reflect.ValueOf(uniq_sprints).MapKeys() names := reflect.ValueOf(uniq_names).MapKeys() /* | Start | Stop | ------------------ s1| 42 | 32 | s2| 24 | 23 | ------------------ avg 33 | 26.5 */ fmt.Printf("%s -> %s\n", keys, names) for _, v := range stats { fmt.Printf("stats for %s: %s\n", v.Name, v) } }
func stats_handleFlags() {
other_intr_en_m4.rs
#[doc = "Register `OTHER_INTR_EN_M4` reader"] pub struct R(crate::R<OTHER_INTR_EN_M4_SPEC>); impl core::ops::Deref for R { type Target = crate::R<OTHER_INTR_EN_M4_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<OTHER_INTR_EN_M4_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<OTHER_INTR_EN_M4_SPEC>) -> Self { R(reader) } } #[doc = "Register `OTHER_INTR_EN_M4` writer"] pub struct W(crate::W<OTHER_INTR_EN_M4_SPEC>); impl core::ops::Deref for W { type Target = crate::W<OTHER_INTR_EN_M4_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<OTHER_INTR_EN_M4_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<OTHER_INTR_EN_M4_SPEC>) -> Self { W(writer) } } #[doc = "M4 SRAM (access during low power) interrupt enable for M4\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum M4_SRAM_INTR_EN_M4_A { #[doc = "0: Disable the interrupt for the power domain"] ENABLE = 0, #[doc = "1: Enable the interrupt for the power domain"] DISABLE = 1, } impl From<M4_SRAM_INTR_EN_M4_A> for bool { #[inline(always)] fn from(variant: M4_SRAM_INTR_EN_M4_A) -> Self { variant as u8 != 0 } } #[doc = "Field `M4_SRAM_INTR_EN_M4` reader - M4 SRAM (access during low power) interrupt enable for M4"] pub struct M4_SRAM_INTR_EN_M4_R(crate::FieldReader<bool, M4_SRAM_INTR_EN_M4_A>); impl M4_SRAM_INTR_EN_M4_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { M4_SRAM_INTR_EN_M4_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> M4_SRAM_INTR_EN_M4_A { match self.bits { false => M4_SRAM_INTR_EN_M4_A::ENABLE, true => M4_SRAM_INTR_EN_M4_A::DISABLE, } } #[doc = "Checks if the value of the field is `ENABLE`"] #[inline(always)] pub fn is_enable(&self) -> bool { **self == M4_SRAM_INTR_EN_M4_A::ENABLE } #[doc = "Checks if the value of the field is `DISABLE`"] #[inline(always)] pub fn is_disable(&self) -> bool { **self == M4_SRAM_INTR_EN_M4_A::DISABLE } } impl core::ops::Deref for M4_SRAM_INTR_EN_M4_R { type Target = crate::FieldReader<bool, M4_SRAM_INTR_EN_M4_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `M4_SRAM_INTR_EN_M4` writer - M4 SRAM (access during low power) interrupt enable for M4"] pub struct M4_SRAM_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> M4_SRAM_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: M4_SRAM_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(M4_SRAM_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(M4_SRAM_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01); self.w } } #[doc = "UART interrupt enable for M4"] pub type UART_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `UART_INTR_EN_M4` reader - UART interrupt enable for M4"] pub type UART_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `UART_INTR_EN_M4` writer - UART interrupt enable for M4"] pub struct UART_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> UART_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: UART_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(UART_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(UART_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1); self.w } } #[doc = "Timer interrupt enable for M4"] pub type TIMER_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `TIMER_INTR_EN_M4` reader - Timer interrupt enable for M4"] pub type TIMER_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `TIMER_INTR_EN_M4` writer - Timer interrupt enable for M4"] pub struct TIMER_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> TIMER_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TIMER_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(TIMER_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(TIMER_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2); self.w } } #[doc = "WDOG M4 interrupt enable for M4"] pub type WDOG_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `WDOG_INTR_EN_M4` reader - WDOG M4 interrupt enable for M4"] pub type WDOG_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `WDOG_INTR_EN_M4` writer - WDOG M4 interrupt enable for M4"] pub struct WDOG_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> WDOG_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: WDOG_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(WDOG_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(WDOG_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u32 & 0x01) << 3); self.w } } #[doc = "WDOG M4 Reset interrupt enable for M4"] pub type WDOG_RST_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `WDOG_RST_EN_M4` reader - WDOG M4 Reset interrupt enable for M4"] pub type WDOG_RST_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `WDOG_RST_EN_M4` writer - WDOG M4 Reset interrupt enable for M4"] pub struct WDOG_RST_EN_M4_W<'a> { w: &'a mut W, } impl<'a> WDOG_RST_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: WDOG_RST_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(WDOG_RST_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(WDOG_RST_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn
(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u32 & 0x01) << 4); self.w } } #[doc = "bus timeout interrupt enable for M4"] pub type TIMEOUT_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `TIMEOUT_INTR_EN_M4` reader - bus timeout interrupt enable for M4"] pub type TIMEOUT_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `TIMEOUT_INTR_EN_M4` writer - bus timeout interrupt enable for M4"] pub struct TIMEOUT_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> TIMEOUT_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TIMEOUT_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(TIMEOUT_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(TIMEOUT_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u32 & 0x01) << 5); self.w } } #[doc = "M4 FPU interrupt enable for M4"] pub type FPU_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `FPU_INTR_EN_M4` reader - M4 FPU interrupt enable for M4"] pub type FPU_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `FPU_INTR_EN_M4` writer - M4 FPU interrupt enable for M4"] pub struct FPU_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> FPU_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: FPU_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(FPU_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(FPU_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u32 & 0x01) << 6); self.w } } #[doc = "Packet FIFO Bank interrupt enable for M4"] pub type PKFB_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `PKFB_INTR_EN_M4` reader - Packet FIFO Bank interrupt enable for M4"] pub type PKFB_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `PKFB_INTR_EN_M4` writer - Packet FIFO Bank interrupt enable for M4"] pub struct PKFB_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> PKFB_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PKFB_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(PKFB_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(PKFB_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7); self.w } } #[doc = "SPI Master interrupt enable for M4"] pub type SPI_MS_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `SPI_MS_INTR_EN_M4` reader - SPI Master interrupt enable for M4"] pub type SPI_MS_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `SPI_MS_INTR_EN_M4` writer - SPI Master interrupt enable for M4"] pub struct SPI_MS_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> SPI_MS_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: SPI_MS_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(SPI_MS_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(SPI_MS_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u32 & 0x01) << 10); self.w } } #[doc = "Config DMA interrupt enable for M4"] pub type CFG_DMA_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `CFG_DMA_INTR_EN_M4` reader - Config DMA interrupt enable for M4"] pub type CFG_DMA_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `CFG_DMA_INTR_EN_M4` writer - Config DMA interrupt enable for M4"] pub struct CFG_DMA_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> CFG_DMA_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CFG_DMA_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(CFG_DMA_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(CFG_DMA_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | ((value as u32 & 0x01) << 11); self.w } } #[doc = "PMU Timer interrupt enable for M4"] pub type PMU_TMR_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `PMU_TMR_INTR_EN_M4` reader - PMU Timer interrupt enable for M4"] pub type PMU_TMR_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `PMU_TMR_INTR_EN_M4` writer - PMU Timer interrupt enable for M4"] pub struct PMU_TMR_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> PMU_TMR_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: PMU_TMR_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(PMU_TMR_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(PMU_TMR_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | ((value as u32 & 0x01) << 12); self.w } } #[doc = "ADC interrupt enable for M4"] pub type ADC_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `ADC_INTR_EN_M4` reader - ADC interrupt enable for M4"] pub type ADC_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `ADC_INTR_EN_M4` writer - ADC interrupt enable for M4"] pub struct ADC_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> ADC_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ADC_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(ADC_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(ADC_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | ((value as u32 & 0x01) << 13); self.w } } #[doc = "RTC interrupt enable for M4"] pub type RTC_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `RTC_INTR_EN_M4` reader - RTC interrupt enable for M4"] pub type RTC_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `RTC_INTR_EN_M4` writer - RTC interrupt enable for M4"] pub struct RTC_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> RTC_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RTC_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(RTC_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(RTC_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 14)) | ((value as u32 & 0x01) << 14); self.w } } #[doc = "Reset interrupt enable for M4"] pub type RST_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `RST_INTR_EN_M4` reader - Reset interrupt enable for M4"] pub type RST_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `RST_INTR_EN_M4` writer - Reset interrupt enable for M4"] pub struct RST_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> RST_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RST_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(RST_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(RST_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | ((value as u32 & 0x01) << 15); self.w } } #[doc = "FFE0 other interrupts enable for M4"] pub type FFE0_INTR_OTHERS_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `FFE0_INTR_OTHERS_EN_M4` reader - FFE0 other interrupts enable for M4"] pub type FFE0_INTR_OTHERS_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `FFE0_INTR_OTHERS_EN_M4` writer - FFE0 other interrupts enable for M4"] pub struct FFE0_INTR_OTHERS_EN_M4_W<'a> { w: &'a mut W, } impl<'a> FFE0_INTR_OTHERS_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: FFE0_INTR_OTHERS_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(FFE0_INTR_OTHERS_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(FFE0_INTR_OTHERS_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | ((value as u32 & 0x01) << 16); self.w } } #[doc = "AP Boot interrupt enable for M4"] pub type APBOOT_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `APBOOT_INTR_EN_M4` reader - AP Boot interrupt enable for M4"] pub type APBOOT_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `APBOOT_INTR_EN_M4` writer - AP Boot interrupt enable for M4"] pub struct APBOOT_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> APBOOT_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: APBOOT_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(APBOOT_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(APBOOT_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | ((value as u32 & 0x01) << 18); self.w } } #[doc = "Absence of LDO30 power good interrupt enable for M4"] pub type LDO30_PG_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `LDO30_PG_INTR_EN_M4` reader - Absence of LDO30 power good interrupt enable for M4"] pub type LDO30_PG_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `LDO30_PG_INTR_EN_M4` writer - Absence of LDO30 power good interrupt enable for M4"] pub struct LDO30_PG_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> LDO30_PG_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LDO30_PG_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(LDO30_PG_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(LDO30_PG_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | ((value as u32 & 0x01) << 19); self.w } } #[doc = "Absence of LDO50 power good interrupt enable for M4"] pub type LDO50_PG_INTR_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `LDO50_PG_INTR_EN_M4` reader - Absence of LDO50 power good interrupt enable for M4"] pub type LDO50_PG_INTR_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `LDO50_PG_INTR_EN_M4` writer - Absence of LDO50 power good interrupt enable for M4"] pub struct LDO50_PG_INTR_EN_M4_W<'a> { w: &'a mut W, } impl<'a> LDO50_PG_INTR_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LDO50_PG_INTR_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(LDO50_PG_INTR_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(LDO50_PG_INTR_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | ((value as u32 & 0x01) << 20); self.w } } #[doc = "LPSD Voice detected interrupt enable for M4"] pub type LPSD_VOICE_DET_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `LPSD_VOICE_DET_EN_M4` reader - LPSD Voice detected interrupt enable for M4"] pub type LPSD_VOICE_DET_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `LPSD_VOICE_DET_EN_M4` writer - LPSD Voice detected interrupt enable for M4"] pub struct LPSD_VOICE_DET_EN_M4_W<'a> { w: &'a mut W, } impl<'a> LPSD_VOICE_DET_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPSD_VOICE_DET_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(LPSD_VOICE_DET_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(LPSD_VOICE_DET_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | ((value as u32 & 0x01) << 22); self.w } } #[doc = "Digital Mic Voice detected interrupt enable for M4"] pub type DMIC_VOICE_DET_EN_M4_A = M4_SRAM_INTR_EN_M4_A; #[doc = "Field `DMIC_VOICE_DET_EN_M4` reader - Digital Mic Voice detected interrupt enable for M4"] pub type DMIC_VOICE_DET_EN_M4_R = M4_SRAM_INTR_EN_M4_R; #[doc = "Field `DMIC_VOICE_DET_EN_M4` writer - Digital Mic Voice detected interrupt enable for M4"] pub struct DMIC_VOICE_DET_EN_M4_W<'a> { w: &'a mut W, } impl<'a> DMIC_VOICE_DET_EN_M4_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DMIC_VOICE_DET_EN_M4_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disable the interrupt for the power domain"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(DMIC_VOICE_DET_EN_M4_A::ENABLE) } #[doc = "Enable the interrupt for the power domain"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(DMIC_VOICE_DET_EN_M4_A::DISABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | ((value as u32 & 0x01) << 23); self.w } } impl R { #[doc = "Bit 0 - M4 SRAM (access during low power) interrupt enable for M4"] #[inline(always)] pub fn m4_sram_intr_en_m4(&self) -> M4_SRAM_INTR_EN_M4_R { M4_SRAM_INTR_EN_M4_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - UART interrupt enable for M4"] #[inline(always)] pub fn uart_intr_en_m4(&self) -> UART_INTR_EN_M4_R { UART_INTR_EN_M4_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Timer interrupt enable for M4"] #[inline(always)] pub fn timer_intr_en_m4(&self) -> TIMER_INTR_EN_M4_R { TIMER_INTR_EN_M4_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - WDOG M4 interrupt enable for M4"] #[inline(always)] pub fn wdog_intr_en_m4(&self) -> WDOG_INTR_EN_M4_R { WDOG_INTR_EN_M4_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - WDOG M4 Reset interrupt enable for M4"] #[inline(always)] pub fn wdog_rst_en_m4(&self) -> WDOG_RST_EN_M4_R { WDOG_RST_EN_M4_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - bus timeout interrupt enable for M4"] #[inline(always)] pub fn timeout_intr_en_m4(&self) -> TIMEOUT_INTR_EN_M4_R { TIMEOUT_INTR_EN_M4_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - M4 FPU interrupt enable for M4"] #[inline(always)] pub fn fpu_intr_en_m4(&self) -> FPU_INTR_EN_M4_R { FPU_INTR_EN_M4_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - Packet FIFO Bank interrupt enable for M4"] #[inline(always)] pub fn pkfb_intr_en_m4(&self) -> PKFB_INTR_EN_M4_R { PKFB_INTR_EN_M4_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 10 - SPI Master interrupt enable for M4"] #[inline(always)] pub fn spi_ms_intr_en_m4(&self) -> SPI_MS_INTR_EN_M4_R { SPI_MS_INTR_EN_M4_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - Config DMA interrupt enable for M4"] #[inline(always)] pub fn cfg_dma_intr_en_m4(&self) -> CFG_DMA_INTR_EN_M4_R { CFG_DMA_INTR_EN_M4_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 12 - PMU Timer interrupt enable for M4"] #[inline(always)] pub fn pmu_tmr_intr_en_m4(&self) -> PMU_TMR_INTR_EN_M4_R { PMU_TMR_INTR_EN_M4_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - ADC interrupt enable for M4"] #[inline(always)] pub fn adc_intr_en_m4(&self) -> ADC_INTR_EN_M4_R { ADC_INTR_EN_M4_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 14 - RTC interrupt enable for M4"] #[inline(always)] pub fn rtc_intr_en_m4(&self) -> RTC_INTR_EN_M4_R { RTC_INTR_EN_M4_R::new(((self.bits >> 14) & 0x01) != 0) } #[doc = "Bit 15 - Reset interrupt enable for M4"] #[inline(always)] pub fn rst_intr_en_m4(&self) -> RST_INTR_EN_M4_R { RST_INTR_EN_M4_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bit 16 - FFE0 other interrupts enable for M4"] #[inline(always)] pub fn ffe0_intr_others_en_m4(&self) -> FFE0_INTR_OTHERS_EN_M4_R { FFE0_INTR_OTHERS_EN_M4_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 18 - AP Boot interrupt enable for M4"] #[inline(always)] pub fn apboot_intr_en_m4(&self) -> APBOOT_INTR_EN_M4_R { APBOOT_INTR_EN_M4_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - Absence of LDO30 power good interrupt enable for M4"] #[inline(always)] pub fn ldo30_pg_intr_en_m4(&self) -> LDO30_PG_INTR_EN_M4_R { LDO30_PG_INTR_EN_M4_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - Absence of LDO50 power good interrupt enable for M4"] #[inline(always)] pub fn ldo50_pg_intr_en_m4(&self) -> LDO50_PG_INTR_EN_M4_R { LDO50_PG_INTR_EN_M4_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 22 - LPSD Voice detected interrupt enable for M4"] #[inline(always)] pub fn lpsd_voice_det_en_m4(&self) -> LPSD_VOICE_DET_EN_M4_R { LPSD_VOICE_DET_EN_M4_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 23 - Digital Mic Voice detected interrupt enable for M4"] #[inline(always)] pub fn dmic_voice_det_en_m4(&self) -> DMIC_VOICE_DET_EN_M4_R { DMIC_VOICE_DET_EN_M4_R::new(((self.bits >> 23) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - M4 SRAM (access during low power) interrupt enable for M4"] #[inline(always)] pub fn m4_sram_intr_en_m4(&mut self) -> M4_SRAM_INTR_EN_M4_W { M4_SRAM_INTR_EN_M4_W { w: self } } #[doc = "Bit 1 - UART interrupt enable for M4"] #[inline(always)] pub fn uart_intr_en_m4(&mut self) -> UART_INTR_EN_M4_W { UART_INTR_EN_M4_W { w: self } } #[doc = "Bit 2 - Timer interrupt enable for M4"] #[inline(always)] pub fn timer_intr_en_m4(&mut self) -> TIMER_INTR_EN_M4_W { TIMER_INTR_EN_M4_W { w: self } } #[doc = "Bit 3 - WDOG M4 interrupt enable for M4"] #[inline(always)] pub fn wdog_intr_en_m4(&mut self) -> WDOG_INTR_EN_M4_W { WDOG_INTR_EN_M4_W { w: self } } #[doc = "Bit 4 - WDOG M4 Reset interrupt enable for M4"] #[inline(always)] pub fn wdog_rst_en_m4(&mut self) -> WDOG_RST_EN_M4_W { WDOG_RST_EN_M4_W { w: self } } #[doc = "Bit 5 - bus timeout interrupt enable for M4"] #[inline(always)] pub fn timeout_intr_en_m4(&mut self) -> TIMEOUT_INTR_EN_M4_W { TIMEOUT_INTR_EN_M4_W { w: self } } #[doc = "Bit 6 - M4 FPU interrupt enable for M4"] #[inline(always)] pub fn fpu_intr_en_m4(&mut self) -> FPU_INTR_EN_M4_W { FPU_INTR_EN_M4_W { w: self } } #[doc = "Bit 7 - Packet FIFO Bank interrupt enable for M4"] #[inline(always)] pub fn pkfb_intr_en_m4(&mut self) -> PKFB_INTR_EN_M4_W { PKFB_INTR_EN_M4_W { w: self } } #[doc = "Bit 10 - SPI Master interrupt enable for M4"] #[inline(always)] pub fn spi_ms_intr_en_m4(&mut self) -> SPI_MS_INTR_EN_M4_W { SPI_MS_INTR_EN_M4_W { w: self } } #[doc = "Bit 11 - Config DMA interrupt enable for M4"] #[inline(always)] pub fn cfg_dma_intr_en_m4(&mut self) -> CFG_DMA_INTR_EN_M4_W { CFG_DMA_INTR_EN_M4_W { w: self } } #[doc = "Bit 12 - PMU Timer interrupt enable for M4"] #[inline(always)] pub fn pmu_tmr_intr_en_m4(&mut self) -> PMU_TMR_INTR_EN_M4_W { PMU_TMR_INTR_EN_M4_W { w: self } } #[doc = "Bit 13 - ADC interrupt enable for M4"] #[inline(always)] pub fn adc_intr_en_m4(&mut self) -> ADC_INTR_EN_M4_W { ADC_INTR_EN_M4_W { w: self } } #[doc = "Bit 14 - RTC interrupt enable for M4"] #[inline(always)] pub fn rtc_intr_en_m4(&mut self) -> RTC_INTR_EN_M4_W { RTC_INTR_EN_M4_W { w: self } } #[doc = "Bit 15 - Reset interrupt enable for M4"] #[inline(always)] pub fn rst_intr_en_m4(&mut self) -> RST_INTR_EN_M4_W { RST_INTR_EN_M4_W { w: self } } #[doc = "Bit 16 - FFE0 other interrupts enable for M4"] #[inline(always)] pub fn ffe0_intr_others_en_m4(&mut self) -> FFE0_INTR_OTHERS_EN_M4_W { FFE0_INTR_OTHERS_EN_M4_W { w: self } } #[doc = "Bit 18 - AP Boot interrupt enable for M4"] #[inline(always)] pub fn apboot_intr_en_m4(&mut self) -> APBOOT_INTR_EN_M4_W { APBOOT_INTR_EN_M4_W { w: self } } #[doc = "Bit 19 - Absence of LDO30 power good interrupt enable for M4"] #[inline(always)] pub fn ldo30_pg_intr_en_m4(&mut self) -> LDO30_PG_INTR_EN_M4_W { LDO30_PG_INTR_EN_M4_W { w: self } } #[doc = "Bit 20 - Absence of LDO50 power good interrupt enable for M4"] #[inline(always)] pub fn ldo50_pg_intr_en_m4(&mut self) -> LDO50_PG_INTR_EN_M4_W { LDO50_PG_INTR_EN_M4_W { w: self } } #[doc = "Bit 22 - LPSD Voice detected interrupt enable for M4"] #[inline(always)] pub fn lpsd_voice_det_en_m4(&mut self) -> LPSD_VOICE_DET_EN_M4_W { LPSD_VOICE_DET_EN_M4_W { w: self } } #[doc = "Bit 23 - Digital Mic Voice detected interrupt enable for M4"] #[inline(always)] pub fn dmic_voice_det_en_m4(&mut self) -> DMIC_VOICE_DET_EN_M4_W { DMIC_VOICE_DET_EN_M4_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Various interrupt enable for M4\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [other_intr_en_m4](index.html) module"] pub struct OTHER_INTR_EN_M4_SPEC; impl crate::RegisterSpec for OTHER_INTR_EN_M4_SPEC { type Ux = u32; } #[doc = "`read()` method returns [other_intr_en_m4::R](R) reader structure"] impl crate::Readable for OTHER_INTR_EN_M4_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [other_intr_en_m4::W](W) writer structure"] impl crate::Writable for OTHER_INTR_EN_M4_SPEC { type Writer = W; } #[doc = "`reset()` method sets OTHER_INTR_EN_M4 to value 0"] impl crate::Resettable for OTHER_INTR_EN_M4_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
clear_bit
day9.go
package day9 import ( "errors" "fmt" "github.com/lluiscampos/advent-of-code-2020/util" ) type XmasSequence struct { pre int nums []int
err := fmt.Errorf("mmmm.. pre %d > len(nums) %d", x.pre, len(x.nums)) return -1, -1, err } for i, num := range x.nums[x.pre:] { valid := false for j, n := range x.nums[i : x.pre+i] { for _, m := range x.nums[i+j+1 : x.pre+i] { // fmt.Printf("loop3 n %d m %d num %d\n", n, m, num) if n+m == num { valid = true break } } if valid { break } } if !valid { return i, num, nil } } return -1, -1, errors.New("full sequence was valid!") } func SolvePart1() { nums, err := util.ParseFileInts("day9.input") if err != nil { fmt.Println(err) return } seq := XmasSequence{ 50, nums, } _, n, err := seq.FirstInvalid() if err != nil { fmt.Println(err) return } fmt.Println(n) } func (x *XmasSequence) FindWeakness() (int, error) { idx, invalid, err := x.FirstInvalid() if err != nil { return -1, err } for i := range x.nums[:idx] { sum := 0 min := 999999999999999999 max := 0 for _, n := range x.nums[i:idx] { sum += n if n < min { min = n } if n > max { max = n } if sum == invalid { return min + max, nil } if sum > invalid { break } } } return -1, errors.New("could not find the weakness") } func SolvePart2() { nums, err := util.ParseFileInts("day9.input") if err != nil { fmt.Println(err) return } seq := XmasSequence{ 50, nums, } n, err := seq.FindWeakness() if err != nil { fmt.Println(err) return } fmt.Println(n) }
} func (x *XmasSequence) FirstInvalid() (int, int, error) { if len(x.nums) <= x.pre {