file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
event_bench_test.go
/* Copyright © Max Mazurov (fox.cpp) 2018 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package zrpc import ( "testing" "gotest.tools/assert" ) // Point of reference for comparsion. Sending pile of zeros // over a socket in one direction. //func BenchmarkZeroMQReference(b *testing.B) { // client, err := zmq4.NewSocket(zmq4.DEALER) // assert.NilError(b, err) // serv, err := zmq4.NewSocket(zmq4.ROUTER) // assert.NilError(b, err) // // defer serv.Close() // defer client.Close() // // endpoint := fmt.Sprintf("inproc://bench-%s", randomString(32)) // assert.NilError(b, serv.Bind(endpoint)) // assert.NilError(b, client.Connect(endpoint)) // // msg := make([]byte, 128) // // for i := 0; i < b.N; i++ { // _, err := client.SendBytes(msg, 0) // assert.NilError(b, err) // _, err = serv.RecvMessageBytes(0) // assert.NilError(b, err) // } //} // Simple ping-pong benchmark over a single channel. // // Pretty far from being realistic but still gives a // lot of useful information. func BenchmarkSingleChannelPingPong(b *testing.B) { client, server := setupSockPair(b) defer server.Close() defer client.Close() ok := make(chan bool) var servCh *channel server.newChannel = func(ch *channel) { servCh = ch ok <- true } ev := &event{Name: "TESTING", Args: 12345} clientCh, err := client.OpenSendEvent(ev) assert.NilError(b, err) <-ok for i := 0; i < b.N; i++ { // Ping. assert.NilError(b, clientCh.SendEvent(ev)) _, err := servCh.RecvEvent() assert.NilError(b, err) // Pong. assert.NilError(b, servCh.SendEvent(ev)) _, err = clientCh.RecvEvent() assert.NilError(b, err) } } // Similar to BenchmarkSingleChannelPingPong but no second event is send and messages are // simply discarded at server side. func BenchmarkSingleChannelFlood(b *testing.B) {
func BenchmarkSingleChannelPipe(b *testing.B) { client, server := setupSockPair(b) defer server.Close() defer client.Close() ok := make(chan bool) var servCh *channel server.newChannel = func(ch *channel) { servCh = ch ok <- true } ev := &event{Name: "TESTING", Args: 12345} clientCh, err := client.OpenSendEvent(ev) assert.NilError(b, err) <-ok for i := 0; i < b.N; i++ { assert.NilError(b, clientCh.SendEvent(ev)) _, err := servCh.RecvEvent() assert.NilError(b, err) } } // Creating a channel to send one message and then discard it. // This benchmark is much more close to real usage pattern. func BenchmarkChannelClusterfuck(b *testing.B) { client, server := setupSockPair(b) defer server.Close() defer client.Close() ok := make(chan bool) var servCh *channel server.newChannel = func(ch *channel) { servCh = ch ok <- true } for i := 0; i < b.N; i++ { ev := &event{Name: "TESTING", Args: 12345} clientCh, err := client.OpenSendEvent(ev) assert.NilError(b, err) <-ok _, err = servCh.RecvEvent() assert.NilError(b, err) assert.NilError(b, servCh.SendEvent(ev)) _, err = clientCh.RecvEvent() assert.NilError(b, err) server.CloseChannel(servCh.id) client.CloseChannel(clientCh.id) } }
client, server := setupSockPair(b) defer server.Close() defer client.Close() ok := make(chan bool) var servCh *channel server.newChannel = func(ch *channel) { servCh = ch ok <- true } ev := &event{Name: "TESTING", Args: 12345} clientCh, err := client.OpenSendEvent(ev) assert.NilError(b, err) <-ok // Hackish method, redirect queue into void. go func() { for range servCh.queue { // do nothing } }() for i := 0; i < b.N; i++ { assert.NilError(b, clientCh.SendEvent(ev)) } }
delivery.rs
use crate::actor::*; use std::time::Duration; #[test] fn basic_test() { enum State { One, Two, Three, } struct MyTestActor { state: State, num: usize, }; impl Actor for MyTestActor { type Msg = usize; fn receive(&mut self, msg: usize, ctx: &mut ActorContext<usize>) { match self.state { State::One => { self.num += msg; if self.num == 3 { ctx.schedule_periodic_delivery( "test", Duration::from_millis(200), Duration::from_millis(200), || 111, ); self.state = State::Two; } } State::Two => { assert_eq!(msg, 111); self.num += msg; if self.num == 336 { // 3 in initial state, 3 in final state ctx.cancel_delivery("test"); let actor_ref = ctx.actor_ref().clone(); ctx.schedule_thunk(Duration::from_millis(500), move || actor_ref.tell(999)); self.state = State::Three; } } State::Three => { assert_eq!(msg, 999); ctx.stop(); } } } fn receive_signal(&mut self, signal: Signal, context: &mut ActorContext<usize>)
} assert!(ActorSystem::new() .spawn(MyTestActor { state: State::One, num: 0 }) .is_ok()); }
{ if let Signal::Started = signal { context.schedule_periodic_delivery( "test", Duration::from_millis(200), Duration::from_millis(200), || 1, ); } }
ToolbarDropdown.tsx
/** * @author Timur Kuzhagaliyev <[email protected]> * @copyright 2020 * @license MIT */ import Menu from '@material-ui/core/Menu'; import React, { useCallback, useMemo } from 'react'; import { FileActionGroup } from '../../types/action-menus.types'; import { important, makeGlobalChonkyStyles } from '../../util/styles';
import { ToolbarButton } from './ToolbarButton'; import { SmartToolbarDropdownButton } from './ToolbarDropdownButton'; export type ToolbarDropdownProps = FileActionGroup; export const ToolbarDropdown: React.FC<ToolbarDropdownProps> = React.memo((props) => { const { name, fileActionIds } = props; const [anchor, setAnchor] = React.useState<null | HTMLElement>(null); const handleClick = useCallback( (event: React.MouseEvent<HTMLButtonElement>) => setAnchor(event.currentTarget), [setAnchor] ); const handleClose = useCallback(() => setAnchor(null), [setAnchor]); const menuItemComponents = useMemo( () => fileActionIds.map((id) => ( <SmartToolbarDropdownButton key={`menu-item-${id}`} fileActionId={id} onClickFollowUp={handleClose} /> )), [fileActionIds, handleClose] ); const classes = useStyles(); return ( <> <ToolbarButton text={name} onClick={handleClick} dropdown={true} /> <Menu autoFocus keepMounted elevation={2} anchorEl={anchor} onClose={handleClose} open={Boolean(anchor)} transitionDuration={150} classes={{ list: classes.dropdownList }} > {menuItemComponents} </Menu> </> ); }); const useStyles = makeGlobalChonkyStyles((theme) => ({ dropdownList: { paddingBottom: important(0), paddingTop: important(0), }, }));
test_clarifai_extractors.py
from os.path import join from ...utils import get_test_data_path from pliers.extractors import ClarifaiAPIExtractor from pliers.stimuli import ImageStim
@pytest.mark.skipif("'CLARIFAI_API_KEY' not in os.environ") def test_clarifai_api_extractor(): image_dir = join(get_test_data_path(), 'image') stim = ImageStim(join(image_dir, 'apple.jpg')) result = ClarifaiAPIExtractor().transform(stim).to_df() assert result['apple'][0] > 0.5 assert result.ix[:, 5][0] > 0.0 result = ClarifaiAPIExtractor(max_concepts=5).transform(stim).to_df() assert result.shape == (1, 9) result = ClarifaiAPIExtractor( min_value=0.9).transform(stim).to_df(object_id=False) assert all(np.isnan(d) or d > 0.9 for d in result.values[0, 3:]) concepts = ['cat', 'dog'] result = ClarifaiAPIExtractor(select_concepts=concepts).transform(stim) result = result.to_df() assert result.shape == (1, 6) assert 'cat' in result.columns and 'dog' in result.columns @pytest.mark.skipif("'CLARIFAI_API_KEY' not in os.environ") def test_clarifai_api_extractor_batch(): image_dir = join(get_test_data_path(), 'image') stim = ImageStim(join(image_dir, 'apple.jpg')) stim2 = ImageStim(join(image_dir, 'obama.jpg')) ext = ClarifaiAPIExtractor() results = ext.transform([stim, stim2]) results = merge_results(results) assert results['ClarifaiAPIExtractor#apple'][0] > 0.5 or \ results['ClarifaiAPIExtractor#apple'][1] > 0.5 # This takes too long to execute # video = VideoStim(join(get_test_data_path(), 'video', 'small.mp4')) # results = ExtractorResult.merge_stims(ext.transform(video)) # assert 'Lego' in results.columns and 'robot' in results.columns
from pliers.extractors.base import merge_results import numpy as np import pytest
amx_test.go
package amx import ( "encoding/json" "fmt" "regexp" "testing" "github.com/mxmCherry/openrtb/v15/openrtb2" "github.com/aclrys/prebid-server/adapters" "github.com/aclrys/prebid-server/config" "github.com/aclrys/prebid-server/openrtb_ext" "github.com/stretchr/testify/assert" "github.com/aclrys/prebid-server/adapters/adapterstest" ) const ( amxTestEndpoint = "http://pbs-dev.amxrtb.com/auction/openrtb" sampleVastADM = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><VAST version=\"2.0\"><Ad id=\"128a6.44d74.46b3\"><InLine><Error><![CDATA[http://example.net/hbx/verr?e=]]></Error><Impression><![CDATA[http://example.net/hbx/vimp?lid=test&aid=testapp]]></Impression><Creatives><Creative sequence=\"1\"><Linear><Duration>00:00:15</Duration><TrackingEvents><Tracking event=\"firstQuartile\"><![CDATA[https://example.com?event=first_quartile]]></Tracking></TrackingEvents><VideoClicks><ClickThrough><![CDATA[http://example.com]]></ClickThrough></VideoClicks><MediaFiles><MediaFile delivery=\"progressive\" width=\"16\" height=\"9\" type=\"video/mp4\" bitrate=\"800\"><![CDATA[https://example.com/media.mp4]]></MediaFile></MediaFiles></Linear></Creative></Creatives></InLine></Ad></VAST>" sampleDisplayADM = "<img src='https://example.com/300x250.png' height='250' width='300'/>" ) func TestJsonSamples(t *testing.T) { bidder, buildErr := Builder(openrtb_ext.BidderAMX, config.Adapter{ Endpoint: amxTestEndpoint}) if buildErr != nil { t.Fatalf("Builder returned unexpected error %v", buildErr) } adapterstest.RunJSONBidderTest(t, "amxtest", bidder) } func TestEndpointMalformed(t *testing.T) { _, buildErr := Builder(openrtb_ext.BidderAMX, config.Adapter{ Endpoint: " http://leading.space.is.invalid"}) assert.Error(t, buildErr) } func TestEndpointQueryStringMalformed(t *testing.T) { _, buildErr := Builder(openrtb_ext.BidderAMX, config.Adapter{ Endpoint: "http://invalid.query.from.go.docs/page?%gh&%ij"}) assert.Error(t, buildErr) } func TestMakeRequestsTagID(t *testing.T) { var w, h int = 300, 250 var width, height int64 = int64(w), int64(h) bidder, buildErr := Builder(openrtb_ext.BidderAMX, config.Adapter{ Endpoint: amxTestEndpoint}) if buildErr != nil { t.Fatalf("Builder returned unexpected error %v", buildErr) } type testCase struct { tagID string extAdUnitID string expectedTagID string blankNil bool } tests := []testCase{ {tagID: "tag-id", extAdUnitID: "ext.adUnitID", expectedTagID: "ext.adUnitID", blankNil: false}, {tagID: "tag-id", extAdUnitID: "", expectedTagID: "tag-id", blankNil: false}, {tagID: "tag-id", extAdUnitID: "", expectedTagID: "tag-id", blankNil: true}, {tagID: "", extAdUnitID: "", expectedTagID: "", blankNil: true}, {tagID: "", extAdUnitID: "", expectedTagID: "", blankNil: false}, {tagID: "", extAdUnitID: "ext.adUnitID", expectedTagID: "ext.adUnitID", blankNil: true}, {tagID: "", extAdUnitID: "ext.adUnitID", expectedTagID: "ext.adUnitID", blankNil: false}, } for _, tc := range tests { imp1 := openrtb2.Imp{ ID: "sample_imp_1", Banner: &openrtb2.Banner{ W: &width, H: &height, Format: []openrtb2.Format{ {W: 300, H: 250}, }, }} if tc.extAdUnitID != "" || !tc.blankNil { imp1.Ext = json.RawMessage( fmt.Sprintf(`{"bidder":{"adUnitId":"%s"}}`, tc.extAdUnitID)) } if tc.tagID != "" || !tc.blankNil { imp1.TagID = tc.tagID } inputRequest := openrtb2.BidRequest{ User: &openrtb2.User{}, Imp: []openrtb2.Imp{imp1}, Site: &openrtb2.Site{}, } actualAdapterRequests, err := bidder.MakeRequests(&inputRequest, &adapters.ExtraRequestInfo{}) assert.Len(t, actualAdapterRequests, 1) assert.Empty(t, err) var body openrtb2.BidRequest assert.Nil(t, json.Unmarshal(actualAdapterRequests[0].Body, &body)) assert.Equal(t, tc.expectedTagID, body.Imp[0].TagID) } } func TestMakeRequestsPublisherId(t *testing.T) { var w, h int = 300, 250 var width, height int64 = int64(w), int64(h) bidder, buildErr := Builder(openrtb_ext.BidderAMX, config.Adapter{ Endpoint: amxTestEndpoint}) if buildErr != nil { t.Fatalf("Builder returned unexpected error %v", buildErr) } type testCase struct { publisherID string extTagID string expectedPublisherID string blankNil bool } tests := []testCase{ {publisherID: "publisher.id", extTagID: "ext.tagId", expectedPublisherID: "ext.tagId", blankNil: false}, {publisherID: "publisher.id", extTagID: "", expectedPublisherID: "publisher.id", blankNil: false}, {publisherID: "", extTagID: "ext.tagId", expectedPublisherID: "ext.tagId", blankNil: false}, {publisherID: "", extTagID: "ext.tagId", expectedPublisherID: "ext.tagId", blankNil: true}, {publisherID: "publisher.id", extTagID: "", expectedPublisherID: "publisher.id", blankNil: false}, {publisherID: "publisher.id", extTagID: "", expectedPublisherID: "publisher.id", blankNil: true}, } for _, tc := range tests { imp1 := openrtb2.Imp{ ID: "sample_imp_1", Banner: &openrtb2.Banner{ W: &width, H: &height, Format: []openrtb2.Format{ {W: 300, H: 250}, }, }} if tc.extTagID != "" || !tc.blankNil { imp1.Ext = json.RawMessage( fmt.Sprintf(`{"bidder":{"tagId":"%s"}}`, tc.extTagID)) } inputRequest := openrtb2.BidRequest{ User: &openrtb2.User{ID: "example_user_id"}, Imp: []openrtb2.Imp{imp1}, Site: &openrtb2.Site{}, ID: "1234", } if tc.publisherID != "" || !tc.blankNil { inputRequest.Site.Publisher = &openrtb2.Publisher{ ID: tc.publisherID, } } actualAdapterRequests, err := bidder.MakeRequests(&inputRequest, &adapters.ExtraRequestInfo{}) assert.Len(t, actualAdapterRequests, 1) assert.Empty(t, err) var body openrtb2.BidRequest assert.Nil(t, json.Unmarshal(actualAdapterRequests[0].Body, &body)) assert.Equal(t, tc.expectedPublisherID, body.Site.Publisher.ID) } } var vastImpressionRXP = regexp.MustCompile(`<Impression><!\[CDATA\[[^\]]*\]\]></Impression>`) func countImpressionPixels(vast string) int
func TestVideoImpInsertion(t *testing.T) { markup := interpolateImpressions(openrtb2.Bid{ AdM: sampleVastADM, NURL: "https://example2.com/nurl", }, amxBidExt{Himp: []string{"https://example.com/pixel.png"}}) assert.Contains(t, markup, "example2.com/nurl") assert.Contains(t, markup, "example.com/pixel.png") assert.Equal(t, 3, countImpressionPixels(markup), "should have 3 Impression pixels") // make sure that a blank NURL won't result in a blank impression tag markup = interpolateImpressions(openrtb2.Bid{ AdM: sampleVastADM, NURL: "", }, amxBidExt{}) assert.Equal(t, 1, countImpressionPixels(markup), "should have 1 impression pixels") // we should also ignore blank ext.Himp pixels markup = interpolateImpressions(openrtb2.Bid{ AdM: sampleVastADM, NURL: "https://example-nurl.com/nurl", }, amxBidExt{Himp: []string{"", "", ""}}) assert.Equal(t, 2, countImpressionPixels(markup), "should have 2 impression pixels") } func TestNoDisplayImpInsertion(t *testing.T) { data := interpolateImpressions(openrtb2.Bid{ AdM: sampleDisplayADM, NURL: "https://example2.com/nurl", }, amxBidExt{Himp: []string{"https://example.com/pixel.png"}}) assert.NotContains(t, data, "example2.com/nurl") assert.NotContains(t, data, "example.com/pixel.png") }
{ matches := vastImpressionRXP.FindAllIndex([]byte(vast), -1) return len(matches) }
urls.py
urlpatterns = [ path('', views.contact_page, name='contact'), ]
from django.urls import path from . import views
git.rs
pub use github::Github; pub use gitlab::Gitlab; mod github { use crate::config::Flavor; use crate::error::RepositoryError; use crate::network::request_async; use crate::repository::{Backend, ReleaseChannel, RemotePackage, RepositoryMetadata}; use async_trait::async_trait; use chrono::{DateTime, Utc}; use isahc::http::Uri; use isahc::ResponseExt; use serde::Deserialize; use std::collections::HashMap; #[derive(Debug, Clone)] pub struct
{ pub url: Uri, pub flavor: Flavor, } #[async_trait] impl Backend for Github { async fn get_metadata(&self) -> Result<RepositoryMetadata, RepositoryError> { let mut path = self.url.path().split('/'); // Get rid of leading slash path.next(); let author = path.next().ok_or(RepositoryError::GitMissingAuthor { url: self.url.to_string(), })?; let repo = path.next().ok_or(RepositoryError::GitMissingRepo { url: self.url.to_string(), })?; let url = format!("https://api.github.com/repos/{}/{}/releases", author, repo); let mut resp = request_async(&url, vec![], None).await?; let releases: Vec<Release> = resp .json() .map_err(|_| RepositoryError::GitMissingRelease { url: url.clone() })?; let stable_release = releases.iter().find(|r| !r.prerelease); let beta_release = releases.iter().find(|r| r.prerelease); if stable_release.is_none() && beta_release.is_none() { return Err(RepositoryError::GitMissingRelease { url: url.clone() }); } let mut remote_packages = HashMap::new(); if let Some(release) = stable_release { set_remote_package( self.flavor, &url, &mut remote_packages, ReleaseChannel::Stable, release, )?; } if let Some(release) = beta_release { set_remote_package( self.flavor, &url, &mut remote_packages, ReleaseChannel::Beta, release, )?; } // URL passed in by user might not be the correct letter casing. Let's // use the url from the API response instead to ensure the title we // use for the addon has the correct letter casing. let title = { let release = if stable_release.is_some() { stable_release.unwrap() } else { beta_release.unwrap() }; let html_url = release.html_url.parse::<Uri>()?; let mut path = html_url.path().split('/'); path.next(); path.next(); path.next() .ok_or(RepositoryError::GitMissingRepo { url: self.url.to_string(), })? .to_string() }; let metadata = RepositoryMetadata { website_url: Some(self.url.to_string()), changelog_url: Some(format!("{}/releases", self.url)), remote_packages, title: Some(title), ..Default::default() }; Ok(metadata) } } fn set_remote_package( flavor: Flavor, url: &str, remote_packages: &mut HashMap<ReleaseChannel, RemotePackage>, release_channel: ReleaseChannel, release: &Release, ) -> Result<(), RepositoryError> { let num_non_classic = release .assets .iter() .filter(|a| a.name.ends_with("zip")) .filter(|a| !a.name.to_lowercase().contains("classic")) .count(); let num_classic = release .assets .iter() .filter(|a| a.name.ends_with("zip")) .filter(|a| a.name.to_lowercase().contains("classic")) .count(); if flavor.base_flavor() == Flavor::Retail && num_non_classic > 1 || flavor.base_flavor() == Flavor::Classic && num_classic == 0 && num_non_classic > 1 { return Err(RepositoryError::GitIndeterminableZip { count: num_non_classic, url: url.to_string(), }); } else if flavor.base_flavor() == Flavor::Classic && num_classic > 1 { return Err(RepositoryError::GitIndeterminableZipClassic { count: num_classic, url: url.to_string(), }); } let asset = release .assets .iter() .find(|a| { if flavor.base_flavor() == Flavor::Retail { a.name.ends_with("zip") && !a.name.to_lowercase().contains("classic") } else if num_classic > 0 { a.name.ends_with("zip") && a.name.to_lowercase().contains("classic") } else { a.name.ends_with("zip") } }) .ok_or(RepositoryError::GitNoZip { url: url.to_string(), })?; let version = release.tag_name.clone(); let download_url = asset.browser_download_url.clone(); let date_time = Some(release.published_at); let remote_package = RemotePackage { version, download_url, date_time, file_id: None, modules: vec![], }; remote_packages.insert(release_channel, remote_package); Ok(()) } #[derive(Debug, Deserialize, Clone)] pub struct Release { pub tag_name: String, pub published_at: DateTime<Utc>, pub prerelease: bool, pub html_url: String, pub body: String, pub assets: Vec<ReleaseAsset>, } #[derive(Debug, Deserialize, Clone)] pub struct ReleaseAsset { pub name: String, pub browser_download_url: String, } } mod gitlab { use crate::config::Flavor; use crate::error::RepositoryError; use crate::network::request_async; use crate::repository::{Backend, ReleaseChannel, RemotePackage, RepositoryMetadata}; use async_trait::async_trait; use chrono::{DateTime, Utc}; use isahc::http::Uri; use isahc::ResponseExt; use serde::Deserialize; use std::collections::HashMap; #[derive(Debug, Clone)] pub struct Gitlab { pub url: Uri, pub flavor: Flavor, } #[async_trait] impl Backend for Gitlab { async fn get_metadata(&self) -> Result<RepositoryMetadata, RepositoryError> { let mut path = self.url.path().split('/'); // Get rid of leading slash path.next(); let author = path.next().ok_or(RepositoryError::GitMissingAuthor { url: self.url.to_string(), })?; let repo = path.next().ok_or(RepositoryError::GitMissingRepo { url: self.url.to_string(), })?; let url = format!( "https://gitlab.com/api/v4/projects/{}%2F{}/releases", author, repo ); let mut resp = request_async(&url, vec![], None).await?; let releases: Vec<Release> = resp .json() .map_err(|_| RepositoryError::GitMissingRelease { url: url.clone() })?; let release = releases .get(0) .ok_or(RepositoryError::GitMissingRelease { url: url.clone() })?; let version = release.tag_name.clone(); let num_non_classic = release .assets .links .iter() .filter(|a| a.name.ends_with("zip")) .filter(|a| !a.name.to_lowercase().contains("classic")) .count(); let num_classic = release .assets .links .iter() .filter(|a| a.name.ends_with("zip")) .filter(|a| a.name.to_lowercase().contains("classic")) .count(); if self.flavor.base_flavor() == Flavor::Retail && num_non_classic > 1 || self.flavor.base_flavor() == Flavor::Classic && num_classic == 0 && num_non_classic > 1 { return Err(RepositoryError::GitIndeterminableZip { count: num_non_classic, url: url.clone(), }); } else if self.flavor.base_flavor() == Flavor::Classic && num_classic > 1 { return Err(RepositoryError::GitIndeterminableZipClassic { count: num_classic, url, }); } let asset = release .assets .links .iter() .find(|a| { if self.flavor.base_flavor() == Flavor::Retail { a.name.ends_with("zip") && !a.name.to_lowercase().contains("classic") } else if num_classic > 0 { a.name.ends_with("zip") && a.name.to_lowercase().contains("classic") } else { a.name.ends_with("zip") } }) .ok_or(RepositoryError::GitNoZip { url })?; let download_url = asset.url.clone(); let date_time = Some(release.released_at); let mut remote_packages = HashMap::new(); let remote_package = RemotePackage { version: version.clone(), download_url, date_time, file_id: None, modules: vec![], }; remote_packages.insert(ReleaseChannel::Stable, remote_package); let metadata = RepositoryMetadata { website_url: Some(self.url.to_string()), changelog_url: Some(format!("{}/-/tags/{}", self.url, version)), remote_packages, title: Some(repo.to_string()), ..Default::default() }; Ok(metadata) } } #[derive(Debug, Deserialize, Clone)] pub struct Release { pub tag_name: String, pub description: String, pub released_at: DateTime<Utc>, pub assets: ReleaseAssets, pub upcoming_release: bool, pub tag_path: String, } #[derive(Debug, Deserialize, Clone)] pub struct ReleaseAssets { pub count: u8, pub links: Vec<ReleaseLink>, } #[derive(Debug, Deserialize, Clone)] pub struct ReleaseLink { pub name: String, pub url: String, } }
Github
util.rs
use crate::fmt; use crate::io::prelude::*; use crate::sys::stdio::panic_output;
let _ = out.write_fmt(args); } } // Other platforms should use the appropriate platform-specific mechanism for // aborting the process. If no platform-specific mechanism is available, // crate::intrinsics::abort() may be used instead. The above implementations cover // all targets currently supported by libstd. pub fn abort(args: fmt::Arguments<'_>) -> ! { dumb_print(format_args!("fatal runtime error: {}\n", args)); unsafe { crate::sys::abort_internal(); } } #[allow(dead_code)] // stack overflow detection not enabled on all platforms pub unsafe fn report_overflow() { dumb_print(format_args!("\nthread '{}' has overflowed its stack\n", thread::current().name().unwrap_or("<unknown>"))); }
use crate::thread; pub fn dumb_print(args: fmt::Arguments<'_>) { if let Some(mut out) = panic_output() {
tee.rs
// * This file is part of the uutils coreutils package. // * // * (c) Aleksander Bielawski <[email protected]> // * // * For the full copyright and license information, please view the LICENSE // * file that was distributed with this source code. extern crate getopts; extern crate uucore; use std::fs::OpenOptions; use std::io::{copy, sink, stdin, stdout, Error, ErrorKind, Read, Result, Write}; use std::path::{Path, PathBuf}; static NAME: &str = "tee"; static VERSION: &str = env!("CARGO_PKG_VERSION"); pub fn uumain(args: impl uucore::Args) -> i32 { let args = args.collect_str(); match options(&args).and_then(exec) { Ok(_) => 0, Err(_) => 1, } } #[allow(dead_code)] struct Options { program: String, append: bool, ignore_interrupts: bool, print_and_exit: Option<String>, files: Vec<String>, } fn options(args: &[String]) -> Result<Options> { let mut opts = getopts::Options::new(); opts.optflag("a", "append", "append to the given FILEs, do not overwrite"); opts.optflag("i", "ignore-interrupts", "ignore interrupt signals"); opts.optflag("h", "help", "display this help and exit"); opts.optflag("V", "version", "output version information and exit"); opts.parse(&args[1..]) .map_err(|e| Error::new(ErrorKind::Other, format!("{}", e))) .map(|m| { let version = format!("{} {}", NAME, VERSION); let arguments = "[OPTION]... [FILE]..."; let brief = "Copy standard input to each FILE, and also to standard output."; let comment = "If a FILE is -, it refers to a file named - ."; let help = format!( "{}\n\nUsage:\n {} {}\n\n{}\n{}", version, NAME, arguments, opts.usage(brief), comment ); let names: Vec<String> = m.free.clone().into_iter().collect(); let to_print = if m.opt_present("help") { Some(help) } else if m.opt_present("version") { Some(version) } else { None }; Options { program: NAME.to_owned(), append: m.opt_present("append"), ignore_interrupts: m.opt_present("ignore-interrupts"), print_and_exit: to_print, files: names, } }) .map_err(|message| warn(format!("{}", message).as_ref())) } fn exec(options: Options) -> Result<()> { match options.print_and_exit { Some(text) => { println!("{}", text); Ok(()) } None => tee(options), } } fn tee(options: Options) -> Result<()> { let mut writers: Vec<Box<dyn Write>> = options .files .clone() .into_iter() .map(|file| open(file, options.append)) .collect(); writers.push(Box::new(stdout())); let output = &mut MultiWriter { writers }; let input = &mut NamedReader { inner: Box::new(stdin()) as Box<dyn Read>, }; if copy(input, output).is_err() || output.flush().is_err() { Err(Error::new(ErrorKind::Other, "")) } else { Ok(()) } } fn open(name: String, append: bool) -> Box<dyn Write> { let path = PathBuf::from(name); let inner: Box<dyn Write> = { let mut options = OpenOptions::new(); let mode = if append { options.append(true) } else { options.truncate(true) }; match mode.write(true).create(true).open(path.as_path()) { Ok(file) => Box::new(file), Err(_) => Box::new(sink()), } }; Box::new(NamedWriter { inner, path }) as Box<dyn Write> } struct MultiWriter { writers: Vec<Box<dyn Write>>, } impl Write for MultiWriter { fn write(&mut self, buf: &[u8]) -> Result<usize> { for writer in &mut self.writers { writer.write_all(buf)?; } Ok(buf.len()) } fn flush(&mut self) -> Result<()> { for writer in &mut self.writers { writer.flush()?; } Ok(()) } } struct NamedWriter { inner: Box<dyn Write>, path: PathBuf, } impl Write for NamedWriter { fn write(&mut self, buf: &[u8]) -> Result<usize> { match self.inner.write(buf) { Err(f) =>
okay => okay, } } fn flush(&mut self) -> Result<()> { match self.inner.flush() { Err(f) => { self.inner = Box::new(sink()) as Box<dyn Write>; warn(format!("{}: {}", self.path.display(), f.to_string()).as_ref()); Err(f) } okay => okay, } } } struct NamedReader { inner: Box<dyn Read>, } impl Read for NamedReader { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { match self.inner.read(buf) { Err(f) => { warn(format!("{}: {}", Path::new("stdin").display(), f.to_string()).as_ref()); Err(f) } okay => okay, } } } fn warn(message: &str) -> Error { eprintln!("{}: {}", NAME, message); Error::new(ErrorKind::Other, format!("{}: {}", NAME, message)) }
{ self.inner = Box::new(sink()) as Box<dyn Write>; warn(format!("{}: {}", self.path.display(), f.to_string()).as_ref()); Err(f) }
states_test.go
/* Copyright SecureKey Technologies Inc. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package outofband import ( "errors" "testing" "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/hyperledger/aries-framework-go/pkg/didcomm/common/service" "github.com/hyperledger/aries-framework-go/pkg/didcomm/protocol/decorator" "github.com/hyperledger/aries-framework-go/pkg/didcomm/protocol/didexchange" mockdidexchange "github.com/hyperledger/aries-framework-go/pkg/mock/didcomm/protocol/didexchange" mockservice "github.com/hyperledger/aries-framework-go/pkg/mock/didcomm/service" "github.com/hyperledger/aries-framework-go/pkg/store/connection" ) func TestStateFromName(t *testing.T) { t.Run("valid state names", func(t *testing.T) { states := []state{ &stateInitial{}, &statePrepareResponse{}, &stateAwaitResponse{}, &stateDone{}, } for _, expected := range states { actual, err := stateFromName(expected.Name()) require.NoError(t, err) require.Equal(t, expected, actual) } }) t.Run("invalid state name", func(t *testing.T) { _, err := stateFromName("invalid") require.Error(t, err) }) } func TestStateInitial_Execute(t *testing.T) { t.Run("handles inbound invitation", func(t *testing.T) { s := &stateInitial{} next, finish, halt, err := s.Execute(&context{Inbound: true}, nil) require.NoError(t, err) require.IsType(t, &statePrepareResponse{}, next) require.NotNil(t, finish) require.False(t, halt) }) } func TestStateAwaitResponse_Execute(t *testing.T) { t.Run("error if not an inbound message", func(t *testing.T) { s := &stateAwaitResponse{} _, _, _, err := s.Execute(&context{}, nil) require.Error(t, err) require.Contains(t, err.Error(), "cannot execute") }) t.Run("handshake-reuse", func(t *testing.T) { t.Run("error if cannot fetch connection ID", func(t *testing.T) { expected := errors.New("test") ctx := &context{ Inbound: true, Action: Action{Msg: service.NewDIDCommMsgMap(&HandshakeReuse{Type: HandshakeReuseMsgType})}, } deps := &dependencies{ connections: &mockConnRecorder{ getConnIDByDIDsErr: expected, }, } s := &stateAwaitResponse{} _, _, _, err := s.Execute(ctx, deps) require.ErrorIs(t, err, expected) }) t.Run("error if cannot fetch connection record", func(t *testing.T) { expected := errors.New("test") ctx := &context{ Inbound: true, Action: Action{Msg: service.NewDIDCommMsgMap(&HandshakeReuse{Type: HandshakeReuseMsgType})}, } deps := &dependencies{ connections: &mockConnRecorder{ getConnRecordErr: expected, }, } s := &stateAwaitResponse{} _, _, _, err := s.Execute(ctx, deps) require.ErrorIs(t, err, expected) }) t.Run("error if connection is not in state 'completed'", func(t *testing.T) { ctx := &context{ Inbound: true, Action: Action{Msg: service.NewDIDCommMsgMap(&HandshakeReuse{Type: HandshakeReuseMsgType})}, } deps := &dependencies{ connections: &mockConnRecorder{ getConnRecordVal: &connection.Record{State: "initial"}, }, }
require.Contains(t, err.Error(), "unexpected state for connection") }) }) } func TestStatePrepareResponse_Execute(t *testing.T) { t.Run("new connection", func(t *testing.T) { t.Run("error while saving attachment handling state", func(t *testing.T) { expected := errors.New("test") ctx := &context{Invitation: &Invitation{ Requests: []*decorator.Attachment{{ ID: uuid.New().String(), Data: decorator.AttachmentData{ JSON: map[string]interface{}{}, }, }}, }} deps := &dependencies{ connections: nil, didSvc: &mockdidexchange.MockDIDExchangeSvc{}, saveAttchStateFunc: func(*attachmentHandlingState) error { return expected }, } s := &statePrepareResponse{} _, _, _, err := s.Execute(ctx, deps) require.ErrorIs(t, err, expected) }) }) t.Run("connection reuse", func(t *testing.T) { t.Run("advances to next state and sends handshake-reuse", func(t *testing.T) { savedAttachmentState := false ctx := &context{ Invitation: &Invitation{ Services: []interface{}{theirDID}, Requests: []*decorator.Attachment{{ ID: uuid.New().String(), Data: decorator.AttachmentData{ JSON: map[string]interface{}{}, }, }}, }, ReuseConnection: theirDID, } deps := &dependencies{ connections: &mockConnRecorder{queryConnRecordsVal: []*connection.Record{{ TheirDID: theirDID, State: didexchange.StateIDCompleted, }}}, saveAttchStateFunc: func(*attachmentHandlingState) error { savedAttachmentState = true return nil }, } s := &statePrepareResponse{} next, finish, halt, err := s.Execute(ctx, deps) require.NoError(t, err) require.IsType(t, &stateAwaitResponse{}, next) require.True(t, halt) sent := false messenger := &mockservice.MockMessenger{ ReplyToMsgFunc: func(_ service.DIDCommMsgMap, out service.DIDCommMsgMap, _ string, _ string) error { require.Equal(t, HandshakeReuseMsgType, out.Type()) sent = true return nil }, } err = finish(messenger) require.NoError(t, err) require.True(t, savedAttachmentState) require.True(t, sent) }) t.Run("error if cannot query connection records", func(t *testing.T) { expected := errors.New("test") ctx := &context{ Inbound: true, ReuseAnyConnection: true, } deps := &dependencies{ connections: &mockConnRecorder{queryConnRecordsErr: expected}, } s := &statePrepareResponse{} _, _, _, err := s.Execute(ctx, deps) require.ErrorIs(t, err, expected) }) t.Run("error if cannot find matching connection record", func(t *testing.T) { ctx := &context{ Inbound: true, ReuseAnyConnection: true, Invitation: &Invitation{ Services: []interface{}{theirDID}, }, } deps := &dependencies{ connections: &mockConnRecorder{}, } s := &statePrepareResponse{} _, _, _, err := s.Execute(ctx, deps) require.Error(t, err) require.Contains(t, err.Error(), "no existing connection record found for the invitation") }) t.Run("error when saving attachment handling state", func(t *testing.T) { expected := errors.New("test") ctx := &context{ Inbound: true, ReuseConnection: theirDID, Invitation: &Invitation{ Services: []interface{}{theirDID}, Requests: []*decorator.Attachment{{ ID: uuid.New().String(), Data: decorator.AttachmentData{ JSON: map[string]interface{}{}, }, }}, }, } deps := &dependencies{ connections: &mockConnRecorder{queryConnRecordsVal: []*connection.Record{{ TheirDID: theirDID, State: didexchange.StateIDCompleted, }}}, saveAttchStateFunc: func(*attachmentHandlingState) error { return expected }, } s := &statePrepareResponse{} _, _, _, err := s.Execute(ctx, deps) require.ErrorIs(t, err, expected) }) }) } type mockConnRecorder struct { saveInvErr error getConnRecordVal *connection.Record getConnRecordErr error getConnIDByDIDsVal string getConnIDByDIDsErr error queryConnRecordsVal []*connection.Record queryConnRecordsErr error } func (m *mockConnRecorder) SaveInvitation(string, interface{}) error { return m.saveInvErr } func (m *mockConnRecorder) GetConnectionRecord(string) (*connection.Record, error) { return m.getConnRecordVal, m.getConnRecordErr } func (m *mockConnRecorder) GetConnectionIDByDIDs(string, string) (string, error) { return m.getConnIDByDIDsVal, m.getConnIDByDIDsErr } func (m *mockConnRecorder) QueryConnectionRecords() ([]*connection.Record, error) { return m.queryConnRecordsVal, m.queryConnRecordsErr }
s := &stateAwaitResponse{} _, _, _, err := s.Execute(ctx, deps) require.Error(t, err)
0004_scheduledemailaction.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-12-16 08:54 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class
(migrations.Migration): initial = True dependencies = [ ('workflow', '0013_auto_20171209_0809'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('action', '0008_auto_20171209_1808'), ('scheduler', '0003_auto_20171216_1944'), ] operations = [ migrations.CreateModel( name='ScheduledEmailAction', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('type', models.CharField(max_length=256)), ('created', models.DateTimeField(auto_now_add=True)), ('execute', models.DateTimeField(null=True)), ('status', models.IntegerField(choices=[(0, 'pending'), (1, 'running'), (2, 'done')], verbose_name='Execution Status')), ('subject', models.CharField(blank=True, default='', max_length=2048, verbose_name='Email subject')), ('send_confirmation', models.BooleanField(default=False, verbose_name='Send you a confirmation email')), ('track_read', models.BooleanField(default=False, verbose_name='Track if emails are read?')), ('add_column', models.BooleanField(default=False, verbose_name='Add a column with the number of email reads tracked')), ('action', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scheduled_actions', to='action.Action')), ('email_column', models.ForeignKey(db_index=False, on_delete=django.db.models.deletion.CASCADE, to='workflow.Column', verbose_name='Column containing the email address')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ('workflow', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scheduled_actions', to='workflow.Workflow')), ], options={ 'abstract': False, }, ), ]
Migration
get_blocked_domains.py
# coding: utf-8 """ SendinBlue API SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable | # noqa: E501 OpenAPI spec version: 3.0.0 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class GetBlockedDomains(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'domains': 'list[str]' } attribute_map = { 'domains': 'domains' } def __init__(self, domains=None): # noqa: E501 """GetBlockedDomains - a model defined in Swagger""" # noqa: E501 self._domains = None self.discriminator = None self.domains = domains @property def domains(self): """Gets the domains of this GetBlockedDomains. # noqa: E501 List of all blocked domains # noqa: E501 :return: The domains of this GetBlockedDomains. # noqa: E501 :rtype: list[str] """ return self._domains @domains.setter def domains(self, domains): """Sets the domains of this GetBlockedDomains. List of all blocked domains # noqa: E501 :param domains: The domains of this GetBlockedDomains. # noqa: E501 :type: list[str] """ if domains is None: raise ValueError("Invalid value for `domains`, must not be `None`") # noqa: E501
self._domains = domains def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(GetBlockedDomains, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, GetBlockedDomains): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
hex.rs
// Copyright 2018 The Grin Developers // Copyright 2019 The Libercoin Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /// Implements hex-encoding from bytes to string and decoding of strings /// to bytes. Given that rustc-serialize is deprecated and serde doesn't /// provide easy hex encoding, hex is a bit in limbo right now in Rust- /// land. It's simple enough that we can just have our own. use std::fmt::Write; use std::num; /// Encode the provided bytes into a hex string pub fn to_hex(bytes: Vec<u8>) -> String { let mut s = String::new(); for byte in bytes { write!(&mut s, "{:02x}", byte).expect("Unable to write"); } s } /// Decode a hex string into bytes. pub fn from_hex(hex_str: String) -> Result<Vec<u8>, num::ParseIntError> { if hex_str.len() % 2 == 1 { // TODO: other way to instantiate a ParseIntError? let err = ("QQQ").parse::<u64>(); if let Err(e) = err { return Err(e); } } let hex_trim = if &hex_str[..2] == "0x" { hex_str[2..].to_owned() } else { hex_str.clone() }; split_n(&hex_trim.trim()[..], 2)
} fn split_n(s: &str, n: usize) -> Vec<&str> { (0..(s.len() - n + 1) / 2 + 1) .map(|i| &s[2 * i..2 * i + n]) .collect() } #[cfg(test)] mod test { use super::*; #[test] fn test_to_hex() { assert_eq!(to_hex(vec![0, 0, 0, 0]), "00000000"); assert_eq!(to_hex(vec![10, 11, 12, 13]), "0a0b0c0d"); assert_eq!(to_hex(vec![0, 0, 0, 255]), "000000ff"); } #[test] fn test_from_hex() { assert_eq!(from_hex("00000000".to_string()).unwrap(), vec![0, 0, 0, 0]); assert_eq!( from_hex("0a0b0c0d".to_string()).unwrap(), vec![10, 11, 12, 13] ); assert_eq!( from_hex("000000ff".to_string()).unwrap(), vec![0, 0, 0, 255] ); } }
.iter() .map(|b| u8::from_str_radix(b, 16)) .collect::<Result<Vec<u8>, _>>()
test_tooltips.py
import time import types import pytest import teek from teek.extras import tooltips def run_event_loop(for_how_long): # this is dumb start = time.time() while time.time() < start + for_how_long: teek.update() @pytest.mark.slow def
(): window = teek.Window() assert not hasattr(window, '_tooltip_manager') tooltips.set_tooltip(window, None) assert not hasattr(window, '_tooltip_manager') tooltips.set_tooltip(window, 'Boo') assert window._tooltip_manager.text == 'Boo' tooltips.set_tooltip(window, None) assert window._tooltip_manager.text is None tooltips.set_tooltip(window, 'lol') assert window._tooltip_manager.text == 'lol' N = types.SimpleNamespace # because pep8 line length assert not window._tooltip_manager.got_mouse window._tooltip_manager.enter(N(widget=window, rootx=123, rooty=456)) assert window._tooltip_manager.got_mouse assert window._tooltip_manager.mousex == 123 assert window._tooltip_manager.mousey == 456 window._tooltip_manager.motion(N(rootx=789, rooty=101112)) assert window._tooltip_manager.got_mouse assert window._tooltip_manager.mousex == 789 assert window._tooltip_manager.mousey == 101112 run_event_loop(1.1) assert window._tooltip_manager.tipwindow is not None assert window._tooltip_manager.got_mouse window._tooltip_manager.leave(N(widget=window)) assert not window._tooltip_manager.got_mouse assert window._tooltip_manager.tipwindow is None # what happens if the window gets destroyed before it's supposed to show? window._tooltip_manager.enter(N(widget=window, rootx=1, rooty=2)) window._tooltip_manager.leave(N(widget=window)) assert window._tooltip_manager.tipwindow is None run_event_loop(1.1) assert window._tooltip_manager.tipwindow is None
test_set_tooltip
xgboost_model.py
import pandas as pd import numpy as np import xgboost # reading data hotel_data = pd.read_csv('cleaned_train.csv') X = hotel_data.drop(columns=['n_clicks', 'hotel_id']) # let's also add the new feature avg_saving_cash X['avg_saving_cash'] = X['avg_price'] * X['avg_saving_percent'] y = hotel_data['n_clicks'] # let's create trained data for xgboost dtrain = xgboost.DMatrix(X, label=y) params = {'max_depth': 6, 'min_child_weight': 3, 'eta': .1, 'subsample': 1, 'colsample_bytree': 0.7,
num_boost_round = 999 print('Training phase has started') # training best model on the optimized hyper-parameters. best_model = xgboost.train( params, dtrain, num_boost_round=num_boost_round, ) print('Saving the model as best_model.model') best_model.save_model("best_model.model") print('Reading test data') # reading test data X_test = pd.read_csv('cleaned_test.csv') dtest = xgboost.DMatrix(X_test.drop(columns=['hotel_id'])) predicted_y = best_model.predict(dtest) X_test['n_clicks'] = predicted_y # getting all negative prediction to 0 X_test['n_clicks'] = np.where(X_test['n_clicks'] < 0, 0, X_test['n_clicks']) final_result = X_test[['hotel_id', 'n_clicks']] print('Saving the prediction as predictions.csv') # saving the result final_result.to_csv('predictions.csv')
'objective': 'reg:squarederror', 'eval_metric': "rmse"}
dataframe_output.py
import json from typing import Sequence from bentoml.adapters.json_output import JsonOutput from bentoml.types import InferenceError, InferenceResult, InferenceTask from bentoml.utils.dataframe_util import PANDAS_DATAFRAME_TO_JSON_ORIENT_OPTIONS def
(result, pandas_dataframe_orient="records"): import pandas as pd assert ( pandas_dataframe_orient in PANDAS_DATAFRAME_TO_JSON_ORIENT_OPTIONS ), f"unknown pandas dataframe orient '{pandas_dataframe_orient}'" if isinstance(result, pd.DataFrame): return result.to_json(orient=pandas_dataframe_orient) if isinstance(result, pd.Series): return pd.DataFrame(result).to_json(orient=pandas_dataframe_orient) return json.dumps(result) class DataframeOutput(JsonOutput): """ Converts result of user defined API function into specific output. Args: cors (str): The value of the Access-Control-Allow-Origin header set in the AWS Lambda response object. Default is "*". If set to None, the header will not be set. """ BATCH_MODE_SUPPORTED = True def __init__(self, output_orient='records', **kwargs): super().__init__(**kwargs) self.output_orient = output_orient assert self.output_orient in PANDAS_DATAFRAME_TO_JSON_ORIENT_OPTIONS, ( f"Invalid 'output_orient'='{self.orient}', valid options are " f"{PANDAS_DATAFRAME_TO_JSON_ORIENT_OPTIONS}" ) @property def config(self): base_config = super(DataframeOutput, self).config return dict(base_config, output_orient=self.output_orient) @property def pip_dependencies(self): """ :return: List of PyPI package names required by this OutputAdapter """ return ['pandas'] def pack_user_func_return_value( self, return_result, tasks: Sequence[InferenceTask] ) -> Sequence[InferenceResult[str]]: rv = [] i = 0 for task in tasks: if task.batch is None: result = return_result[i : i + 1] i += 1 else: result = return_result[i : i + task.batch] i += task.batch try: result = df_to_json(result, self.output_orient) rv.append(InferenceResult(http_status=200, data=result)) except Exception as e: # pylint: disable=broad-except rv.append(InferenceError(err_msg=str(e), http_status=500)) return rv
df_to_json
recipe-521914.py
from __future__ import with_statement from contextlib import contextmanager @contextmanager def Switch(): D = {} class _P(Exception): pass def _mkCase(var): class _PP(_P): V = var def __repr__(self): return str(self.V) D[var]=_PP return _PP def switch(var): if D.has_key(var): raise D[var]() raise _mkCase(var)() def case(var): if D.has_key(var): return D[var] return _mkCase(var) def default(): return _P yield switch, case, default if __name__=="__main__": def
(): with Switch() as (switch, case, default): try: switch(55) except case(1): print 1 except case(6): print 6 except case(5): print 5 except default(): print 'default..' def test2(): with Switch() as (switch, case, default): try:switch('hola') except case(1): print 1 except case('holaS'): print 'holaS' except case('hola'): print 'hola' except default(): print 'default..' test1() test2()
test1
space.rs
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fmt; use std::marker::PhantomData; use common_datavalues::prelude::*; use common_datavalues::DataTypeAndNullable; use common_exception::ErrorCode; use common_exception::Result; use crate::scalars::function_factory::FunctionDescription; use crate::scalars::function_factory::FunctionFeatures; use crate::scalars::Function; pub type SpaceFunction = SpaceGenFunction<SpaceGen>; pub trait SpaceGenOperator: Send + Sync + Clone + Default + 'static { fn apply<'a>(&'a mut self, c: &u64, _: &mut [u8]) -> usize; fn apply_char(&self, c: &u64) -> Vec<u8>; } #[derive(Clone, Default)] pub struct SpaceGen {} impl SpaceGenOperator for SpaceGen { #[inline] fn apply<'a>(&'a mut self, c: &u64, buffer: &mut [u8]) -> usize { let len = *c as usize; let buffer = &mut buffer[0..len]; buffer.copy_from_slice(vec![32; len].as_slice()); len } #[inline] fn apply_char(&self, c: &u64) -> Vec<u8> { vec![32; *c as usize] } } #[derive(Clone)] pub struct SpaceGenFunction<T> { display_name: String, _marker: PhantomData<T>, } impl<T: SpaceGenOperator> SpaceGenFunction<T> { pub fn try_create(display_name: &str) -> Result<Box<dyn Function>> { Ok(Box::new(Self { display_name: display_name.to_string(), _marker: PhantomData, })) } pub fn desc() -> FunctionDescription { FunctionDescription::creator(Box::new(Self::try_create)) .features(FunctionFeatures::default().deterministic().num_arguments(1)) } } impl<T: SpaceGenOperator> Function for SpaceGenFunction<T> { fn name(&self) -> &str { &*self.display_name } fn return_type(&self, args: &[DataTypeAndNullable]) -> Result<DataTypeAndNullable> { if !args[0].is_unsigned_integer() && !args[0].is_string() && !args[0].is_null() { return Err(ErrorCode::IllegalDataType(format!( "Expected unsigned integer or null, but got {}", args[0] ))); } let nullable = args.iter().any(|arg| arg.is_nullable()); let dt = DataType::String; Ok(DataTypeAndNullable::create(&dt, nullable)) } fn eval(&self, columns: &DataColumnsWithField, input_rows: usize) -> Result<DataColumn> { let mut op = T::default(); let r_column: DataColumn = match columns[0].column().cast_with_type(&DataType::UInt64)? { DataColumn::Constant(DataValue::UInt64(c), _) => { if let Some(c) = c { DataColumn::Constant(DataValue::String(Some(op.apply_char(&c))), input_rows) } else { DataColumn::Constant(DataValue::Null, input_rows) } } DataColumn::Array(c_series) => transform_from_primitive_with_no_null( c_series.u64()?, |x| *x as usize, |x, buffer| op.apply(x, buffer), ) .into(), _ => DataColumn::Constant(DataValue::Null, input_rows),
} } impl<F> fmt::Display for SpaceGenFunction<F> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.display_name) } }
}; Ok(r_column)
skip_test.go
// Copyright 2019 Drone.IO Inc. All rights reserved. // Use of this source code is governed by the Drone Non-Commercial License // that can be found in the LICENSE file. // +build !oss package trigger import ( "testing" "github.com/drone/drone-yaml/yaml" "github.com/mlclmj/drone/core" ) func Test_skipBranch(t *testing.T) { tests := []struct { config string branch string want bool }{ { config: "kind: pipeline\ntrigger: { }", branch: "master", want: false, }, { config: "kind: pipeline\ntrigger: { branch: [ master ] }", branch: "master", want: false, }, { config: "kind: pipeline\ntrigger: { branch: [ master ] }", branch: "develop", want: true, }, } for i, test := range tests { manifest, err := yaml.ParseString(test.config) if err != nil { t.Error(err) } pipeline := manifest.Resources[0].(*yaml.Pipeline) got, want := skipBranch(pipeline, test.branch), test.want if got != want { t.Errorf("Want test %d to return %v", i, want) } } } func Test_skipEvent(t *testing.T) { tests := []struct { config string event string want bool }{ { config: "kind: pipeline\ntrigger: { }", event: "push", want: false, }, { config: "kind: pipeline\ntrigger: { event: [ push ] }", event: "push", want: false, }, { config: "kind: pipeline\ntrigger: { event: [ push ] }", event: "pull_request", want: true, }, } for i, test := range tests { manifest, err := yaml.ParseString(test.config) if err != nil { t.Error(err) } pipeline := manifest.Resources[0].(*yaml.Pipeline) got, want := skipEvent(pipeline, test.event), test.want if got != want
} } // func Test_skipPath(t *testing.T) { // tests := []struct { // config string // paths []string // want bool // }{ // { // config: "trigger: { }", // paths: []string{}, // want: false, // }, // { // config: "trigger: { }", // paths: []string{"README.md"}, // want: false, // }, // { // config: "trigger: { paths: foo/* }", // paths: []string{"foo/README"}, // want: false, // }, // { // config: "trigger: { paths: foo/* }", // paths: []string{"bar/README"}, // want: true, // }, // // if empty changeset, never skip the pipeline // { // config: "trigger: { paths: foo/* }", // paths: []string{}, // want: false, // }, // // if max changeset, never skip the pipeline // { // config: "trigger: { paths: foo/* }", // paths: make([]string, 400), // want: false, // }, // } // for i, test := range tests { // document, err := config.ParseString(test.config) // if err != nil { // t.Error(err) // } // got, want := skipPaths(document, test.paths), test.want // if got != want { // t.Errorf("Want test %d to return %v", i, want) // } // } // } func Test_skipMessage(t *testing.T) { tests := []struct { event string message string title string want bool }{ { event: "push", message: "update readme", want: false, }, // skip when message contains [CI SKIP] { event: "push", message: "update readme [CI SKIP]", want: true, }, { event: "pull_request", message: "update readme [CI SKIP]", want: true, }, // skip when title contains [CI SKIP] { event: "push", title: "update readme [CI SKIP]", want: true, }, { event: "pull_request", title: "update readme [CI SKIP]", want: true, }, // ignore [CI SKIP] when event is tag { event: "tag", message: "update readme [CI SKIP]", want: false, }, { event: "tag", title: "update readme [CI SKIP]", want: false, }, { event: "cron", title: "update readme [CI SKIP]", want: false, }, { event: "cron", title: "update readme [CI SKIP]", want: false, }, { event: "custom", title: "update readme [CI SKIP]", want: false, }, { event: "custom", title: "update readme [CI SKIP]", want: false, }, } for _, test := range tests { hook := &core.Hook{ Message: test.message, Title: test.title, Event: test.event, } got, want := skipMessage(hook), test.want if got != want { t.Errorf("Want { event: %q, message: %q, title: %q } to return %v", test.event, test.message, test.title, want) } } } func Test_skipMessageEval(t *testing.T) { tests := []struct { eval string want bool }{ {"update readme", false}, // test [CI SKIP] {"foo [ci skip] bar", true}, {"foo [CI SKIP] bar", true}, {"foo [CI Skip] bar", true}, {"foo [CI SKIP]", true}, // test [SKIP CI] {"foo [skip ci] bar", true}, {"foo [SKIP CI] bar", true}, {"foo [Skip CI] bar", true}, {"foo [SKIP CI]", true}, // test ***NO_CI*** {"foo ***NO_CI*** bar", true}, {"foo ***NO_CI*** bar", true}, {"foo ***NO_CI*** bar", true}, {"foo ***NO_CI***", true}, } for _, test := range tests { got, want := skipMessageEval(test.eval), test.want if got != want { t.Errorf("Want %q to return %v, got %v", test.eval, want, got) } } }
{ t.Errorf("Want test %d to return %v", i, want) }
storage.py
''' * Use rocksdb as cardano-sl did. * Store each epoch in seperate db. 'b/' + hash -> block data 'u/' + hash -> undo data g -> hash of genesis block of epoch. * Main database: * 'c/tip' -> hash * 'b/' + hash -> BlockHeader * 'e/fl/' + hash -> hash of next block. * 'ut/t/' + txIn -> TxOut * 's/' + stake holder id * 's/ftssum' * 'a/' + addr -> 1 # address discovery. Sync ---- * get headers from storage current tip to network tip. * download blocks and save to db. ''' import os import cbor import rocksdb from .block import DecodedBlock, DecodedBlockHeader from . import config def iter_prefix(db, prefix): it = db.iteritems() it.seek(prefix) for k, v in it: if not k.startswith(prefix): break yield k, v def remove_prefix(db, prefix): batch = rocksdb.WriteBatch() for k, _ in iter_prefix(db, prefix): batch.delete(k) db.write(batch) class Storage(object): def __init__(self, root_path, readonly=False): print('create storage at', root_path) if not os.path.exists(root_path): os.makedirs(root_path) self._root_path = root_path opt = rocksdb.Options(create_if_missing=True) self.db = rocksdb.DB(os.path.join(self._root_path, 'db'), opt, readonly) self._tip = None # cache current tip header in memory. # cache recent used epoch db. self._current_epoch_db = None self._current_epoch = None def epoch_db_path(self, epoch): return os.path.join(self._root_path, 'epoch%d' % epoch) def open_epoch_db(self, epoch, readonly=False): if epoch != self._current_epoch: self._current_epoch = epoch self._current_epoch_db = rocksdb.DB( self.epoch_db_path(epoch), rocksdb.Options(create_if_missing=True), readonly ) return self._current_epoch_db def load_tip(self): h = self.db.get(b'c/tip') if h: return self.blockheader(h) def tip(self): if not self._tip: self._tip = self.load_tip() return self._tip def set_tip(self, hdr, batch=None): self._tip = hdr (batch or self.db).put(b'c/tip', hdr.hash()) def blockheader(self, h): buf = self.db.get(b'b/' + h) if buf: return DecodedBlockHeader.from_raw(buf, h) def raw_block(self, hdr): db = self.open_epoch_db(hdr.slot()[0], readonly=True) buf = db.get(b'b/' + hdr.hash()) if buf: return buf def block(self, hdr): raw = self.raw_block(hdr) if raw: return DecodedBlock.from_raw(raw) def undos(self, hdr): db = self.open_epoch_db(hdr.slot()[0], readonly=True) buf = db.get(b'u/' + hdr.hash()) if buf: return cbor.loads(buf) def genesis_block(self, epoch): db = self.open_epoch_db(epoch, readonly=True) h = db.get(b'g') assert h, 'epoch not exist: %d' % epoch return DecodedBlock.from_raw(db.get(h)) def blocks_rev(self, start_hash=None): 'Iterate blocks backwardly.' current_hash = start_hash or self.tip().hash() current_epoch = self.blockheader(current_hash).slot()[0] current_epoch_db = self.open_epoch_db(current_epoch, readonly=True) while True: raw = current_epoch_db.get(b'b/' + current_hash) if not raw: # try decrease epoch id. current_epoch -= 1 if current_epoch < 0: break current_epoch_db = self.open_epoch_db(current_epoch, readonly=True) continue blk = DecodedBlock(cbor.loads(raw), raw) yield blk current_hash = blk.header().prev_header() def blocks(self, start_hash=None): 'Iterate blocks forwardly.' if start_hash: current_epoch, _ = DecodedBlockHeader( cbor.loads(self.db.get(b'b/' + start_hash)) ).slot() else: start_hash = config.GENESIS_BLOCK_HASH current_epoch = 0 current_epoch_db = self.open_epoch_db(current_epoch, readonly=True) current_hash = start_hash raw = current_epoch_db.get(b'b/' + current_hash) yield DecodedBlock(cbor.loads(raw), raw) while True: current_hash = self.db.get(b'e/fl/' + current_hash) if not current_hash: return raw = current_epoch_db.get(b'b/' + current_hash) if raw: yield DecodedBlock(cbor.loads(raw), raw) continue # try increase epoch number. current_epoch += 1 current_epoch_db = self.open_epoch_db(current_epoch, readonly=True) if not current_epoch_db: return raw = current_epoch_db.get(b'b/' + current_hash) if not raw: return yield DecodedBlock(cbor.loads(raw), raw) def blockheaders_rev(self, start=None): 'Iterate block header backwardly.' current_hash = start or self.tip().hash() while True: raw = self.db.get(b'b/' + current_hash) if not raw: break hdr = DecodedBlockHeader(cbor.loads(raw), raw) yield hdr current_hash = hdr.prev_header() def blockheaders(self, start=None): current_hash = start or config.GENESIS_BLOCK_HASH while True: raw = self.db.get(b'b/' + current_hash) yield DecodedBlockHeader.from_raw(raw, current_hash) current_hash = self.db.get(b'e/fl/' + current_hash) if not current_hash: break def iter_header_hash(self, start=None): current_hash = start or config.GENESIS_BLOCK_HASH while True: yield current_hash current_hash = self.db.get(b'e/fl/' + current_hash) if not current_hash: break def blockheaders_noorder(self): 'Iterate block header in rocksdb order, fastest.' return map( lambda t: DecodedBlockHeader.from_raw(t[1], t[0][2:]), iter_prefix(self.db, b'b/') ) def append_block(self, block): hdr = block.header() batch = rocksdb.WriteBatch() # check prev_hash tip = self.tip() if tip: assert hdr.prev_header() == tip.hash(), 'invalid block.' h = hdr.hash() batch.put(b'b/' + h, hdr.raw()) batch.put(b'e/fl/' + hdr.prev_header(), h) undos = None if not block.is_genesis(): undos = self._get_block_undos(block) self.utxo_apply_block(block, batch) for tx in block.transactions(): for out in tx.outputs(): batch.put(b'a/' + out.addr, b'') self.set_tip(hdr, batch) self.db.write(batch) # write body epoch, _ = hdr.slot() db = self.open_epoch_db(epoch, readonly=False) batch = rocksdb.WriteBatch() if hdr.is_genesis(): assert not db.get(b'g') batch.put(b'g', h) else: batch.put(b'u/' + h, cbor.dumps(undos)) batch.put(b'b/' + h, block.raw()) db.write(batch) return undos def _get_block_undos(self, block): return [[self.get_output(txin) for txin in tx.inputs()] for tx in block.transactions()] def utxo_apply_block(self, block, batch): txins, utxo = block.utxos() for txin in txins: batch.delete(b'ut/t/' + cbor.dumps(txin)) for txin, txout in utxo.items(): batch.put(b'ut/t/' + cbor.dumps(txin), cbor.dumps(txout)) def iter_utxo(self): from .wallet import TxIn, TxOut prefix = b'ut/t/' for k, v in iter_prefix(self.db, prefix): yield TxIn(*cbor.loads(k[len(prefix):])), TxOut(*cbor.loads(v)) def iter_addresses(self): it = self.db.iterkeys() it.seek(b'a/') for k in it: if not k.startswith(b'a/'): break yield k[2:] def get_output(self, txin): from .wallet import TxOut data = self.db.get(b'ut/t/' + cbor.dumps(txin)) if data: return TxOut(*cbor.loads(data)) def hash_range(store, hstart, hstop, depth_limit): if hstart == hstop: assert depth_limit > 0 yield hstart return start = store.blockheader(hstart) stop = store.blockheader(hstop) assert start and stop assert stop.diffculty() > start.diffculty() assert stop.diffculty() - start.diffculty() < depth_limit for h in store.iter_header_hash(start): yield h if h == stop: break def fetch_raw_blocks(store, hstart, hstop): ''' ''' for h in hash_range(store, hstart, hstop, config.CHAIN['block']['recoveryHeadersMessage']): yield store.raw_block(store.blockheader(h)) def stream_raw_blocks(store, hstart):
for h in store.iter_header_hash(hstart): yield store.raw_block(store.blockheader(h))
GradleSettings.ts
import path from 'path' import fsExtra from 'fs-extra' import { addTextOneLineAfter, replaceTextLineByLineAtPath } from '../common/commonLanguage' export class GradleSettings { ProjectRootPath: string SettingsGradlePath: string constructor(ProjectRootPath: string) { this.ProjectRootPath = path.resolve(ProjectRootPath) this.SettingsGradlePath = path.join(this.ProjectRootPath, 'settings.gradle') } renameSettingGradleInclude(from: string, to: string): Error | null { if (!fsExtra.existsSync(this.SettingsGradlePath)) { return new Error(`settings.gradle not exists: ${this.SettingsGradlePath}`) } const fromText = `include ':${from}'`
} addGradleModuleInclude(addModule: string): Error | null { if (!fsExtra.existsSync(this.SettingsGradlePath)) { return new Error(`settings.gradle not exists: ${this.SettingsGradlePath}`) } addTextOneLineAfter( this.SettingsGradlePath, /^include .*/, `include ':${addModule}'` ) return null } }
const toText = `include ':${to}'` replaceTextLineByLineAtPath(this.SettingsGradlePath, fromText, toText) return null
pyeval.py
#!/usr/bin/env python3 from __future__ import (unicode_literals, absolute_import, print_function, division) from functools import lru_cache from itertools import count, islice from signal import signal, SIGPIPE, SIG_DFL signal(SIGPIPE, SIG_DFL) import argparse import collections import collections.abc import contextlib import importlib import inspect import json import pydoc import sys, re, io import os, site if 'VIRTUAL_ENV' in os.environ: # derived from activate_this.py from the virtualenv package base = os.environ['VIRTUAL_ENV'] os.environ['PATH'] = os.pathsep.join([os.path.join(base, 'bin')] + os.environ['PATH'].split(os.pathsep)) prev_length = len(sys.path) site.addsitedir(os.path.realpath(site._get_path(base))) sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length] sys.prefix = base sys.path.insert(0, '') cache = lambda function: lru_cache(maxsize=128, typed=True)(function) try: from pythonpy.__version__ import __version__ except (ImportError, ValueError, SystemError): __version__ = '0.5.3' pyversion = sys.version.split(' ')[0] version_string = f'''Pythonpy {__version__} Python {pyversion}''' module_aliases = { 'mp' : 'matplotlib', 'np' : 'numpy', 'pd' : 'pandas', 'tf' : 'tensorflow', 'xa' : 'xarray' } ModuleAlias = collections.namedtuple('ModuleAlias', ('shorthand', 'modname')) IOHandles = collections.namedtuple('IOHandles', ('out', 'err')) aliases = { re.compile(rf"^{key}") : ModuleAlias(shorthand=key, modname=value) \ for key, value \ in module_aliases.items() } def iterlen(iterable): """ iterlen(iterable) → Return the number of items in “iterable.” This will consume iterables without a “__len__()” method – be careful! """ # Stolen from “more-itertools”: http://bit.ly/2LUZqCx try: return len(iterable) except TypeError as exc: if 'has no len' in str(exc): counter = count() collections.deque(zip(iterable, counter), maxlen=0) return next(counter) raise @cache def import_matches(query, prefix=''): for raw_module_name in frozenset( re.findall( rf"({prefix}[a-zA-Z_][a-zA-Z0-9_]*)\.?", query)): module_name = raw_module_name # Only de-alias module names at the top level, # and at most de-alias once: if prefix == '': for rgx, alias in aliases.items(): if rgx.match(module_name): module_name = rgx.sub(alias.modname, module_name) break try: module = importlib.import_module(module_name) except (ModuleNotFoundError, ImportError): pass else: globals()[raw_module_name] = module if module_name != raw_module_name: globals()[module_name] = module yield module yield from import_matches(query, prefix=rf"{module_name}\.") def lazy_imports(*args): query = ' '.join(x for x in args if x) yield from import_matches(query) def current_list(input): return current_list.rgx.split(input) current_list.rgx = re.compile(r'[^a-zA-Z0-9_\.]') def inspect_source(instance): try: return ''.join(inspect.getsourcelines(instance)[0]) except: return help(instance) parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False) group = parser.add_argument_group("Options") parser.add_argument('expression', nargs='?', default='None', help="e.g. “py '2 ** 32'”") group.add_argument('-x', dest='lines_of_stdin', action='store_const', const=True, default=False, help='treat each row of stdin as “x”') group.add_argument('-fx', dest='filter_result', action='store_const', const=True, default=False, help=argparse.SUPPRESS) group.add_argument('-l', dest='list_of_stdin', action='store_const', const=True, default=False, help='treat list of stdin as “l”') group.add_argument('--ji', '--json_input', dest='json_input', action='store_const', const=True, default=False, help=argparse.SUPPRESS) group.add_argument('--jo', '--json_output', dest='json_output', action='store_const', const=True, default=False, help=argparse.SUPPRESS) group.add_argument('--si', '--split_input', dest='input_delimiter', help=argparse.SUPPRESS) group.add_argument('--so', '--split_output', dest='output_delimiter', help=argparse.SUPPRESS) group.add_argument('-p', '--pager', dest='pager', action='store_const', const=True, default=False, help=argparse.SUPPRESS) group.add_argument('-c', dest='pre_cmd', help='run code before expression') group.add_argument('-C', dest='post_cmd', help='run code after expression') group.add_argument('--i', '--ignore_exceptions', dest='ignore_exceptions', action='store_const', const=True, default=False, help=argparse.SUPPRESS) group.add_argument('-v', '--verbose', dest='verbosity', action='count', default=0, help='set the verbosity level (default=0)') group.add_argument('-V', '--version', action='version', version=version_string, help='show the current version and exit') group.add_argument('-h', '--help', action='help', help="show this help message and exit")
return exc @contextlib.contextmanager def redirect(args): """ Redirect “stdout” and “stderr” at the same time """ out, err = io.StringIO(), io.StringIO() with contextlib.ExitStack() as ctx: ctx.enter_context(contextlib.redirect_stdout(out)) ctx.enter_context(contextlib.redirect_stderr(err)) iohandles = IOHandles(out=out, err=err) try: yield iohandles except SystemExit as exc: raise exit("[ERROR] in cluval execution:", str(exc)) except BaseException as exc: import traceback pyheader = 'pythonpy/pyeval.py' exprheader = 'File "<string>"' foundexpr = False lines = traceback.format_exception(*sys.exc_info()) for line in lines: if pyheader in line: continue iohandles.err.write(line) if not foundexpr and line.lstrip().startswith(exprheader) and not isinstance(exc, SyntaxError): iohandles.err.write(' {}\n'.format(args.expression)) foundexpr = True raise exit(iohandles.err.getvalue()) def safe_eval(code, x): try: return eval(code) except: return None def pyeval(argv=None): """ Evaluate a Python expression from a set of CLI arguments. """ args = parser.parse_args(argv or sys.argv[1:]) with redirect(args) as iohandles: if sum([args.list_of_stdin, args.lines_of_stdin, args.filter_result]) > 1: raise exit('Pythonpy accepts at most one of [-x, -l] flags\n') if args.json_input: def loads(string): try: return json.loads(string.rstrip()) except BaseException as exc: if args.ignore_exceptions: pass else: raise exc stdin = (loads(x) for x in sys.stdin) elif args.input_delimiter: stdin = (re.split(args.input_delimiter, x.rstrip()) for x in sys.stdin) else: stdin = (x.rstrip() for x in sys.stdin) if args.expression: args.expression = args.expression.replace("`", "'") if args.expression.endswith('…'): args.expression = args.expression[:-1] args.pager = True if args.expression.startswith('?') or args.expression.endswith('?'): final_atom = current_list(args.expression.rstrip('?'))[-1] first_atom = current_list(args.expression.lstrip('?'))[0] if args.expression.startswith('??'): args.expression = f"inspect_source({first_atom})" elif args.expression.endswith('??'): args.expression = f"inspect_source({final_atom})" elif args.expression.startswith('?'): args.expression = f'inspect.getdoc({first_atom})' else: args.expression = f'inspect.getdoc({final_atom})' args.pager = True if args.lines_of_stdin: stdin = islice(stdin, 1) if args.expression.startswith('help('): args.pager = True if args.pre_cmd: args.pre_cmd = args.pre_cmd.replace("`", "'") if args.post_cmd: args.post_cmd = args.post_cmd.replace("`", "'") # DO THE IMPORTS: modules = tuple(lazy_imports(args.expression, args.pre_cmd, args.post_cmd)) if args.pre_cmd: exec(args.pre_cmd) if args.lines_of_stdin: if args.ignore_exceptions: result = (safe_eval(args.expression, x) for x in stdin) else: result = (eval(args.expression) for x in stdin) elif args.filter_result: if args.ignore_exceptions: result = (x for x in stdin if safe_eval(args.expression, x)) else: result = (x for x in stdin if eval(args.expression)) elif args.list_of_stdin: locals()['l'] = list(stdin) result = eval(args.expression) else: result = eval(args.expression) def prepare(output): if output is None: return None elif args.json_output: return json.dumps(output) elif args.output_delimiter: return args.output_delimiter.join(output) else: return output if isinstance(result, collections.abc.Iterable) and not \ isinstance(result, (str, bytes)): for x in result: formatted = prepare(x) if formatted is not None: iohandles.out.write(f"{formatted}\n") else: formatted = prepare(result) if formatted is not None: iohandles.out.write(f"{formatted}\n") if args.post_cmd: exec(args.post_cmd) sys.stdout.flush() # Extract rerouted «stdout» value: out = iohandles.out.getvalue() # Return extracted «stdout» and whether or not to page: return out, args.pager def main(): out, pager = pyeval() if pager: pydoc.pager(out) else: print(out, end='') if __name__ == '__main__': main()
def exit(*args, **kwargs): """ Craft and return (without raising!) a SystemExit exception """ exc = SystemExit("\n\t".join(args))
inner.rs
use std::cmp::{max, min}; use std::fmt; use std::future::Future; use std::sync::{Arc, Weak}; use std::time::{Duration, Instant}; use futures_channel::oneshot; use futures_util::stream::{FuturesUnordered, StreamExt}; use futures_util::TryFutureExt; use tokio::spawn; use tokio::time::{interval_at, sleep, timeout, Interval}; use crate::api::{Builder, ManageConnection, PooledConnection, RunError}; use crate::internals::{Approval, ApprovalIter, Conn, SharedPool, State}; pub(crate) struct PoolInner<M> where M: ManageConnection + Send, { inner: Arc<SharedPool<M>>, } impl<M> PoolInner<M> where M: ManageConnection + Send, { pub(crate) fn new(builder: Builder<M>, manager: M) -> Self { let inner = Arc::new(SharedPool::new(builder, manager));
let start = Instant::now() + shared.statics.reaper_rate; let interval = interval_at(start.into(), shared.statics.reaper_rate); schedule_reaping(interval, s); } } Self { inner } } pub(crate) async fn start_connections(&self) -> Result<(), M::Error> { let wanted = self.inner.internals.lock().wanted(&self.inner.statics); let mut stream = self.replenish_idle_connections(wanted); while let Some(result) = stream.next().await { result? } Ok(()) } pub(crate) fn spawn_start_connections(&self) { let mut locked = self.inner.internals.lock(); self.spawn_replenishing_approvals(locked.wanted(&self.inner.statics)); } fn spawn_replenishing_approvals(&self, approvals: ApprovalIter) { if approvals.len() == 0 { return; } let this = self.clone(); spawn(async move { let mut stream = this.replenish_idle_connections(approvals); while let Some(result) = stream.next().await { match result { Ok(()) => {} Err(e) => this.inner.statics.error_sink.sink(e), } } }); } fn replenish_idle_connections( &self, approvals: ApprovalIter, ) -> FuturesUnordered<impl Future<Output = Result<(), M::Error>>> { let stream = FuturesUnordered::new(); for approval in approvals { let this = self.clone(); stream.push(async move { this.add_connection(approval).await }); } stream } pub(crate) async fn get(&self) -> Result<PooledConnection<'_, M>, RunError<M::Error>> { self.make_pooled(|this, conn| PooledConnection::new(this, conn)) .await } pub(crate) async fn get_owned( &self, ) -> Result<PooledConnection<'static, M>, RunError<M::Error>> { self.make_pooled(|this, conn| { let pool = PoolInner { inner: Arc::clone(&this.inner), }; PooledConnection::new_owned(pool, conn) }) .await } pub(crate) async fn make_pooled<'a, 'b, F>( &'a self, make_pooled_conn: F, ) -> Result<PooledConnection<'b, M>, RunError<M::Error>> where F: Fn(&'a Self, Conn<M::Connection>) -> PooledConnection<'b, M>, { loop { let mut conn = { let mut locked = self.inner.internals.lock(); match locked.pop(&self.inner.statics) { Some((conn, approvals)) => { self.spawn_replenishing_approvals(approvals); make_pooled_conn(self, conn) } None => break, } }; if !self.inner.statics.test_on_check_out { return Ok(conn); } match self.inner.manager.is_valid(&mut conn).await { Ok(()) => return Ok(conn), Err(e) => { self.inner.statics.error_sink.sink(e); conn.drop_invalid(); continue; } } } let (tx, rx) = oneshot::channel(); { let mut locked = self.inner.internals.lock(); let approvals = locked.push_waiter(tx, &self.inner.statics); self.spawn_replenishing_approvals(approvals); }; match timeout(self.inner.statics.connection_timeout, rx).await { Ok(Ok(mut guard)) => Ok(make_pooled_conn(self, guard.extract())), _ => Err(RunError::TimedOut), } } pub(crate) async fn connect(&self) -> Result<M::Connection, M::Error> { let mut conn = self.inner.manager.connect().await?; self.on_acquire_connection(&mut conn).await?; Ok(conn) } /// Return connection back in to the pool pub(crate) fn put_back(&self, conn: Option<Conn<M::Connection>>) { let conn = conn.and_then(|mut conn| { if !self.inner.manager.has_broken(&mut conn.conn) { Some(conn) } else { None } }); let mut locked = self.inner.internals.lock(); match conn { Some(conn) => locked.put(conn, None, self.inner.clone()), None => { let approvals = locked.dropped(1, &self.inner.statics); self.spawn_replenishing_approvals(approvals); } } } /// Returns information about the current state of the pool. pub(crate) fn state(&self) -> State { self.inner.internals.lock().state() } fn reap(&self) { let mut internals = self.inner.internals.lock(); let approvals = internals.reap(&self.inner.statics); self.spawn_replenishing_approvals(approvals); } // Outside of Pool to avoid borrow splitting issues on self async fn add_connection(&self, approval: Approval) -> Result<(), M::Error> where M: ManageConnection, { let new_shared = Arc::downgrade(&self.inner); let shared = match new_shared.upgrade() { None => return Ok(()), Some(shared) => shared, }; let start = Instant::now(); let mut delay = Duration::from_secs(0); loop { let conn = shared .manager .connect() .and_then(|mut c| async { self.on_acquire_connection(&mut c).await.map(|_| c) }) .await; match conn { Ok(conn) => { let conn = Conn::new(conn); shared .internals .lock() .put(conn, Some(approval), self.inner.clone()); return Ok(()); } Err(e) => { if Instant::now() - start > self.inner.statics.connection_timeout { let mut locked = shared.internals.lock(); locked.connect_failed(approval); return Err(e); } else { delay = max(Duration::from_millis(200), delay); delay = min(self.inner.statics.connection_timeout / 2, delay * 2); sleep(delay).await; } } } } } async fn on_acquire_connection(&self, conn: &mut M::Connection) -> Result<(), M::Error> { match self.inner.statics.connection_customizer.as_ref() { Some(customizer) => customizer.on_acquire(conn).await, None => Ok(()), } } } impl<M> Clone for PoolInner<M> where M: ManageConnection, { fn clone(&self) -> Self { PoolInner { inner: self.inner.clone(), } } } impl<M> fmt::Debug for PoolInner<M> where M: ManageConnection, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_fmt(format_args!("PoolInner({:p})", self.inner)) } } fn schedule_reaping<M>(mut interval: Interval, weak_shared: Weak<SharedPool<M>>) where M: ManageConnection, { spawn(async move { loop { let _ = interval.tick().await; if let Some(inner) = weak_shared.upgrade() { PoolInner { inner }.reap() } else { break; } } }); }
if inner.statics.max_lifetime.is_some() || inner.statics.idle_timeout.is_some() { let s = Arc::downgrade(&inner); if let Some(shared) = s.upgrade() {
celery_app.py
# -*- coding: utf-8 -*- from celery import Celery import config if config.REDIS_PASSWD: redis_url = "redis://:{0}@{1}:{2}/{3}".format( config.REDIS_PASSWD, config.REDIS_HOST, config.REDIS_PORT, config.REDIS_DB ) else: redis_url = "redis://{0}:{1}/{2}".format( config.REDIS_HOST, config.REDIS_PORT, config.REDIS_DB ) celery_app = Celery( broker=redis_url, backend=redis_url, ) celery_app.conf.update( task_serializer="json", accept_content=["json"], result_serializer="json", timezone="Asia/Shanghai", enable_utc=True, ) celery_app.autodiscover_tasks([ "tasks", ], force=True) celery_app.conf.beat_schedule = { "parse_log": { "task": "parse_log", "schedule": 30
} }
mouse_controller.py
''' This is a sample class that you can use to control the mouse pointer. It uses the pyautogui library. You can set the precision for mouse movement (how much the mouse moves) and the speed (how fast it moves) by changing precision_dict and speed_dict. Calling the move function with the x and y output of the gaze estimation model will move the pointer. This class is provided to help get you started; you can choose whether you want to use it or create your own from scratch. ''' import pyautogui pyautogui.FAILSAFE = False class MouseController: def __init__(self, precision, speed): precision_dict={'high':100, 'low':1000, 'medium':500} speed_dict={'fast':1, 'slow':10, 'medium':5} self.precision=precision_dict[precision] self.speed=speed_dict[speed] def move(self, x, y):
pyautogui.moveRel(x*self.precision, -1*y*self.precision, duration=self.speed)
instrumental.go
package instrumental import ( "bytes" "fmt" "io" "net" "regexp" "strings" "time" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/plugins/outputs" "github.com/influxdata/telegraf/plugins/serializers" "github.com/influxdata/telegraf/plugins/serializers/graphite" ) var ( ValueIncludesBadChar = regexp.MustCompile("[^[:digit:].]") MetricNameReplacer = regexp.MustCompile("[^-[:alnum:]_.]+") ) type Instrumental struct { Host string `toml:"host"` APIToken string `toml:"api_token"` Prefix string `toml:"prefix"` DataFormat string `toml:"data_format"` Template string `toml:"template"` Templates []string `toml:"templates"` Timeout config.Duration `toml:"timeout"` Debug bool `toml:"debug"` Log telegraf.Logger `toml:"-"` conn net.Conn } const ( DefaultHost = "collector.instrumentalapp.com" HelloMessage = "hello version go/telegraf/1.1\n" AuthFormat = "authenticate %s\n" HandshakeFormat = HelloMessage + AuthFormat ) var sampleConfig = ` ## Project API Token (required) api_token = "API Token" # required ## Prefix the metrics with a given name prefix = "" ## Stats output template (Graphite formatting) ## see https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_OUTPUT.md#graphite template = "host.tags.measurement.field" ## Timeout in seconds to connect timeout = "2s" ## Display Communication to Instrumental debug = false ` func (i *Instrumental) Connect() error { connection, err := net.DialTimeout("tcp", i.Host+":8000", time.Duration(i.Timeout)) if err != nil { i.conn = nil return err } err = i.authenticate(connection) if err != nil { i.conn = nil return err } return nil } func (i *Instrumental) Close() error { i.conn.Close() i.conn = nil return nil } func (i *Instrumental) Write(metrics []telegraf.Metric) error { if i.conn == nil { err := i.Connect() if err != nil { return fmt.Errorf("failed to (re)connect to Instrumental. Error: %s", err) } } s, err := serializers.NewGraphiteSerializer(i.Prefix, i.Template, false, "strict", ".", i.Templates) if err != nil { return err } var points []string var metricType string for _, m := range metrics { // Pull the metric_type out of the metric's tags. We don't want the type // to show up with the other tags pulled from the system, as they go in the // beginning of the line instead. // e.g we want: // // increment some_prefix.host.tag1.tag2.tag3.field value timestamp // // vs // // increment some_prefix.host.tag1.tag2.tag3.counter.field value timestamp // metricType = m.Tags()["metric_type"] m.RemoveTag("metric_type") buf, err := s.Serialize(m) if err != nil { i.Log.Debugf("Could not serialize metric: %v", err) continue } switch metricType { case "counter": fallthrough case "histogram": metricType = "increment" default: metricType = "gauge" } buffer := bytes.NewBuffer(buf) for { line, err := buffer.ReadBytes('\n') if err != nil { break } stat := string(line) // decompose "metric.name value time" splitStat := strings.SplitN(stat, " ", 3) name := splitStat[0] value := splitStat[1] time := splitStat[2] // replace invalid components of metric name with underscore cleanMetric := MetricNameReplacer.ReplaceAllString(name, "_") if !ValueIncludesBadChar.MatchString(value) { points = append(points, fmt.Sprintf("%s %s %s %s", metricType, cleanMetric, value, time)) } } } allPoints := strings.Join(points, "") _, err = fmt.Fprintf(i.conn, allPoints) if err != nil { if err == io.EOF { i.Close() } return err } // force the connection closed after sending data // to deal with various disconnection scenarios and eschew holding // open idle connections en masse i.Close() return nil } func (i *Instrumental) Description() string { return "Configuration for sending metrics to an Instrumental project" } func (i *Instrumental) SampleConfig() string { return sampleConfig } func (i *Instrumental) authenticate(conn net.Conn) error { _, err := fmt.Fprintf(conn, HandshakeFormat, i.APIToken) if err != nil { return err } // The response here will either be two "ok"s or an error message. responses := make([]byte, 512) if _, err = conn.Read(responses); err != nil { return err } if string(responses)[:6] != "ok\nok\n" { return fmt.Errorf("authentication failed: %s", responses) } i.conn = conn return nil } func
() { outputs.Add("instrumental", func() telegraf.Output { return &Instrumental{ Host: DefaultHost, Template: graphite.DefaultTemplate, } }) }
init
data.rs
//! Utilities for representations of data types and enum types. use std::collections::{BTreeMap, BTreeSet}; use quill_common::location::{Range, SourceFileIdentifier}; use quill_index::{EnumI, TypeConstructorI, TypeParameter}; use quill_type::{PrimitiveType, Type}; use quill_type_deduce::replace_type_variables; use crate::{sort_types::MonomorphisedItem, Representations}; use quill_monomorphise::monomorphisation::{ MonomorphisationParameters, MonomorphisedAspect, MonomorphisedType, }; #[derive(Debug, Clone)] pub struct DataRepresentation { /// Where in the file was this type defined? pub range: Range, pub file: SourceFileIdentifier, pub name: String, /// Maps Quill field names to the index of the field in the struct representation, /// if the field had a representation. /// If this contains *any* fields, we say that this data type "has a representation". field_indices: BTreeMap<String, FieldIndex>, /// Contains the types of *every* field in this data type, /// regardless if it has a representation or not. field_types: BTreeMap<String, Type>, } #[derive(Debug, Copy, Clone)] pub enum FieldIndex { /// The field is inside the struct at this position. Literal(u32), /// A pointer to the field is inside the struct at this position. Heap(u32), } impl PartialOrd for FieldIndex { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl Ord for FieldIndex { fn cmp(&self, other: &Self) -> std::cmp::Ordering { let i = match self { FieldIndex::Literal(i) | FieldIndex::Heap(i) => i, }; let j = match other { FieldIndex::Literal(j) | FieldIndex::Heap(j) => j, }; i.cmp(j) } } impl PartialEq for FieldIndex { fn eq(&self, other: &Self) -> bool { let i = match self { FieldIndex::Literal(i) | FieldIndex::Heap(i) => i, }; let j = match other { FieldIndex::Literal(j) | FieldIndex::Heap(j) => j, }; i == j } } impl Eq for FieldIndex {} impl DataRepresentation { /// Lists the fields which are stored indirectly (on the heap). pub fn
(&self) -> Vec<&str> { self.field_indices .iter() .filter_map(|(k, v)| { if matches!(v, FieldIndex::Heap(_)) { Some(k.as_str()) } else { None } }) .collect() } /// Checks to see if a field *with representation* exists in this data structure. pub fn has_field(&self, name: &str) -> bool { self.field_indices.contains_key(name) } /// Retrieves the type of the given field. pub fn field_ty(&self, name: &str) -> Option<&Type> { self.field_types.get(name) } /// Get a reference to the data representation's field indices. pub fn field_indices(&self) -> &BTreeMap<String, FieldIndex> { &self.field_indices } /// Get a reference to the data representation's field types. pub fn field_types(&self) -> &BTreeMap<String, Type> { &self.field_types } } #[derive(Debug)] pub struct EnumRepresentation { pub mono: MonomorphisedType, pub range: Range, /// Maps variant names to data representations of the enum variants. /// If a discriminant is required in the data representation, it will have field name `.discriminant`. pub variants: BTreeMap<String, DataRepresentation>, /// The discriminant values associated with each variant, if there is a discriminant. pub variant_discriminants: BTreeMap<String, u64>, } impl EnumRepresentation { /// By this point, `reprs` should contain the representations of all (non-indirected) fields in this enum type. pub fn new( reprs: &Representations, ty: &EnumI, mono: &MonomorphisedType, indirected_types: Vec<MonomorphisedItem>, ) -> Self { // Construct each enum variant as a data type with an extra integer discriminant field at the start. let variants = ty .variants .iter() .map(|variant| { let mut builder = DataRepresentationBuilder::new(reprs); builder.add_field( ".discriminant".to_string(), Type::Primitive(PrimitiveType::Int), &ty.type_params, &mono.mono, false, ); builder.add_fields( &variant.type_ctor, &ty.type_params, &mono.mono, indirected_types.clone(), ); ( variant.name.name.clone(), builder.build( &mono.name.source_file, ty.range, format!("{}@{}", mono, variant.name.name), ), ) }) .collect::<BTreeMap<_, _>>(); let variant_discriminants = ty .variants .iter() .enumerate() .map(|(i, variant)| (variant.name.name.clone(), i as u64)) .collect::<BTreeMap<_, _>>(); EnumRepresentation { mono: mono.clone(), range: ty.range, variants, variant_discriminants, } } } pub struct DataRepresentationBuilder<'a> { reprs: &'a Representations, field_indices: BTreeMap<String, FieldIndex>, field_types: BTreeMap<String, Type>, /// If a field's name is in this set, it can be accessed only behind a heap pointer. indirect_fields: BTreeSet<String>, } impl<'a> DataRepresentationBuilder<'a> { pub fn new(reprs: &'a Representations) -> Self { Self { reprs, field_indices: BTreeMap::new(), field_types: BTreeMap::new(), indirect_fields: BTreeSet::new(), } } pub fn add_field( &mut self, field_name: String, field_type: Type, type_params: &[TypeParameter], mono: &MonomorphisationParameters, indirect: bool, ) { self.field_types .insert(field_name.clone(), field_type.clone()); if indirect { self.indirect_fields.insert(field_name.clone()); self.field_indices.insert( field_name, FieldIndex::Heap(self.field_indices.len() as u32), ); } else if self.reprs.has_repr(replace_type_variables( field_type, type_params, mono.type_parameters(), )) { self.field_indices.insert( field_name, FieldIndex::Literal(self.field_indices.len() as u32), ); } else { // This field had no representation. } } /// Add the fields from a type constructor to this data type. pub fn add_fields( &mut self, type_ctor: &TypeConstructorI, type_params: &[TypeParameter], mono: &MonomorphisationParameters, indirected_types: Vec<MonomorphisedItem>, ) { for (field_name, field_ty) in &type_ctor.fields { let field_ty = replace_type_variables(field_ty.clone(), type_params, mono.type_parameters()); let indirect = match &field_ty { Type::Named { name, parameters } => { indirected_types.contains(&MonomorphisedItem::Type(MonomorphisedType { name: name.clone(), mono: MonomorphisationParameters::new(parameters.clone()), })) } Type::Impl { name, parameters } => { indirected_types.contains(&MonomorphisedItem::Aspect(MonomorphisedAspect { name: name.clone(), mono: MonomorphisationParameters::new(parameters.clone()), })) } _ => false, }; self.add_field( field_name.name.clone(), field_ty, type_params, mono, indirect, ); } } /// Returns a data representation. pub fn build( self, file: &SourceFileIdentifier, range: Range, name: String, ) -> DataRepresentation { DataRepresentation { range, field_indices: self.field_indices, field_types: self.field_types, file: file.clone(), name, } } }
field_names_on_heap
migrate-stories-to-6-2.spec.ts
import type { Tree } from '@nrwl/devkit'; import { joinPathFragments, writeJson } from '@nrwl/devkit'; import { createTreeWithEmptyWorkspace } from '@nrwl/devkit/testing'; import { Linter } from '@nrwl/linter'; import { storybookVersion } from '@nrwl/storybook'; import { overrideCollectionResolutionForTesting, wrapAngularDevkitSchematic, } from '@nrwl/tao/src/commands/ngcli-adapter'; import { findNodes } from '@nrwl/workspace/src/utils/ast-utils'; import * as ts from 'typescript'; import { SyntaxKind } from 'typescript'; import { getTsSourceFile } from '../../utils/nx-devkit/ast-utils'; import { nxVersion } from '../../utils/versions'; import { storybookConfigurationGenerator } from '../storybook-configuration/storybook-configuration'; import { angularMigrateStoriesTo62Generator } from './migrate-stories-to-6-2'; import libraryGenerator from '../library/library'; const componentSchematic = wrapAngularDevkitSchematic( '@schematics/angular', 'component' ); describe('migrate-stories-to-6-2 schematic', () => { let appTree: Tree; describe('angular project', () => { beforeEach(async () => { overrideCollectionResolutionForTesting({ '@nrwl/storybook': joinPathFragments( __dirname, '../../../../storybook/generators.json' ), }); appTree = createTreeWithEmptyWorkspace(); await libraryGenerator(appTree, { name: 'test-ui-lib', }); await componentSchematic(appTree, { name: 'test-button', project: 'test-ui-lib', }); writeJson(appTree, 'package.json', { devDependencies: { '@nrwl/storybook': nxVersion, '@storybook/addon-knobs': storybookVersion, '@storybook/angular': storybookVersion, }, }); await storybookConfigurationGenerator(appTree, { name: 'test-ui-lib', configureCypress: true, generateCypressSpecs: true, generateStories: true, linter: Linter.EsLint, }); appTree.write( `libs/test-ui-lib/src/lib/test-button/test-button.component.stories.ts`, ` import { text, number, boolean } from '@storybook/addon-knobs'; import { TestButtonComponent } from './test-button.component'; export default { title: 'TestButtonComponent', };
export const primary = () => ({ component: TestButtonComponent, moduleMetadata: { imports: [], }, props: { buttonType: text('buttonType', 'button'), style: text('style', 'default'), age: number('age', 0), isOn: boolean('isOn', false), }, }); export const secondary = () => ({ component: TestButtonComponent, moduleMetadata: { imports: [], }, props: {}, }); ` ); }); it('should move the component from the story to parameters.component', async () => { await angularMigrateStoriesTo62Generator(appTree); const storyFilePath = 'libs/test-ui-lib/src/lib/test-button/test-button.component.stories.ts'; const file = getTsSourceFile(appTree, storyFilePath); const storiesExportDefault = findNodes(file, [ ts.SyntaxKind.ExportAssignment, ]); const defaultExportNode = storiesExportDefault[0]; const defaultExportObject = defaultExportNode ?.getChildren() ?.find((node) => { return node.kind === SyntaxKind.ObjectLiteralExpression; }); const defaultPropertiesList = defaultExportObject ?.getChildren() ?.find((node) => { return node.kind === SyntaxKind.SyntaxList; }); const hasTitle = defaultPropertiesList?.getChildren()?.find((node) => { return ( node.kind === SyntaxKind.PropertyAssignment && node.getText().startsWith('title') ); }); const hasComponent = defaultPropertiesList ?.getChildren() ?.find((node) => { return ( node.kind === SyntaxKind.PropertyAssignment && node.getText().startsWith('component') ); }); expect(appTree.exists(storyFilePath)).toBeTruthy(); expect(hasTitle).toBeTruthy(); expect(hasComponent).toBeTruthy(); }); }); });
store.rs
// Generated by Molecule 0.6.1 use super::blockchain::*; use super::godwoken::*; use molecule::prelude::*; #[derive(Clone)] pub struct OutPointVec(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for OutPointVec { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for OutPointVec { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for OutPointVec { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} [", Self::NAME)?; for i in 0..self.len() { if i == 0 { write!(f, "{}", self.get_unchecked(i))?; } else { write!(f, ", {}", self.get_unchecked(i))?; } } write!(f, "]") } } impl ::core::default::Default for OutPointVec { fn default() -> Self { let v: Vec<u8> = vec![0, 0, 0, 0]; OutPointVec::new_unchecked(v.into()) } } impl OutPointVec { pub const ITEM_SIZE: usize = 36; pub fn total_size(&self) -> usize { molecule::NUMBER_SIZE * (self.item_count() + 1) } pub fn item_count(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn len(&self) -> usize { self.item_count() } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn get(&self, idx: usize) -> Option<OutPoint> { if idx >= self.len() { None } else { Some(self.get_unchecked(idx)) } } pub fn get_unchecked(&self, idx: usize) -> OutPoint { let start = molecule::NUMBER_SIZE + Self::ITEM_SIZE * idx; let end = start + Self::ITEM_SIZE; OutPoint::new_unchecked(self.0.slice(start..end)) } pub fn as_reader<'r>(&'r self) -> OutPointVecReader<'r> { OutPointVecReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for OutPointVec { type Builder = OutPointVecBuilder; const NAME: &'static str = "OutPointVec"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { OutPointVec(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { OutPointVecReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { OutPointVecReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder().extend(self.into_iter()) } } #[derive(Clone, Copy)] pub struct OutPointVecReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for OutPointVecReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for OutPointVecReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for OutPointVecReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} [", Self::NAME)?; for i in 0..self.len() { if i == 0 { write!(f, "{}", self.get_unchecked(i))?; } else { write!(f, ", {}", self.get_unchecked(i))?; } } write!(f, "]") } } impl<'r> OutPointVecReader<'r> { pub const ITEM_SIZE: usize = 36; pub fn total_size(&self) -> usize { molecule::NUMBER_SIZE * (self.item_count() + 1) } pub fn item_count(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn len(&self) -> usize { self.item_count() } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn get(&self, idx: usize) -> Option<OutPointReader<'r>> { if idx >= self.len() { None } else { Some(self.get_unchecked(idx)) } } pub fn get_unchecked(&self, idx: usize) -> OutPointReader<'r> { let start = molecule::NUMBER_SIZE + Self::ITEM_SIZE * idx; let end = start + Self::ITEM_SIZE; OutPointReader::new_unchecked(&self.as_slice()[start..end]) } } impl<'r> molecule::prelude::Reader<'r> for OutPointVecReader<'r> { type Entity = OutPointVec; const NAME: &'static str = "OutPointVecReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { OutPointVecReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], _compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len < molecule::NUMBER_SIZE { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE, slice_len); } let item_count = molecule::unpack_number(slice) as usize; if item_count == 0 { if slice_len != molecule::NUMBER_SIZE { return ve!(Self, TotalSizeNotMatch, molecule::NUMBER_SIZE, slice_len); } return Ok(()); } let total_size = molecule::NUMBER_SIZE + Self::ITEM_SIZE * item_count; if slice_len != total_size { return ve!(Self, TotalSizeNotMatch, total_size, slice_len); } Ok(()) } } #[derive(Debug, Default)] pub struct OutPointVecBuilder(pub(crate) Vec<OutPoint>); impl OutPointVecBuilder { pub const ITEM_SIZE: usize = 36; pub fn set(mut self, v: Vec<OutPoint>) -> Self { self.0 = v; self } pub fn push(mut self, v: OutPoint) -> Self { self.0.push(v); self } pub fn extend<T: ::core::iter::IntoIterator<Item = OutPoint>>(mut self, iter: T) -> Self { for elem in iter { self.0.push(elem); } self } } impl molecule::prelude::Builder for OutPointVecBuilder { type Entity = OutPointVec; const NAME: &'static str = "OutPointVecBuilder"; fn expected_length(&self) -> usize { molecule::NUMBER_SIZE + Self::ITEM_SIZE * self.0.len() } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { writer.write_all(&molecule::pack_number(self.0.len() as molecule::Number))?; for inner in &self.0[..] { writer.write_all(inner.as_slice())?; } Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); OutPointVec::new_unchecked(inner.into()) } } pub struct OutPointVecIterator(OutPointVec, usize, usize); impl ::core::iter::Iterator for OutPointVecIterator { type Item = OutPoint; fn next(&mut self) -> Option<Self::Item> { if self.1 >= self.2 { None
let ret = self.0.get_unchecked(self.1); self.1 += 1; Some(ret) } } } impl ::core::iter::ExactSizeIterator for OutPointVecIterator { fn len(&self) -> usize { self.2 - self.1 } } impl ::core::iter::IntoIterator for OutPointVec { type Item = OutPoint; type IntoIter = OutPointVecIterator; fn into_iter(self) -> Self::IntoIter { let len = self.len(); OutPointVecIterator(self, 0, len) } } impl<'r> OutPointVecReader<'r> { pub fn iter<'t>(&'t self) -> OutPointVecReaderIterator<'t, 'r> { OutPointVecReaderIterator(&self, 0, self.len()) } } pub struct OutPointVecReaderIterator<'t, 'r>(&'t OutPointVecReader<'r>, usize, usize); impl<'t: 'r, 'r> ::core::iter::Iterator for OutPointVecReaderIterator<'t, 'r> { type Item = OutPointReader<'t>; fn next(&mut self) -> Option<Self::Item> { if self.1 >= self.2 { None } else { let ret = self.0.get_unchecked(self.1); self.1 += 1; Some(ret) } } } impl<'t: 'r, 'r> ::core::iter::ExactSizeIterator for OutPointVecReaderIterator<'t, 'r> { fn len(&self) -> usize { self.2 - self.1 } } #[derive(Clone)] pub struct NumberHash(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for NumberHash { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for NumberHash { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for NumberHash { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "number", self.number())?; write!(f, ", {}: {}", "block_hash", self.block_hash())?; write!(f, " }}") } } impl ::core::default::Default for NumberHash { fn default() -> Self { let v: Vec<u8> = vec![ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; NumberHash::new_unchecked(v.into()) } } impl NumberHash { pub const TOTAL_SIZE: usize = 40; pub const FIELD_SIZES: [usize; 2] = [8, 32]; pub const FIELD_COUNT: usize = 2; pub fn number(&self) -> Uint64 { Uint64::new_unchecked(self.0.slice(0..8)) } pub fn block_hash(&self) -> Byte32 { Byte32::new_unchecked(self.0.slice(8..40)) } pub fn as_reader<'r>(&'r self) -> NumberHashReader<'r> { NumberHashReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for NumberHash { type Builder = NumberHashBuilder; const NAME: &'static str = "NumberHash"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { NumberHash(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { NumberHashReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { NumberHashReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder() .number(self.number()) .block_hash(self.block_hash()) } } #[derive(Clone, Copy)] pub struct NumberHashReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for NumberHashReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for NumberHashReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for NumberHashReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "number", self.number())?; write!(f, ", {}: {}", "block_hash", self.block_hash())?; write!(f, " }}") } } impl<'r> NumberHashReader<'r> { pub const TOTAL_SIZE: usize = 40; pub const FIELD_SIZES: [usize; 2] = [8, 32]; pub const FIELD_COUNT: usize = 2; pub fn number(&self) -> Uint64Reader<'r> { Uint64Reader::new_unchecked(&self.as_slice()[0..8]) } pub fn block_hash(&self) -> Byte32Reader<'r> { Byte32Reader::new_unchecked(&self.as_slice()[8..40]) } } impl<'r> molecule::prelude::Reader<'r> for NumberHashReader<'r> { type Entity = NumberHash; const NAME: &'static str = "NumberHashReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { NumberHashReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], _compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len != Self::TOTAL_SIZE { return ve!(Self, TotalSizeNotMatch, Self::TOTAL_SIZE, slice_len); } Ok(()) } } #[derive(Debug, Default)] pub struct NumberHashBuilder { pub(crate) number: Uint64, pub(crate) block_hash: Byte32, } impl NumberHashBuilder { pub const TOTAL_SIZE: usize = 40; pub const FIELD_SIZES: [usize; 2] = [8, 32]; pub const FIELD_COUNT: usize = 2; pub fn number(mut self, v: Uint64) -> Self { self.number = v; self } pub fn block_hash(mut self, v: Byte32) -> Self { self.block_hash = v; self } } impl molecule::prelude::Builder for NumberHashBuilder { type Entity = NumberHash; const NAME: &'static str = "NumberHashBuilder"; fn expected_length(&self) -> usize { Self::TOTAL_SIZE } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { writer.write_all(self.number.as_slice())?; writer.write_all(self.block_hash.as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); NumberHash::new_unchecked(inner.into()) } } #[derive(Clone)] pub struct L2BlockCommittedInfo(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for L2BlockCommittedInfo { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for L2BlockCommittedInfo { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for L2BlockCommittedInfo { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "number", self.number())?; write!(f, ", {}: {}", "block_hash", self.block_hash())?; write!(f, ", {}: {}", "transaction_hash", self.transaction_hash())?; write!(f, " }}") } } impl ::core::default::Default for L2BlockCommittedInfo { fn default() -> Self { let v: Vec<u8> = vec![ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; L2BlockCommittedInfo::new_unchecked(v.into()) } } impl L2BlockCommittedInfo { pub const TOTAL_SIZE: usize = 72; pub const FIELD_SIZES: [usize; 3] = [8, 32, 32]; pub const FIELD_COUNT: usize = 3; pub fn number(&self) -> Uint64 { Uint64::new_unchecked(self.0.slice(0..8)) } pub fn block_hash(&self) -> Byte32 { Byte32::new_unchecked(self.0.slice(8..40)) } pub fn transaction_hash(&self) -> Byte32 { Byte32::new_unchecked(self.0.slice(40..72)) } pub fn as_reader<'r>(&'r self) -> L2BlockCommittedInfoReader<'r> { L2BlockCommittedInfoReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for L2BlockCommittedInfo { type Builder = L2BlockCommittedInfoBuilder; const NAME: &'static str = "L2BlockCommittedInfo"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { L2BlockCommittedInfo(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { L2BlockCommittedInfoReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { L2BlockCommittedInfoReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder() .number(self.number()) .block_hash(self.block_hash()) .transaction_hash(self.transaction_hash()) } } #[derive(Clone, Copy)] pub struct L2BlockCommittedInfoReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for L2BlockCommittedInfoReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for L2BlockCommittedInfoReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for L2BlockCommittedInfoReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "number", self.number())?; write!(f, ", {}: {}", "block_hash", self.block_hash())?; write!(f, ", {}: {}", "transaction_hash", self.transaction_hash())?; write!(f, " }}") } } impl<'r> L2BlockCommittedInfoReader<'r> { pub const TOTAL_SIZE: usize = 72; pub const FIELD_SIZES: [usize; 3] = [8, 32, 32]; pub const FIELD_COUNT: usize = 3; pub fn number(&self) -> Uint64Reader<'r> { Uint64Reader::new_unchecked(&self.as_slice()[0..8]) } pub fn block_hash(&self) -> Byte32Reader<'r> { Byte32Reader::new_unchecked(&self.as_slice()[8..40]) } pub fn transaction_hash(&self) -> Byte32Reader<'r> { Byte32Reader::new_unchecked(&self.as_slice()[40..72]) } } impl<'r> molecule::prelude::Reader<'r> for L2BlockCommittedInfoReader<'r> { type Entity = L2BlockCommittedInfo; const NAME: &'static str = "L2BlockCommittedInfoReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { L2BlockCommittedInfoReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], _compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len != Self::TOTAL_SIZE { return ve!(Self, TotalSizeNotMatch, Self::TOTAL_SIZE, slice_len); } Ok(()) } } #[derive(Debug, Default)] pub struct L2BlockCommittedInfoBuilder { pub(crate) number: Uint64, pub(crate) block_hash: Byte32, pub(crate) transaction_hash: Byte32, } impl L2BlockCommittedInfoBuilder { pub const TOTAL_SIZE: usize = 72; pub const FIELD_SIZES: [usize; 3] = [8, 32, 32]; pub const FIELD_COUNT: usize = 3; pub fn number(mut self, v: Uint64) -> Self { self.number = v; self } pub fn block_hash(mut self, v: Byte32) -> Self { self.block_hash = v; self } pub fn transaction_hash(mut self, v: Byte32) -> Self { self.transaction_hash = v; self } } impl molecule::prelude::Builder for L2BlockCommittedInfoBuilder { type Entity = L2BlockCommittedInfo; const NAME: &'static str = "L2BlockCommittedInfoBuilder"; fn expected_length(&self) -> usize { Self::TOTAL_SIZE } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { writer.write_all(self.number.as_slice())?; writer.write_all(self.block_hash.as_slice())?; writer.write_all(self.transaction_hash.as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); L2BlockCommittedInfo::new_unchecked(inner.into()) } } #[derive(Clone)] pub struct TransactionKey(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for TransactionKey { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for TransactionKey { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for TransactionKey { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; let raw_data = hex_string(&self.raw_data()); write!(f, "{}(0x{})", Self::NAME, raw_data) } } impl ::core::default::Default for TransactionKey { fn default() -> Self { let v: Vec<u8> = vec![ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; TransactionKey::new_unchecked(v.into()) } } impl TransactionKey { pub const TOTAL_SIZE: usize = 36; pub const ITEM_SIZE: usize = 1; pub const ITEM_COUNT: usize = 36; pub fn nth0(&self) -> Byte { Byte::new_unchecked(self.0.slice(0..1)) } pub fn nth1(&self) -> Byte { Byte::new_unchecked(self.0.slice(1..2)) } pub fn nth2(&self) -> Byte { Byte::new_unchecked(self.0.slice(2..3)) } pub fn nth3(&self) -> Byte { Byte::new_unchecked(self.0.slice(3..4)) } pub fn nth4(&self) -> Byte { Byte::new_unchecked(self.0.slice(4..5)) } pub fn nth5(&self) -> Byte { Byte::new_unchecked(self.0.slice(5..6)) } pub fn nth6(&self) -> Byte { Byte::new_unchecked(self.0.slice(6..7)) } pub fn nth7(&self) -> Byte { Byte::new_unchecked(self.0.slice(7..8)) } pub fn nth8(&self) -> Byte { Byte::new_unchecked(self.0.slice(8..9)) } pub fn nth9(&self) -> Byte { Byte::new_unchecked(self.0.slice(9..10)) } pub fn nth10(&self) -> Byte { Byte::new_unchecked(self.0.slice(10..11)) } pub fn nth11(&self) -> Byte { Byte::new_unchecked(self.0.slice(11..12)) } pub fn nth12(&self) -> Byte { Byte::new_unchecked(self.0.slice(12..13)) } pub fn nth13(&self) -> Byte { Byte::new_unchecked(self.0.slice(13..14)) } pub fn nth14(&self) -> Byte { Byte::new_unchecked(self.0.slice(14..15)) } pub fn nth15(&self) -> Byte { Byte::new_unchecked(self.0.slice(15..16)) } pub fn nth16(&self) -> Byte { Byte::new_unchecked(self.0.slice(16..17)) } pub fn nth17(&self) -> Byte { Byte::new_unchecked(self.0.slice(17..18)) } pub fn nth18(&self) -> Byte { Byte::new_unchecked(self.0.slice(18..19)) } pub fn nth19(&self) -> Byte { Byte::new_unchecked(self.0.slice(19..20)) } pub fn nth20(&self) -> Byte { Byte::new_unchecked(self.0.slice(20..21)) } pub fn nth21(&self) -> Byte { Byte::new_unchecked(self.0.slice(21..22)) } pub fn nth22(&self) -> Byte { Byte::new_unchecked(self.0.slice(22..23)) } pub fn nth23(&self) -> Byte { Byte::new_unchecked(self.0.slice(23..24)) } pub fn nth24(&self) -> Byte { Byte::new_unchecked(self.0.slice(24..25)) } pub fn nth25(&self) -> Byte { Byte::new_unchecked(self.0.slice(25..26)) } pub fn nth26(&self) -> Byte { Byte::new_unchecked(self.0.slice(26..27)) } pub fn nth27(&self) -> Byte { Byte::new_unchecked(self.0.slice(27..28)) } pub fn nth28(&self) -> Byte { Byte::new_unchecked(self.0.slice(28..29)) } pub fn nth29(&self) -> Byte { Byte::new_unchecked(self.0.slice(29..30)) } pub fn nth30(&self) -> Byte { Byte::new_unchecked(self.0.slice(30..31)) } pub fn nth31(&self) -> Byte { Byte::new_unchecked(self.0.slice(31..32)) } pub fn nth32(&self) -> Byte { Byte::new_unchecked(self.0.slice(32..33)) } pub fn nth33(&self) -> Byte { Byte::new_unchecked(self.0.slice(33..34)) } pub fn nth34(&self) -> Byte { Byte::new_unchecked(self.0.slice(34..35)) } pub fn nth35(&self) -> Byte { Byte::new_unchecked(self.0.slice(35..36)) } pub fn raw_data(&self) -> molecule::bytes::Bytes { self.as_bytes() } pub fn as_reader<'r>(&'r self) -> TransactionKeyReader<'r> { TransactionKeyReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for TransactionKey { type Builder = TransactionKeyBuilder; const NAME: &'static str = "TransactionKey"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { TransactionKey(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { TransactionKeyReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { TransactionKeyReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder().set([ self.nth0(), self.nth1(), self.nth2(), self.nth3(), self.nth4(), self.nth5(), self.nth6(), self.nth7(), self.nth8(), self.nth9(), self.nth10(), self.nth11(), self.nth12(), self.nth13(), self.nth14(), self.nth15(), self.nth16(), self.nth17(), self.nth18(), self.nth19(), self.nth20(), self.nth21(), self.nth22(), self.nth23(), self.nth24(), self.nth25(), self.nth26(), self.nth27(), self.nth28(), self.nth29(), self.nth30(), self.nth31(), self.nth32(), self.nth33(), self.nth34(), self.nth35(), ]) } } #[derive(Clone, Copy)] pub struct TransactionKeyReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for TransactionKeyReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for TransactionKeyReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for TransactionKeyReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; let raw_data = hex_string(&self.raw_data()); write!(f, "{}(0x{})", Self::NAME, raw_data) } } impl<'r> TransactionKeyReader<'r> { pub const TOTAL_SIZE: usize = 36; pub const ITEM_SIZE: usize = 1; pub const ITEM_COUNT: usize = 36; pub fn nth0(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[0..1]) } pub fn nth1(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[1..2]) } pub fn nth2(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[2..3]) } pub fn nth3(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[3..4]) } pub fn nth4(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[4..5]) } pub fn nth5(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[5..6]) } pub fn nth6(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[6..7]) } pub fn nth7(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[7..8]) } pub fn nth8(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[8..9]) } pub fn nth9(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[9..10]) } pub fn nth10(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[10..11]) } pub fn nth11(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[11..12]) } pub fn nth12(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[12..13]) } pub fn nth13(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[13..14]) } pub fn nth14(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[14..15]) } pub fn nth15(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[15..16]) } pub fn nth16(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[16..17]) } pub fn nth17(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[17..18]) } pub fn nth18(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[18..19]) } pub fn nth19(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[19..20]) } pub fn nth20(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[20..21]) } pub fn nth21(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[21..22]) } pub fn nth22(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[22..23]) } pub fn nth23(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[23..24]) } pub fn nth24(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[24..25]) } pub fn nth25(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[25..26]) } pub fn nth26(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[26..27]) } pub fn nth27(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[27..28]) } pub fn nth28(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[28..29]) } pub fn nth29(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[29..30]) } pub fn nth30(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[30..31]) } pub fn nth31(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[31..32]) } pub fn nth32(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[32..33]) } pub fn nth33(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[33..34]) } pub fn nth34(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[34..35]) } pub fn nth35(&self) -> ByteReader<'r> { ByteReader::new_unchecked(&self.as_slice()[35..36]) } pub fn raw_data(&self) -> &'r [u8] { self.as_slice() } } impl<'r> molecule::prelude::Reader<'r> for TransactionKeyReader<'r> { type Entity = TransactionKey; const NAME: &'static str = "TransactionKeyReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { TransactionKeyReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], _compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len != Self::TOTAL_SIZE { return ve!(Self, TotalSizeNotMatch, Self::TOTAL_SIZE, slice_len); } Ok(()) } } pub struct TransactionKeyBuilder(pub(crate) [Byte; 36]); impl ::core::fmt::Debug for TransactionKeyBuilder { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:?})", Self::NAME, &self.0[..]) } } impl ::core::default::Default for TransactionKeyBuilder { fn default() -> Self { TransactionKeyBuilder([ Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), Byte::default(), ]) } } impl TransactionKeyBuilder { pub const TOTAL_SIZE: usize = 36; pub const ITEM_SIZE: usize = 1; pub const ITEM_COUNT: usize = 36; pub fn set(mut self, v: [Byte; 36]) -> Self { self.0 = v; self } pub fn nth0(mut self, v: Byte) -> Self { self.0[0] = v; self } pub fn nth1(mut self, v: Byte) -> Self { self.0[1] = v; self } pub fn nth2(mut self, v: Byte) -> Self { self.0[2] = v; self } pub fn nth3(mut self, v: Byte) -> Self { self.0[3] = v; self } pub fn nth4(mut self, v: Byte) -> Self { self.0[4] = v; self } pub fn nth5(mut self, v: Byte) -> Self { self.0[5] = v; self } pub fn nth6(mut self, v: Byte) -> Self { self.0[6] = v; self } pub fn nth7(mut self, v: Byte) -> Self { self.0[7] = v; self } pub fn nth8(mut self, v: Byte) -> Self { self.0[8] = v; self } pub fn nth9(mut self, v: Byte) -> Self { self.0[9] = v; self } pub fn nth10(mut self, v: Byte) -> Self { self.0[10] = v; self } pub fn nth11(mut self, v: Byte) -> Self { self.0[11] = v; self } pub fn nth12(mut self, v: Byte) -> Self { self.0[12] = v; self } pub fn nth13(mut self, v: Byte) -> Self { self.0[13] = v; self } pub fn nth14(mut self, v: Byte) -> Self { self.0[14] = v; self } pub fn nth15(mut self, v: Byte) -> Self { self.0[15] = v; self } pub fn nth16(mut self, v: Byte) -> Self { self.0[16] = v; self } pub fn nth17(mut self, v: Byte) -> Self { self.0[17] = v; self } pub fn nth18(mut self, v: Byte) -> Self { self.0[18] = v; self } pub fn nth19(mut self, v: Byte) -> Self { self.0[19] = v; self } pub fn nth20(mut self, v: Byte) -> Self { self.0[20] = v; self } pub fn nth21(mut self, v: Byte) -> Self { self.0[21] = v; self } pub fn nth22(mut self, v: Byte) -> Self { self.0[22] = v; self } pub fn nth23(mut self, v: Byte) -> Self { self.0[23] = v; self } pub fn nth24(mut self, v: Byte) -> Self { self.0[24] = v; self } pub fn nth25(mut self, v: Byte) -> Self { self.0[25] = v; self } pub fn nth26(mut self, v: Byte) -> Self { self.0[26] = v; self } pub fn nth27(mut self, v: Byte) -> Self { self.0[27] = v; self } pub fn nth28(mut self, v: Byte) -> Self { self.0[28] = v; self } pub fn nth29(mut self, v: Byte) -> Self { self.0[29] = v; self } pub fn nth30(mut self, v: Byte) -> Self { self.0[30] = v; self } pub fn nth31(mut self, v: Byte) -> Self { self.0[31] = v; self } pub fn nth32(mut self, v: Byte) -> Self { self.0[32] = v; self } pub fn nth33(mut self, v: Byte) -> Self { self.0[33] = v; self } pub fn nth34(mut self, v: Byte) -> Self { self.0[34] = v; self } pub fn nth35(mut self, v: Byte) -> Self { self.0[35] = v; self } } impl molecule::prelude::Builder for TransactionKeyBuilder { type Entity = TransactionKey; const NAME: &'static str = "TransactionKeyBuilder"; fn expected_length(&self) -> usize { Self::TOTAL_SIZE } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { writer.write_all(self.0[0].as_slice())?; writer.write_all(self.0[1].as_slice())?; writer.write_all(self.0[2].as_slice())?; writer.write_all(self.0[3].as_slice())?; writer.write_all(self.0[4].as_slice())?; writer.write_all(self.0[5].as_slice())?; writer.write_all(self.0[6].as_slice())?; writer.write_all(self.0[7].as_slice())?; writer.write_all(self.0[8].as_slice())?; writer.write_all(self.0[9].as_slice())?; writer.write_all(self.0[10].as_slice())?; writer.write_all(self.0[11].as_slice())?; writer.write_all(self.0[12].as_slice())?; writer.write_all(self.0[13].as_slice())?; writer.write_all(self.0[14].as_slice())?; writer.write_all(self.0[15].as_slice())?; writer.write_all(self.0[16].as_slice())?; writer.write_all(self.0[17].as_slice())?; writer.write_all(self.0[18].as_slice())?; writer.write_all(self.0[19].as_slice())?; writer.write_all(self.0[20].as_slice())?; writer.write_all(self.0[21].as_slice())?; writer.write_all(self.0[22].as_slice())?; writer.write_all(self.0[23].as_slice())?; writer.write_all(self.0[24].as_slice())?; writer.write_all(self.0[25].as_slice())?; writer.write_all(self.0[26].as_slice())?; writer.write_all(self.0[27].as_slice())?; writer.write_all(self.0[28].as_slice())?; writer.write_all(self.0[29].as_slice())?; writer.write_all(self.0[30].as_slice())?; writer.write_all(self.0[31].as_slice())?; writer.write_all(self.0[32].as_slice())?; writer.write_all(self.0[33].as_slice())?; writer.write_all(self.0[34].as_slice())?; writer.write_all(self.0[35].as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); TransactionKey::new_unchecked(inner.into()) } } #[derive(Clone)] pub struct TransactionInfo(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for TransactionInfo { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for TransactionInfo { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for TransactionInfo { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "block_number", self.block_number())?; write!(f, ", {}: {}", "key", self.key())?; write!(f, " }}") } } impl ::core::default::Default for TransactionInfo { fn default() -> Self { let v: Vec<u8> = vec![ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; TransactionInfo::new_unchecked(v.into()) } } impl TransactionInfo { pub const TOTAL_SIZE: usize = 44; pub const FIELD_SIZES: [usize; 2] = [8, 36]; pub const FIELD_COUNT: usize = 2; pub fn block_number(&self) -> Uint64 { Uint64::new_unchecked(self.0.slice(0..8)) } pub fn key(&self) -> TransactionKey { TransactionKey::new_unchecked(self.0.slice(8..44)) } pub fn as_reader<'r>(&'r self) -> TransactionInfoReader<'r> { TransactionInfoReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for TransactionInfo { type Builder = TransactionInfoBuilder; const NAME: &'static str = "TransactionInfo"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { TransactionInfo(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { TransactionInfoReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { TransactionInfoReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder() .block_number(self.block_number()) .key(self.key()) } } #[derive(Clone, Copy)] pub struct TransactionInfoReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for TransactionInfoReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for TransactionInfoReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for TransactionInfoReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "block_number", self.block_number())?; write!(f, ", {}: {}", "key", self.key())?; write!(f, " }}") } } impl<'r> TransactionInfoReader<'r> { pub const TOTAL_SIZE: usize = 44; pub const FIELD_SIZES: [usize; 2] = [8, 36]; pub const FIELD_COUNT: usize = 2; pub fn block_number(&self) -> Uint64Reader<'r> { Uint64Reader::new_unchecked(&self.as_slice()[0..8]) } pub fn key(&self) -> TransactionKeyReader<'r> { TransactionKeyReader::new_unchecked(&self.as_slice()[8..44]) } } impl<'r> molecule::prelude::Reader<'r> for TransactionInfoReader<'r> { type Entity = TransactionInfo; const NAME: &'static str = "TransactionInfoReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { TransactionInfoReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], _compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len != Self::TOTAL_SIZE { return ve!(Self, TotalSizeNotMatch, Self::TOTAL_SIZE, slice_len); } Ok(()) } } #[derive(Debug, Default)] pub struct TransactionInfoBuilder { pub(crate) block_number: Uint64, pub(crate) key: TransactionKey, } impl TransactionInfoBuilder { pub const TOTAL_SIZE: usize = 44; pub const FIELD_SIZES: [usize; 2] = [8, 36]; pub const FIELD_COUNT: usize = 2; pub fn block_number(mut self, v: Uint64) -> Self { self.block_number = v; self } pub fn key(mut self, v: TransactionKey) -> Self { self.key = v; self } } impl molecule::prelude::Builder for TransactionInfoBuilder { type Entity = TransactionInfo; const NAME: &'static str = "TransactionInfoBuilder"; fn expected_length(&self) -> usize { Self::TOTAL_SIZE } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { writer.write_all(self.block_number.as_slice())?; writer.write_all(self.key.as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); TransactionInfo::new_unchecked(inner.into()) } } #[derive(Clone)] pub struct LogItem(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for LogItem { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for LogItem { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for LogItem { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "account_id", self.account_id())?; write!(f, ", {}: {}", "service_flag", self.service_flag())?; write!(f, ", {}: {}", "data", self.data())?; let extra_count = self.count_extra_fields(); if extra_count != 0 { write!(f, ", .. ({} fields)", extra_count)?; } write!(f, " }}") } } impl ::core::default::Default for LogItem { fn default() -> Self { let v: Vec<u8> = vec![ 25, 0, 0, 0, 16, 0, 0, 0, 20, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; LogItem::new_unchecked(v.into()) } } impl LogItem { pub const FIELD_COUNT: usize = 3; pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn field_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn count_extra_fields(&self) -> usize { self.field_count() - Self::FIELD_COUNT } pub fn has_extra_fields(&self) -> bool { Self::FIELD_COUNT != self.field_count() } pub fn account_id(&self) -> Uint32 { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[4..]) as usize; let end = molecule::unpack_number(&slice[8..]) as usize; Uint32::new_unchecked(self.0.slice(start..end)) } pub fn service_flag(&self) -> Byte { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[8..]) as usize; let end = molecule::unpack_number(&slice[12..]) as usize; Byte::new_unchecked(self.0.slice(start..end)) } pub fn data(&self) -> Bytes { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[12..]) as usize; if self.has_extra_fields() { let end = molecule::unpack_number(&slice[16..]) as usize; Bytes::new_unchecked(self.0.slice(start..end)) } else { Bytes::new_unchecked(self.0.slice(start..)) } } pub fn as_reader<'r>(&'r self) -> LogItemReader<'r> { LogItemReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for LogItem { type Builder = LogItemBuilder; const NAME: &'static str = "LogItem"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { LogItem(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { LogItemReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { LogItemReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder() .account_id(self.account_id()) .service_flag(self.service_flag()) .data(self.data()) } } #[derive(Clone, Copy)] pub struct LogItemReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for LogItemReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for LogItemReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for LogItemReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "account_id", self.account_id())?; write!(f, ", {}: {}", "service_flag", self.service_flag())?; write!(f, ", {}: {}", "data", self.data())?; let extra_count = self.count_extra_fields(); if extra_count != 0 { write!(f, ", .. ({} fields)", extra_count)?; } write!(f, " }}") } } impl<'r> LogItemReader<'r> { pub const FIELD_COUNT: usize = 3; pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn field_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn count_extra_fields(&self) -> usize { self.field_count() - Self::FIELD_COUNT } pub fn has_extra_fields(&self) -> bool { Self::FIELD_COUNT != self.field_count() } pub fn account_id(&self) -> Uint32Reader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[4..]) as usize; let end = molecule::unpack_number(&slice[8..]) as usize; Uint32Reader::new_unchecked(&self.as_slice()[start..end]) } pub fn service_flag(&self) -> ByteReader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[8..]) as usize; let end = molecule::unpack_number(&slice[12..]) as usize; ByteReader::new_unchecked(&self.as_slice()[start..end]) } pub fn data(&self) -> BytesReader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[12..]) as usize; if self.has_extra_fields() { let end = molecule::unpack_number(&slice[16..]) as usize; BytesReader::new_unchecked(&self.as_slice()[start..end]) } else { BytesReader::new_unchecked(&self.as_slice()[start..]) } } } impl<'r> molecule::prelude::Reader<'r> for LogItemReader<'r> { type Entity = LogItem; const NAME: &'static str = "LogItemReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { LogItemReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len < molecule::NUMBER_SIZE { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE, slice_len); } let total_size = molecule::unpack_number(slice) as usize; if slice_len != total_size { return ve!(Self, TotalSizeNotMatch, total_size, slice_len); } if slice_len == molecule::NUMBER_SIZE && Self::FIELD_COUNT == 0 { return Ok(()); } if slice_len < molecule::NUMBER_SIZE * 2 { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE * 2, slice_len); } let offset_first = molecule::unpack_number(&slice[molecule::NUMBER_SIZE..]) as usize; if offset_first % 4 != 0 || offset_first < molecule::NUMBER_SIZE * 2 { return ve!(Self, OffsetsNotMatch); } let field_count = offset_first / 4 - 1; if field_count < Self::FIELD_COUNT { return ve!(Self, FieldCountNotMatch, Self::FIELD_COUNT, field_count); } else if !compatible && field_count > Self::FIELD_COUNT { return ve!(Self, FieldCountNotMatch, Self::FIELD_COUNT, field_count); }; let header_size = molecule::NUMBER_SIZE * (field_count + 1); if slice_len < header_size { return ve!(Self, HeaderIsBroken, header_size, slice_len); } let mut offsets: Vec<usize> = slice[molecule::NUMBER_SIZE..] .chunks(molecule::NUMBER_SIZE) .take(field_count) .map(|x| molecule::unpack_number(x) as usize) .collect(); offsets.push(total_size); if offsets.windows(2).any(|i| i[0] > i[1]) { return ve!(Self, OffsetsNotMatch); } Uint32Reader::verify(&slice[offsets[0]..offsets[1]], compatible)?; ByteReader::verify(&slice[offsets[1]..offsets[2]], compatible)?; BytesReader::verify(&slice[offsets[2]..offsets[3]], compatible)?; Ok(()) } } #[derive(Debug, Default)] pub struct LogItemBuilder { pub(crate) account_id: Uint32, pub(crate) service_flag: Byte, pub(crate) data: Bytes, } impl LogItemBuilder { pub const FIELD_COUNT: usize = 3; pub fn account_id(mut self, v: Uint32) -> Self { self.account_id = v; self } pub fn service_flag(mut self, v: Byte) -> Self { self.service_flag = v; self } pub fn data(mut self, v: Bytes) -> Self { self.data = v; self } } impl molecule::prelude::Builder for LogItemBuilder { type Entity = LogItem; const NAME: &'static str = "LogItemBuilder"; fn expected_length(&self) -> usize { molecule::NUMBER_SIZE * (Self::FIELD_COUNT + 1) + self.account_id.as_slice().len() + self.service_flag.as_slice().len() + self.data.as_slice().len() } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { let mut total_size = molecule::NUMBER_SIZE * (Self::FIELD_COUNT + 1); let mut offsets = Vec::with_capacity(Self::FIELD_COUNT); offsets.push(total_size); total_size += self.account_id.as_slice().len(); offsets.push(total_size); total_size += self.service_flag.as_slice().len(); offsets.push(total_size); total_size += self.data.as_slice().len(); writer.write_all(&molecule::pack_number(total_size as molecule::Number))?; for offset in offsets.into_iter() { writer.write_all(&molecule::pack_number(offset as molecule::Number))?; } writer.write_all(self.account_id.as_slice())?; writer.write_all(self.service_flag.as_slice())?; writer.write_all(self.data.as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); LogItem::new_unchecked(inner.into()) } } #[derive(Clone)] pub struct LogItemVec(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for LogItemVec { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for LogItemVec { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for LogItemVec { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} [", Self::NAME)?; for i in 0..self.len() { if i == 0 { write!(f, "{}", self.get_unchecked(i))?; } else { write!(f, ", {}", self.get_unchecked(i))?; } } write!(f, "]") } } impl ::core::default::Default for LogItemVec { fn default() -> Self { let v: Vec<u8> = vec![4, 0, 0, 0]; LogItemVec::new_unchecked(v.into()) } } impl LogItemVec { pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn item_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn len(&self) -> usize { self.item_count() } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn get(&self, idx: usize) -> Option<LogItem> { if idx >= self.len() { None } else { Some(self.get_unchecked(idx)) } } pub fn get_unchecked(&self, idx: usize) -> LogItem { let slice = self.as_slice(); let start_idx = molecule::NUMBER_SIZE * (1 + idx); let start = molecule::unpack_number(&slice[start_idx..]) as usize; if idx == self.len() - 1 { LogItem::new_unchecked(self.0.slice(start..)) } else { let end_idx = start_idx + molecule::NUMBER_SIZE; let end = molecule::unpack_number(&slice[end_idx..]) as usize; LogItem::new_unchecked(self.0.slice(start..end)) } } pub fn as_reader<'r>(&'r self) -> LogItemVecReader<'r> { LogItemVecReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for LogItemVec { type Builder = LogItemVecBuilder; const NAME: &'static str = "LogItemVec"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { LogItemVec(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { LogItemVecReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { LogItemVecReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder().extend(self.into_iter()) } } #[derive(Clone, Copy)] pub struct LogItemVecReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for LogItemVecReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for LogItemVecReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for LogItemVecReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} [", Self::NAME)?; for i in 0..self.len() { if i == 0 { write!(f, "{}", self.get_unchecked(i))?; } else { write!(f, ", {}", self.get_unchecked(i))?; } } write!(f, "]") } } impl<'r> LogItemVecReader<'r> { pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn item_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn len(&self) -> usize { self.item_count() } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn get(&self, idx: usize) -> Option<LogItemReader<'r>> { if idx >= self.len() { None } else { Some(self.get_unchecked(idx)) } } pub fn get_unchecked(&self, idx: usize) -> LogItemReader<'r> { let slice = self.as_slice(); let start_idx = molecule::NUMBER_SIZE * (1 + idx); let start = molecule::unpack_number(&slice[start_idx..]) as usize; if idx == self.len() - 1 { LogItemReader::new_unchecked(&self.as_slice()[start..]) } else { let end_idx = start_idx + molecule::NUMBER_SIZE; let end = molecule::unpack_number(&slice[end_idx..]) as usize; LogItemReader::new_unchecked(&self.as_slice()[start..end]) } } } impl<'r> molecule::prelude::Reader<'r> for LogItemVecReader<'r> { type Entity = LogItemVec; const NAME: &'static str = "LogItemVecReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { LogItemVecReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len < molecule::NUMBER_SIZE { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE, slice_len); } let total_size = molecule::unpack_number(slice) as usize; if slice_len != total_size { return ve!(Self, TotalSizeNotMatch, total_size, slice_len); } if slice_len == molecule::NUMBER_SIZE { return Ok(()); } if slice_len < molecule::NUMBER_SIZE * 2 { return ve!( Self, TotalSizeNotMatch, molecule::NUMBER_SIZE * 2, slice_len ); } let offset_first = molecule::unpack_number(&slice[molecule::NUMBER_SIZE..]) as usize; if offset_first % 4 != 0 || offset_first < molecule::NUMBER_SIZE * 2 { return ve!(Self, OffsetsNotMatch); } let item_count = offset_first / 4 - 1; let header_size = molecule::NUMBER_SIZE * (item_count + 1); if slice_len < header_size { return ve!(Self, HeaderIsBroken, header_size, slice_len); } let mut offsets: Vec<usize> = slice[molecule::NUMBER_SIZE..] .chunks(molecule::NUMBER_SIZE) .take(item_count) .map(|x| molecule::unpack_number(x) as usize) .collect(); offsets.push(total_size); if offsets.windows(2).any(|i| i[0] > i[1]) { return ve!(Self, OffsetsNotMatch); } for pair in offsets.windows(2) { let start = pair[0]; let end = pair[1]; LogItemReader::verify(&slice[start..end], compatible)?; } Ok(()) } } #[derive(Debug, Default)] pub struct LogItemVecBuilder(pub(crate) Vec<LogItem>); impl LogItemVecBuilder { pub fn set(mut self, v: Vec<LogItem>) -> Self { self.0 = v; self } pub fn push(mut self, v: LogItem) -> Self { self.0.push(v); self } pub fn extend<T: ::core::iter::IntoIterator<Item = LogItem>>(mut self, iter: T) -> Self { for elem in iter { self.0.push(elem); } self } } impl molecule::prelude::Builder for LogItemVecBuilder { type Entity = LogItemVec; const NAME: &'static str = "LogItemVecBuilder"; fn expected_length(&self) -> usize { molecule::NUMBER_SIZE * (self.0.len() + 1) + self .0 .iter() .map(|inner| inner.as_slice().len()) .sum::<usize>() } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { let item_count = self.0.len(); if item_count == 0 { writer.write_all(&molecule::pack_number( molecule::NUMBER_SIZE as molecule::Number, ))?; } else { let (total_size, offsets) = self.0.iter().fold( ( molecule::NUMBER_SIZE * (item_count + 1), Vec::with_capacity(item_count), ), |(start, mut offsets), inner| { offsets.push(start); (start + inner.as_slice().len(), offsets) }, ); writer.write_all(&molecule::pack_number(total_size as molecule::Number))?; for offset in offsets.into_iter() { writer.write_all(&molecule::pack_number(offset as molecule::Number))?; } for inner in self.0.iter() { writer.write_all(inner.as_slice())?; } } Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); LogItemVec::new_unchecked(inner.into()) } } pub struct LogItemVecIterator(LogItemVec, usize, usize); impl ::core::iter::Iterator for LogItemVecIterator { type Item = LogItem; fn next(&mut self) -> Option<Self::Item> { if self.1 >= self.2 { None } else { let ret = self.0.get_unchecked(self.1); self.1 += 1; Some(ret) } } } impl ::core::iter::ExactSizeIterator for LogItemVecIterator { fn len(&self) -> usize { self.2 - self.1 } } impl ::core::iter::IntoIterator for LogItemVec { type Item = LogItem; type IntoIter = LogItemVecIterator; fn into_iter(self) -> Self::IntoIter { let len = self.len(); LogItemVecIterator(self, 0, len) } } impl<'r> LogItemVecReader<'r> { pub fn iter<'t>(&'t self) -> LogItemVecReaderIterator<'t, 'r> { LogItemVecReaderIterator(&self, 0, self.len()) } } pub struct LogItemVecReaderIterator<'t, 'r>(&'t LogItemVecReader<'r>, usize, usize); impl<'t: 'r, 'r> ::core::iter::Iterator for LogItemVecReaderIterator<'t, 'r> { type Item = LogItemReader<'t>; fn next(&mut self) -> Option<Self::Item> { if self.1 >= self.2 { None } else { let ret = self.0.get_unchecked(self.1); self.1 += 1; Some(ret) } } } impl<'t: 'r, 'r> ::core::iter::ExactSizeIterator for LogItemVecReaderIterator<'t, 'r> { fn len(&self) -> usize { self.2 - self.1 } } #[derive(Clone)] pub struct TxReceipt(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for TxReceipt { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for TxReceipt { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for TxReceipt { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "tx_witness_hash", self.tx_witness_hash())?; write!(f, ", {}: {}", "post_state", self.post_state())?; write!(f, ", {}: {}", "read_data_hashes", self.read_data_hashes())?; write!(f, ", {}: {}", "logs", self.logs())?; let extra_count = self.count_extra_fields(); if extra_count != 0 { write!(f, ", .. ({} fields)", extra_count)?; } write!(f, " }}") } } impl ::core::default::Default for TxReceipt { fn default() -> Self { let v: Vec<u8> = vec![ 96, 0, 0, 0, 20, 0, 0, 0, 52, 0, 0, 0, 88, 0, 0, 0, 92, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, ]; TxReceipt::new_unchecked(v.into()) } } impl TxReceipt { pub const FIELD_COUNT: usize = 4; pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn field_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn count_extra_fields(&self) -> usize { self.field_count() - Self::FIELD_COUNT } pub fn has_extra_fields(&self) -> bool { Self::FIELD_COUNT != self.field_count() } pub fn tx_witness_hash(&self) -> Byte32 { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[4..]) as usize; let end = molecule::unpack_number(&slice[8..]) as usize; Byte32::new_unchecked(self.0.slice(start..end)) } pub fn post_state(&self) -> AccountMerkleState { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[8..]) as usize; let end = molecule::unpack_number(&slice[12..]) as usize; AccountMerkleState::new_unchecked(self.0.slice(start..end)) } pub fn read_data_hashes(&self) -> Byte32Vec { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[12..]) as usize; let end = molecule::unpack_number(&slice[16..]) as usize; Byte32Vec::new_unchecked(self.0.slice(start..end)) } pub fn logs(&self) -> LogItemVec { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[16..]) as usize; if self.has_extra_fields() { let end = molecule::unpack_number(&slice[20..]) as usize; LogItemVec::new_unchecked(self.0.slice(start..end)) } else { LogItemVec::new_unchecked(self.0.slice(start..)) } } pub fn as_reader<'r>(&'r self) -> TxReceiptReader<'r> { TxReceiptReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for TxReceipt { type Builder = TxReceiptBuilder; const NAME: &'static str = "TxReceipt"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { TxReceipt(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { TxReceiptReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { TxReceiptReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder() .tx_witness_hash(self.tx_witness_hash()) .post_state(self.post_state()) .read_data_hashes(self.read_data_hashes()) .logs(self.logs()) } } #[derive(Clone, Copy)] pub struct TxReceiptReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for TxReceiptReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for TxReceiptReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for TxReceiptReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "tx_witness_hash", self.tx_witness_hash())?; write!(f, ", {}: {}", "post_state", self.post_state())?; write!(f, ", {}: {}", "read_data_hashes", self.read_data_hashes())?; write!(f, ", {}: {}", "logs", self.logs())?; let extra_count = self.count_extra_fields(); if extra_count != 0 { write!(f, ", .. ({} fields)", extra_count)?; } write!(f, " }}") } } impl<'r> TxReceiptReader<'r> { pub const FIELD_COUNT: usize = 4; pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn field_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn count_extra_fields(&self) -> usize { self.field_count() - Self::FIELD_COUNT } pub fn has_extra_fields(&self) -> bool { Self::FIELD_COUNT != self.field_count() } pub fn tx_witness_hash(&self) -> Byte32Reader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[4..]) as usize; let end = molecule::unpack_number(&slice[8..]) as usize; Byte32Reader::new_unchecked(&self.as_slice()[start..end]) } pub fn post_state(&self) -> AccountMerkleStateReader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[8..]) as usize; let end = molecule::unpack_number(&slice[12..]) as usize; AccountMerkleStateReader::new_unchecked(&self.as_slice()[start..end]) } pub fn read_data_hashes(&self) -> Byte32VecReader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[12..]) as usize; let end = molecule::unpack_number(&slice[16..]) as usize; Byte32VecReader::new_unchecked(&self.as_slice()[start..end]) } pub fn logs(&self) -> LogItemVecReader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[16..]) as usize; if self.has_extra_fields() { let end = molecule::unpack_number(&slice[20..]) as usize; LogItemVecReader::new_unchecked(&self.as_slice()[start..end]) } else { LogItemVecReader::new_unchecked(&self.as_slice()[start..]) } } } impl<'r> molecule::prelude::Reader<'r> for TxReceiptReader<'r> { type Entity = TxReceipt; const NAME: &'static str = "TxReceiptReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { TxReceiptReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len < molecule::NUMBER_SIZE { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE, slice_len); } let total_size = molecule::unpack_number(slice) as usize; if slice_len != total_size { return ve!(Self, TotalSizeNotMatch, total_size, slice_len); } if slice_len == molecule::NUMBER_SIZE && Self::FIELD_COUNT == 0 { return Ok(()); } if slice_len < molecule::NUMBER_SIZE * 2 { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE * 2, slice_len); } let offset_first = molecule::unpack_number(&slice[molecule::NUMBER_SIZE..]) as usize; if offset_first % 4 != 0 || offset_first < molecule::NUMBER_SIZE * 2 { return ve!(Self, OffsetsNotMatch); } let field_count = offset_first / 4 - 1; if field_count < Self::FIELD_COUNT { return ve!(Self, FieldCountNotMatch, Self::FIELD_COUNT, field_count); } else if !compatible && field_count > Self::FIELD_COUNT { return ve!(Self, FieldCountNotMatch, Self::FIELD_COUNT, field_count); }; let header_size = molecule::NUMBER_SIZE * (field_count + 1); if slice_len < header_size { return ve!(Self, HeaderIsBroken, header_size, slice_len); } let mut offsets: Vec<usize> = slice[molecule::NUMBER_SIZE..] .chunks(molecule::NUMBER_SIZE) .take(field_count) .map(|x| molecule::unpack_number(x) as usize) .collect(); offsets.push(total_size); if offsets.windows(2).any(|i| i[0] > i[1]) { return ve!(Self, OffsetsNotMatch); } Byte32Reader::verify(&slice[offsets[0]..offsets[1]], compatible)?; AccountMerkleStateReader::verify(&slice[offsets[1]..offsets[2]], compatible)?; Byte32VecReader::verify(&slice[offsets[2]..offsets[3]], compatible)?; LogItemVecReader::verify(&slice[offsets[3]..offsets[4]], compatible)?; Ok(()) } } #[derive(Debug, Default)] pub struct TxReceiptBuilder { pub(crate) tx_witness_hash: Byte32, pub(crate) post_state: AccountMerkleState, pub(crate) read_data_hashes: Byte32Vec, pub(crate) logs: LogItemVec, } impl TxReceiptBuilder { pub const FIELD_COUNT: usize = 4; pub fn tx_witness_hash(mut self, v: Byte32) -> Self { self.tx_witness_hash = v; self } pub fn post_state(mut self, v: AccountMerkleState) -> Self { self.post_state = v; self } pub fn read_data_hashes(mut self, v: Byte32Vec) -> Self { self.read_data_hashes = v; self } pub fn logs(mut self, v: LogItemVec) -> Self { self.logs = v; self } } impl molecule::prelude::Builder for TxReceiptBuilder { type Entity = TxReceipt; const NAME: &'static str = "TxReceiptBuilder"; fn expected_length(&self) -> usize { molecule::NUMBER_SIZE * (Self::FIELD_COUNT + 1) + self.tx_witness_hash.as_slice().len() + self.post_state.as_slice().len() + self.read_data_hashes.as_slice().len() + self.logs.as_slice().len() } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { let mut total_size = molecule::NUMBER_SIZE * (Self::FIELD_COUNT + 1); let mut offsets = Vec::with_capacity(Self::FIELD_COUNT); offsets.push(total_size); total_size += self.tx_witness_hash.as_slice().len(); offsets.push(total_size); total_size += self.post_state.as_slice().len(); offsets.push(total_size); total_size += self.read_data_hashes.as_slice().len(); offsets.push(total_size); total_size += self.logs.as_slice().len(); writer.write_all(&molecule::pack_number(total_size as molecule::Number))?; for offset in offsets.into_iter() { writer.write_all(&molecule::pack_number(offset as molecule::Number))?; } writer.write_all(self.tx_witness_hash.as_slice())?; writer.write_all(self.post_state.as_slice())?; writer.write_all(self.read_data_hashes.as_slice())?; writer.write_all(self.logs.as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); TxReceipt::new_unchecked(inner.into()) } } #[derive(Clone)] pub struct SMTBranchNode(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for SMTBranchNode { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for SMTBranchNode { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for SMTBranchNode { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "fork_height", self.fork_height())?; write!(f, ", {}: {}", "key", self.key())?; write!(f, ", {}: {}", "node", self.node())?; let extra_count = self.count_extra_fields(); if extra_count != 0 { write!(f, ", .. ({} fields)", extra_count)?; } write!(f, " }}") } } impl ::core::default::Default for SMTBranchNode { fn default() -> Self { let v: Vec<u8> = vec![ 53, 0, 0, 0, 16, 0, 0, 0, 17, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; SMTBranchNode::new_unchecked(v.into()) } } impl SMTBranchNode { pub const FIELD_COUNT: usize = 3; pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn field_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn count_extra_fields(&self) -> usize { self.field_count() - Self::FIELD_COUNT } pub fn has_extra_fields(&self) -> bool { Self::FIELD_COUNT != self.field_count() } pub fn fork_height(&self) -> Byte { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[4..]) as usize; let end = molecule::unpack_number(&slice[8..]) as usize; Byte::new_unchecked(self.0.slice(start..end)) } pub fn key(&self) -> Byte32 { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[8..]) as usize; let end = molecule::unpack_number(&slice[12..]) as usize; Byte32::new_unchecked(self.0.slice(start..end)) } pub fn node(&self) -> Bytes { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[12..]) as usize; if self.has_extra_fields() { let end = molecule::unpack_number(&slice[16..]) as usize; Bytes::new_unchecked(self.0.slice(start..end)) } else { Bytes::new_unchecked(self.0.slice(start..)) } } pub fn as_reader<'r>(&'r self) -> SMTBranchNodeReader<'r> { SMTBranchNodeReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for SMTBranchNode { type Builder = SMTBranchNodeBuilder; const NAME: &'static str = "SMTBranchNode"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { SMTBranchNode(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { SMTBranchNodeReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { SMTBranchNodeReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder() .fork_height(self.fork_height()) .key(self.key()) .node(self.node()) } } #[derive(Clone, Copy)] pub struct SMTBranchNodeReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for SMTBranchNodeReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for SMTBranchNodeReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for SMTBranchNodeReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "fork_height", self.fork_height())?; write!(f, ", {}: {}", "key", self.key())?; write!(f, ", {}: {}", "node", self.node())?; let extra_count = self.count_extra_fields(); if extra_count != 0 { write!(f, ", .. ({} fields)", extra_count)?; } write!(f, " }}") } } impl<'r> SMTBranchNodeReader<'r> { pub const FIELD_COUNT: usize = 3; pub fn total_size(&self) -> usize { molecule::unpack_number(self.as_slice()) as usize } pub fn field_count(&self) -> usize { if self.total_size() == molecule::NUMBER_SIZE { 0 } else { (molecule::unpack_number(&self.as_slice()[molecule::NUMBER_SIZE..]) as usize / 4) - 1 } } pub fn count_extra_fields(&self) -> usize { self.field_count() - Self::FIELD_COUNT } pub fn has_extra_fields(&self) -> bool { Self::FIELD_COUNT != self.field_count() } pub fn fork_height(&self) -> ByteReader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[4..]) as usize; let end = molecule::unpack_number(&slice[8..]) as usize; ByteReader::new_unchecked(&self.as_slice()[start..end]) } pub fn key(&self) -> Byte32Reader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[8..]) as usize; let end = molecule::unpack_number(&slice[12..]) as usize; Byte32Reader::new_unchecked(&self.as_slice()[start..end]) } pub fn node(&self) -> BytesReader<'r> { let slice = self.as_slice(); let start = molecule::unpack_number(&slice[12..]) as usize; if self.has_extra_fields() { let end = molecule::unpack_number(&slice[16..]) as usize; BytesReader::new_unchecked(&self.as_slice()[start..end]) } else { BytesReader::new_unchecked(&self.as_slice()[start..]) } } } impl<'r> molecule::prelude::Reader<'r> for SMTBranchNodeReader<'r> { type Entity = SMTBranchNode; const NAME: &'static str = "SMTBranchNodeReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { SMTBranchNodeReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len < molecule::NUMBER_SIZE { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE, slice_len); } let total_size = molecule::unpack_number(slice) as usize; if slice_len != total_size { return ve!(Self, TotalSizeNotMatch, total_size, slice_len); } if slice_len == molecule::NUMBER_SIZE && Self::FIELD_COUNT == 0 { return Ok(()); } if slice_len < molecule::NUMBER_SIZE * 2 { return ve!(Self, HeaderIsBroken, molecule::NUMBER_SIZE * 2, slice_len); } let offset_first = molecule::unpack_number(&slice[molecule::NUMBER_SIZE..]) as usize; if offset_first % 4 != 0 || offset_first < molecule::NUMBER_SIZE * 2 { return ve!(Self, OffsetsNotMatch); } let field_count = offset_first / 4 - 1; if field_count < Self::FIELD_COUNT { return ve!(Self, FieldCountNotMatch, Self::FIELD_COUNT, field_count); } else if !compatible && field_count > Self::FIELD_COUNT { return ve!(Self, FieldCountNotMatch, Self::FIELD_COUNT, field_count); }; let header_size = molecule::NUMBER_SIZE * (field_count + 1); if slice_len < header_size { return ve!(Self, HeaderIsBroken, header_size, slice_len); } let mut offsets: Vec<usize> = slice[molecule::NUMBER_SIZE..] .chunks(molecule::NUMBER_SIZE) .take(field_count) .map(|x| molecule::unpack_number(x) as usize) .collect(); offsets.push(total_size); if offsets.windows(2).any(|i| i[0] > i[1]) { return ve!(Self, OffsetsNotMatch); } ByteReader::verify(&slice[offsets[0]..offsets[1]], compatible)?; Byte32Reader::verify(&slice[offsets[1]..offsets[2]], compatible)?; BytesReader::verify(&slice[offsets[2]..offsets[3]], compatible)?; Ok(()) } } #[derive(Debug, Default)] pub struct SMTBranchNodeBuilder { pub(crate) fork_height: Byte, pub(crate) key: Byte32, pub(crate) node: Bytes, } impl SMTBranchNodeBuilder { pub const FIELD_COUNT: usize = 3; pub fn fork_height(mut self, v: Byte) -> Self { self.fork_height = v; self } pub fn key(mut self, v: Byte32) -> Self { self.key = v; self } pub fn node(mut self, v: Bytes) -> Self { self.node = v; self } } impl molecule::prelude::Builder for SMTBranchNodeBuilder { type Entity = SMTBranchNode; const NAME: &'static str = "SMTBranchNodeBuilder"; fn expected_length(&self) -> usize { molecule::NUMBER_SIZE * (Self::FIELD_COUNT + 1) + self.fork_height.as_slice().len() + self.key.as_slice().len() + self.node.as_slice().len() } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { let mut total_size = molecule::NUMBER_SIZE * (Self::FIELD_COUNT + 1); let mut offsets = Vec::with_capacity(Self::FIELD_COUNT); offsets.push(total_size); total_size += self.fork_height.as_slice().len(); offsets.push(total_size); total_size += self.key.as_slice().len(); offsets.push(total_size); total_size += self.node.as_slice().len(); writer.write_all(&molecule::pack_number(total_size as molecule::Number))?; for offset in offsets.into_iter() { writer.write_all(&molecule::pack_number(offset as molecule::Number))?; } writer.write_all(self.fork_height.as_slice())?; writer.write_all(self.key.as_slice())?; writer.write_all(self.node.as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); SMTBranchNode::new_unchecked(inner.into()) } } #[derive(Clone)] pub struct SMTLeafNode(molecule::bytes::Bytes); impl ::core::fmt::LowerHex for SMTLeafNode { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl ::core::fmt::Debug for SMTLeafNode { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl ::core::fmt::Display for SMTLeafNode { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "key", self.key())?; write!(f, ", {}: {}", "value", self.value())?; write!(f, " }}") } } impl ::core::default::Default for SMTLeafNode { fn default() -> Self { let v: Vec<u8> = vec![ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; SMTLeafNode::new_unchecked(v.into()) } } impl SMTLeafNode { pub const TOTAL_SIZE: usize = 64; pub const FIELD_SIZES: [usize; 2] = [32, 32]; pub const FIELD_COUNT: usize = 2; pub fn key(&self) -> Byte32 { Byte32::new_unchecked(self.0.slice(0..32)) } pub fn value(&self) -> Byte32 { Byte32::new_unchecked(self.0.slice(32..64)) } pub fn as_reader<'r>(&'r self) -> SMTLeafNodeReader<'r> { SMTLeafNodeReader::new_unchecked(self.as_slice()) } } impl molecule::prelude::Entity for SMTLeafNode { type Builder = SMTLeafNodeBuilder; const NAME: &'static str = "SMTLeafNode"; fn new_unchecked(data: molecule::bytes::Bytes) -> Self { SMTLeafNode(data) } fn as_bytes(&self) -> molecule::bytes::Bytes { self.0.clone() } fn as_slice(&self) -> &[u8] { &self.0[..] } fn from_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { SMTLeafNodeReader::from_slice(slice).map(|reader| reader.to_entity()) } fn from_compatible_slice(slice: &[u8]) -> molecule::error::VerificationResult<Self> { SMTLeafNodeReader::from_compatible_slice(slice).map(|reader| reader.to_entity()) } fn new_builder() -> Self::Builder { ::core::default::Default::default() } fn as_builder(self) -> Self::Builder { Self::new_builder().key(self.key()).value(self.value()) } } #[derive(Clone, Copy)] pub struct SMTLeafNodeReader<'r>(&'r [u8]); impl<'r> ::core::fmt::LowerHex for SMTLeafNodeReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { use molecule::hex_string; if f.alternate() { write!(f, "0x")?; } write!(f, "{}", hex_string(self.as_slice())) } } impl<'r> ::core::fmt::Debug for SMTLeafNodeReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{}({:#x})", Self::NAME, self) } } impl<'r> ::core::fmt::Display for SMTLeafNodeReader<'r> { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { write!(f, "{} {{ ", Self::NAME)?; write!(f, "{}: {}", "key", self.key())?; write!(f, ", {}: {}", "value", self.value())?; write!(f, " }}") } } impl<'r> SMTLeafNodeReader<'r> { pub const TOTAL_SIZE: usize = 64; pub const FIELD_SIZES: [usize; 2] = [32, 32]; pub const FIELD_COUNT: usize = 2; pub fn key(&self) -> Byte32Reader<'r> { Byte32Reader::new_unchecked(&self.as_slice()[0..32]) } pub fn value(&self) -> Byte32Reader<'r> { Byte32Reader::new_unchecked(&self.as_slice()[32..64]) } } impl<'r> molecule::prelude::Reader<'r> for SMTLeafNodeReader<'r> { type Entity = SMTLeafNode; const NAME: &'static str = "SMTLeafNodeReader"; fn to_entity(&self) -> Self::Entity { Self::Entity::new_unchecked(self.as_slice().to_owned().into()) } fn new_unchecked(slice: &'r [u8]) -> Self { SMTLeafNodeReader(slice) } fn as_slice(&self) -> &'r [u8] { self.0 } fn verify(slice: &[u8], _compatible: bool) -> molecule::error::VerificationResult<()> { use molecule::verification_error as ve; let slice_len = slice.len(); if slice_len != Self::TOTAL_SIZE { return ve!(Self, TotalSizeNotMatch, Self::TOTAL_SIZE, slice_len); } Ok(()) } } #[derive(Debug, Default)] pub struct SMTLeafNodeBuilder { pub(crate) key: Byte32, pub(crate) value: Byte32, } impl SMTLeafNodeBuilder { pub const TOTAL_SIZE: usize = 64; pub const FIELD_SIZES: [usize; 2] = [32, 32]; pub const FIELD_COUNT: usize = 2; pub fn key(mut self, v: Byte32) -> Self { self.key = v; self } pub fn value(mut self, v: Byte32) -> Self { self.value = v; self } } impl molecule::prelude::Builder for SMTLeafNodeBuilder { type Entity = SMTLeafNode; const NAME: &'static str = "SMTLeafNodeBuilder"; fn expected_length(&self) -> usize { Self::TOTAL_SIZE } fn write<W: ::molecule::io::Write>(&self, writer: &mut W) -> ::molecule::io::Result<()> { writer.write_all(self.key.as_slice())?; writer.write_all(self.value.as_slice())?; Ok(()) } fn build(&self) -> Self::Entity { let mut inner = Vec::with_capacity(self.expected_length()); self.write(&mut inner) .unwrap_or_else(|_| panic!("{} build should be ok", Self::NAME)); SMTLeafNode::new_unchecked(inner.into()) } }
} else {
message_export.go
// Copyright (c) 2016-present TinkerTech, Inc. All Rights Reserved. // See License.txt for license information. package commands import ( "errors" "context" "time" "github.com/mattermost/mattermost-server/cmd" "github.com/mattermost/mattermost-server/model" "github.com/spf13/cobra" ) var MessageExportCmd = &cobra.Command{ Use: "export", Short: "Export data from Mattermost", Long: "Export data from Mattermost in a format suitable for import into a third-party application", Example: "export --format=actiance --exportFrom=12345", RunE: messageExportCmdF, } func init() { MessageExportCmd.Flags().String("format", "actiance", "The format to export data in") MessageExportCmd.Flags().Int64("exportFrom", -1, "The timestamp of the earliest post to export, expressed in seconds since the unix epoch.") MessageExportCmd.Flags().Int("timeoutSeconds", -1, "The maximum number of seconds to wait for the job to complete before timing out.") cmd.RootCmd.AddCommand(MessageExportCmd) } func messageExportCmdF(command *cobra.Command, args []string) error
{ a, err := cmd.InitDBCommandContextCobra(command) if err != nil { return err } defer a.Shutdown() if !*a.Config().MessageExportSettings.EnableExport { return errors.New("ERROR: The message export feature is not enabled") } // for now, format is hard-coded to actiance. In time, we'll have to support other formats and inject them into job data if format, err := command.Flags().GetString("format"); err != nil { return errors.New("format flag error") } else if format != "actiance" { return errors.New("unsupported export format") } startTime, err := command.Flags().GetInt64("exportFrom") if err != nil { return errors.New("exportFrom flag error") } else if startTime < 0 { return errors.New("exportFrom must be a positive integer") } timeoutSeconds, err := command.Flags().GetInt("timeoutSeconds") if err != nil { return errors.New("timeoutSeconds error") } else if timeoutSeconds < 0 { return errors.New("timeoutSeconds must be a positive integer") } if messageExportI := a.MessageExport; messageExportI != nil { ctx := context.Background() if timeoutSeconds > 0 { var cancel context.CancelFunc ctx, cancel = context.WithTimeout(ctx, time.Second*time.Duration(timeoutSeconds)) defer cancel() } job, err := messageExportI.StartSynchronizeJob(ctx, startTime) if err != nil || job.Status == model.JOB_STATUS_ERROR || job.Status == model.JOB_STATUS_CANCELED { cmd.CommandPrintErrorln("ERROR: Message export job failed. Please check the server logs") } else { cmd.CommandPrettyPrintln("SUCCESS: Message export job complete") } } return nil }
vga_buffer.rs
use spin::Mutex; use core::fmt; use io::Port; #[allow(dead_code)] #[repr(u8)] pub enum Color { Black = 0, Blue = 1, Green = 2, Cyan = 3, Red = 4, Magenta = 5, Brown = 6, LightGray = 7, DarkGray = 8, LightBlue = 9, LightGreen = 10, LightCyan = 11, LightRed = 12, Pink = 13, Yellow = 14, White = 15, } #[derive(Clone, Copy)] struct ColorCode(u8); impl ColorCode { const fn new(foreground: Color, background: Color) -> ColorCode { ColorCode((background as u8) << 4 | (foreground as u8)) } } // Repr C guarantees field ordering #[repr(C)] #[derive(Clone, Copy)] struct ScreenChar { ascii_character: u8, color_code: ColorCode, } const BUFFER_HEIGHT: usize = 25; const BUFFER_WIDTH: usize = 80; struct Buffer { chars: [[ScreenChar; BUFFER_WIDTH]; BUFFER_HEIGHT], } use core::ptr::Unique; pub struct Writer { column_position: usize, color_code: ColorCode, buffer: Unique<Buffer>, } pub static WRITER: Mutex<Writer> = Mutex::new(Writer { column_position: 0, color_code: ColorCode::new(Color::White, Color::Black), buffer: unsafe { Unique::new_unchecked(0xb8000 as *mut _) }, }); impl Writer { pub fn write_byte(&mut self, byte: u8) { match byte { // Backspace cant be escaped in rust b'\x08' => { if self.column_position > 0 { self.column_position -= 1 } } b'\n' => self.new_line(), byte => { if self.column_position >= BUFFER_WIDTH { self.new_line(); } let row = BUFFER_HEIGHT - 1; let col = self.column_position; self.buffer().chars[row][col] = ScreenChar { ascii_character: byte, color_code: self.color_code, }; self.column_position += 1; } } // Finally update the cursor. self.update_cursor(); } fn buffer(&mut self) -> &mut Buffer { unsafe { self.buffer.as_mut() } } fn new_line(&mut self) { for row in 0..(BUFFER_HEIGHT - 1) { let buffer = self.buffer(); buffer.chars[row] = buffer.chars[row + 1] } self.clear_row(BUFFER_HEIGHT - 1); self.column_position = 0; } fn clear_row(&mut self, row: usize) { let blank = ScreenChar { ascii_character: b' ', color_code: self.color_code, }; self.buffer().chars[row] = [blank; BUFFER_WIDTH]; } fn update_cursor(&self) { // For now the cursor will always be on the bottom row so we only need to worry about // moving its column position. We also assume the location of the register as 0x3D4. In the // future this should be read from BIOS. // see - http://wiki.osdev.org/Text_Mode_Cursor#Moving_the_Cursor_without_the_BIOS let position = ((BUFFER_HEIGHT - 1) * BUFFER_WIDTH) + self.column_position; let port_low = Port::new(0x3D4); let port_hgh = Port::new(0x3D5); unsafe { // Set column position port_low.write(0x0F); port_hgh.write((position & 0xFF) as u8); port_low.write(0x0E); port_hgh.write(((position >> 8) & 0xFF) as u8); } } } impl ::core::fmt::Write for Writer { fn write_str(&mut self, s: &str) -> ::core::fmt::Result { for byte in s.bytes() { self.write_byte(byte) } Ok(()) } } macro_rules! kprint { ($($arg:tt)*) => ({ use core::fmt::Write; let mut writer = $crate::vga_buffer::WRITER.lock(); writer.write_fmt(format_args!($($arg)*)).unwrap(); }); } macro_rules! kprintln { ($fmt:expr) => (kprint!(concat!($fmt, "\n"))); ($fmt:expr, $($arg:tt)*) => (kprint!(concat!($fmt, "\n"), $($arg)*)); } pub fn
() { for _ in 0..BUFFER_HEIGHT { kprintln!(""); } } pub unsafe fn print_error(fmt: fmt::Arguments) { use core::fmt::Write; let mut writer = Writer { column_position: 0, color_code: ColorCode::new(Color::Red, Color::Black), buffer: Unique::new_unchecked(0xb8000 as *mut _), }; let _ = writer.write_fmt(fmt); writer.new_line(); }
clear_screen
docs.go
// GENERATED BY THE COMMAND ABOVE; DO NOT EDIT // This file was generated by swaggo/swag package docs import ( "bytes" "encoding/json" "strings" "github.com/alecthomas/template" "github.com/swaggo/swag" ) var doc = `{ "schemes": {{ marshal .Schemes }}, "swagger": "2.0", "info": { "description": "{{.Description}}", "title": "{{.Title}}", "termsOfService": "http://swagger.io/terms/", "contact": {}, "license": { "name": "Apache 2.0", "url": "http://www.apache.org/licenses/LICENSE-2.0.html" }, "version": "{{.Version}}" }, "host": "{{.Host}}", "basePath": "{{.BasePath}}", "paths": { "/test": { "get": { "description": "get map", "consumes": [ "application/json" ], "produces": [ "application/json" ], "summary": "Get Map Example", "operationId": "get-map", "responses": { "200": { "description": "OK", "schema": { "$ref": "#/definitions/controller.Response" } } } } } }, "definitions": { "controller.Data": { "type": "object", "properties": { "title": { "type": "string", "example": "Object data" } } }, "controller.Response": { "type": "object", "properties": { "map_data": { "type": "object", "additionalProperties": { "type": "string" }, "example": { "key": "value", "key2": "value2" } }, "object": { "$ref": "#/definitions/controller.Data" }, "title": { "type": "object", "additionalProperties": { "type": "string" }, "example": { "en": "Map", "kk": "Карталар", "ru": "Карта" } } } } } }` type swaggerInfo struct { Version string Host string BasePath string Schemes []string Title string Description string } // SwaggerInfo holds exported Swagger Info so clients can modify it var SwaggerInfo = swaggerInfo{ Version: "1.0", Host: "localhost:8080", BasePath: "/api/v1", Schemes: []string{}, Title: "Swagger Map Example API", Description: "", } type s struct{} func (s *s) ReadDoc() string { sInfo := SwaggerInfo sInfo.Description = strings.Replace(sInfo.Description, "\n", "\\n", -1) t, err := template.New("swagger_info").Funcs(template.FuncMap{ "marshal": func(v interface{}) string { a, _ := json.Marshal(v) return string(a) }, }).Parse(doc) if err != nil { return do
tes.Buffer if err := t.Execute(&tpl, sInfo); err != nil { return doc } return tpl.String() } func init() { swag.Register(swag.Name, &s{}) }
c } var tpl by
ChildBillingForm.js
import React from 'react' import {useToasts} from 'react-toast-notifications' export default function
(props) { const {addToast} = useToasts() const handleChildSubmit = e => { console.log('not making it into child handle?') addToast('Gotcha, these are fake chairs dummy!', {appearance: 'success'}) props.handleSubmit(e) } return ( <form onSubmit={handleChildSubmit}> <label>Name:</label> <input name="name" type="text" value={props.name} onChange={props.handleChange} /> <label>Address:</label> <input name="address" type="text" value={props.address} onChange={props.handleChange} /> <label>Credit Card:</label> <input name="cardNumber" type="text" placeholder="Not a real CC...SERIOUSLY" value={props.cardNumber} onChange={props.handleChange} /> <button type="submit">Confirm Order</button> </form> ) }
ChildBillingForm
schema.py
# GENERATED BY KOMAND SDK - DO NOT EDIT import insightconnect_plugin_runtime import json class Component: DESCRIPTION = "Request a takedown for a given alert in IntSights" class Input: ALERT_ID = "alert_id" TARGET = "target" class
: STATUS = "status" class TakedownRequestInput(insightconnect_plugin_runtime.Input): schema = json.loads(""" { "type": "object", "title": "Variables", "properties": { "alert_id": { "type": "string", "title": "Alert ID", "description": "Alert's unique ID", "order": 1 }, "target": { "type": "string", "title": "Target", "description": "Target", "default": "Domain", "enum": [ "Website", "Domain" ], "order": 2 } }, "required": [ "alert_id", "target" ] } """) def __init__(self): super(self.__class__, self).__init__(self.schema) class TakedownRequestOutput(insightconnect_plugin_runtime.Output): schema = json.loads(""" { "type": "object", "title": "Variables", "properties": { "status": { "type": "boolean", "title": "Status", "description": "Status from IntSights", "order": 1 } }, "required": [ "status" ] } """) def __init__(self): super(self.__class__, self).__init__(self.schema)
Output
views.py
from django.shortcuts import redirect from django.shortcuts import render from django.core.paginator import Paginator from django.contrib.auth.decorators import login_required from article.forms import ArticleForm from article.models import Article from block.models import Block from comment.models import Comment def article_list(request, block_id): ARTICLE_CNT_1PAGE = 1 page_no = int(request.GET.get("page_no", "1")) block_id = int(block_id) block = Block.objects.get(id=block_id) articles_objs = Article.objects.filter(block=block, status=0).order_by("-id") page_articles, pagination_data = paginate_queryset(articles_objs, page_no, ARTICLE_CNT_1PAGE) return render(request, "article_list.html", {"articles": page_articles, "b": block, "pagination_data": pagination_data}) def paginate_queryset(objs, page_no, cnt_per_page=10, half_show_length=5): p = Paginator(objs, cnt_per_page) if page_no > p.num_pages: page_no = p.num_pages if page_no <= 0: page_no = 1 page_links = [i for i in range(page_no - half_show_length, page_no + half_show_length + 1) if i > 0 and i <= p.num_pages] page = p.page(page_no) previous_link = page_links[0] - 1 next_link = page_links[-1] + 1 pagination_data = {"page_cnt": p.num_pages, "page_no": page_no, "page_links": page_links, "previous_link": previous_link, "next_link": next_link, "has_previous": previous_link > 0, "has_next": next_link <= p.num_pages} return (page.object_list, pagination_data) @login_required def article_create(request, block_id):
def article_detail(request, article_id): page_no = int(request.GET.get("page_no", "1")) article_id = int(article_id) article = Article.objects.get(id=article_id) comments = Comment.objects.filter(article=article).order_by("-id") page_comments, pagination_data = paginate_queryset(comments, page_no, 2) return render(request, "article_detail.html", {"article": article, "comments": page_comments, "pagination_data": pagination_data})
block_id = int(block_id) block = Block.objects.get(id=block_id) if request.method == "GET": return render(request, "article_create.html", {"b": block}) else: form = ArticleForm(request.POST) if form.is_valid(): article = form.save(commit=False) article.owner = request.user article.block = block article.status = 0 article.save() return redirect("/article/list/%s" % block_id) else: return render(request, "article_create.html", {"b": block, "form": form})
main.py
import logging, uvicorn from dotenv import find_dotenv, load_dotenv # # Load environment variables from the '.env' file # Make sure you have your credentials there for local development. # (On Azure, those env vars will be already be set via Application Settings, and we don't override them here) # load_dotenv(find_dotenv()) # Standard format for all log messages log = logging.getLogger(__name__) # Imports the API (a FastAPI "app") # FastAPI App and API versions as Sub Applications # see: https://fastapi.tiangolo.com/advanced/sub-applications/#mounting-a-fastapi-application import app.api as api # Entrypoint for "python main.py"
# Start uvicorn server # uvicorn.run(api, host="0.0.0.0", port=5000, log_level="debug", log_config=None)
if __name__ == "__main__": #
__init__.py
""" The OpenML module implements a python interface to `OpenML <https://www.openml.org>`_, a collaborative platform for machine learning. OpenML can be used to * store, download and analyze datasets * make experiments and their results (e.g. models, predictions) accesible and reproducible for everybody * analyze experiments (uploaded by you and other collaborators) and conduct meta studies In particular, this module implements a python interface for the `OpenML REST API <https://www.openml.org/guide#!rest_services>`_ (`REST on wikipedia <http://en.wikipedia.org/wiki/Representational_state_transfer>`_). """ # License: BSD 3-Clause from . import _api_calls from . import config from .datasets import OpenMLDataset, OpenMLDataFeature from . import datasets from . import evaluations from .evaluations import OpenMLEvaluation from . import extensions from . import exceptions from . import tasks from .tasks import ( OpenMLTask, OpenMLSplit, OpenMLSupervisedTask, OpenMLClassificationTask, OpenMLRegressionTask, OpenMLClusteringTask, OpenMLLearningCurveTask, ) from . import runs from .runs import OpenMLRun from . import flows from .flows import OpenMLFlow from . import study from .study import OpenMLStudy, OpenMLBenchmarkSuite from . import utils from . import setups
from .__version__ import __version__ def populate_cache(task_ids=None, dataset_ids=None, flow_ids=None, run_ids=None): """ Populate a cache for offline and parallel usage of the OpenML connector. Parameters ---------- task_ids : iterable dataset_ids : iterable flow_ids : iterable run_ids : iterable Returns ------- None """ if task_ids is not None: for task_id in task_ids: tasks.functions.get_task(task_id) if dataset_ids is not None: for dataset_id in dataset_ids: datasets.functions.get_dataset(dataset_id) if flow_ids is not None: for flow_id in flow_ids: flows.functions.get_flow(flow_id) if run_ids is not None: for run_id in run_ids: runs.functions.get_run(run_id) __all__ = [ 'OpenMLDataset', 'OpenMLDataFeature', 'OpenMLRun', 'OpenMLSplit', 'OpenMLEvaluation', 'OpenMLSetup', 'OpenMLParameter', 'OpenMLTask', 'OpenMLSupervisedTask', 'OpenMLClusteringTask', 'OpenMLLearningCurveTask', 'OpenMLRegressionTask', 'OpenMLClassificationTask', 'OpenMLFlow', 'OpenMLStudy', 'OpenMLBenchmarkSuite', 'datasets', 'evaluations', 'exceptions', 'extensions', 'config', 'runs', 'flows', 'tasks', 'setups', 'study', 'utils', '_api_calls', '__version__', ] # Load the scikit-learn extension by default import openml.extensions.sklearn # noqa: F401
from .setups import OpenMLSetup, OpenMLParameter
test_sleepless.py
import pytest import os from app import create_app @pytest.fixture
def app(monkeypatch): app = create_app() monkeypatch.setenv('DATA_PATH', os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sites.yml')) app.config.update( DATA_PATH=os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sites.yml') ) yield app try: os.remove(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sites.yml')) except: pass @pytest.fixture def client(app): return app.test_client() def test_dashboard(app, client): rv = client.get('/') assert rv.status == '200 OK' def test_sites_list(app, client): rv = client.get('/api/') assert rv.status == '200 OK' def test_sites_add(app, client): rv = client.post('/api/sites/', json={'url': 'https://google.com'}) assert rv.status == '201 CREATED' def test_modify_modify(app, client): rv = client.put('/api/sites/', json={'url': 'https://google.com', 'new_url': 'https://amazon.com'}) assert rv.status == '204 NO CONTENT' def test_modify_delete(app, client): rv = client.delete('/api/sites/', json={'url': 'https://amazon.com'}) assert rv.status == '204 NO CONTENT' def test_monitor(app, client): rv = client.get('/api/monitoring/') assert rv.status == '200 OK'
registry.go
package plugin import ( "context" "fmt" "regexp" "sync" ) // Registry represents the plugin registry. It is also Wash's root. type Registry struct { EntryBase mux sync.Mutex plugins map[string]Root pluginRoots []Entry } // NewRegistry creates a new plugin registry object func NewRegistry() *Registry { r := &Registry{ EntryBase: NewEntry("/"), plugins: make(map[string]Root), } r.eb().id = "/" r.DisableDefaultCaching() return r } // Plugins returns a map of the currently registered plugins. It should not be called // while registering plugins. func (r *Registry) Plugins() map[string]Root { return r.plugins } var pluginNameRegex = regexp.MustCompile("^[0-9a-zA-Z_-]+$") // RegisterPlugin initializes the given plugin and adds it to the registry if // initialization was successful. func (r *Registry) RegisterPlugin(root Root, config map[string]interface{}) error { registerPlugin := func(initSucceeded bool) { r.mux.Lock() if initSucceeded { if !pluginNameRegex.MatchString(root.eb().name) { msg := fmt.Sprintf("r.RegisterPlugin: invalid plugin name %v. The plugin name must consist of alphanumeric characters, or a hyphen", root.eb().name) panic(msg) } if _, ok := r.plugins[root.eb().name]; ok { msg := fmt.Sprintf("r.RegisterPlugin: the %v plugin's already been registered", root.eb().name) panic(msg) } if DeleteAction().IsSupportedOn(root) { msg := fmt.Sprintf("r.RegisterPlugin: the %v plugin's root implements delete", root.eb().name) panic(msg) } }
if err := root.Init(config); err != nil { // Create a stubPluginRoot so that Wash users can see the plugin's // documentation via 'describe <plugin>'. This is important b/c the // plugin docs also include details on how to set it up. Note that // 'docs <plugin>' will not work for external plugins. This is because // the plugin documentation is contained in the root's description, and // the root's description is contained in the root's schema. Retrieving // an external plugin root's schema requires a successful Init invocation, // which is not the case here. root = newStubRoot(root) registerPlugin(false) return err } registerPlugin(true) return nil } // ChildSchemas only makes sense for core plugin roots func (r *Registry) ChildSchemas() []*EntrySchema { return nil } // Schema only makes sense for core plugin roots func (r *Registry) Schema() *EntrySchema { return nil } // List all of Wash's loaded plugins func (r *Registry) List(ctx context.Context) ([]Entry, error) { return r.pluginRoots, nil } type stubRoot struct { EntryBase pluginDocumentation string } func newStubRoot(root Root) *stubRoot { stubRoot := &stubRoot{ EntryBase: NewEntry(Name(root)), } stubRoot.DisableDefaultCaching() schema := root.Schema() if schema != nil { stubRoot.pluginDocumentation = schema.Description } return stubRoot } func (r *stubRoot) Init(map[string]interface{}) error { return nil } func (r *stubRoot) Schema() *EntrySchema { return NewEntrySchema(r, CName(r)). SetDescription(r.pluginDocumentation). IsSingleton() } func (r *stubRoot) ChildSchemas() []*EntrySchema { return []*EntrySchema{} } func (r *stubRoot) List(context.Context) ([]Entry, error) { return []Entry{}, nil } const registryDescription = ` Welcome to Wash, a UNIX-like shell that lets you manage all your entries as if they were files and directories. This entry represents the Wash root. 'ls'-ing it yields all the configured plugins. `
r.plugins[root.eb().name] = root r.pluginRoots = append(r.pluginRoots, root) r.mux.Unlock() }
root.go
/* Copyright (c) Huawei Technologies Co., Ltd. 2021. kunpengsecl licensed under the Mulan PSL v2. You can use this software according to the terms and conditions of the Mulan PSL v2. You may obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL v2 for more details. Author: wucaijun Create: 2021-12-01 Description: Command line tool for tpm provision process. */ package cmd import ( "github.com/spf13/cobra" ) var ( // rootCmd represents the base command when called without any subcommands rootCmd = &cobra.Command{ Use: "tbprovisioner", Short: "command line tool for tpm provision process.", Long: `Command line tool for tpm provision process: 1. read/write EK from/to TPM NVRAM 2. create EK and signed by remote attestation server 3. create IK and signed by remote attestation server ...`, /*Run: func(cmd *cobra.Command, args []string) { fmt.Printf("Hi, I'm here!\ncmd=%v\nargs=%v\n", cmd, args) },*/ } ) // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { cobra.CheckErr(rootCmd.Execute()) } func init() { // Here you will define your flags and configuration settings. // Cobra supports persistent flags, which, if defined here, // will be global for your application.
//rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") }
//rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.tbprovisioner.yaml)") // Cobra also supports local flags, which will only run // when this action is called directly.
apb1rstr.rs
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::APB1RSTR { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct LPTIM1RSTR { bits: bool, } impl LPTIM1RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct PWRRSTR { bits: bool, } impl PWRRSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct I2C2RSTR { bits: bool, } impl I2C2RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct I2C1RSTR { bits: bool, } impl I2C1RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct LPUART1RSTR { bits: bool, } impl LPUART1RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct USART2RSTR { bits: bool, } impl USART2RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct SPI2RSTR { bits: bool, } impl SPI2RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct WWDGRSTR { bits: bool, } impl WWDGRSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct TIM6RSTR { bits: bool, } impl TIM6RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct TIM2RSTR { bits: bool, } impl TIM2RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct TIM3RSTR { bits: bool, } impl TIM3RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct TIM7RSTR { bits: bool, } impl TIM7RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct USART4RSTR { bits: bool, } impl USART4RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct USART5RSTR { bits: bool, } impl USART5RSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct CRCRSTR { bits: bool, } impl CRCRSTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct I2C3R { bits: bool, } impl I2C3R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _LPTIM1RSTW<'a> { w: &'a mut W, } impl<'a> _LPTIM1RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 31; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PWRRSTW<'a> { w: &'a mut W, } impl<'a> _PWRRSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 28; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _I2C2RSTW<'a> { w: &'a mut W, } impl<'a> _I2C2RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W
#[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 22; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _I2C1RSTW<'a> { w: &'a mut W, } impl<'a> _I2C1RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 21; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _LPUART1RSTW<'a> { w: &'a mut W, } impl<'a> _LPUART1RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 18; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _USART2RSTW<'a> { w: &'a mut W, } impl<'a> _USART2RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 17; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _SPI2RSTW<'a> { w: &'a mut W, } impl<'a> _SPI2RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _WWDGRSTW<'a> { w: &'a mut W, } impl<'a> _WWDGRSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 11; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _TIM6RSTW<'a> { w: &'a mut W, } impl<'a> _TIM6RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _TIM2RSTW<'a> { w: &'a mut W, } impl<'a> _TIM2RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _TIM3RSTW<'a> { w: &'a mut W, } impl<'a> _TIM3RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _TIM7RSTW<'a> { w: &'a mut W, } impl<'a> _TIM7RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _USART4RSTW<'a> { w: &'a mut W, } impl<'a> _USART4RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 19; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _USART5RSTW<'a> { w: &'a mut W, } impl<'a> _USART5RSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 20; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _CRCRSTW<'a> { w: &'a mut W, } impl<'a> _CRCRSTW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 27; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _I2C3W<'a> { w: &'a mut W, } impl<'a> _I2C3W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 30; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 31 - Low power timer reset"] #[inline] pub fn lptim1rst(&self) -> LPTIM1RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 31; ((self.bits >> OFFSET) & MASK as u32) != 0 }; LPTIM1RSTR { bits } } #[doc = "Bit 28 - Power interface reset"] #[inline] pub fn pwrrst(&self) -> PWRRSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 28; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PWRRSTR { bits } } #[doc = "Bit 22 - I2C2 reset"] #[inline] pub fn i2c2rst(&self) -> I2C2RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 22; ((self.bits >> OFFSET) & MASK as u32) != 0 }; I2C2RSTR { bits } } #[doc = "Bit 21 - I2C1 reset"] #[inline] pub fn i2c1rst(&self) -> I2C1RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 21; ((self.bits >> OFFSET) & MASK as u32) != 0 }; I2C1RSTR { bits } } #[doc = "Bit 18 - LPUART1 reset"] #[inline] pub fn lpuart1rst(&self) -> LPUART1RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 18; ((self.bits >> OFFSET) & MASK as u32) != 0 }; LPUART1RSTR { bits } } #[doc = "Bit 17 - USART2 reset"] #[inline] pub fn usart2rst(&self) -> USART2RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 17; ((self.bits >> OFFSET) & MASK as u32) != 0 }; USART2RSTR { bits } } #[doc = "Bit 14 - SPI2 reset"] #[inline] pub fn spi2rst(&self) -> SPI2RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) != 0 }; SPI2RSTR { bits } } #[doc = "Bit 11 - Window watchdog reset"] #[inline] pub fn wwdgrst(&self) -> WWDGRSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 11; ((self.bits >> OFFSET) & MASK as u32) != 0 }; WWDGRSTR { bits } } #[doc = "Bit 4 - Timer 6 reset"] #[inline] pub fn tim6rst(&self) -> TIM6RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }; TIM6RSTR { bits } } #[doc = "Bit 0 - Timer 2 reset"] #[inline] pub fn tim2rst(&self) -> TIM2RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }; TIM2RSTR { bits } } #[doc = "Bit 1 - Timer 3 reset"] #[inline] pub fn tim3rst(&self) -> TIM3RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }; TIM3RSTR { bits } } #[doc = "Bit 5 - Timer 7 reset"] #[inline] pub fn tim7rst(&self) -> TIM7RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) != 0 }; TIM7RSTR { bits } } #[doc = "Bit 19 - USART4 reset"] #[inline] pub fn usart4rst(&self) -> USART4RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 19; ((self.bits >> OFFSET) & MASK as u32) != 0 }; USART4RSTR { bits } } #[doc = "Bit 20 - USART5 reset"] #[inline] pub fn usart5rst(&self) -> USART5RSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 20; ((self.bits >> OFFSET) & MASK as u32) != 0 }; USART5RSTR { bits } } #[doc = "Bit 27 - CRC reset"] #[inline] pub fn crcrst(&self) -> CRCRSTR { let bits = { const MASK: bool = true; const OFFSET: u8 = 27; ((self.bits >> OFFSET) & MASK as u32) != 0 }; CRCRSTR { bits } } #[doc = "Bit 30 - I2C3 reset"] #[inline] pub fn i2c3(&self) -> I2C3R { let bits = { const MASK: bool = true; const OFFSET: u8 = 30; ((self.bits >> OFFSET) & MASK as u32) != 0 }; I2C3R { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 31 - Low power timer reset"] #[inline] pub fn lptim1rst(&mut self) -> _LPTIM1RSTW { _LPTIM1RSTW { w: self } } #[doc = "Bit 28 - Power interface reset"] #[inline] pub fn pwrrst(&mut self) -> _PWRRSTW { _PWRRSTW { w: self } } #[doc = "Bit 22 - I2C2 reset"] #[inline] pub fn i2c2rst(&mut self) -> _I2C2RSTW { _I2C2RSTW { w: self } } #[doc = "Bit 21 - I2C1 reset"] #[inline] pub fn i2c1rst(&mut self) -> _I2C1RSTW { _I2C1RSTW { w: self } } #[doc = "Bit 18 - LPUART1 reset"] #[inline] pub fn lpuart1rst(&mut self) -> _LPUART1RSTW { _LPUART1RSTW { w: self } } #[doc = "Bit 17 - USART2 reset"] #[inline] pub fn usart2rst(&mut self) -> _USART2RSTW { _USART2RSTW { w: self } } #[doc = "Bit 14 - SPI2 reset"] #[inline] pub fn spi2rst(&mut self) -> _SPI2RSTW { _SPI2RSTW { w: self } } #[doc = "Bit 11 - Window watchdog reset"] #[inline] pub fn wwdgrst(&mut self) -> _WWDGRSTW { _WWDGRSTW { w: self } } #[doc = "Bit 4 - Timer 6 reset"] #[inline] pub fn tim6rst(&mut self) -> _TIM6RSTW { _TIM6RSTW { w: self } } #[doc = "Bit 0 - Timer 2 reset"] #[inline] pub fn tim2rst(&mut self) -> _TIM2RSTW { _TIM2RSTW { w: self } } #[doc = "Bit 1 - Timer 3 reset"] #[inline] pub fn tim3rst(&mut self) -> _TIM3RSTW { _TIM3RSTW { w: self } } #[doc = "Bit 5 - Timer 7 reset"] #[inline] pub fn tim7rst(&mut self) -> _TIM7RSTW { _TIM7RSTW { w: self } } #[doc = "Bit 19 - USART4 reset"] #[inline] pub fn usart4rst(&mut self) -> _USART4RSTW { _USART4RSTW { w: self } } #[doc = "Bit 20 - USART5 reset"] #[inline] pub fn usart5rst(&mut self) -> _USART5RSTW { _USART5RSTW { w: self } } #[doc = "Bit 27 - CRC reset"] #[inline] pub fn crcrst(&mut self) -> _CRCRSTW { _CRCRSTW { w: self } } #[doc = "Bit 30 - I2C3 reset"] #[inline] pub fn i2c3(&mut self) -> _I2C3W { _I2C3W { w: self } } }
{ self.bit(true) }
telemetry.go
// Copyright 2020 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package telemetry import ( "bytes" "context" "encoding/json" "net/http" "github.com/pingcap/errors" ) var defaultURL = "https://telemetry.pingcap.com/api/v1/clusters/report" // Telemetry control telemetry. type Telemetry struct { url string cli *http.Client } // NewTelemetry return a new Telemetry instance. func NewTelemetry() *Telemetry
// Report report the msg right away. func (t *Telemetry) Report(ctx context.Context, msg *Report) error { dst, err := json.Marshal(msg) if err != nil { return errors.AddStack(err) } req, err := http.NewRequestWithContext(ctx, "POST", t.url, bytes.NewReader(dst)) if err != nil { return errors.AddStack(err) } req.Header.Add("Content-Type", "application/json") resp, err := t.cli.Do(req) if err != nil { return errors.AddStack(err) } defer resp.Body.Close() code := resp.StatusCode if code < 200 || code >= 300 { return errors.Errorf("StatusCode: %d, Status: %s", resp.StatusCode, resp.Status) } return nil }
{ cli := new(http.Client) return &Telemetry{ url: defaultURL, cli: cli, } }
.eslintrc.js
module.exports = { extends: [ '@linters/eslint-config-node', 'prettier', ],
rules: { 'max-lines': 0 } }
source_code.py
# -*- coding: utf-8 -*- """The source code classes.""" import collections from yaldevtools import definitions class EnumDeclaration(object): """Enumeration type declaration. Attributes: name (str): name. constants (dict[str, str]): constant values per name. """ def __init__(self, name): """Initializes an enumeration type declaration. Args: name (str): name. """ super(EnumDeclaration, self).__init__() self.constants = collections.OrderedDict() self.name = name class FunctionArgument(object):
class FunctionPrototype(object): """Function prototype. Attributes: arguments (list[FunctionArgument]): function arguments. have_bfio (bool): True if the function prototype is defined if BFIO is defined. have_debug_output (bool): True if the function prototype is defined if debug output is defined. have_extern (bool): True if the function prototype is defined as externally available (API). have_wide_character_type (bool): True if the function prototype is defined if the wide character type is defined. name (str): name. return_type (str): return type. return_values (set[str]): return values or None if the function does not return values. value_description (str): description of the value. """ def __init__(self, name, return_type): """Initializes a function prototype. Args: name (str): name. return_type (str): return type. """ super(FunctionPrototype, self).__init__() self._parsed_value = False self._value_name = None self._value_type = None self.arguments = [] self.have_bfio = False self.have_debug_output = False self.have_extern = False self.have_wide_character_type = False self.name = name self.return_type = return_type self.return_values = None self.value_description = None def AddArgument(self, argument): """Adds an argument to the function prototype. Args: argument (FunctionArgument): function argument. """ self.arguments.append(argument) def AddArgumentString(self, argument_string): """Adds an argument string to the function prototype. Args: argument_string (str): function argument. """ function_argument = FunctionArgument(argument_string) self.arguments.append(function_argument) def CopyToManpageString(self): """Copies the function prototype to a string to be used in manpage. Returns: str: function prototype to be used in manpage. """ argument_strings = [] for function_argument in self.arguments: argument_string = function_argument.CopyToString() argument_string = '"{0:s}"'.format(argument_string) argument_strings.append(argument_string) return ' '.join(argument_strings) def CopyToString(self): """Copies the function prototype to a string. Returns: str: function prototype. """ argument_strings = [] for function_argument in self.arguments: argument_string = function_argument.CopyToString() argument_strings.append(argument_string) return ', '.join(argument_strings) def _ParseValue(self): """Parses the value name and type.""" # Strip the library name. _, _, function_name = self.name.partition('_') # Strip the library type. _, _, function_name = function_name.partition('_') value_name = None value_type = None number_of_arguments = len(self.arguments) if function_name.startswith('get_utf'): if number_of_arguments in (3, 4): _, _, value_name = function_name.partition('_') _, _, value_name = value_name.partition('_') elif function_name.startswith('get_'): # TODO: handle by_index, by_path getters if number_of_arguments == 3: _, _, value_name = function_name.partition('_') self._parsed_value = True self._value_name = value_name self._value_type = value_type def GetValueName(self): """Determines the value name of a getter or setter function. Returns: str: value name or None if not available. """ if not self._parsed_value: self._ParseValue() return self._value_name def GetValueType(self): """Determines the value type of a getter or setter function. Returns: str: value type or None if not available. """ if not self._parsed_value: self._ParseValue() return self._value_type class PythonTypeObjectFunctionPrototype(object): """Python type object function prototype. Attributes: arguments (list[str]): arguments. data_type (str): data type. function_type (str): function type. object_type (str): object type. return_values (set[str]): return values or None if the function does not return values. value_description (str): description of the value. value_type (str): value type. """ def __init__(self, python_module_name, type_name, type_function): """Initializes a Python type object function prototype. Args: python_module_name (str): python module name. type_name (str): type name. type_function (str): type function. """ super(PythonTypeObjectFunctionPrototype, self).__init__() self._name = None self._python_module_name = python_module_name self._type_function = type_function self._type_name = type_name self._value_name = None self.arguments = [] self.data_type = definitions.DATA_TYPE_NONE self.function_type = None self.object_type = None self.return_values = None self.value_description = None self.value_type = None @property def name(self): """str: name.""" if self._name is None: self._name = '{0:s}_{1:s}_{2:s}'.format( self._python_module_name, self._type_name, self.type_function) return self._name @property def type_function(self): """str: type function.""" # TODO: make overrides more generic. if self._type_function == 'set_parent_file': return 'set_parent' if (self._type_function.startswith('copy_') and not self._type_function.startswith('copy_from_')): return 'get_{0:s}'.format(self._type_function[5:]) if (self._type_function.startswith('get_utf8_') or self._type_function.startswith('set_utf8_')): return ''.join([self._type_function[:4], self._type_function[9:]]) if self._type_function.startswith('get_data_as_'): _, _, type_function_suffix = self._type_function.partition('_data_as_') if type_function_suffix in ( '16bit_integer', '32bit_integer', '64bit_integer'): return 'get_data_as_integer' if type_function_suffix in ('filetime', 'floatingtime'): return 'get_data_as_datetime' if type_function_suffix == 'utf8_string': return 'get_data_as_string' return self._type_function if self._type_function.startswith('get_'): type_function_prefix, _, type_function_suffix = ( self._type_function.partition('_by_')) if type_function_suffix in ('entry', 'index'): return type_function_prefix if type_function_suffix in ('utf8_name', 'utf8_path'): return ''.join([self._type_function[:-10], self._type_function[-5:]]) if self._type_function.endswith('_utf8_string'): return ''.join([self._type_function[:-12], self._type_function[-7:]]) if self._type_function.endswith('_utf8_string_size'): return ''.join([self._type_function[:-17], self._type_function[-12:]]) return self._type_function @property def value_name(self): """str: value name.""" if self._value_name is None: # TODO: make overrides more generic. if self.function_type == definitions.FUNCTION_TYPE_COPY: if self._type_function.startswith('copy_'): self._value_name = self._type_function[5:] elif self.function_type == definitions.FUNCTION_TYPE_COPY_FROM: if self._type_function.startswith('copy_from_'): self._value_name = self._type_function[10:] elif self.function_type == definitions.FUNCTION_TYPE_COPY_TO: if self._type_function.startswith('get_'): self._value_name = self._type_function[4:] elif self.function_type in ( definitions.FUNCTION_TYPE_GET, definitions.FUNCTION_TYPE_GET_BY_IDENTIFIER, definitions.FUNCTION_TYPE_GET_BY_INDEX, definitions.FUNCTION_TYPE_GET_BY_NAME, definitions.FUNCTION_TYPE_GET_BY_PATH): type_function_prefix, _, _ = self._type_function.partition('_by_') if type_function_prefix.startswith('get_'): type_function_prefix = type_function_prefix[4:] if type_function_prefix.startswith('utf8_'): type_function_prefix = type_function_prefix[5:] self._value_name = type_function_prefix elif self.function_type == definitions.FUNCTION_TYPE_IS: if self._type_function.startswith('is_'): self._value_name = self._type_function[3:] elif self.function_type == definitions.FUNCTION_TYPE_SET: if self._type_function.startswith('set_utf8_'): self._value_name = self._type_function[9:] elif self._type_function.startswith('set_'): self._value_name = self._type_function[4:] return self._value_name def DataTypeIsDatetime(self): """Determines if the data type is a datetime type. Returns: bool: True if the data type is a datetime type. """ return self.data_type in ( definitions.DATA_TYPE_FAT_DATE_TIME, definitions.DATA_TYPE_FILETIME, definitions.DATA_TYPE_FLOATINGTIME, definitions.DATA_TYPE_POSIX_TIME) def DataTypeIsFloat(self): """Determines if the data type is a floating-point type. Returns: bool: True if the data type is a floating-point type. """ return self.data_type in ( definitions.DATA_TYPE_FLOAT, definitions.DATA_TYPE_DOUBLE) def DataTypeIsInteger(self): """Determines if the data type is an integer type. Returns: bool: True if the data type is an integer type. """ return self.data_type in ( definitions.DATA_TYPE_INT, definitions.DATA_TYPE_INT32, definitions.DATA_TYPE_OFF64, definitions.DATA_TYPE_SIZE32, definitions.DATA_TYPE_SIZE64, definitions.DATA_TYPE_UINT8, definitions.DATA_TYPE_UINT16, definitions.DATA_TYPE_UINT32, definitions.DATA_TYPE_UINT64) def GetAttributeDescription(self): """Retrieves the fuction as attribute description. Returns: str: function as attribute description. """ description = '' type_function = self.type_function value_name = self.value_name if value_name: value_name = value_name.replace('_', ' ') if type_function == 'get_ascii_codepage': description = ( 'The codepage used for ASCII strings in the {0:s}.').format( self._type_name) elif type_function == 'get_data_as_boolean': description = 'The data as a boolean.' elif type_function == 'get_data_as_datetime': description = 'The data as a datetime object.' elif type_function == 'get_data_as_integer': description = 'The data as an integer.' elif type_function == 'get_data_as_floating_point': description = 'The data as a floating point.' elif type_function == 'get_data_as_string': description = 'The data as a string.' elif self.function_type == definitions.FUNCTION_TYPE_IS: type_name = self._type_name if type_name: type_name = type_name.replace('_', ' ') description = 'Indicates the {0:s} is {1:s}.'.format( type_name, value_name) elif self.value_description: description = 'The {0:s}.'.format(self.value_description) elif value_name: description = 'The {0:s}.'.format(value_name) return description def GetDataTypeDescription(self): """Retrieves the data type description. Returns: str: data type description. """ if self.data_type == definitions.DATA_TYPE_BINARY_DATA: data_type_description = 'Binary string' elif self.data_type == definitions.DATA_TYPE_BOOLEAN: data_type_description = 'Boolean' elif self.DataTypeIsDatetime(): data_type_description = 'Datetime' elif self.data_type == definitions.DATA_TYPE_OBJECT: data_type_description = 'Object' elif self.DataTypeIsFloat(): data_type_description = 'Float' elif self.DataTypeIsInteger(): data_type_description = 'Integer' elif self.data_type in ( definitions.DATA_TYPE_GUID, definitions.DATA_TYPE_STRING, definitions.DATA_TYPE_UUID): data_type_description = 'Unicode string' elif self.data_type == definitions.DATA_TYPE_NARROW_STRING: data_type_description = 'String' elif self.data_type == definitions.DATA_TYPE_NONE: data_type_description = 'None' else: data_type_description = self.data_type if (data_type_description != 'None' and self.return_values and 'None' in self.return_values): data_type_description = '{0:s} or None'.format(data_type_description) return data_type_description def GetDescription(self): """Retrieves the description. Returns: list[str]: lines of the description. """ description = [''] type_function = self.type_function type_name = self._type_name if type_name: type_name = type_name.replace('_', ' ') value_name = self.value_name if value_name: value_name = value_name.replace('_', ' ') if type_function == 'close': description = ['Closes a {0:s}.'.format(type_name)] elif type_function == 'get_ascii_codepage': description = [( 'Retrieves the codepage for ASCII strings used in ' 'the {0:s}.').format(type_name)] elif type_function == 'get_data_as_boolean': description = ['Retrieves the data as a boolean.'] elif type_function == 'get_data_as_datetime': description = ['Retrieves the data as a datetime object.'] elif type_function == 'get_data_as_integer': description = ['Retrieves the data as an integer.'] elif type_function == 'get_data_as_floating_point': description = ['Retrieves the data as a floating point.'] elif type_function == 'get_data_as_string': description = ['Retrieves the data as a string.'] elif type_function == 'get_string': description = ['Retrieves the {0:s} formatted as a string.'.format( type_name)] elif type_function == 'open': description = ['Opens a {0:s}.'.format(type_name)] elif type_function == 'open_file_object': description = [( 'Opens a {0:s} using a file-like object.').format(type_name)] elif type_function == 'read_buffer': if self.value_description: description = ['Reads a buffer of {0:s}.'.format( self.value_description)] else: description = ['Reads a buffer of data.'] elif type_function == 'read_buffer_at_offset': if self.value_description: description = ['Reads a buffer of {0:s} at a specific offset.'.format( self.value_description)] else: description = ['Reads a buffer of data at a specific offset.'] elif type_function == 'seek_offset': if self.value_description: description = ['Seeks an offset within the {0:s}.'.format( self.value_description)] else: description = ['Seeks an offset within the data.'] elif type_function == 'set_ascii_codepage': description = [ ('Sets the codepage for ASCII strings used in the ' '{0:s}.').format(type_name), ('Expects the codepage to be a string containing a Python ' 'codec definition.')] elif type_function == 'set_parent': description = ['Sets the parent file.'] elif type_function == 'signal_abort': description = ['Signals the {0:s} to abort the current activity.'.format( type_name)] elif self.function_type == definitions.FUNCTION_TYPE_GET_BY_INDEX: _, _, argument_suffix = self.arguments[0].rpartition('_') if self.value_description: description = ['Retrieves the {0:s} specified by the {1:s}.'.format( self.value_description, argument_suffix)] else: description = ['Retrieves the {0:s} specified by the {1:s}.'.format( value_name, argument_suffix)] elif self.function_type in ( definitions.FUNCTION_TYPE_GET_BY_IDENTIFIER, definitions.FUNCTION_TYPE_GET_BY_NAME, definitions.FUNCTION_TYPE_GET_BY_PATH): _, _, type_function_suffix = type_function.partition('_by_') if self.value_description: description = ['Retrieves the {0:s} specified by the {1:s}.'.format( self.value_description, type_function_suffix)] else: description = ['Retrieves the {0:s} specified by the {1:s}.'.format( value_name, type_function_suffix)] elif self.function_type == definitions.FUNCTION_TYPE_COPY_FROM: # TODO: fix value name. description = ['Copies the {0:s} from the {1:s}.'.format( type_name, value_name)] elif self.function_type in ( definitions.FUNCTION_TYPE_COPY, definitions.FUNCTION_TYPE_GET): if self.value_description: description = ['Retrieves the {0:s}.'.format(self.value_description)] else: description = ['Retrieves the {0:s}.'.format(value_name)] elif self.function_type == definitions.FUNCTION_TYPE_IS: description = ['Determines if the {0:s} is {1:s}.'.format( type_name, value_name)] elif self.function_type == definitions.FUNCTION_TYPE_SET: description = ['Sets the {0:s}.'.format(value_name)] return description def GetValueNameAndPrefix(self): """Determines the value name and its prefix. Returns: tuple[str, str]: value name and prefix. """ if self.value_name: value_name_prefix, _, value_name = self.value_name.partition('_') if value_name_prefix in ('root', 'sub'): return value_name, value_name_prefix return self.value_name, None
"""Function argument.""" def __init__(self, argument_string): """Initializes a function argument. Args: argument_string (str): function argument. """ super(FunctionArgument, self).__init__() self._strings = [argument_string] def AddArgumentString(self, argument_string): """Adds an argument string to the function argument. Args: argument_string (str): function argument. """ self._strings.append(argument_string) def CopyToString(self): """Copies the function argument to a string. Returns: str: function argument. """ number_of_strings = len(self._strings) argument_string = '' if number_of_strings == 1: argument_string = self._strings[0] elif number_of_strings > 1: argument_string = '{0:s}{1:s}'.format( self._strings[0], ', '.join(self._strings[1:])) return argument_string
wait_for_pvcs.go
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package util import ( "fmt" "strings" "time" clientset "k8s.io/client-go/kubernetes" "k8s.io/klog" ) // WaitForPVCOptions is an options used by WaitForPVCs methods. type WaitForPVCOptions struct { Selector *ObjectSelector DesiredPVCCount int EnableLogging bool CallerName string WaitForPVCsInterval time.Duration } // WaitForPVCs waits till desired number of PVCs is running. // PVCs are be specified by namespace, field and/or label selectors. // If stopCh is closed before all PVCs are running, the error will be returned. func WaitForPVCs(clientSet clientset.Interface, stopCh <-chan struct{}, options *WaitForPVCOptions) error
{ ps, err := NewPVCStore(clientSet, options.Selector) if err != nil { return fmt.Errorf("PVC store creation error: %v", err) } defer ps.Stop() oldPVCs := ps.List() scaling := uninitialized var pvcsStatus PVCsStartupStatus switch { case len(oldPVCs) == options.DesiredPVCCount: scaling = none case len(oldPVCs) < options.DesiredPVCCount: scaling = up case len(oldPVCs) > options.DesiredPVCCount: scaling = down } for { select { case <-stopCh: return fmt.Errorf("timeout while waiting for %d PVCs to be running in namespace '%v' with labels '%v' and fields '%v' - only %d found bound", options.DesiredPVCCount, options.Selector.Namespace, options.Selector.LabelSelector, options.Selector.FieldSelector, pvcsStatus.Bound) case <-time.After(options.WaitForPVCsInterval): pvcs := ps.List() pvcsStatus = ComputePVCsStartupStatus(pvcs, options.DesiredPVCCount) diff := DiffPVCs(oldPVCs, pvcs) deletedPVCs := diff.DeletedPVCs() if scaling != down && len(deletedPVCs) > 0 { klog.Errorf("%s: %s: %d PVCs disappeared: %v", options.CallerName, options.Selector.String(), len(deletedPVCs), strings.Join(deletedPVCs, ", ")) } addedPVCs := diff.AddedPVCs() if scaling != up && len(addedPVCs) > 0 { klog.Errorf("%s: %s: %d PVCs appeared: %v", options.CallerName, options.Selector.String(), len(deletedPVCs), strings.Join(deletedPVCs, ", ")) } if options.EnableLogging { klog.Infof("%s: %s: %s", options.CallerName, options.Selector.String(), pvcsStatus.String()) } // We wait until there is a desired number of PVCs bound and all other PVCs are pending. if len(pvcs) == (pvcsStatus.Bound+pvcsStatus.Pending) && pvcsStatus.Bound == options.DesiredPVCCount { return nil } oldPVCs = pvcs } } }
builder_test.go
package tx import ( "testing" "github.com/stretchr/testify/require" "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/codec/legacy" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" txtypes "github.com/cosmos/cosmos-sdk/types/tx" "github.com/cosmos/cosmos-sdk/types/tx/signing" ) func TestTxBuilder(t *testing.T) { _, pubkey, addr := testdata.KeyTestPubAddr() marshaler := codec.NewProtoCodec(codectypes.NewInterfaceRegistry()) txBuilder := newBuilder(nil) memo := "sometestmemo" msgs := []sdk.Msg{testdata.NewTestMsg(addr)} accSeq := uint64(2) // Arbitrary account sequence any, err := codectypes.NewAnyWithValue(pubkey) require.NoError(t, err) var signerInfo []*txtypes.SignerInfo signerInfo = append(signerInfo, &txtypes.SignerInfo{ PublicKey: any, ModeInfo: &txtypes.ModeInfo{ Sum: &txtypes.ModeInfo_Single_{ Single: &txtypes.ModeInfo_Single{ Mode: signing.SignMode_SIGN_MODE_DIRECT, }, }, }, Sequence: accSeq, }) var sig signing.SignatureV2 = signing.SignatureV2{ PubKey: pubkey, Data: &signing.SingleSignatureData{ SignMode: signing.SignMode_SIGN_MODE_DIRECT, Signature: legacy.Cdc.MustMarshal(pubkey), }, Sequence: accSeq, } fee := txtypes.Fee{Amount: sdk.NewCoins(sdk.NewInt64Coin("atom", 150)), GasLimit: 20000} t.Log("verify that authInfo bytes encoded with DefaultTxEncoder and decoded with DefaultTxDecoder can be retrieved from getAuthInfoBytes") authInfo := &txtypes.AuthInfo{ Fee: &fee, SignerInfos: signerInfo, } authInfoBytes := marshaler.MustMarshal(authInfo) require.NotEmpty(t, authInfoBytes) t.Log("verify that body bytes encoded with DefaultTxEncoder and decoded with DefaultTxDecoder can be retrieved from getBodyBytes") anys := make([]*codectypes.Any, len(msgs)) for i, msg := range msgs { var err error anys[i], err = codectypes.NewAnyWithValue(msg) if err != nil { panic(err) } } txBody := &txtypes.TxBody{ Memo: memo, Messages: anys, } bodyBytes := marshaler.MustMarshal(txBody) require.NotEmpty(t, bodyBytes) require.Empty(t, txBuilder.getBodyBytes()) t.Log("verify that calling the SetMsgs, SetMemo results in the correct getBodyBytes") require.NotEqual(t, bodyBytes, txBuilder.getBodyBytes()) err = txBuilder.SetMsgs(msgs...) require.NoError(t, err) require.NotEqual(t, bodyBytes, txBuilder.getBodyBytes()) txBuilder.SetMemo(memo) require.Equal(t, bodyBytes, txBuilder.getBodyBytes()) require.Equal(t, len(msgs), len(txBuilder.GetMsgs())) pks, err := txBuilder.GetPubKeys() require.NoError(t, err) require.Empty(t, pks) t.Log("verify that updated AuthInfo results in the correct getAuthInfoBytes and GetPubKeys") require.NotEqual(t, authInfoBytes, txBuilder.getAuthInfoBytes()) txBuilder.SetFeeAmount(fee.Amount) require.NotEqual(t, authInfoBytes, txBuilder.getAuthInfoBytes()) txBuilder.SetGasLimit(fee.GasLimit) require.NotEqual(t, authInfoBytes, txBuilder.getAuthInfoBytes()) err = txBuilder.SetSignatures(sig) require.NoError(t, err) // once fee, gas and signerInfos are all set, AuthInfo bytes should match require.Equal(t, authInfoBytes, txBuilder.getAuthInfoBytes()) require.Equal(t, len(msgs), len(txBuilder.GetMsgs())) pks, err = txBuilder.GetPubKeys() require.NoError(t, err) require.Equal(t, 1, len(pks)) require.True(t, pubkey.Equals(pks[0]))
txBuilder.SetExtensionOptions(any) require.Equal(t, []*codectypes.Any{any}, txBuilder.GetExtensionOptions()) txBuilder.SetNonCriticalExtensionOptions(any) require.Equal(t, []*codectypes.Any{any}, txBuilder.GetNonCriticalExtensionOptions()) txBuilder = &wrapper{} require.NotPanics(t, func() { _ = txBuilder.GetMsgs() }) } func TestBuilderValidateBasic(t *testing.T) { // keys and addresses _, pubKey1, addr1 := testdata.KeyTestPubAddr() _, pubKey2, addr2 := testdata.KeyTestPubAddr() // msg and signatures msg1 := testdata.NewTestMsg(addr1, addr2) feeAmount := testdata.NewTestFeeAmount() msgs := []sdk.Msg{msg1} // require to fail validation upon invalid fee badFeeAmount := testdata.NewTestFeeAmount() badFeeAmount[0].Amount = sdk.NewInt(-5) txBuilder := newBuilder(nil) var sig1, sig2 signing.SignatureV2 sig1 = signing.SignatureV2{ PubKey: pubKey1, Data: &signing.SingleSignatureData{ SignMode: signing.SignMode_SIGN_MODE_DIRECT, Signature: legacy.Cdc.MustMarshal(pubKey1), }, Sequence: 0, // Arbitrary account sequence } sig2 = signing.SignatureV2{ PubKey: pubKey2, Data: &signing.SingleSignatureData{ SignMode: signing.SignMode_SIGN_MODE_DIRECT, Signature: legacy.Cdc.MustMarshal(pubKey2), }, Sequence: 0, // Arbitrary account sequence } err := txBuilder.SetMsgs(msgs...) require.NoError(t, err) txBuilder.SetGasLimit(200000) err = txBuilder.SetSignatures(sig1, sig2) require.NoError(t, err) txBuilder.SetFeeAmount(badFeeAmount) err = txBuilder.ValidateBasic() require.Error(t, err) _, code, _ := sdkerrors.ABCIInfo(err, false) require.Equal(t, sdkerrors.ErrInsufficientFee.ABCICode(), code) // require to fail validation when no signatures exist err = txBuilder.SetSignatures() require.NoError(t, err) txBuilder.SetFeeAmount(feeAmount) err = txBuilder.ValidateBasic() require.Error(t, err) _, code, _ = sdkerrors.ABCIInfo(err, false) require.Equal(t, sdkerrors.ErrNoSignatures.ABCICode(), code) // require to fail with nil values for tx, authinfo err = txBuilder.SetMsgs(msgs...) require.NoError(t, err) err = txBuilder.ValidateBasic() require.Error(t, err) // require to fail validation when signatures do not match expected signers err = txBuilder.SetSignatures(sig1) require.NoError(t, err) err = txBuilder.ValidateBasic() require.Error(t, err) _, code, _ = sdkerrors.ABCIInfo(err, false) require.Equal(t, sdkerrors.ErrUnauthorized.ABCICode(), code) require.Error(t, err) txBuilder.SetFeeAmount(feeAmount) err = txBuilder.SetSignatures(sig1, sig2) require.NoError(t, err) err = txBuilder.ValidateBasic() require.NoError(t, err) // gas limit too high txBuilder.SetGasLimit(txtypes.MaxGasWanted + 1) err = txBuilder.ValidateBasic() require.Error(t, err) txBuilder.SetGasLimit(txtypes.MaxGasWanted - 1) err = txBuilder.ValidateBasic() require.NoError(t, err) // bad builder structs // missing body body := txBuilder.tx.Body txBuilder.tx.Body = nil err = txBuilder.ValidateBasic() require.Error(t, err) txBuilder.tx.Body = body err = txBuilder.ValidateBasic() require.NoError(t, err) // missing fee f := txBuilder.tx.AuthInfo.Fee txBuilder.tx.AuthInfo.Fee = nil err = txBuilder.ValidateBasic() require.Error(t, err) txBuilder.tx.AuthInfo.Fee = f err = txBuilder.ValidateBasic() require.NoError(t, err) // missing AuthInfo authInfo := txBuilder.tx.AuthInfo txBuilder.tx.AuthInfo = nil err = txBuilder.ValidateBasic() require.Error(t, err) txBuilder.tx.AuthInfo = authInfo err = txBuilder.ValidateBasic() require.NoError(t, err) // missing tx txBuilder.tx = nil err = txBuilder.ValidateBasic() require.Error(t, err) } func TestBuilderFeePayer(t *testing.T) { // keys and addresses _, _, addr1 := testdata.KeyTestPubAddr() _, _, addr2 := testdata.KeyTestPubAddr() _, _, addr3 := testdata.KeyTestPubAddr() // msg and signatures msg1 := testdata.NewTestMsg(addr1, addr2) feeAmount := testdata.NewTestFeeAmount() msgs := []sdk.Msg{msg1} cases := map[string]struct { txFeePayer sdk.AccAddress expectedSigners []sdk.AccAddress expectedPayer sdk.AccAddress }{ "no fee payer specified": { expectedSigners: []sdk.AccAddress{addr1, addr2}, expectedPayer: addr1, }, "secondary signer set as fee payer": { txFeePayer: addr2, expectedSigners: []sdk.AccAddress{addr1, addr2}, expectedPayer: addr2, }, "outside signer set as fee payer": { txFeePayer: addr3, expectedSigners: []sdk.AccAddress{addr1, addr2, addr3}, expectedPayer: addr3, }, } for name, tc := range cases { t.Run(name, func(t *testing.T) { // setup basic tx txBuilder := newBuilder(nil) err := txBuilder.SetMsgs(msgs...) require.NoError(t, err) txBuilder.SetGasLimit(200000) txBuilder.SetFeeAmount(feeAmount) // set fee payer txBuilder.SetFeePayer(tc.txFeePayer) // and check it updates fields properly require.Equal(t, tc.expectedSigners, txBuilder.GetSigners()) require.Equal(t, tc.expectedPayer, txBuilder.FeePayer()) }) } } func TestBuilderFeeGranter(t *testing.T) { // keys and addresses _, _, addr1 := testdata.KeyTestPubAddr() // msg and signatures msg1 := testdata.NewTestMsg(addr1, addr2) feeAmount := testdata.NewTestFeeAmount() msgs := []sdk.Msg{msg1} txBuilder := newBuilder(nil) err := txBuilder.SetMsgs(msgs...) require.NoError(t, err) txBuilder.SetGasLimit(200000) txBuilder.SetFeeAmount(feeAmount) require.Empty(t, txBuilder.GetTx().FeeGranter()) // set fee granter txBuilder.SetFeeGranter(addr1) require.Equal(t, addr1, txBuilder.GetTx().FeeGranter()) }
any, err = codectypes.NewAnyWithValue(testdata.NewTestMsg()) require.NoError(t, err)
SelectedConnectionsScreen.js
import React from 'react'; import { TouchableOpacity, View } from 'react-native'; import { TabView, SceneMap } from 'react-native-tab-view'; import BackButton from '../../components/BackButton'; import styles from '../../constants/Connections/ConnectionTabs'; import Animated from 'react-native-reanimated'; import People from './tabs/SelectedPeopleScreen'; import Organizations from './tabs/SelectedOrgScreen'; export default class
extends React.Component { static navigationOptions = ({ navigation }) => { return { title: 'CONNECTIONS', headerStyle: { backgroundColor: '#323338' }, headerTintColor: '#fff', headerLeft: () => <BackButton navigation={navigation} /> }; }; constructor(props) { super(props); const routes = [ { key: 'organizations', title: 'Organizations' }, { key: 'people', title: 'People' } ]; this.state = { index: 0, routes }; } handleIndexChange = index => this.setState({ index }); renderTabBar = props => { return ( <View style={styles.tabBar}> {props.navigationState.routes.map((route, i) => { return ( <TouchableOpacity key={i} style={{ ...styles.tabItem, borderBottomColor: `rgba(0, 255, 157, ${ this.state.index === i ? 1 : 0 })` }} onPress={() => this.setState({ index: i })} > <Animated.Text style={{ fontFamily: 'Lato-Bold', fontSize: 18 }}> {route.title} </Animated.Text> {/* <Badge // status='success' textStyle={{ color: "black", fontSize: 12 }} badgeStyle={{ backgroundColor: "#CAFF03" }} containerStyle={{ position: "absolute", top: 10, right: 53 }} value={0} /> */} </TouchableOpacity> ); })} </View> ); }; renderScene = SceneMap({ organizations: () => <Organizations profile={this.props.profile} />, people: () => <People profile={this.props.profile} /> }); render() { return ( <TabView navigationState={this.state} renderScene={this.renderScene} renderTabBar={this.renderTabBar} onIndexChange={this.handleIndexChange} /> ); } }
SelectedConnectionsScreen
notification-preferences-test.ts
import { registry, setupHub } from '../helpers/server'; const stubNonce = 'abc:123'; let stubAuthToken = 'def--456'; let stubTimestamp = process.hrtime.bigint(); class StubAuthenticationUtils { generateNonce() { return stubNonce; } buildAuthToken() { return stubAuthToken; } extractVerifiedTimestamp(_nonce: string) { return stubTimestamp; } validateAuthToken(encryptedAuthToken: string) { return handleValidateAuthToken(encryptedAuthToken); } } let stubUserAddress = '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13'; function
(encryptedString: string) { expect(encryptedString).to.equal('abc123--def456--ghi789'); return stubUserAddress; } describe('GET /api/notification-preferences/:push_client_id', async function () { let { request, getContainer } = setupHub(this); this.beforeEach(async function () { registry(this).register('authentication-utils', StubAuthenticationUtils); let dbManager = await getContainer().lookup('database-manager'); let db = await dbManager.getClient(); await db.query('INSERT INTO notification_types(id, notification_type, default_status) VALUES($1, $2, $3)', [ '73994d4b-bb3a-4d73-969f-6fa24da16fb4', 'merchant_claim', 'enabled', ]); await db.query('INSERT INTO notification_types(id, notification_type, default_status) VALUES($1, $2, $3)', [ '2cbe34e4-f41d-41d5-b7d2-ee875dc7c588', 'customer_payment', 'enabled', ]); }); it('returns 401 without bearer token', async function () { await request() .get('/api/notification-preferences/PUSH_CLIENT_ID') .send({}) .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(401) .expect({ errors: [ { status: '401', title: 'No valid auth token', }, ], }) .expect('Content-Type', 'application/vnd.api+json'); }); it('returns default preferences when none are defined for the EOA/device pair', async function () { await request() .get('/api/notification-preferences/1234567') .send({}) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: [ { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'enabled', }, }, { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'customer_payment', status: 'enabled', }, }, ], }); }); it('returns overriden preference when EOA/device pair has a preference saved', async function () { let pushClientId = '1234567'; let notificationPreferenceQueries = await getContainer().lookup('notification-preference', { type: 'query' }); await notificationPreferenceQueries.upsert({ ownerAddress: stubUserAddress, pushClientId, notificationType: 'customer_payment', status: 'disabled', }); await request() .get(`/api/notification-preferences/${pushClientId}`) .send({}) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: [ { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'enabled', }, }, { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'customer_payment', status: 'disabled', }, }, ], }); }); }); describe('PUT /api/notification-preferences/:push_client_id', async function () { let { request, getContainer } = setupHub(this); this.beforeEach(async function () { registry(this).register('authentication-utils', StubAuthenticationUtils); let dbManager = await getContainer().lookup('database-manager'); let db = await dbManager.getClient(); await db.query('INSERT INTO notification_types(id, notification_type, default_status) VALUES($1, $2, $3)', [ '73994d4b-bb3a-4d73-969f-6fa24da16fb4', 'merchant_claim', 'enabled', ]); await db.query('INSERT INTO notification_types(id, notification_type, default_status) VALUES($1, $2, $3)', [ '2cbe34e4-f41d-41d5-b7d2-ee875dc7c588', 'customer_payment', 'enabled', ]); }); it('returns 401 without bearer token', async function () { await request() .put('/api/notification-preferences/1234567') .send({}) .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(401) .expect({ errors: [ { status: '401', title: 'No valid auth token', }, ], }) .expect('Content-Type', 'application/vnd.api+json'); }); it('creates a new preference', async function () { await request() .put('/api/notification-preferences/1234567') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'merchant_claim', status: 'disabled', }, }, }) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: { type: 'notification-preference', attributes: { 'owner-address': stubUserAddress, 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'disabled', }, }, }); let notificationPreferenceQueries = await getContainer().lookup('notification-preference', { type: 'query' }); let records = await notificationPreferenceQueries.query({ ownerAddress: stubUserAddress, pushClientId: '1234567', notificationType: 'merchant_claim', }); expect(records.length).to.equal(1); expect(records[0].ownerAddress).to.equal(stubUserAddress); expect(records[0].pushClientId).to.equal('1234567'); expect(records[0].notificationType).to.equal('merchant_claim'); expect(records[0].status).to.equal('disabled'); await request() .get('/api/notification-preferences/1234567') .send({}) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: [ { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'disabled', }, }, { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'customer_payment', status: 'enabled', }, }, ], }); }); it('updates a preference', async function () { let notificationPreferenceQueries = await getContainer().lookup('notification-preference', { type: 'query' }); await notificationPreferenceQueries.upsert({ ownerAddress: stubUserAddress, pushClientId: '1234567', notificationType: 'customer_payment', status: 'disabled', }); await request() .put('/api/notification-preferences/1234567') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'customer_payment', status: 'disabled', }, }, }) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: { type: 'notification-preference', attributes: { 'owner-address': stubUserAddress, 'push-client-id': '1234567', 'notification-type': 'customer_payment', status: 'disabled', }, }, }); let records = await notificationPreferenceQueries.query({ ownerAddress: stubUserAddress, pushClientId: '1234567', }); expect(records.length).to.equal(1); expect(records[0].ownerAddress).to.equal(stubUserAddress); expect(records[0].pushClientId).to.equal('1234567'); expect(records[0].notificationType).to.equal('customer_payment'); expect(records[0].status).to.equal('disabled'); await request() .get('/api/notification-preferences/1234567') .send({}) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: [ { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'enabled', }, }, { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'customer_payment', status: 'disabled', }, }, ], }); }); it('is idempotent when saving a new preference', async function () { await request() .put('/api/notification-preferences/1234567') .send({ data: { type: 'notification-preference', attributes: { 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'disabled', }, }, }) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: { type: 'notification-preference', attributes: { 'owner-address': stubUserAddress, 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'disabled', }, }, }); // second same request await request() .put('/api/notification-preferences/1234567') .send({ data: { type: 'notification-preference', attributes: { 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'disabled', }, }, }) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: { type: 'notification-preference', attributes: { 'push-client-id': '1234567', 'owner-address': stubUserAddress, 'notification-type': 'merchant_claim', status: 'disabled', }, }, }); await request() .get('/api/notification-preferences/1234567') .send({}) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: [ { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'disabled', }, }, { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'customer_payment', status: 'enabled', }, }, ], }); }); it('does not create duplicates when toggling a couple of times', async function () { await request() .put('/api/notification-preferences/1234567') .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'merchant_claim', status: 'disabled', }, }, }) .expect(200); await request() .put('/api/notification-preferences/1234567') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'merchant_claim', status: 'enabled', }, }, }) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200); await request() .put('/api/notification-preferences/1234567') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'customer_payment', status: 'disabled', }, }, }) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200); await request() .put('/api/notification-preferences/1234567') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'customer_payment', status: 'enabled', }, }, }) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200); let notificationPreferenceQueries = await getContainer().lookup('notification-preference', { type: 'query' }); let records = await notificationPreferenceQueries.query({ ownerAddress: stubUserAddress, pushClientId: '1234567', }); expect(records.length).to.equal(2); let revenueClaimedPreference = records.find((r) => r.notificationType === 'merchant_claim')!; let merchantPaymentPreference = records.find((r) => r.notificationType === 'customer_payment')!; expect(revenueClaimedPreference.ownerAddress).to.equal(stubUserAddress); expect(revenueClaimedPreference.pushClientId).to.equal('1234567'); expect(revenueClaimedPreference.notificationType).to.equal('merchant_claim'); expect(revenueClaimedPreference.status).to.equal('enabled'); expect(merchantPaymentPreference.ownerAddress).to.equal(stubUserAddress); expect(merchantPaymentPreference.pushClientId).to.equal('1234567'); expect(merchantPaymentPreference.notificationType).to.equal('customer_payment'); expect(merchantPaymentPreference.status).to.equal('enabled'); }); it('allows creating preferences for multiple devices on a single EOA', async function () { await request() .put('/api/notification-preferences/1234567') .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'merchant_claim', status: 'disabled', }, }, }) .expect(200); await request() .put('/api/notification-preferences/7654321') .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .send({ data: { type: 'notification-preference', attributes: { 'notification-type': 'merchant_claim', status: 'disabled', }, }, }) .expect(200); // At this point, an EOA should have two sets of notification preferences, for each device await request() .get('/api/notification-preferences/1234567') .send({}) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: [ { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'merchant_claim', status: 'disabled', }, }, { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '1234567', 'notification-type': 'customer_payment', status: 'enabled', }, }, ], }); await request() .get('/api/notification-preferences/7654321') .send({}) .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .expect(200) .expect({ data: [ { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '7654321', 'notification-type': 'merchant_claim', status: 'disabled', }, }, { type: 'notification-preference', attributes: { 'owner-address': '0x2f58630CA445Ab1a6DE2Bb9892AA2e1d60876C13', 'push-client-id': '7654321', 'notification-type': 'customer_payment', status: 'enabled', }, }, ], }); }); it('should fail when mandatory attributes are not given', async function () { await request() .put('/api/notification-preferences/1234567') .set('Authorization', 'Bearer abc123--def456--ghi789') .set('Accept', 'application/vnd.api+json') .set('Content-Type', 'application/vnd.api+json') .send({ data: { type: 'notification-preference', attributes: {}, }, }) .expect(422) .expect({ errors: [ { detail: 'Must be present', source: { pointer: '/data/attributes/status', }, status: '422', title: 'Invalid attribute', }, { detail: 'Must be present', source: { pointer: '/data/attributes/notification-type', }, status: '422', title: 'Invalid attribute', }, ], }); }); });
handleValidateAuthToken
showcase-file-selector.component.ts
import { Component } from '@angular/core'; @Component({ selector: 'showcase-file-selector', templateUrl: 'showcase-file-selector.component.html' }) export class ShowcaseFileSelectorComponent { public fileName: string; public file: File;
constructor() { } }
public fileList: FileList; public fileNameMultiple: string; public fileListMultiple: FileList;
main.rs
//! rustbuild, the Rust build system //! //! This is the entry point for the build system used to compile the `rustc` //! compiler. Lots of documentation can be found in the `README.md` file in the //! parent directory, and otherwise documentation can be found throughout the `build` //! directory in each respective module. use std::env; use bootstrap::{Build, Config}; fn main()
fn check_version(config: &Config) -> Option<String> { const VERSION: usize = 1; let mut msg = String::new(); let suggestion = if let Some(seen) = config.changelog_seen { if seen != VERSION { msg.push_str("warning: there have been changes to x.py since you last updated.\n"); format!("update `config.toml` to use `changelog-seen = {}` instead", VERSION) } else { return None; } } else { msg.push_str("warning: x.py has made several changes recently you may want to look at\n"); format!("add `changelog-seen = {}` to `config.toml`", VERSION) }; msg.push_str("help: consider looking at the changes in `src/bootstrap/CHANGELOG.md`\n"); msg.push_str("note: to silence this warning, "); msg.push_str(&suggestion); Some(msg) }
{ let args = env::args().skip(1).collect::<Vec<_>>(); let config = Config::parse(&args); let changelog_suggestion = check_version(&config); if let Some(suggestion) = &changelog_suggestion { println!("{}", suggestion); } Build::new(config).build(); if let Some(suggestion) = changelog_suggestion { println!("{}", suggestion); println!("note: this message was printed twice to make it more likely to be seen"); } }
unitedworldmoney_de.ts
<TS language="de" version="2.1"> <context> <name>AddressBookPage</name> <message> <source>Right-click to edit address or label</source> <translation>Rechtsklick um Adresse oder Bezeichnung zu bearbeiten</translation> </message> <message> <source>Create a new address</source> <translation>Eine neue Adresse erstellen</translation> </message> <message> <source>&amp;New</source> <translation>&amp;Neu</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Ausgewählte Adresse in die Zwischenablage kopieren</translation> </message> <message> <source>&amp;Copy</source> <translation>&amp;Kopieren</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation>Ausgewählte Adresse aus der Liste entfernen</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Löschen</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Daten aus der aktuellen Ansicht in eine Datei exportieren</translation> </message> <message> <source>&amp;Export</source> <translation>&amp;Exportieren</translation> </message> <message> <source>C&amp;lose</source> <translation>&amp;Schließen</translation> </message> <message> <source>Choose the address to send coins to</source> <translation>Wählen Sie die Adresse aus, an die Sie UWM überweisen möchten</translation> </message> <message> <source>Choose the address to receive coins with</source> <translation>Wählen Sie die Adresse aus, über die Sie UWM empfangen wollen</translation> </message> <message> <source>C&amp;hoose</source> <translation>&amp;Auswählen</translation> </message> <message> <source>Sending addresses</source> <translation>Zahlungsadressen</translation> </message> <message> <source>Receiving addresses</source> <translation>Empfangsadressen</translation> </message> <message> <source>These are your UnitedWorldMoney addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Dies sind ihre UnitedWorldMoney-Adressen zum Tätigen von Überweisungen. Bitte prüfen Sie den Betrag und die Empfangsadresse, bevor Sie UWM überweisen.</translation> </message> <message> <source>These are your UnitedWorldMoney addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source> <translation>Dies sind ihre UnitedWorldMoney-Adressen zum Empfangen von Zahlungen. Es wird empfohlen für jede Transaktion eine neue Empfangsadresse zu verwenden.</translation> </message> <message> <source>&amp;Copy Address</source> <translation>&amp;Adresse kopieren</translation> </message> <message> <source>Copy &amp;Label</source> <translation>&amp;Bezeichnung kopieren</translation> </message> <message> <source>&amp;Edit</source> <translation>&amp;Editieren</translation> </message> <message> <source>Export Address List</source> <translation>Adressliste exportieren</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Kommagetrennte Datei (*.csv)</translation> </message> <message> <source>Exporting Failed</source> <translation>Exportieren fehlgeschlagen</translation> </message> <message> <source>There was an error trying to save the address list to %1. Please try again.</source> <translation>Beim Speichern der Adressliste nach %1 ist ein Fehler aufgetreten. Bitte erneut versuchen.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation>Passphrase Dialog</translation> </message> <message> <source>Enter passphrase</source> <translation>Passphrase eingeben</translation> </message> <message> <source>New passphrase</source> <translation>Neue Passphrase</translation> </message> <message> <source>Repeat new passphrase</source> <translation>Neue Passphrase wiederholen</translation> </message> <message> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>Verhindert das einfache Überweisen von Geld, falls das Systemkonto kompromittiert wurde. Bietet keine wirkliche Sicherheit.</translation> </message> <message> <source>For anonymization, automint, and staking only</source> <translation>Nur zur Anonymisierung, automatischen Prägung (automint) und Staking</translation> </message> <message> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Geben Sie die neue Passphrase für die Wallet ein.&lt;br&gt;Bitte verwenden Sie eine Passphrase bestehend aus &lt;b&gt;10 oder mehr zufälligen Zeichen&lt;/b&gt; oder &lt;b&gt;8 oder mehr Wörtern&lt;/b&gt;.</translation> </message> <message> <source>Encrypt wallet</source> <translation>Wallet verschlüsseln</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Dieser Vorgang benötigt ihre Passphrase, um die Wallet zu entsperren.</translation> </message> <message> <source>Unlock wallet</source> <translation>Wallet entsperren</translation> </message> <message> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Dieser Vorgang benötigt ihre Passphrase, um die Wallet zu entschlüsseln.</translation> </message> <message> <source>Decrypt wallet</source> <translation>Wallet entschlüsseln</translation> </message> <message> <source>Change passphrase</source> <translation>Passphrase ändern</translation> </message> <message> <source>Enter the old and new passphrase to the wallet.</source> <translation>Geben Sie die alte und neue Passphrase für die Wallet ein.</translation> </message> <message> <source>Confirm wallet encryption</source> <translation>Verschlüsselung der Wallet bestätigen</translation> </message> <message> <source>UnitedWorldMoney will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your UWMs from being stolen by malware infecting your computer.</source> <translation>Die Anwendung wird nun geschlossen um die Verschlüsselung abzuschließen. Bitte bedenken Sie, dass auch die Verschlüsselung nicht sicher vor Diebstahl ihrer UWMs durch Schadsoftware schützt, die ihren Computer befällt.</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Sind Sie sich sicher, dass Sie Ihre Wallet verschlüsseln möchten?</translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR UWM&lt;/b&gt;!</source> <translation>Warnung: Wenn Sie Ihre Wallet verschlüsseln und Ihre Passphrase verlieren, &lt;b&gt;verlieren Sie alle ihre UWMs&lt;/b&gt;!</translation> </message> <message> <source>Wallet encrypted</source> <translation>Wallet verschlüsselt</translation> </message> <message> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>WICHTIG: Alle vorherigen Sicherungen sollten durch die neu erzeugte, verschlüsselte Wallet-Datei ersetzt werden. Aus Sicherheitsgründen werden vorherige Sicherungen der unverschlüsselten Wallet nutzlos, sobald Sie die neue, verschlüsselte Wallet verwenden.</translation> </message> <message> <source>Wallet encryption failed</source> <translation>Verschlüsselung der Wallet fehlgeschlagen</translation> </message> <message> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Die Verschlüsselung der Wallet ist aufgrund eines internen Fehlers fehlgeschlagen. Ihre Wallet wurde nicht verschlüsselt.</translation> </message> <message> <source>The supplied passphrases do not match.</source> <translation>Die eingegebenen Passphrases stimmen nicht überein.</translation> </message> <message> <source>Wallet unlock failed</source> <translation>Entsperrung der Wallet fehlgeschlagen</translation> </message> <message> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Die eingegebene Passphrase zur Entschlüsselung der Wallet ist nicht korrekt.</translation> </message> <message> <source>Wallet decryption failed</source> <translation>Entschlüsselung der Wallet fehlgeschlagen</translation> </message> <message> <source>Wallet passphrase was successfully changed.</source> <translation>Die Passphrase der Wallet wurde erfolgreich geändert.</translation> </message> <message> <source>Warning: The Caps Lock key is on!</source> <translation>Warnung: Die Feststelltaste ist aktiviert!</translation> </message> </context> <context> <name>BanTableModel</name> <message> <source>IP/Netmask</source> <translation>IP/Netzmaske</translation> </message> <message> <source>Banned Until</source> <translation>Gesperrt bis</translation> </message> </context> <context> <name>Bip38ToolDialog</name> <message> <source>BIP 38 Tool</source> <translation>BIP 38 Tool</translation> </message> <message> <source>&amp;BIP 38 Encrypt</source> <translation>&amp;BIP 38 Verschlüsselung</translation> </message> <message> <source>Address:</source> <translation>Adresse:</translation> </message> <message> <source>Enter a UnitedWorldMoney Address that you would like to encrypt using BIP 38. Enter a passphrase in the middle box. Press encrypt to compute the encrypted private key.</source> <translation>Bitte die UnitedWorldMoney-Adresse eingeben, welche mittels BIP 38 verschlüsselt werden soll. Bitte ein Passwort in die mittlere Box eintragen. Klicken Sie auf "Verschlüsslen" um den privaten Schlüssel zu generieren.</translation> </message> <message> <source>The UnitedWorldMoney address to encrypt</source> <translation>Die zu verschlüsselnde UnitedWorldMoney-Adresse</translation> </message> <message> <source>Choose previously used address</source> <translation>Bereits verwendete Adresse auswählen</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Adresse aus der Zwischenablage einfügen</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Passphrase: </source> <translation>Passphrase: </translation> </message> <message> <source>Encrypted Key:</source> <translation>Geschützer Schlüssel:</translation> </message> <message> <source>Copy the current signature to the system clipboard</source> <translation>Kopiert die aktuelle Signatur in die Zwischenablage</translation> </message> <message> <source>Encrypt the private key for this UnitedWorldMoney address</source> <translation>Den privaten Schlüssel für diese UnitedWorldMoney-Adresse verschlüsseln</translation> </message> <message> <source>Reset all fields</source> <translation>Alle Felder zurücksetzen</translation> </message> <message> <source>The encrypted private key</source> <translation>Der verschlüsselte private Schlüssel</translation> </message> <message> <source>Decrypt the entered key using the passphrase</source> <translation>Den eigegebenen Schlüssel mittels Passwort entschlüsseln</translation> </message> <message> <source>Encrypt &amp;Key</source> <translation>Verschlüsselt &amp;Schlüssel</translation> </message> <message> <source>Clear &amp;All</source> <translation>&amp;Alles zurücksetzen</translation> </message> <message> <source>&amp;BIP 38 Decrypt</source> <translation>&amp;BIP 38 Entschlüsseln</translation> </message> <message> <source>Enter the BIP 38 encrypted private key. Enter the passphrase in the middle box. Click Decrypt Key to compute the private key. After the key is decrypted, clicking 'Import Address' will add this private key to the wallet.</source> <translation>Geben Sie den BIP 38 verschlüsselten privaten Schlüssel ein. Geben Sie die Passphrase in das mittlere Feld ein. Klicken Sie auf 'Entschlüsseln', um den privaten Schlüssel zu berechnen. Nach erfolgreicher Entschlüsselung, wird durch Klicken auf 'Adresse importieren' dieser private Schlüssel der Wallet hinzugefügt.</translation> </message> <message> <source>Decrypt &amp;Key</source> <translation>Entschlüsseln &amp;Schlüssel</translation> </message> <message> <source>Decrypted Key:</source> <translation>Entschlüsselter Schlüssel:</translation> </message> <message> <source>Import Address</source> <translation>Adresse importieren</translation> </message> <message> <source>Click "Decrypt Key" to compute key</source> <translation>Klicke "Schlüssel entschlüsseln" um den Schlüssel zu berechnen</translation> </message> <message> <source>The entered passphrase is invalid. </source> <translation>Die eingegebene Passphrase ist ungültig. </translation> </message> <message> <source>Allowed: 0-9,a-z,A-Z,</source> <translation>Zulässig: 0-9,a-z,A-Z,</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Die eingegebene Adresse ist falsch</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Bitte die Adresse prüfen und erneut eingeben</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Die eingegebene Adresse passt zu keinem Schlüssel</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Entsperrung der Wallet wurde abgebrochen.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Kein privater Schlüssel für die eingegebene Adresse verfügbar</translation> </message> <message> <source>Failed to decrypt.</source> <translation>Entschlüsselung fehlgeschlagen.</translation> </message> <message> <source>Please check the key and passphrase and try again.</source> <translation>Bitte überprüfen Sie den Schlüssel und die Passphrase und versuchen Sie es erneut.</translation> </message> <message> <source>Data Not Valid.</source> <translation>Daten ungültig.</translation> </message> <message> <source>Please try again.</source> <translation>Bitte versuchen Sie es erneut.</translation> </message> <message> <source>Please wait while key is imported</source> <translation>Bitte warten. Schlüssel wird importiert..</translation> </message> <message> <source>Key Already Held By Wallet</source> <translation>Schlüssel bereits in der Wallet vorhanden</translation> </message> <message> <source>Error Adding Key To Wallet</source> <translation>Fehler beim Hinzufügen des Schlüssels zur Wallet</translation> </message> <message> <source>Successfully Added Private Key To Wallet</source> <translation>Privater Schlüssel erfolgreich zur Wallet hinzugefügt</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <source>Wallet</source> <translation>Wallet</translation> </message> <message> <source>Node</source> <translation>Node</translation> </message> <message> <source>&amp;Overview</source> <translation>&amp;Übersicht</translation> </message> <message> <source>Show general overview of wallet</source> <translation>Gesamtübersicht der Wallet anzeigen</translation> </message> <message> <source>&amp;Send</source> <translation>&amp;Überweisen</translation> </message> <message> <source>&amp;Receive</source> <translation>&amp;Empfangen</translation> </message> <message> <source>&amp;Transactions</source> <translation>&amp;Transaktionen</translation> </message> <message> <source>Browse transaction history</source> <translation>Transaktionsverlauf durchsuchen</translation> </message> <message> <source>Privacy Actions for zUWM</source> <translation>Datenschutzmaßnahmen für zUWM</translation> </message> <message> <source>E&amp;xit</source> <translation>&amp;Beenden</translation> </message> <message> <source>Quit application</source> <translation>Anwendung beenden</translation> </message> <message> <source>About &amp;Qt</source> <translation>Über &amp;Qt</translation> </message> <message> <source>Show information about Qt</source> <translation>Informationen über Qt anzeigen</translation> </message> <message> <source>&amp;Options...</source> <translation>&amp;Konfiguration...</translation> </message> <message> <source>&amp;Show / Hide</source> <translation>&amp;Anzeigen / Verstecken</translation> </message> <message> <source>Show or hide the main Window</source> <translation>Das Hauptfenster anzeigen oder verstecken</translation> </message> <message> <source>&amp;Encrypt Wallet...</source> <translation>Wallet &amp;verschlüsseln...</translation> </message> <message> <source>Encrypt the private keys that belong to your wallet</source> <translation>Verschlüsselt die zu Ihrer Wallet gehörenden privaten Schlüssel</translation> </message> <message> <source>&amp;Backup Wallet...</source> <translation>Wallet &amp;sichern...</translation> </message> <message> <source>Backup wallet to another location</source> <translation>Sicherung der Wallet an einem anderen Ort speichern</translation> </message> <message> <source>&amp;Change Passphrase...</source> <translation>Passphrase &amp;ändern...</translation> </message> <message> <source>Change the passphrase used for wallet encryption</source> <translation>Ändert die Passphrase, die für die Verschlüsselung der Wallet benutzt wird</translation> </message> <message> <source>&amp;Unlock Wallet...</source> <translation>Wallet &amp;entsperren</translation> </message> <message> <source>Unlock wallet</source> <translation>Wallet entsperren</translation> </message> <message> <source>&amp;Lock Wallet</source> <translation>Wallet &amp;sperren</translation> </message> <message> <source>Sign &amp;message...</source> <translation>Nachricht &amp;signieren...</translation> </message> <message> <source>&amp;Verify message...</source> <translation>&amp;Nachricht prüfen...</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Information</translation> </message> <message> <source>Show diagnostic information</source> <translation>Diagnoseinformation anzeigen</translation> </message> <message> <source>&amp;Debug console</source> <translation>&amp;Debugkonsole</translation> </message> <message> <source>Open debugging console</source> <translation>Debugkonsole öffnen</translation> </message> <message> <source>&amp;Network Monitor</source> <translation>&amp;Netzwerkmonitor</translation> </message> <message> <source>Show network monitor</source> <translation>Netzwerkmonitor anzeigen</translation> </message> <message> <source>&amp;Peers list</source> <translation>&amp;Gegenstellen-Liste</translation> </message> <message> <source>Show peers info</source> <translation>Informationen zu Gegenstellen anzeigen</translation> </message> <message> <source>Wallet &amp;Repair</source> <translation>Wallet-&amp;Reparatur</translation> </message> <message> <source>Show wallet repair options</source> <translation>Optionen zur Wallet-Reparatur anzeigen</translation> </message> <message> <source>Open configuration file</source> <translation>Konfigurationsdatei öffnen</translation> </message> <message> <source>Show Automatic &amp;Backups</source> <translation>Automatische &amp;Sicherheitskopien anzeigen</translation> </message> <message> <source>Show automatically created wallet backups</source> <translation>Automatisch erzeugte Wallet-Sicherheitskopien anzeigen</translation> </message> <message> <source>&amp;Sending addresses...</source> <translation>&amp;Zahlungsadressen...</translation> </message> <message> <source>Show the list of used sending addresses and labels</source> <translation>Liste verwendeter Zahlungsadressen und Bezeichnungen anzeigen</translation> </message> <message> <source>&amp;Receiving addresses...</source> <translation>&amp;Empfangsadressen...</translation> </message> <message> <source>Show the list of used receiving addresses and labels</source> <translation>Liste verwendeter Empfangsadressen und Bezeichnungen anzeigen</translation> </message> <message> <source>&amp;Multisignature creation...</source> <translation>&amp;Mehrfachsignatur erstellen...</translation> </message> <message> <source>Create a new multisignature address and add it to this wallet</source> <translation>Erstelle eine neue mehrfach signierte Adresse und füge sie diesem Wallet hinzu</translation> </message> <message> <source>&amp;Multisignature spending...</source> <translation>&amp;Mehrfachsignatur überweisen...</translation> </message> <message> <source>Spend from a multisignature address</source> <translation>Von einer mehrfach signierten Adresse überweisen</translation> </message> <message> <source>&amp;Multisignature signing...</source> <translation>&amp;Mehrfachsignatur signieren...</translation> </message> <message> <source>Sign with a multisignature address</source> <translation>Signiere mit einer mehrfach signierten Adresse</translation> </message> <message> <source>Open &amp;URI...</source> <translation>&amp;URI öffnen...</translation> </message> <message> <source>&amp;Command-line options</source> <translation>&amp;Kommandozeilenoptionen</translation> </message> <message> <source>Synchronizing additional data: %p%</source> <translation>Synchronisiere zusätzliche Daten: %p%</translation> </message> <message> <source>%1 behind. Scanning block %2</source> <translation>%1 im Rückstand. Scanne Block %2</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt; for anonymization and staking only</source> <translation>Wallet ist &lt;b&gt;verschlüsselt&lt;/b&gt; und aktuell nur zum Anonymisieren und "staking"&lt;b&gt;entsperrt&lt;/b&gt;</translation> </message> <message> <source>Tor is &lt;b&gt;enabled&lt;/b&gt;: %1</source> <translation>Tor ist &lt;b&gt;aktiviert&lt;/b&gt;: %1</translation> </message> <message> <source>&amp;File</source> <translation>&amp;Datei</translation> </message> <message> <source>&amp;Settings</source> <translation>&amp;Einstellungen</translation> </message> <message> <source>&amp;Tools</source> <translation>&amp;Werkzeuge</translation> </message> <message> <source>&amp;Help</source> <translation>&amp;Hilfe</translation> </message> <message> <source>Tabs toolbar</source> <translation>Registerkartenleiste</translation> </message> <message> <source>UnitedWorldMoney Core</source> <translation>UnitedWorldMoney Core</translation> </message> <message> <source>Send coins to a UnitedWorldMoney address</source> <translation>An eine UnitedWorldMoney-Adresse überweisen</translation> </message> <message> <source>Request payments (generates QR codes and unitedworldmoney: URIs)</source> <translation>Zahlung anfordern (QR-Code Generierung und unitedworldmoney: URIs)</translation> </message> <message> <source>&amp;Privacy</source> <translation>&amp;Privatsphäre</translation> </message> <message> <source>&amp;Masternodes</source> <translation>&amp;Masternodes</translation> </message> <message> <source>Browse masternodes</source> <translation>Masternodes durchsuchen</translation> </message> <message> <source>&amp;About UnitedWorldMoney Core</source> <translation>Über UnitedWorldMoney Core</translation> </message> <message> <source>Show information about UnitedWorldMoney Core</source> <translation>Zeigt Informationen über UnitedWorldMoney Core</translation> </message> <message> <source>Modify configuration options for UnitedWorldMoney</source> <translation>Konfiguration von UnitedWorldMoney verändern</translation> </message> <message> <source>Sign messages with your UnitedWorldMoney addresses to prove you own them</source> <translation>Unterschreibt eine Nachricht mit Ihrer UnitedWorldMoney-Adresse und beweist, dass sie Ihnen gehört</translation> </message> <message> <source>Verify messages to ensure they were signed with specified UnitedWorldMoney addresses</source> <translation>Überprüft eine Nachricht um zu prüfen ob diese mit einer bestimmten UnitedWorldMoney-Adresse signiert wurde</translation> </message> <message> <source>&amp;BIP38 tool</source> <translation>&amp;BIP38 Hilfsprogramm</translation> </message> <message> <source>Encrypt and decrypt private keys using a passphrase</source> <translation>Den eigegebenen Schlüssel mittels Passwort entschlüsseln</translation> </message> <message> <source>&amp;MultiSend</source> <translation>&amp;MultiSend</translation> </message> <message> <source>MultiSend Settings</source> <translation>MultiSend Einstellungen</translation> </message> <message> <source>Open Wallet &amp;Configuration File</source> <translation>Öffne Wallet &amp;Einstellungsdatei </translation> </message> <message> <source>Open &amp;Masternode Configuration File</source> <translation>Öffne &amp;Masternode Einstellungsdatei </translation> </message> <message> <source>Open Masternode configuration file</source> <translation>Öffne Masternode Einstellungsdatei</translation> </message> <message> <source>Open a UnitedWorldMoney: URI or payment request</source> <translation>Öffne eine UnitedWorldMoney: URI oder Zahlungsanfrage</translation> </message> <message> <source>&amp;Blockchain explorer</source> <translation>&amp;Blockchain Betrachter</translation> </message> <message> <source>Block explorer window</source> <translation>Blockchain Betrachter Fenster</translation> </message> <message> <source>Show the UnitedWorldMoney Core help message to get a list with possible UnitedWorldMoney command-line options</source> <translation>Zeige die UnitedWorldMoney-Core Hilfe, um mögliche UnitedWorldMoney Kommando-Zeilen-Optionen anzuzeigen</translation> </message> <message> <source>UnitedWorldMoney Core client</source> <translation>UnitedWorldMoney Core Client</translation> </message> <message> <source>Synchronizing with network...</source> <translation>Synchronisiere mit Netzwerk...</translation> </message> <message> <source>Importing blocks from disk...</source> <translation>Importiere Blöcke von Datenträger...</translation> </message> <message> <source>Reindexing blocks on disk...</source> <translation>Reindiziere Blöcke auf Datenträger...</translation> </message> <message> <source>No block source available...</source> <translation>Keine Blockquelle verfügbar...</translation> </message> <message> <source>Up to date</source> <translation>Auf aktuellem Stand</translation> </message> <message> <source>%1 and %2</source> <translation>%1 und %2</translation> </message> <message> <source>Catching up...</source> <translation>Hole auf...</translation> </message> <message> <source>Last received block was generated %1 ago.</source> <translation>Der letzte empfangene Block ist %1 alt.</translation> </message> <message> <source>Transactions after this will not yet be visible.</source> <translation>Transaktionen hiernach werden noch nicht angezeigt.</translation> </message> <message> <source>Error</source> <translation>Fehler</translation> </message> <message> <source>Warning</source> <translation>Warnung</translation> </message> <message> <source>Information</source> <translation>Hinweis</translation> </message> <message> <source>Sent transaction</source> <translation>Gesendete Transaktion</translation> </message> <message> <source>Incoming transaction</source> <translation>Eingehende Transaktion</translation> </message> <message> <source>Sent MultiSend transaction</source> <translation>Versende MultiSend Transaktion</translation> </message> <message> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datum: %1 Betrag: %2 Typ: %3 Adresse: %4</translation> </message> <message> <source>Staking is active MultiSend: %1</source> <translation>Staking ist aktiviert MultiSend: %1</translation> </message> <message> <source>Active</source> <translation>Aktiv</translation> </message> <message> <source>Not Active</source> <translation>Nicht aktiv</translation> </message> <message> <source>Staking is not active MultiSend: %1</source> <translation>Staking ist nicht aktiviert MultiSend: %1</translation> </message> <message> <source>AutoMint is currently enabled and set to </source> <translation>Automatisches Prägen ist aktiv und eingestellt auf </translation> </message> <message> <source>AutoMint is disabled</source> <translation>Automatisches Prägen ist deaktiviert</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Wallet ist &lt;b&gt;verschlüsselt&lt;/b&gt; und aktuell &lt;b&gt;entsperrt&lt;/b&gt;</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Wallet ist &lt;b&gt;verschlüsselt&lt;/b&gt; und aktuell &lt;b&gt;gesperrt&lt;/b&gt;</translation> </message> </context> <context> <name>BlockExplorer</name> <message> <source>Blockchain Explorer</source> <translation>Blockchain Explorer</translation> </message> <message> <source>Back</source> <translation>Zurück</translation> </message> <message> <source>Forward</source> <translation>Vor</translation> </message> <message> <source>Address / Block / Transaction</source> <translation>Addrese / Block / Tansaktion</translation> </message> <message> <source>Search</source> <translation>Suche</translation> </message> <message> <source>TextLabel</source> <translation>TextEtikett</translation> </message> <message> <source>Not all transactions will be shown. To view all transactions you need to set txindex=1 in the configuration file (unitedworldmoney.conf).</source> <translation>Nicht alle Transaktionen können anzezeigt werden. Um alle Transaktionen zu sehen, müssen sie die Option "txindex=1" in der "unitedworldmoney.conf" Konfigurationsdatei hinzufügen.</translation> </message> </context> <context> <name>ClientModel</name> <message> <source>Total: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Unknown: %5)</source> <translation>Total: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Unbekannt: %5)</translation> </message> <message> <source>Network Alert</source> <translation>Netzwerkalarm</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Quantity:</source> <translation>Anzahl:</translation> </message> <message> <source>Bytes:</source> <translation>Byte:</translation> </message> <message> <source>Amount:</source> <translation>Betrag:</translation> </message> <message> <source>Priority:</source> <translation>Priorität:</translation> </message> <message> <source>Fee:</source> <translation>Gebühr:</translation> </message> <message> <source>Coin Selection</source> <translation>Münzenauswahl </translation> </message> <message> <source>Dust:</source> <translation>Abzug:</translation> </message> <message> <source>After Fee:</source> <translation>Nach Gebühren:</translation> </message> <message> <source>Change:</source> <translation>Wechselgeld:</translation> </message> <message> <source>(un)select all</source> <translation>(nicht) alle auswählen</translation> </message> <message> <source>toggle lock state</source> <translation>Sperrzustand umschalten</translation> </message> <message> <source>Tree mode</source> <translation>Baumansicht</translation> </message> <message> <source>List mode</source> <translation>Listenansicht </translation> </message> <message> <source>(1 locked)</source> <translation>(1 gesperrt)</translation> </message> <message> <source>Amount</source> <translation>Betrag</translation> </message> <message> <source>Received with label</source> <translation>Empfange mit Bezeichnung</translation> </message> <message> <source>Received with address</source> <translation>Empfangen mit Adresse</translation> </message> <message> <source>Type</source> <translation>Art</translation> </message> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Confirmations</source> <translation>Betätigungen</translation> </message> <message> <source>Confirmed</source> <translation>Bestätigt</translation> </message> <message> <source>Priority</source> <translation>Priorität</translation> </message> <message> <source>Copy address</source> <translation>Adresse kopieren</translation> </message> <message> <source>Copy label</source> <translation>Bezeichnung kopieren</translation> </message> <message> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> <message> <source>Copy transaction ID</source> <translation>Transaktions-ID kopieren</translation> </message> <message> <source>Lock unspent</source> <translation>Sperre Verwendung</translation> </message> <message> <source>Unlock unspent</source> <translation>Entsperre Verwendung</translation> </message> <message> <source>Copy quantity</source> <translation>Anzahl kopieren</translation> </message> <message> <source>Copy fee</source> <translation>Gebühr kopieren</translation> </message> <message> <source>Copy after fee</source> <translation>Kopiere nach Gebühr</translation> </message> <message> <source>Copy bytes</source> <translation>Kopiere Bytes</translation> </message> <message> <source>Copy priority</source> <translation>Kopiere Priorität</translation> </message> <message> <source>Copy dust</source> <translation>Kopiere Abzug</translation> </message> <message> <source>Copy change</source> <translation>Kopiere Wechselgeld</translation> </message> <message> <source>Please switch to "List mode" to use this function.</source> <translation>Bitte wechsle zur "Listenansicht" um diese Funktion zu verwenden.</translation> </message> <message> <source>highest</source> <translation>höchste</translation> </message> <message> <source>higher</source> <translation>höher</translation> </message> <message> <source>high</source> <translation>hoch</translation> </message> <message> <source>medium-high</source> <translation>mittel-hoch</translation> </message> <message> <source>medium</source> <translation>mittel</translation> </message> <message> <source>low-medium</source> <translation>niedrig-mittel</translation> </message> <message> <source>low</source> <translation>niedrig</translation> </message> <message> <source>lower</source> <translation>niedriger</translation> </message> <message> <source>lowest</source> <translation>am niedrigsten</translation> </message> <message> <source>(%1 locked)</source> <translation>(%1 gesperrt)</translation> </message> <message> <source>none</source> <translation>keine</translation> </message> <message> <source>yes</source> <translation>ja</translation> </message> <message> <source>no</source> <translation>nein</translation> </message> <message> <source>This label turns red, if the transaction size is greater than 1000 bytes.</source> <translation>Diese Bezeichnung wird rot, wenn die Transaktion größer als 1000 Bytes ist.</translation> </message> <message> <source>This means a fee of at least %1 per kB is required.</source> <translation>Das heißt, eine Gebühr von mindestens %1 pro kB ist notwendig.</translation> </message> <message> <source>Can vary +/- 1 byte per input.</source> <translation>Kann um +/-1 Byte pro Eingang variieren.</translation> </message> <message> <source>Transactions with higher priority are more likely to get included into a block.</source> <translation>Transaktionen mit höherer Priorität werden eher in einen Block aufgenommen.</translation> </message> <message> <source>This label turns red, if the priority is smaller than "medium".</source> <translation>Diese Bezeichnung wird rot, wenn die Priorität kleiner als "mittel" ist.</translation> </message> <message> <source>This label turns red, if any recipient receives an amount smaller than %1.</source> <translation>Diese Bezeichnung wird rot, wenn ein Empfänger einen Betrag erhält, der kleiner ist als %1.</translation> </message> <message> <source>Can vary +/- %1 uuwm per input.</source> <translation>Kann um +/- %1 uuwm pro Eingang variieren.</translation> </message> <message> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> <message> <source>change from %1 (%2)</source> <translation>Wechselgeld von %1 (%2)</translation> </message> <message> <source>(change)</source> <translation>(Wechselgeld)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>Edit Address</source> <translation>Adresse bearbeiten</translation> </message> <message> <source>&amp;Label</source> <translation>&amp;Bezeichnung</translation> </message> <message> <source>The label associated with this address list entry</source> <translation>Bezeichnung, die dem Adresslisteneintrag zugeordnet ist</translation> </message> <message> <source>&amp;Address</source> <translation>&amp;Adresse</translation> </message> <message> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation>Adresse, die dem Adresslisteneintrag zugeordnet ist. Diese kann nur bei Zahlungsadressen verändert werden.</translation> </message> <message> <source>New receiving address</source> <translation>Neue Empfangsadresse</translation> </message> <message> <source>New sending address</source> <translation>Neue Zahlungsadresse</translation> </message> <message> <source>Edit receiving address</source> <translation>Empfangsadresse bearbeiten</translation> </message> <message> <source>Edit sending address</source> <translation>Zahlungsadresse bearbeiten</translation> </message> <message> <source>The entered address "%1" is not a valid UnitedWorldMoney address.</source> <translation>Die eingegebene Adresse "%1" ist keine gültige UnitedWorldMoney-Adresse.</translation> </message> <message> <source>The entered address "%1" is already in the address book.</source> <translation>Die eingegebene Adresse "%1" befindet sich bereits im Adressbuch.</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>Wallet konnte nicht entsperrt werden.</translation> </message> <message> <source>New key generation failed.</source> <translation>Erzeugung eines neuen Schlüssels fehlgeschlagen.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <source>A new data directory will be created.</source> <translation>Es wird ein neues Datenverzeichnis angelegt.</translation> </message> <message> <source>name</source> <translation>Name</translation> </message> <message> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation>Verzeichnis existiert bereits. Fügen Sie %1 an, wenn Sie beabsichtigen hier ein neues Verzeichnis anzulegen.</translation> </message> <message> <source>Path already exists, and is not a directory.</source> <translation>Pfad existiert bereits und ist kein Verzeichnis.</translation> </message> <message> <source>Cannot create data directory here.</source> <translation>Datenverzeichnis kann hier nicht angelegt werden.</translation> </message> </context> <context> <name>GovernancePage</name> <message> <source>Form</source> <translation>Formular</translation> </message> <message> <source>0</source> <translation>0</translation> </message> </context> <context> <name>HelpMessageDialog</name> <message> <source>version</source> <translation>Version</translation> </message> <message> <source>UnitedWorldMoney Core</source> <translation>UnitedWorldMoney Core</translation> </message> <message> <source>(%1-bit)</source> <translation>(%1-Bit)</translation> </message> <message> <source>About UnitedWorldMoney Core</source> <translation>Über UnitedWorldMoney Core</translation> </message> <message> <source>Command-line options</source> <translation>Kommandozeilenoptionen</translation> </message> <message> <source>Usage:</source> <translation>Benutzung:</translation> </message> <message> <source>command-line options</source> <translation>Kommandozeilenoptionen</translation> </message> <message> <source>UI Options:</source> <translation>UI Optionen:</translation> </message> <message> <source>Choose data directory on startup (default: %u)</source> <translation>Wähle Datenverzeichnis beim Start (Standard: %u)</translation> </message> <message> <source>Show splash screen on startup (default: %u)</source> <translation>Zeige Startbildschirm beim Start (Standard: %u)</translation> </message> <message> <source>Set language, for example "de_DE" (default: system locale)</source> <translation>Sprache festlegen, z.B. "de_DE" (Standard: Systemstandard)</translation> </message> <message> <source>Start minimized</source> <translation>Minimiert starten</translation> </message> <message> <source>Set SSL root certificates for payment request (default: -system-)</source> <translation>SSL-Wurzelzertifikate für Zahlungsanforderungen festlegen (Standard: Systemstandard)</translation> </message> </context> <context> <name>Intro</name> <message> <source>Welcome</source> <translation>Willkommen</translation> </message> <message> <source>Welcome to UnitedWorldMoney Core.</source> <translation>Willkommen zu UnitedWorldMoney Core.</translation> </message> <message> <source>As this is the first time the program is launched, you can choose where UnitedWorldMoney Core will store its data.</source> <translation>Da dies das erste Mal ist, dass Sie UnitedWorldMoney Core starten, legen Sie jetzt bitte fest, an welchem Ort die Daten gespeichert werden sollen.</translation> </message> <message> <source>UnitedWorldMoney Core will download and store a copy of the UnitedWorldMoney block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation>UnitedWorldMoney Core wird die Blockchain laden und lokal speichern. Dafür sind mindestens %1GB freier Speicherplatz erforderlich. Der Speicherbedarf wird mit der Zeit anwachsen. Die Wallet wird ebenfalls in diesem Verzeichnis gespeichert.</translation> </message> <message> <source>Use the default data directory</source> <translation>Standard-Datenverzeichnis verwenden</translation> </message> <message> <source>Use a custom data directory:</source> <translation>Ein benutzerdefiniertes Datenverzeichnis verwenden:</translation> </message> <message> <source>UnitedWorldMoney Core</source> <translation>UnitedWorldMoney Core</translation> </message> <message> <source>Error: Specified data directory "%1" cannot be created.</source> <translation>Fehler: Angegebenes Datenverzeichnis "%1" kann nicht angelegt werden.</translation> </message> <message> <source>Error</source> <translation>Fehler</translation> </message> <message> <source>%1 GB of free space available</source> <translation>%1 GB freier Speicherplatz verfügbar</translation> </message> <message> <source>(of %1 GB needed)</source> <translation>(von benötigten %1 GB)</translation> </message> </context> <context> <name>MasternodeList</name> <message> <source>Form</source> <translation>Formular</translation> </message> <message> <source>MASTERNODES</source> <translation>MASTERNODES</translation> </message> <message> <source>Note: Status of your masternodes in local wallet can potentially be slightly incorrect.&lt;br /&gt;Always wait for wallet to sync additional data and then double check from another node&lt;br /&gt;if your node should be running but you still see "MISSING" in "Status" field.</source> <translation>Hinweis: Der Status Ihrer Masternodes kann in der lokalen Wallet möglicherweise falsch sein.&lt;br /&gt;Warten Sie, bis die Wallet zusätzliche Daten synchronisiert hat. Wenn Ihr Node läuft,&lt;br /&gt;aber trotzdem "MISSING" im Feld "Status" steht, überprüfen Sie den Status von einem anderen Knoten aus.</translation> </message> <message> <source>Alias</source> <translation>Name</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>Protocol</source> <translation>Protokoll</translation> </message> <message> <source>Status</source> <translation>Status</translation> </message> <message> <source>Active</source> <translation>Aktiv</translation> </message> <message> <source>Last Seen (UTC)</source> <translation>Zuletzt gesehen (UTC)</translation> </message> <message> <source>Pubkey</source> <translation>Öffentlicher Schlüssel</translation> </message> <message> <source>S&amp;tart alias</source> <translation>S&amp;tarten</translation> </message> <message> <source>Start &amp;all</source> <translation>&amp;Alle starten</translation> </message> <message> <source>Start &amp;MISSING</source> <translation>Starte nur &amp;MISSING</translation> </message> <message> <source>&amp;Update status</source> <translation>Stat&amp;us aktualisieren</translation> </message> <message> <source>Status will be updated automatically in (sec):</source> <translation>Status wird automatisch aktualisiert in (Sekunden):</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>Start alias</source> <translation>Starten</translation> </message> <message> <source>Confirm masternode start</source> <translation>Masternode start bestätigen</translation> </message> <message> <source>Are you sure you want to start masternode %1?</source> <translation>Sind Sie sicher, dass Sie den Masternode %1 starten wollen?</translation> </message> <message> <source>Confirm all masternodes start</source> <translation>Start aller Masternodes bestätigen</translation> </message> <message> <source>Are you sure you want to start ALL masternodes?</source> <translation>Sind Sie sicher, dass Sie alle Masternodes starten wollen?</translation> </message> <message> <source>Command is not available right now</source> <translation>Befehl steht zur Zeit nicht zut verfügung</translation> </message> <message> <source>You can't use this command until masternode list is synced</source> <translation>Sie können diesen Befehlt nicht ausführen, bevor der Masternode synchronisiert ist</translation> </message> <message> <source>Confirm missing masternodes start</source> <translation>Start fehlender Masternodes bestätigen</translation> </message> <message> <source>Are you sure you want to start MISSING masternodes?</source> <translation>Sind Sie sicher, dass Sie alle fehlenden Masternodes starten wollen?</translation> </message> </context> <context> <name>MultiSendDialog</name> <message> <source>MultiSend</source> <translation>MultiSend</translation> </message> <message> <source>Enter whole numbers 1 - 100</source> <translation>Gebe Ganzzahlen zwischen 1 - 100 ein</translation> </message> <message> <source>Enter Address to Send to</source> <translation>Zieladresse eingeben</translation> </message> <message> <source>MultiSend allows you to automatically send up to 100% of your stake or masternode reward to a list of other UnitedWorldMoney addresses after it matures. To Add: enter percentage to give and UnitedWorldMoney address to add to the MultiSend vector. To Delete: Enter address to delete and press delete. MultiSend will not be activated unless you have clicked Activate</source> <translation>MultiSend ermöglicht es automatisch bis zu 100% Ihrer Stake- oder Masternode-Belohnung zu einer Liste von anderen UnitedWorldMoney-Adressen zu senden. Zum hinzufügen: Gebe den Prozentsätze ein, um die UnitedWorldMoney-Adressen des MultiSend-Vektor hinzuzufügen. Zum entfernen: Gebe die zu löschende Adresse ein und drücke löschen. MultiSend wird nicht aktiviert bis Sie auf Aktivieren geklickt haben.</translation> </message> <message> <source>Add to MultiSend Vector</source> <translation>Füge zum MultiSend-Vektor hinzu</translation> </message> <message> <source>Add</source> <translation>Hinzufügen</translation> </message> <message> <source>Deactivate MultiSend</source> <translation>Deaktiviere MultiSend</translation> </message> <message> <source>Deactivate</source> <translation>Deaktivieren</translation> </message> <message> <source>Choose an address from the address book</source> <translation>Wähle eine Adresse aus dem Adressbuch</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Percentage of stake to send</source> <translation>Prozentsatz des Staken zum senden</translation> </message> <message> <source>Percentage:</source> <translation>Prozent:</translation> </message> <message> <source>Address to send portion of stake to</source> <translation>Adresse an die der Staking-Anteil gesendet wird</translation> </message> <message> <source>Address:</source> <translation>Adresse:</translation> </message> <message> <source>Label:</source> <translation>Bezeichnung:</translation> </message> <message> <source>Enter a label for this address to add it to your address book</source> <translation>Adressbezeichnung eingeben, diese wird zusammen mit der Adresse dem Adressbuch hinzugefügt</translation> </message> <message> <source>Delete Address From MultiSend Vector</source> <translation>Lösche Adresse von MultiSend-Vektor</translation> </message> <message> <source>Delete</source> <translation>Löschen</translation> </message> <message> <source>Activate MultiSend</source> <translation>Aktiviere MultiSend</translation> </message> <message> <source>Activate</source> <translation>Aktivieren</translation> </message> <message> <source>View MultiSend Vector</source> <translation>Zeige MultiSend-Vektor</translation> </message> <message> <source>View MultiSend</source> <translation>Zeige MultiSend</translation> </message> <message> <source>Send For Stakes</source> <translation>For Staker versenden</translation> </message> <message> <source>Send For Masternode Rewards</source> <translation>Für Masternode Rewards versenden</translation> </message> <message> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> <message> <source>Please Enter 1 - 100 for percent.</source> <translation>Bitte eine Zahl zwischen 1-100 in Prozent eingeben.</translation> </message> </context> <context> <name>MultisigDialog</name> <message> <source>Multisignature Address Interactions</source> <translation>Mehrfachsignatur- und Adressinteraktionen</translation> </message> <message> <source>Create MultiSignature &amp;Address</source> <translation>Erstelle Mehrfachsignatur $Address</translation> </message> <message> <source>How many people must sign to verify a transaction</source> <translation>Wie viele Personen müssen diese Transaktion signieren um Sie zu verifizieren?</translation> </message> <message> <source>Enter the minimum number of signatures required to sign transactions</source> <translation>Anzahl der minimal benötigten Signierungen um Transaktionen zu signieren.</translation> </message> <message> <source>Address Label:</source> <translation>Adressbezeichnung:</translation> </message> <message> <source>Add another address that could sign to verify a transaction from the multisig address.</source> <translation>Berechtige eine weitere Adresse, eine Signatur zur Verifikation einer Transaktion von einer mehrfach signierten Adresse zu leisten.</translation> </message> <message> <source>&amp;Add Address / Key</source> <translation>&amp;Füge Adresse / Schlüssel hinzu</translation> </message> <message> <source>Local addresses or public keys that can sign:</source> <translation>Lokale Adressen oder öffentliche Schlüssel die Signieren dürfen:</translation> </message> <message> <source>Create a new multisig address</source> <translation>Erstelle eine neue Mehrfachsignatur-Adresse</translation> </message> <message> <source>C&amp;reate</source> <translation>C&amp;reate</translation> </message> <message> <source>Status:</source> <translation>Status:</translation> </message> <message> <source>Use below to quickly import an address by its redeem. Don't forget to add a label before clicking import! Keep in mind, the wallet will rescan the blockchain to find transactions containing the new address. Please be patient after clicking import.</source> <translation>Verwenden Sie unten, um eine Adresse schnell zu importieren, indem Sie sie einlösen. Vergessen Sie nicht, eine Beschriftung hinzuzufügen, bevor Sie auf Importieren klicken! Beachten Sie, dass die Brieftasche die Blockchain erneut scannt, um Transaktionen zu finden, die die neue Adresse enthalten Bitte haben Sie etwas Geduld, nachdem Sie auf Importieren geklickt haben.</translation> </message> <message> <source>&amp;Import Redeem</source> <translation>&amp; Importieren einlösen</translation> </message> <message> <source>&amp;Create MultiSignature Tx</source> <translation>&amp;Erstelle eine mehrfach signierte Transaktion </translation> </message> <message> <source>Inputs:</source> <translation>Eingaben:</translation> </message> <message> <source>Coin Control</source> <translation>Coinverwaltung</translation> </message> <message> <source>Quantity Selected:</source> <translation>Menge ausgewählt:</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>Amount:</source> <translation>Betrag:</translation> </message> <message> <source>Add an input to fund the outputs</source> <translation>Füge eine Eingabe hinzu damit die Ausgabe finanziert wird.</translation> </message> <message> <source>Add a Raw Input</source> <translation>Füge eine rohe Eingabe hinzu</translation> </message> <message> <source>Address / Amount:</source> <translation>Adresse / Menge:</translation> </message> <message> <source>Add destinations to send UWM to</source> <translation>Füge Transaktionsziele für UWM-Überweisung hinzu</translation> </message> <message> <source>Add &amp;Destination</source> <translation>Füge &amp;Transaktionsziel hinzu</translation> </message> <message> <source>Create a transaction object using the given inputs to the given outputs</source> <translation>Erstelle eine Transaktion mit den gegeben Eingaben zu den angegebenen Zielen.</translation> </message> <message> <source>Cr&amp;eate</source> <translation>Erzeuge</translation> </message> <message> <source>&amp;Sign MultiSignature Tx</source> <translation>&amp;Signiere mehrfach signierte Transaktion</translation> </message> <message> <source>Transaction Hex:</source> <translation>Transaktionsschlüssel HEX:</translation> </message> <message> <source>Sign the transaction from this wallet or from provided private keys</source> <translation>Signiere eine Transaktion von diesem Wallet oder von bereitgestellten privaten Schlüsseln.</translation> </message> <message> <source>S&amp;ign</source> <translation>Unterschreiben</translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;DISABLED until transaction has been signed enough times.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt; DEAKTIVIERT bis die Transaktion entsprechend oft signiert wurde.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>Co&amp;mmit</source> <translation>Co&amp;mmit</translation> </message> <message> <source>Add private keys to sign the transaction with</source> <translation>Füge private Schlüssel zum signieren der Transaktion hinzu</translation> </message> <message> <source>Add Private &amp;Key</source> <translation>Füge privaten &amp;Key hinzu</translation> </message> <message> <source>Sign with only private keys (Not Recommened)</source> <translation>Nur mit privaten Schlüsseln signieren (Nicht empfohlen)</translation> </message> <message> <source>Invalid Tx Hash.</source> <translation>Ungültiger Transaktions-Hash.</translation> </message> <message> <source>Vout position must be positive.</source> <translation>Vout-Position muss positiv sein.</translation> </message> <message> <source>Maximum possible addresses reached. (15)</source> <translation>Maximale Anzahl möglicher Adressen erreicht. (15)</translation> </message> <message> <source>Vout Position: </source> <translation>Vout Position:</translation> </message> <message> <source>Amount: </source> <translation>Menge:</translation> </message> <message> <source>Maximum (15)</source> <translation>Maximum (15)</translation> </message> </context> <context> <name>OpenURIDialog</name> <message> <source>Open URI</source> <translation>URI öffnen</translation> </message> <message> <source>Open payment request from URI or file</source> <translation>Zahlungsanfrage von URI oder Datei öffnen</translation> </message> <message> <source>URI:</source> <translation>URI:</translation> </message> <message> <source>Select payment request file</source> <translation>Datei für Zahlungsanfrage auswählen</translation> </message> <message> <source>Select payment request file to open</source> <translation>Datei für Zahlungsanfrage öffnen</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <source>Options</source> <translation>Optionen</translation> </message> <message> <source>&amp;Main</source> <translation>&amp;Main</translation> </message> <message> <source>Size of &amp;database cache</source> <translation>Größe &amp;Datenbank Cache</translation> </message> <message> <source>MB</source> <translation>MB</translation> </message> <message> <source>Number of script &amp;verification threads</source> <translation>Anzahl der Skript &amp;Verifikation Threads</translation> </message> <message> <source>(0 = auto, &lt;0 = leave that many cores free)</source> <translation>(0 = auto, &lt;0 = leave that many cores free)</translation> </message> <message> <source>W&amp;allet</source> <translation>W&amp;allet</translation> </message> <message> <source>If you disable the spending of unconfirmed change, the change from a transaction&lt;br/&gt;cannot be used until that transaction has at least one confirmation.&lt;br/&gt;This also affects how your balance is computed.</source> <translation>Wenn Sie das Ausgeben von unbestätigten Wechselgeld deaktivieren, so kann das Wechselgeld von einer Transaktion &lt;br/&gt;nicht verwendet werden, bis mindestens eine Bestätigung vorliegt.&lt;br/&gt;Dies hat auch Auswirkungen auf ihre Bilanzberechnung.</translation> </message> <message> <source>Automatically open the UnitedWorldMoney client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automatisches öffnen des UnitedWorldMoney-Client-Port im Router. Dies funktioniert nur, wenn ihr Router UPnP unterstützt und es eingeschaltet ist.</translation> </message> <message> <source>Accept connections from outside</source> <translation>Verbindungen von Aussen zulassen</translation> </message> <message> <source>Allow incoming connections</source> <translation>Eingehende Verbindungen erlauben</translation> </message> <message> <source>&amp;Connect through SOCKS5 proxy (default proxy):</source> <translation>&amp;Verbindung durch SOCKS5 proxy (default proxy):</translation> </message> <message> <source>Expert</source> <translation>Experte</translation> </message> <message> <source>Automatically start UnitedWorldMoney after logging in to the system.</source> <translation>UnitedWorldMoney automatisch nach dem Login starten.</translation> </message> <message> <source>&amp;Start UnitedWorldMoney on system login</source> <translation>UnitedWorldMoney automatisch beim System Login &amp;starten.</translation> </message> <message> <source>Whether to show coin control features or not.</source> <translation>Coin &amp;control Funktionen anzeigen oder nicht</translation> </message> <message> <source>Enable coin &amp;control features</source> <translation>Coin &amp;control Funktionen aktivieren</translation> </message> <message> <source>Show additional tab listing all your masternodes in first sub-tab&lt;br/&gt;and all masternodes on the network in second sub-tab.</source> <translation>Zeige zusätzlichen Tab, der alle ihre Masternodes anzeigt, im ersten Untertab&lt;br/&gt;und alle Masternodes im Netzwerk im zweiten Untertab.</translation> </message> <message> <source>Show Masternodes Tab</source> <translation>Masternodes Tab anzeigen</translation> </message> <message> <source>&amp;Spend unconfirmed change</source> <translation>&amp;Sende unbestätigtes Wechselgeld</translation> </message> <message> <source>&amp;Network</source> <translation>&amp;Netzwerk</translation> </message> <message> <source>The user interface language can be set here. This setting will take effect after restarting UnitedWorldMoney.</source> <translation>Die Sprache der Benutzeroberfläche kann hier festgelegt werden. Diese Einstellung wird nach einem Neustart realisiert.</translation> </message> <message> <source>Language missing or translation incomplete? Help contributing translations here: https://www.transifex.com/unitedworldmoney-project/unitedworldmoney-project-translations</source> <translation>Sprache fehlt oder ist unvollständig? Helfe mit und trage zur Übersetzung bei unter: https://www.transifex.com/unitedworldmoney-project/unitedworldmoney-project-translations</translation> </message> <message> <source>Map port using &amp;UPnP</source> <translation>Port mit &amp;UPnP mappen</translation> </message> <message> <source>Enable automatic minting of UWM units to zUWM</source> <translation>Aktivieren Sie die automatische Prägung von UWM-Einheiten zu zUWM</translation> </message> <message> <source>Enable zUWM Automint</source> <translation>Aktivieren Sie zUWM Automint</translation> </message> <message> <source>Percentage of incoming UWM which get automatically converted to zUWM via Zerocoin Protocol (min: 10%)</source> <translation>Anteil des eingehenden UWM, welcher automatisch zu zUWM via Zerocoin Protokoll konvertiert wird (min: 10%)</translation> </message> <message> <source>Percentage of autominted zUWM</source> <translation>Prozentsatz der automatisierten zUWM</translation> </message> <message> <source>Wait with automatic conversion to Zerocoin until enough UWM for this denomination is available</source> <translation>Warte mit automatischer Konvertierung zu Zerocoin, bis genügend UWM für diese Stückelung verfügbar ist</translation> </message> <message> <source>Preferred Automint zUWM Denomination</source> <translation>Bevorzugte automatisierte zUWM Stückelung </translation> </message> <message> <source>Stake split threshold:</source> <translation>Staff Split-Schwelle:</translation> </message> <message> <source>Connect to the UnitedWorldMoney network through a SOCKS5 proxy.</source> <translation>Durch einen SOCKS5 Proxy mit dem UnitedWorldMoney Netzwerk verbinden.</translation> </message> <message> <source>Proxy &amp;IP:</source> <translation>Proxy &amp;IP:</translation> </message> <message> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation>IP Adresse des PROXY (z.B. IPv4: 127.0.0.1 / IPv6: ::1)</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <source>Port of the proxy (e.g. 9050)</source> <translation>Port des Proxies (z.B. 9050)</translation> </message> <message> <source>&amp;Window</source> <translation>&amp;Fenster</translation> </message> <message> <source>Show only a tray icon after minimizing the window.</source> <translation>Zeige nur ein Symbolkachel nach der Fensterminimierung.</translation> </message> <message> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimierung zur Kachel anstelle auf der Taskbar</translation> </message> <message> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimierung anstelle vom Schließen der Anwendung, wenn das Fenster geschlossen wird. Wenn diese Option aktiviert ist, wird die Anwendung nur geschlossen, wenn im Menü Beenden gewählt wird.</translation> </message> <message> <source>M&amp;inimize on close</source> <translation>Beim Schließen m&amp;inimieren</translation> </message> <message> <source>&amp;Display</source> <translation>&amp;Display</translation> </message> <message> <source>User Interface &amp;language:</source> <translation>Benutzeroberfläche &amp;Sprache</translation> </message> <message> <source>User Interface Theme:</source> <translation>Benutzeroberfläche Motiv:</translation> </message> <message> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Wähle die Standardunterteilungseinheit, um sie auf der Oberfläche anzuzeigen und wenn Coins gesendet werden.</translation> </message> <message> <source>Decimal digits</source> <translation>Dezimalstellen</translation> </message> <message> <source>Hide empty balances</source> <translation>Leere Salden ausblenden</translation> </message> <message> <source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source> <translation>Drittpartei URLs (z.B. Blockexplorer) die auf der Registerkarte Transaktionen als Kontextmenü erscheinen. %s in der URL wird durch den Transaktionshash ersetzt. Mehrere URLs sind durch vertikale Balken | getrennt.</translation> </message> <message> <source>Third party transaction URLs</source> <translation>Drittanbieter Transaktions URLs</translation> </message> <message> <source>Active command-line options that override above options:</source> <translation>Aktive Befehlszeilenoptionen, die über Optionen hinausgehen:</translation> </message> <message> <source>Reset all client options to default.</source> <translation>Alle Client Optionen auf den Standard zurücksetzen</translation> </message> <message> <source>&amp;Reset Options</source> <translation>Optionen Zu&amp;rücksetzen</translation> </message> <message> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Abbrechen</translation> </message> <message> <source>Any</source> <translation>Irgendein</translation> </message> <message> <source>default</source> <translation>standard</translation> </message> <message> <source>none</source> <translation>keine</translation> </message> <message> <source>Confirm options reset</source> <translation>Zurücksetzen der Optionen bestätigen</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation>Client Neustart erforderlich, um Änderungen zu aktivieren.</translation> </message> <message> <source>Client will be shutdown, do you want to proceed?</source> <translation>Der Client wird heruntergefahren, wollen Sie fortfahren?</translation> </message> <message> <source>This change would require a client restart.</source> <translation>Diese Änderung würde einen Neustart des Clients erfordern.</translation> </message> <message> <source>The supplied proxy address is invalid.</source> <translation>Die eingegeben PROXY-Adresse ist ungültig.</translation> </message> <message> <source>The supplied proxy port is invalid.</source> <translation>Der angegebene Proxy-Port ist ungültig.</translation> </message> <message> <source>The supplied proxy settings are invalid.</source> <translation>Die angegebenen Proxyeinstellungen sind ungültig.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <source>Form</source> <translation>Formular</translation> </message> <message> <source>Available:</source> <translation>Verfügbar:</translation> </message> <message> <source>Your current spendable balance</source> <translation>Ihr aktuell verfügbarer Kontostand</translation> </message> <message> <source>Total Balance, including all unavailable coins.</source> <translation>Gesamtsaldo, einschließlich aller nicht verfügbaren Münzen.</translation> </message> <message> <source>UWM Balance</source> <translation>UWM Bilanzen</translation> </message> <message> <source>Pending:</source> <translation>Ausstehend:</translation> </message> <message> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation>Betrag aus unbestätigten Transaktionen, der noch nicht im aktuell verfügbaren Kontostand enthalten ist</translation> </message> <message> <source>Immature:</source> <translation>Unreif:</translation> </message> <message> <source>Staked or masternode rewards that has not yet matured</source> <translation>Erarbeiteter Betrag der noch nicht gereift ist</translation> </message> <message> <source>Current locked balance in watch-only addresses</source> <translation>Der aktuelle gesperrte Kontostand in Nur-Uhr-Adressen</translation> </message> <message> <source>Your current UnitedWorldMoney balance, unconfirmed and immature transactions included</source> <translation>Ihr aktueller UnitedWorldMoney-Saldo, unbestätigte und unreife Transaktionen sind enthalten</translation> </message> <message> <source>zUWM Balance</source> <translation>zUWM Bilanzen</translation> </message> <message> <source>Mature: more than 20 confirmation and more than 1 mint of the same denomination after it was minted. These zUWM are spendable.</source> <translation>Ausgereift: Mehr als 20 Bestätigungen und mehr als 1 Prägungen der selben Stückelung nach erfolgter Prägung. Diese zUWM sind Verwendbar.</translation> </message> <message> <source>Unconfirmed: less than 20 confirmations Immature: confirmed, but less than 1 mint of the same denomination after it was minted</source> <translation>Unbestätigt: Weniger als 20 Bestätigungen Unreif: Bestätigt, aber keine Prägung in der selben Stückelung seit erfolgter Prägung</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the UnitedWorldMoney network after a connection is established, but this process has not completed yet.</source> <translation>Die angezeigte Information ist möglicherweise nicht mehr aktuell. Ihre Wallet synchronisiert sich automatisch mit dem UnitedWorldMoney-Netzwerk, nachdem eine Verbindung hergestellt wurde. Aber dieser Vorgang ist noch nicht abgeschlossen.</translation> </message> <message> <source>OVERVIEW</source> <translation>ÜBERBLICK</translation> </message> <message> <source>Combined Balance (including unconfirmed and immature coins)</source> <translation>Kombinierte Beträge (einschließlich unbestätigte und unreife Coins)</translation> </message> <message> <source>Combined Balance</source> <translation>Kombiniertes Guthaben</translation> </message> <message> <source>Unconfirmed transactions to watch-only addresses</source> <translation>Unbestätigte Transaktionen zu beobachteten Adressen</translation> </message> <message> <source>Staked or masternode rewards in watch-only addresses that has not yet matured</source> <translation>Erarbeiteter Betrag der beobachteten Adressen der noch nicht gereift ist</translation> </message> <message> <source>Total:</source> <translation>Gesamtbetrag:</translation> </message> <message> <source>Current total balance in watch-only addresses</source> <translation>Kontostand der beobachteten Adressen</translation> </message> <message> <source>Watch-only:</source> <translation>Beobachtet:</translation> </message> <message> <source>Your current balance in watch-only addresses</source> <translation>Aktueller Kontostand der beobachteten Adressen</translation> </message> <message> <source>Spendable:</source> <translation>Verfügbar:</translation> </message> <message> <source>Locked UWM or Masternode collaterals. These are excluded from zUWM minting.</source> <translation>Gesperrte UWM oder Masternode Pfand. Diese sind vom zUWM Prägen ausgeschlossen.</translation> </message> <message> <source>Locked:</source> <translation>Gesperrt:</translation> </message> <message> <source>Unconfirmed:</source> <translation>Unbestätigt:</translation> </message> <message> <source>Your current zUWM balance, unconfirmed and immature zUWM included.</source> <translation>Ihre aktuelle zUWM-Bilanz, unbestätigte und unreife zUWM enthalten.</translation> </message> <message> <source>Recent transactions</source> <translation>Letzte Transaktionen</translation> </message> <message> <source>out of sync</source> <translation>nicht synchron</translation> </message> <message> <source>Current percentage of zUWM. If AutoMint is enabled this percentage will settle around the configured AutoMint percentage (default = 10%). </source> <translation>Aktueller Prozentsatz an zUWM. Falls das automatische Prägen aktiviert ist, wird sich dieser Prozentsatz um den eingestellten Prozentsatz fürs automatisierte Prägen einpendeln (Standard = 10%). </translation> </message> <message> <source>AutoMint is currently enabled and set to </source> <translation>Automatisches Prägen ist aktiv und eingestellt auf </translation> </message> <message> <source>To disable AutoMint add 'enablezeromint=0' in unitedworldmoney.conf.</source> <translation>Um das automatische Prägen zu deaktivieren, füge 'enablezeromint=0' in unitedworldmoney.conf ein.</translation> </message> <message> <source>AutoMint is currently disabled. To enable AutoMint change 'enablezeromint=0' to 'enablezeromint=1' in unitedworldmoney.conf</source> <translation>Das automatische Prägen ist deaktiviert. Um das automatische Prägen zu aktivieren ändere 'enablezeromint=0' zu 'enablezeromint=1' in unitedworldmoney.conf</translation> </message> </context> <context> <name>PaymentServer</name> <message> <source>Payment request error</source> <translation>Zahlungsauftragsfehler</translation> </message> <message> <source>URI handling</source> <translation>URI Behandlung</translation> </message> <message> <source>Payment request fetch URL is invalid: %1</source> <translation>Zahlungsauftragsabruf URL ist ungültig: %1</translation> </message> <message> <source>Payment request file handling</source> <translation>Zahlungsauftragsdatei Behandlung</translation> </message> <message> <source>Invalid payment address %1</source> <translation>Ungültige Zahlungsaufforderungsadresse %1</translation> </message> <message> <source>Cannot start unitedworldmoney: click-to-pay handler</source> <translation>Kann UnitedWorldMoney nicht starten: click-to-pay Handler</translation> </message> <message> <source>URI cannot be parsed! This can be caused by an invalid UnitedWorldMoney address or malformed URI parameters.</source> <translation>URI kann nicht analysiert werden! Dies kann durch eine ungültige UnitedWorldMoney-Adresse oder fehlerhafte URI-Parameter verursacht worden sein.</translation> </message> <message> <source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source> <translation>Zahlungsauftragsdatei kann nicht gelesen werden! Dies kann durch eine ungültige Zahlungsauftragsdatei verursacht werden.</translation> </message> <message> <source>Payment request rejected</source> <translation>Zahlungsauftrag abgelehnt</translation> </message> <message> <source>Payment request network doesn't match client network.</source> <translation>Das Zahlungsauftragsnetzwerk stimmt nicht mit dem Clientnetzwerk überein.</translation> </message> <message> <source>Payment request has expired.</source> <translation>Zahlungsauftrag ist abgelaufen.</translation> </message> <message> <source>Payment request is not initialized.</source> <translation>Zahlungsauftrag ist nicht initialisiert.</translation> </message> <message> <source>Unverified payment requests to custom payment scripts are unsupported.</source> <translation>Nicht verifizierte Zahlungsaufträge an benutzerdefinierte Zahlungsskripts werden nicht unterstützt.</translation> </message> <message> <source>Requested payment amount of %1 is too small (considered dust).</source> <translation>Der angeforderte Zahlungsbetrag von %1 ist zu klein (betrachtet als Staub).</translation> </message> <message> <source>Refund from %1</source> <translation>Rückerstattung von %1</translation> </message> <message> <source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source> <translation>Zahlungsauftrag %1 ist zu groß (%2 Bytes, erlaubt %3 Bytes).</translation> </message> <message> <source>Payment request DoS protection</source> <translation>Zahlungsauftrag DoS Schutz</translation> </message> <message> <source>Error communicating with %1: %2</source> <translation>Fehler beim Kommunizieren mit %1: %2</translation> </message> <message> <source>Payment request cannot be parsed!</source> <translation>Zahlungsauftrag kann nicht analysiert werden!</translation> </message> <message> <source>Bad response from server %1</source> <translation>Fehlerhafte Antwort vom Server %1</translation> </message> <message> <source>Network request error</source> <translation>Netzwerkanfragefehler</translation> </message> <message> <source>Payment acknowledged</source> <translation>Zahlung anerkannt</translation> </message> </context> <context> <name>PeerTableModel</name> <message> <source>Address/Hostname</source> <translation>Adresse/Rechnername</translation> </message> <message> <source>Version</source> <translation>Programmversion</translation> </message> <message> <source>Ping Time</source> <translation>Latenz (Ping)</translation> </message> </context> <context> <name>PrivacyDialog</name> <message> <source>Zerocoin Actions:</source> <translation>Zerocoin Aktionen:</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the UnitedWorldMoney network after a connection is established, but this process has not completed yet.</source> <translation>Die angezeigte Information ist möglicherweise nicht mehr aktuell. Ihre Wallet synchronisiert sich automatisch mit dem UnitedWorldMoney-Netzwerk, nachdem eine Verbindung hergestellt wurde. Aber dieser Vorgang ist noch nicht abgeschlossen.</translation> </message> <message> <source>Mint Zerocoin</source> <translation>Präge Zerocoin</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>zUWM</source> <translation>zUWM</translation> </message> <message> <source>Reset Zerocoin Wallet DB. Deletes transactions that did not make it into the blockchain.</source> <translation>Zurücksetzen der Zerocoin-Wallet-DB. Löscht Transaktionen, die es nicht in die Blockchain geschafft haben.</translation> </message> <message> <source>Reset</source> <translation>Zurücksetzen</translation> </message> <message> <source>Rescan the complete blockchain for Zerocoin mints and their meta-data.</source> <translation>Erneutes Scannen der kompletten Blockchain nach Zerocoin Prägungen und deren Meta-Daten</translation> </message> <message> <source>ReScan</source> <translation>Erneut Scannen</translation> </message> <message> <source>Status and/or Mesages from the last Mint Action.</source> <translation>Status und/oder Mitteilungen aus der letzten Prägung</translation> </message> <message> <source>PRIVACY</source> <translation>PRIVATSPHÄRE</translation> </message> <message> <source>zUWM Control</source> <translation>zUWM Control</translation> </message> <message> <source>zUWM Selected:</source> <translation>zUWM ausgewählt:</translation> </message> <message> <source>Quantity Selected:</source> <translation>Menge ausgewählt:</translation> </message> <message> <source>Spend Zerocoin. Without 'Pay To:' address creates payments to yourself.</source> <translation>Überweise Zerocoin. Ohne Empfänger-Adresse werden Zahlungen an Sie selbst erstellt.</translation> </message> <message> <source>Spend Zerocoin</source> <translation>Überweise Zerocoin</translation> </message> <message> <source>Available (mature and spendable) zUWM for spending</source> <translation>Verfügbare (ausgereifte und aufwendbare) zUWM zum Überweisen</translation> </message> <message> <source>Available Balance:</source> <translation>Verfügbare Bilanz:</translation> </message> <message> <source>Available (mature and spendable) zUWM for spending zUWM are mature when they have more than 20 confirmations AND more than 2 mints of the same denomination after them were minted</source> <translation>Verfügbare (ausgereifte und aufwendbare) zUWM zum Überweisen zUWM sind ausgereift wenn sie mehr als 20 Bestätigungen und mehr als 2 Prägungen in der selben Stückelung nach erfolgter Prägung halten.</translation> </message> <message> <source>0 zUWM</source> <translation>0 zUWM</translation> </message> <message> <source>Pay &amp;To:</source> <translation>E&amp;mpfänger:</translation> </message> <message> <source>The UnitedWorldMoney address to send the payment to. Creates local payment to yourself when empty.</source> <translation>Die UnitedWorldMoney-Adresse, an die die Zahlung gesendet wird. Erstellt eine lokale Zahlung an Sie selbst, wenn die Zeile leer ist.</translation> </message> <message> <source>Choose previously used address</source> <translation>Bereits verwendete Adresse auswählen</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Adresse aus der Zwischenablage einfügen</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Bezeichnung:</translation> </message> <message> <source>Enter a label for this address to add it to the list of used addresses</source> <translation>Adressbezeichnung eingeben, um diese in der Liste der bereits verwendeten Adressen zu speichern</translation> </message> <message> <source>A&amp;mount:</source> <translation>Betra&amp;g:</translation> </message> <message> <source>Convert Change to Zerocoin (might cost additional fees)</source> <translation>Wandle Wechselgeld in Zerocoin um (kann zusätzliche Gebühren kosten)</translation> </message> <message> <source>If checked, the wallet tries to minimize the returning change instead of minimizing the number of spent denominations.</source> <translation>Falls aktiviert, versucht die Wallet, das zurückkehrende Wechselgeld zu minimieren, anstatt die Anzahl der verbrauchten Stückelungen zu minimieren.</translation> </message> <message> <source>Minimize Change</source> <translation>Minimiert Wechselgeld</translation> </message> <message> <source>Information about the available Zerocoin funds.</source> <translation>Informationen über den verfügbaren Zerocoin-Bestand.</translation> </message> <message> <source>Zerocoin Stats:</source> <translation>Zerocoin Statistiken:</translation> </message> <message> <source>Total Balance including unconfirmed and immature zUWM</source> <translation>Totales Guthaben, einschließlich unbestätigter und unreifer zUWM</translation> </message> <message> <source>Total Zerocoin Balance:</source> <translation>Totales Zerocoin Guthaben:</translation> </message> <message> <source>Denominations with value 1:</source> <translation>Stückelungen mit Wert 1:</translation> </message> <message> <source>Denom. with value 1:</source> <translation>Stücke mit Wert 1:</translation> </message> <message> <source>Unconfirmed: less than 20 confirmations Immature: confirmed, but less than 1 mint of the same denomination after it was minted</source> <translation>Unbestätigt: Weniger als 20 Bestätigungen Unreif: Bestätigt, aber keine Prägung in der selben Stückelung seit erfolgter Prägung</translation> </message> <message> <source>Show the current status of automatic zUWM minting. To change the status (restart required): - enable: add 'enablezeromint=1' to unitedworldmoney.conf - disable: add 'enablezeromint=0' to unitedworldmoney.conf To change the percentage (no restart required): - menu Settings-&gt;Options-&gt;Percentage of autominted zUWM </source> <translation>Zeigt den aktuellen Status der automatischen zUWM-Prägung an Um den Status zu ändern (Neustart erforderlich): - enable: füge 'enablezeromint = 1' zu unitedworldmoney.conf hinzu - disable: füge 'enablezeromint = 0' zu unitedworldmoney.conf hinzu Um den Prozentsatz zu ändern (kein Neustart erforderlich): - Menü Einstellungen-&gt; Optionen-&gt; Prozentsatz der autom. zUWM </translation> </message> <message> <source>AutoMint Status</source> <translation>AutoMint-Status</translation> </message> <message> <source>Global Supply:</source> <translation>Globales Angebot:</translation> </message> <message> <source>Denom. 1:</source> <translation>Wertstufe 1:</translation> </message> <message> <source>Denom. 5:</source> <translation>Wertstufe 5:</translation> </message> <message> <source>Denom. 10:</source> <translation>Wertstufe 10:</translation> </message> <message> <source>Denom. 50:</source> <translation>Wertstufe 50:</translation> </message> <message> <source>Denom. 100:</source> <translation>Wertstufe 100:</translation> </message> <message> <source>Denom. 500:</source> <translation>Wertstufe 500:</translation> </message> <message> <source>Denom. 1000:</source> <translation>Wertstufe 1000:</translation> </message> <message> <source>Denom. 5000:</source> <translation>Wertstufe 5000:</translation> </message> <message> <source>0 x</source> <translation>0 x</translation> </message> <message> <source>Denominations with value 5:</source> <translation>Stückelungen mit Wert 5:</translation> </message> <message> <source>Denom. with value 5:</source> <translation>Stücke mit Wert 5:</translation> </message> <message> <source>Denominations with value 10:</source> <translation>Stückelungen mit Wert 10:</translation> </message> <message> <source>Denom. with value 10:</source> <translation>Stücke mit Wert 10:</translation> </message> <message> <source>Denominations with value 50:</source> <translation>Stückelungen mit Wert 50:</translation> </message> <message> <source>Denom. with value 50:</source> <translation>Stücke mit Wert 50:</translation> </message> <message> <source>Denominations with value 100:</source> <translation>Stückelungen mit Wert 100:</translation> </message> <message> <source>Denom. with value 100:</source> <translation>Stücke mit Wert 100:</translation> </message> <message> <source>Denominations with value 500:</source> <translation>Stückelungen mit Wert 500:</translation> </message> <message> <source>Denom. with value 500:</source> <translation>Stücke mit Wert 500:</translation> </message> <message> <source>Denominations with value 1000:</source> <translation>Stückelungen mit Wert 1000:</translation> </message> <message> <source>Denom. with value 1000:</source> <translation>Stücke mit Wert 1000:</translation> </message> <message> <source>Denominations with value 5000:</source> <translation>Stückelungen mit Wert 5000:</translation> </message> <message> <source>Denom. with value 5000:</source> <translation>Stücke mit Wert 5000:</translation> </message> <message> <source>Priority:</source> <translation>Priorität:</translation> </message> <message> <source>TextLabel</source> <translation>TextEtikett</translation> </message> <message> <source>Fee:</source> <translation>Gebühr:</translation> </message> <message> <source>Dust:</source> <translation>Abzug:</translation> </message> <message> <source>no</source> <translation>nein</translation> </message> <message> <source>Bytes:</source> <translation>Byte:</translation> </message> <message> <source>Insufficient funds!</source> <translation>Unzureichender Kontostand!</translation> </message> <message> <source>Coins automatically selected</source> <translation>Coins automatisch ausgewählt</translation> </message> <message> <source>medium</source> <translation>mittel</translation> </message> <message> <source>Coin Control Features</source> <translation>"Coin Control"-Funktionen</translation> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Wenn dies aktivert, und die Wechselgeld-Adresse leer oder ungültig ist, wird das Wechselgeld einer neu erzeugten Adresse gutgeschrieben.</translation> </message> <message> <source>Custom change address</source> <translation>Benutzerdefinierte Wechselgeld-Adresse</translation> </message> <message> <source>Amount After Fee:</source> <translation>Betrag nach Gebühr:</translation> </message> <message> <source>Change:</source> <translation>Wechselgeld:</translation> </message> <message> <source>out of sync</source> <translation>nicht synchron</translation> </message> <message> <source>Mint Status: Okay</source> <translation>Prägestatus: Okay</translation> </message> <message> <source>Starting ResetMintZerocoin: rescanning complete blockchain, this will need up to 30 minutes depending on your hardware. Please be patient...</source> <translation>Starten von ResetMintZerocoin: Erneutes Scannen der kompletten Blockchain. Das benötigt, je nach Hardware, bis zu 30 Minuten. Bitte haben Sie etwas Geduld...</translation> </message> <message> <source>zUWM Spend #: </source> <translation>zUWM Ausgeben #:</translation> </message> <message> <source>zUWM Mint</source> <translation>zUWM Prägen</translation> </message> <message> <source> &lt;b&gt;enabled&lt;/b&gt;.</source> <translation> &lt;b&gt;enabled&lt;/b&gt;.</translation> </message> <message> <source> &lt;b&gt;disabled&lt;/b&gt;.</source> <translation> &lt;b&gt;disabled&lt;/b&gt;.</translation> </message> <message> <source> Configured target percentage: &lt;b&gt;</source> <translation>Konfigurierter Zielprozentsatz: &lt;b&gt;</translation> </message> <message> <source>zUWM is currently disabled due to maintenance.</source> <translation>zUWM ist aktuell deaktiviert aufgrund von Wartungsarbeiten.</translation> </message> <message> <source>zUWM is currently undergoing maintenance.</source> <translation>zUWM unterliegt momentan Wartungsarbeiten.</translation> </message> <message> <source>Denom. with value &lt;b&gt;1&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt;1&lt;/b&gt;:</translation> </message> <message> <source>Denom. with value &lt;b&gt;5&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt;5&lt;/b&gt;:</translation> </message> <message> <source>Denom. with value &lt;b&gt;10&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt; 10&lt;/b&gt;:</translation> </message> <message> <source>Denom. with value &lt;b&gt;50&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt; 50&lt;/b&gt;:</translation> </message> <message> <source>Denom. with value &lt;b&gt;100&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt;100&lt;/b&gt;:</translation> </message> <message> <source>Denom. with value &lt;b&gt;500&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt;500&lt;/b&gt;:</translation> </message> <message> <source>Denom. with value &lt;b&gt;1000&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt;1000&lt;/b&gt;:</translation> </message> <message> <source>Denom. with value &lt;b&gt;5000&lt;/b&gt;:</source> <translation>Denom. mit Wert &lt;b&gt;5000&lt;/b&gt;:</translation> </message> <message> <source>AutoMint Status:</source> <translation>AutoMint-Status:</translation> </message> <message> <source>Denom. &lt;b&gt;1&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;1&lt;/b&gt;:</translation> </message> <message> <source>Denom. &lt;b&gt;5&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;5&lt;/b&gt;:</translation> </message> <message> <source>Denom. &lt;b&gt;10&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;10&lt;/b&gt;:</translation> </message> <message> <source>Denom. &lt;b&gt;50&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;50&lt;/b&gt;:</translation> </message> <message> <source>Denom. &lt;b&gt;100&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;100&lt;/b&gt;:</translation> </message> <message> <source>Denom. &lt;b&gt;500&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;500&lt;/b&gt;:</translation> </message> <message> <source>Denom. &lt;b&gt;1000&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;1000&lt;/b&gt;:</translation> </message> <message> <source>Denom. &lt;b&gt;5000&lt;/b&gt;:</source> <translation>Denom. &lt;b&gt;5000&lt;/b&gt;:</translation> </message> <message> <source>Duration: </source> <translation>Dauer:</translation> </message> <message> <source> sec. </source> <translation>ausg. </translation> </message> <message> <source>Starting ResetSpentZerocoin: </source> <translation>Starte ResetSpentZerocoin: </translation> </message> <message> <source>No 'Pay To' address provided, creating local payment</source> <translation>Keine "Pay To"-Adresse zur Verfügung. Erstelle lokale Zahlung</translation> </message> <message> <source>Invalid UnitedWorldMoney Address</source> <translation>Ungültige UnitedWorldMoney-Adresse </translation> </message> <message> <source>Invalid Send Amount</source> <translation>Ungültiger Sendebetrag.</translation> </message> <message> <source>Confirm additional Fees</source> <translation>Zusätzliche Gebühren bestätigen</translation> </message> <message> <source>Are you sure you want to send?&lt;br /&gt;&lt;br /&gt;</source> <translation>Sind Sie sicher, dass Sie senden möchten?&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source> to address </source> <translation>an Adresse</translation> </message> <message> <source> to a newly generated (unused and therefore anonymous) local address &lt;br /&gt;</source> <translation>an eine neu generierte (unbenutzte und daher anonyme) lokale Adresse&lt;br /&gt;</translation> </message> <message> <source>Confirm send coins</source> <translation>Sende Coins bestätigen</translation> </message> <message> <source>Failed to fetch mint associated with serial hash</source> <translation>Mit Hash verbundene Prägung konnte nicht abgerufen werden</translation> </message> <message> <source>Spend Zerocoin failed with status = </source> <translation>Ausgeben Zerocoin fehlgeschlagen mit Status =</translation> </message> <message> <source>denomination: </source> <translation>Stückelung:</translation> </message> <message> <source>serial: </source> <translation>seriell:</translation> </message> <message> <source>Spend is 1 of : </source> <translation>Spend ist 1 von :</translation> </message> <message> <source>value out: </source> <translation>Betrag out:</translation> </message> <message> <source>address: </source> <translation>Adresse:</translation> </message> <message> <source>Sending successful, return code: </source> <translation>Senden erfolgreich, Rückgabecode:</translation> </message> <message> <source>txid: </source> <translation>txid: </translation> </message> <message> <source>fee: </source> <translation>Gebühr:</translation> </message> </context> <context> <name>ProposalFrame</name> </context> <context> <name>QObject</name> <message> <source>Amount</source> <translation>Betrag</translation> </message> <message> <source>Enter a UnitedWorldMoney address (e.g. %1)</source> <translation>UnitedWorldMoney-Adresse eingeben (z.B. %1)</translation> </message> <message> <source>%1 d</source> <translation>%1 T</translation> </message> <message> <source>%1 h</source> <translation>%1 St.</translation> </message> <message> <source>%1 m</source> <translation>%1 Min.</translation> </message> <message> <source>%1 s</source> <translation>%1 S</translation> </message> <message> <source>NETWORK</source> <translation>NETZWERK</translation> </message> <message> <source>BLOOM</source> <translation>BLOOM</translation> </message> <message> <source>UNKNOWN</source> <translation>UNBEKANNT</translation> </message> <message> <source>None</source> <translation>Keine</translation> </message> <message> <source>N/A</source> <translation>k.A.</translation> </message> <message> <source>%1 ms</source> <translation>%1 Ms</translation> </message> <message> <source>UnitedWorldMoney Core</source> <translation>UnitedWorldMoney Core</translation> </message> </context> <context> <name>QRImageWidget</name> <message> <source>&amp;Save Image...</source> <translation>Grafik &amp;speichern...</translation> </message> <message> <source>&amp;Copy Image</source> <translation>Grafik &amp;kopieren</translation> </message> <message> <source>Save QR Code</source> <translation>QR-Code speichern</translation> </message> <message> <source>PNG Image (*.png)</source> <translation>PNG-Grafik (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <source>Tools window</source> <translation>Werkzeugfenster</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Information</translation> </message> <message> <source>General</source> <translation>Allgemein</translation> </message> <message> <source>Name</source> <translation>Name</translation> </message> <message> <source>Client name</source> <translation>Client Name</translation> </message> <message> <source>N/A</source> <translation>k.A.</translation> </message> <message> <source>Number of connections</source> <translation>Anzahl der Verbindungen</translation> </message> <message> <source>&amp;Open</source> <translation>&amp;Öffnen</translation> </message> <message> <source>Startup time</source> <translation>Startzeit</translation> </message> <message> <source>Network</source> <translation>Netzwerk</translation> </message> <message> <source>Last block time</source> <translation>Letzte Blockzeit</translation> </message> <message> <source>Debug log file</source> <translation>Debug-Protokolldatei</translation> </message> <message> <source>Using OpenSSL version</source> <translation>Verwendete OpenSSL Version</translation> </message> <message> <source>Build date</source> <translation>Build Datum</translation> </message> <message> <source>Current number of blocks</source> <translation>Aktuelle Blockanzahl</translation> </message> <message> <source>Client version</source> <translation>Client Version</translation> </message> <message> <source>Using BerkeleyDB version</source> <translation>Verwendete BerkeleyDB Version</translation> </message> <message> <source>Block chain</source> <translation>Blockchain</translation> </message> <message> <source>Open the UnitedWorldMoney debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Öffne UnitedWorldMoney-Debug-Protokolldatei aus dem aktuellen Datenverzeichnis. Dies kann bei großen Protokolldateien einige Sekunden dauern.</translation> </message> <message> <source>Number of Masternodes</source> <translation>Anzahl der Masternodes</translation> </message> <message> <source>&amp;Console</source> <translation>&amp;Konsole</translation> </message> <message> <source>Clear console</source> <translation>Konsole leeren</translation> </message> <message> <source>&amp;Network Traffic</source> <translation>&amp;Netzwerk Traffic</translation> </message> <message> <source>&amp;Clear</source> <translation>&amp;Clear</translation> </message> <message> <source>Totals</source> <translation>Gesamtbetrag</translation> </message> <message> <source>Received</source> <translation>Empfangen</translation> </message> <message> <source>Sent</source> <translation>Gesendet</translation> </message> <message> <source>&amp;Peers</source> <translation>&amp;Peers</translation> </message> <message> <source>Banned peers</source> <translation>Gebannte Peers</translation> </message> <message> <source>Select a peer to view detailed information.</source> <translation>Wähle ein Peer, um detaillierte Informationen zu sehen.</translation> </message> <message> <source>Whitelisted</source> <translation>Whitelisted</translation> </message> <message> <source>Direction</source> <translation>Richtung</translation> </message> <message> <source>Protocol</source> <translation>Protokoll</translation> </message> <message> <source>Version</source> <translation>Programmversion</translation> </message> <message> <source>Services</source> <translation>Dienstleistung</translation> </message> <message> <source>Ban Score</source> <translation>Ban Score</translation> </message> <message> <source>Connection Time</source> <translation>Verbindungszeit</translation> </message> <message> <source>Last Send</source> <translation>Letztes Senden</translation> </message> <message> <source>Last Receive</source> <translation>Letzter Empfang</translation> </message> <message> <source>Bytes Sent</source> <translation>Bytes gesendet</translation> </message> <message> <source>Bytes Received</source> <translation>Bytes empfangen</translation> </message> <message> <source>Ping Time</source> <translation>Latenz (Ping)</translation> </message> <message> <source>&amp;Wallet Repair</source> <translation>&amp;Wallet reparieren </translation> </message> <message> <source>Delete local Blockchain Folders</source> <translation>Lokalen Blockchain-Ordner löschen.</translation> </message> <message> <source>Wallet In Use:</source> <translation>Wallet In Benutzung:</translation> </message> <message> <source>Starting Block</source> <translation>Startblock</translation> </message> <message> <source>Synced Headers</source> <translation>Synchronisierte Header</translation> </message> <message> <source>Synced Blocks</source> <translation>Synchronisierte Blöcke</translation> </message> <message> <source>The duration of a currently outstanding ping.</source> <translation>Die Dauer eines derzeit ausstehenden Pings.</translation> </message>
</message> <message> <source>Time Offset</source> <translation>Zeitverschiebung</translation> </message> <message> <source>Custom Backup Path:</source> <translation>Benutzerdefinierter Sicherungspfad:</translation> </message> <message> <source>Custom zUWM Backup Path:</source> <translation>Benutzerdefinierter zUWM Sicherungspfad:</translation> </message> <message> <source>Custom Backups Threshold:</source> <translation>Benutzerdefinierter Sicherungs-Schwellwert:</translation> </message> <message> <source>Salvage wallet</source> <translation>Wallet Rettung</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat.</source> <translation>Versuche, private Schlüssel aus einer beschädigten wallet.dat wiederherzustellen.</translation> </message> <message> <source>Rescan blockchain files</source> <translation>Erneutes Scannen der Bockchain Dateien</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions.</source> <translation>Erneutes Scannen der Bockchain nach verschollen Wallet Transaktionen</translation> </message> <message> <source>Recover transactions 1</source> <translation>Wiederherstellung von Transaktionen 1</translation> </message> <message> <source>Recover transactions from blockchain (keep meta-data, e.g. account owner).</source> <translation>Wiederherstellung von Transaktionen aus der Blockchain (behalte Meta-Daten, zB. Kontoinhaber)</translation> </message> <message> <source>Recover transactions 2</source> <translation>Wiederherstellung von Transaktionen 2</translation> </message> <message> <source>Recover transactions from blockchain (drop meta-data).</source> <translation>Wiederherstellung von Transaktionen aus der Blockchain (verwerfe Meta-Daten)</translation> </message> <message> <source>Upgrade wallet format</source> <translation>Upgrade Wallet Format</translation> </message> <message> <source>Rebuild block chain index from current blk000??.dat files.</source> <translation>Blockchain Index aus der aktuellen blk000??.dat Datei neu aufbauen.</translation> </message> <message> <source>-resync:</source> <translation>-resync:</translation> </message> <message> <source>Deletes all local blockchain folders so the wallet synchronizes from scratch.</source> <translation>Löscht alle lokalen Blockchain-Ordner damit das Wallet sich von Grund auf synchronisiert.</translation> </message> <message> <source>The buttons below will restart the wallet with command-line options to repair the wallet, fix issues with corrupt blockhain files or missing/obsolete transactions.</source> <translation>Die unten aufgeführten Schaltflächen starten die Wallet mit Befehlszeilenoptionen, um die Wallet zu reparieren. Probleme mit beschädigten Blockchain-Dateien oder fehlenden/veralteten Transaktionen werden behoben.</translation> </message> <message> <source>Wallet repair options.</source> <translation>Wallet-Reparatur-Optionen </translation> </message> <message> <source>Upgrade wallet to latest format on startup. (Note: this is NOT an update of the wallet itself!)</source> <translation>Upgrade die Wallet, beim Start, auf das neueste Format . (Anmerkung: Dies ist NICHT ein Update der Wallet selbst!)</translation> </message> <message> <source>Rebuild index</source> <translation>Index neu aufbauen</translation> </message> <message> <source>In:</source> <translation>In:</translation> </message> <message> <source>Out:</source> <translation>Out:</translation> </message> <message> <source>Welcome to the UnitedWorldMoney RPC console.</source> <translation>Willkommen in der UnitedWorldMoney RPC-Konsole.</translation> </message> <message> <source>&amp;Disconnect Node</source> <translation>&amp;Disconnect Node</translation> </message> <message> <source>Ban Node for</source> <translation>Ban Knoten für</translation> </message> <message> <source>1 &amp;hour</source> <translation>1 &amp;hour</translation> </message> <message> <source>1 &amp;day</source> <translation>1 &amp;day</translation> </message> <message> <source>1 &amp;week</source> <translation>1 &amp;week</translation> </message> <message> <source>1 &amp;year</source> <translation>1 &amp;year</translation> </message> <message> <source>&amp;Unban Node</source> <translation>&amp;Unban Node</translation> </message> <message> <source>This will delete your local blockchain folders and the wallet will synchronize the complete Blockchain from scratch.&lt;br /&gt;&lt;br /&gt;</source> <translation>Dieser Vorgang wird Ihren lokalen Blockchain-Ordner löschen und das Wallet wird von Grund auf neu Synchronisieren.&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>This needs quite some time and downloads a lot of data.&lt;br /&gt;&lt;br /&gt;</source> <translation>Dieser Vorgang wird einige Zeit in Anspruch nehmen und eine menge Daten herunterladen.&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>Your transactions and funds will be visible again after the download has completed.&lt;br /&gt;&lt;br /&gt;</source> <translation>Deine Transaktionen und Guthaben werden wieder sichtbar sein nachdem der Download-Prozess abgeschlossen ist.&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>Do you want to continue?.&lt;br /&gt;</source> <translation>Möchten Sie fortfahren?&lt;br /&gt;</translation> </message> <message> <source>Confirm resync Blockchain</source> <translation>Synchronisation der Blockchain bestätigen</translation> </message> <message> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Gebe &lt;b&gt;help&lt;/b&gt; ein, um eine Übersicht der verfügbaren Befehle zu erhalten.</translation> </message> <message> <source>%1 B</source> <translation>%1 Byte</translation> </message> <message> <source>%1 KB</source> <translation>%1 KB</translation> </message> <message> <source>%1 MB</source> <translation>%1 MB</translation> </message> <message> <source>%1 GB</source> <translation>%1 GB</translation> </message> <message> <source>(node id: %1)</source> <translation>(node id: %1)</translation> </message> <message> <source>via %1</source> <translation>via %1</translation> </message> <message> <source>never</source> <translation>nie</translation> </message> <message> <source>Inbound</source> <translation>Eingehend</translation> </message> <message> <source>Outbound</source> <translation>Ausgehend</translation> </message> <message> <source>Yes</source> <translation>Ja</translation> </message> <message> <source>No</source> <translation>Nein</translation> </message> <message> <source>Unknown</source> <translation>Unbekannt</translation> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <source>Reuse one of the previously used receiving addresses.&lt;br&gt;Reusing addresses has security and privacy issues.&lt;br&gt;Do not use this unless re-generating a payment request made before.</source> <translation>Wiederverwendung einer zuvor verwendeten Empfangsadressen.&lt;br&gt;Wiederverwendete Adressen haben Sicherheits- und Datenschutzprobleme.&lt;br&gt;Verwenden Sie diese nicht, es sei denn, dass Sie zuvor eine Zahlungsanforderung generieren.</translation> </message> <message> <source>R&amp;euse an existing receiving address (not recommended)</source> <translation>&amp;Wiederverwendung einer vorhandenen Empfangsadresse (nicht empfohlen)</translation> </message> <message> <source>&amp;Message:</source> <translation>Nachricht:</translation> </message> <message> <source>An optional label to associate with the new receiving address.</source> <translation>Ein optionales Etikett, das mit der neuen Empfangsadresse verknüpft werden soll.</translation> </message> <message> <source>A&amp;mount:</source> <translation>Betra&amp;g:</translation> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the UnitedWorldMoney network.</source> <translation>Eine optionale Nachricht an die Zahlungsaufforderung anhängen, die bei der Eröffnung der Anforderung angezeigt wird. Hinweis: Die Nachricht wird nicht mit der Zahlung über das UnitedWorldMoney-Netzwerk gesendet.</translation> </message> <message> <source>RECEIVE</source> <translation>EMPFANGEN</translation> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened.&lt;br&gt;Note: The message will not be sent with the payment over the UnitedWorldMoney network.</source> <translation>Eine optionale Nachricht an die Zahlungsaufforderung anhängen, die bei der Eröffnung der Anforderung angezeigt wird.&lt;br&gt;Hinweis: Die Nachricht wird nicht mit der Zahlung über das UnitedWorldMoney-Netzwerk gesendet.</translation> </message> <message> <source>Use this form to request payments. All fields are &lt;b&gt;optional&lt;/b&gt;.</source> <translation>Verwenden Sie dieses Formular, um Zahlungen anzufordern. Alle Felder sind &lt;b&gt;optional&lt;/b&gt;.</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Bezeichnung:</translation> </message> <message> <source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source> <translation>Einen optionalen Betrag anfordern. Lassen Sie diesen leer oder Null, um einen unbestimmten Betrag anzufordern.</translation> </message> <message> <source>&amp;Request payment</source> <translation>Zahlung anfo&amp;rdern</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Alle Formularfelder zurücksetzen.</translation> </message> <message> <source>Clear</source> <translation>Clear</translation> </message> <message> <source>Requested payments history</source> <translation>Verlauf der angeforderten Zahlungen</translation> </message> <message> <source>Show the selected request (does the same as double clicking an entry)</source> <translation>Zeige die ausgewählte Anfrage an (entspricht dem Doppelklick auf einen Eintrag)</translation> </message> <message> <source>Show</source> <translation>Anzeigen</translation> </message> <message> <source>Remove the selected entries from the list</source> <translation>Entferne die ausgewählten Einträge aus der Liste</translation> </message> <message> <source>Remove</source> <translation>Entfernen</translation> </message> <message> <source>Copy label</source> <translation>Bezeichnung kopieren</translation> </message> <message> <source>Copy message</source> <translation>Nachricht kopieren</translation> </message> <message> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> <message> <source>Copy address</source> <translation>Adresse kopieren</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <source>QR Code</source> <translation>QR Code</translation> </message> <message> <source>Copy &amp;URI</source> <translation>&amp;URI kopieren</translation> </message> <message> <source>Copy &amp;Address</source> <translation>&amp;Adresse kopieren</translation> </message> <message> <source>&amp;Save Image...</source> <translation>Grafik &amp;speichern...</translation> </message> <message> <source>Request payment to %1</source> <translation>Zahlung an %1 anfordern </translation> </message> <message> <source>Payment information</source> <translation>Zahlungsinformationen</translation> </message> <message> <source>URI</source> <translation>URI</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>Amount</source> <translation>Betrag</translation> </message> <message> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <source>Message</source> <translation>Nachricht</translation> </message> <message> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Resultierende URI zu lang. Versuchen sie den Text für Label / Nachricht zu kürzen.</translation> </message> <message> <source>Error encoding URI into QR Code.</source> <translation>Fehler bei der Kodierung der URI im QR-Code</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <source>Message</source> <translation>Nachricht</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>Amount</source> <translation>Betrag</translation> </message> <message> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> <message> <source>(no message)</source> <translation>(keine Nachricht)</translation> </message> <message> <source>(no amount)</source> <translation>(kein Betrag)</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <source>Send Coins</source> <translation>UWMs überweisen</translation> </message> <message> <source>SEND</source> <translation>SENDEN</translation> </message> <message> <source>Coin Control Features</source> <translation>"Coin Control"-Funktionen</translation> </message> <message> <source>Insufficient funds!</source> <translation>Unzureichender Kontostand!</translation> </message> <message> <source>Quantity:</source> <translation>Anzahl:</translation> </message> <message> <source>Bytes:</source> <translation>Byte:</translation> </message> <message> <source>Amount:</source> <translation>Betrag:</translation> </message> <message> <source>Priority:</source> <translation>Priorität:</translation> </message> <message> <source>medium</source> <translation>mittel</translation> </message> <message> <source>Fee:</source> <translation>Gebühr:</translation> </message> <message> <source>Dust:</source> <translation>Abzug:</translation> </message> <message> <source>no</source> <translation>nein</translation> </message> <message> <source>After Fee:</source> <translation>Nach Gebühren:</translation> </message> <message> <source>Change:</source> <translation>Wechselgeld:</translation> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Wenn dies aktivert, und die Wechselgeld-Adresse leer oder ungültig ist, wird das Wechselgeld einer neu erzeugten Adresse gutgeschrieben.</translation> </message> <message> <source>Custom change address</source> <translation>Benutzerdefinierte Wechselgeld-Adresse</translation> </message> <message> <source>Split UTXO</source> <translation>UTXO aufteilen</translation> </message> <message> <source># of outputs</source> <translation>Anzahl outputs</translation> </message> <message> <source>UTXO Size:</source> <translation>UTXO Größe:</translation> </message> <message> <source>0 UWM</source> <translation>0 UWM</translation> </message> <message> <source>Transaction Fee:</source> <translation>Transaktionsgebühr:</translation> </message> <message> <source>Choose...</source> <translation>Auswählen...</translation> </message> <message> <source>collapse fee-settings</source> <translation>Gebühreneinstellungen reduzieren</translation> </message> <message> <source>Minimize</source> <translation>Minimieren</translation> </message> <message> <source>per kilobyte</source> <translation>pro Kilobyte</translation> </message> <message> <source>total at least</source> <translation>Gesamt mindestens</translation> </message> <message> <source>(read the tooltip)</source> <translation>(Kurzinfo lesen)</translation> </message> <message> <source>Custom:</source> <translation>Benutzerdefiniert:</translation> </message> <message> <source>(Smart fee not initialized yet. This usually takes a few blocks...)</source> <translation>("Intelligente" Gebühren sind noch nicht berechnet. Dies dauert normalerweise ein paar Blöcke...)</translation> </message> <message> <source>SwiftX</source> <translation>Swift Transaktion</translation> </message> <message> <source>Confirmation time:</source> <translation>Bestätigungszeit:</translation> </message> <message> <source>Open Coin Control...</source> <translation>Öffne Coin Kontrolle...</translation> </message> <message> <source>Coins automatically selected</source> <translation>Coins automatisch ausgewählt</translation> </message> <message> <source>If the custom fee is set to 1000 uUWMs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uUWMs in fee,&lt;br /&gt;while "at least" pays 1000 uUWMs. For transactions bigger than a kilobyte both pay by kilobyte.</source> <translation>Wenn die kundenspezifische Gebühr auf 1000 uUWMs eingestellt ist und die Transaktion nur 250 Bytes beträgt, dann zahlt man "pro Kilobyte" nur 250 uUWMs an Gebühren,&lt;br /&gt;während man "mindestens" 1000 uUWMs bezahlt. Für Transaktionen, die größer sind als ein Kilobyte, bezahlen sie per Kilobyte.</translation> </message> <message> <source>If the custom fee is set to 1000 uUWMs and the transaction is only 250 bytes, then "per kilobyte" only pays 250 uUWMs in fee,&lt;br /&gt;while "total at least" pays 1000 uUWMs. For transactions bigger than a kilobyte both pay by kilobyte.</source> <translation>Wenn die kundenspezifische Gebühr auf 1000 uUWMs eingestellt ist und die Transaktion nur 250 Bytes beträgt, dann zahlt man "pro Kilobyte" nur 250 uUWMs an Gebühren,&lt;br /&gt;während man "insgesamt mindestens" 1000 uUWMs bezahlt. Für Transaktionen, die größer sind als ein Kilobyte, bezahlen sie per Kilobyte.</translation> </message> <message> <source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks.&lt;br /&gt;But be aware that this can end up in a never confirming transaction once there is more demand for UnitedWorldMoney transactions than the network can process.</source> <translation>Nur die minimale Gebühr zu zahlen ist nur gut, solange es weniger Transaktionsvolumen als Platz in den Blocken gibt.&lt;br /&gt;Aber seien Sie sich bewusst, dass dies in einer niemals bestätigende Transaktion enden kann, sobald die Nachfrage nach UnitedWorldMoney-Transaktionen höher ist, als was das Netzwerk verarbeiten kann.</translation> </message> <message> <source>normal</source> <translation>normal</translation> </message> <message> <source>fast</source> <translation>schnell</translation> </message> <message> <source>Recommended</source> <translation>Empfohlen</translation> </message> <message> <source>Send as zero-fee transaction if possible</source> <translation>Sende als gebührenfreie Transaktion, wenn möglich</translation> </message> <message> <source>(confirmation may take longer)</source> <translation>(die Bestätigung kann länger dauern)</translation> </message> <message> <source>Confirm the send action</source> <translation>Bestätige die Sendeaktion</translation> </message> <message> <source>S&amp;end</source> <translation>Üb&amp;erweisen</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Alle Formularfelder zurücksetzen.</translation> </message> <message> <source>Clear &amp;All</source> <translation>&amp;Alles zurücksetzen</translation> </message> <message> <source>Send to multiple recipients at once</source> <translation>An mehrere Empfänger auf einmal überweisen</translation> </message> <message> <source>Add &amp;Recipient</source> <translation>Empfänge&amp;r hinzufügen</translation> </message> <message> <source>Anonymized UWM</source> <translation>Anonymisierte UWM</translation> </message> <message> <source>Balance:</source> <translation>Kontostand:</translation> </message> <message> <source>Copy quantity</source> <translation>Anzahl kopieren</translation> </message> <message> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> <message> <source>Copy fee</source> <translation>Gebühr kopieren</translation> </message> <message> <source>Copy after fee</source> <translation>Kopiere nach Gebühr</translation> </message> <message> <source>Copy bytes</source> <translation>Kopiere Bytes</translation> </message> <message> <source>Copy priority</source> <translation>Kopiere Priorität</translation> </message> <message> <source>Copy dust</source> <translation>Kopiere Abzug</translation> </message> <message> <source>Copy change</source> <translation>Kopiere Wechselgeld</translation> </message> <message> <source>The split block tool does not work when sending to outside addresses. Try again.</source> <translation>Das Split-Block-Tool funktioniert nicht beim Senden an externe Adressen. Versuchen Sie es erneut.</translation> </message> <message> <source>The split block tool does not work with multiple addresses. Try again.</source> <translation>Das Split-Block-Tool funktioniert nicht mit mehreren Adressen. Versuchen Sie es erneut.</translation> </message> <message> <source>Warning: Invalid UnitedWorldMoney address</source> <translation>Warnung: Ungültige UnitedWorldMoney-Adresse</translation> </message> <message> <source>%1 to %2</source> <translation>%1 zu %2</translation> </message> <message> <source>Are you sure you want to send?</source> <translation>Sind Sie sicher, dass Sie senden möchten?</translation> </message> <message> <source>are added as transaction fee</source> <translation>werden als Transaktionsgebühr hinzugefügt</translation> </message> <message> <source>Total Amount = &lt;b&gt;%1&lt;/b&gt;&lt;br /&gt;= %2</source> <translation>Gesamtbetrag = &lt;b&gt;%1&lt;/b&gt;&lt;br /&gt;= %2</translation> </message> <message> <source>Confirm send coins</source> <translation>Sende Coins bestätigen</translation> </message> <message> <source>A fee %1 times higher than %2 per kB is considered an insanely high fee.</source> <translation>Eine Gebühr %1 mal höher als %2 pro kB gilt als wahnsinnig hohe Gebühr.</translation> </message> <message> <source>The recipient address is not valid, please recheck.</source> <translation>Die Adresse des Empfängers ist nicht gültig, bitte erneut prüfen.</translation> </message> <message> <source>using SwiftX</source> <translation>mit SwiftX</translation> </message> <message> <source> split into %1 outputs using the UTXO splitter.</source> <translation>In %1 Ausgänge mit dem UTXO-Splitter aufgeteilen.</translation> </message> <message> <source>&lt;b&gt;(%1 of %2 entries displayed)&lt;/b&gt;</source> <translation>&lt;b&gt;(%1 von %2 Einträge angezeigt)&lt;/b&gt;</translation> </message> <message> <source>The amount to pay must be larger than 0.</source> <translation>Der zu zahlende Betrag muss größer als 0 sein.</translation> </message> <message> <source>The amount exceeds your balance.</source> <translation>Der Betrag übersteigt ihr Guthaben.</translation> </message> <message> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Die Gesamtsumme überschreitet Ihr Guthaben, sobald die Transaktionsgebühr von %1 enthalten ist.</translation> </message> <message> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Doppelte Adresse gefunden. Es kann nur einmal pro Sendeoperation an jede Adresse gesendet werden.</translation> </message> <message> <source>Transaction creation failed!</source> <translation>Transaktionserstellung fehlgeschlagen!</translation> </message> <message> <source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Die Transaktion wurde abgelehnt! Dies könnte passieren, wenn einige der Coins in deiner Wallet schon ausgegeben wurden. Zum Beispiel wenn du eine Kopie der wallet.dat benutzt hättest und Coins in der Kopie verbracht wurden. Hier aber noch nicht als verbraucht markiert wurden.</translation> </message> <message> <source>Error: The wallet was unlocked only to anonymize coins.</source> <translation>Fehler: Die Wallet wurde nur zum Anonymisieren von Coins freigeschaltet.</translation> </message> <message> <source>Error: The wallet was unlocked only to anonymize coins. Unlock canceled.</source> <translation>Fehler: Die Wallet wurde nur zum Anonymisieren von Coins freigeschaltet. Freischaltung aufgehoben.</translation> </message> <message> <source>Pay only the minimum fee of %1</source> <translation>Zahle nur die Mindestgebühr von %1</translation> </message> <message> <source>Warning: Unknown change address</source> <translation>Warnung: Unbekannte Wechselgeld-Adresse </translation> </message> <message> <source>(no label)</source> <translation>(keine Bezeichnung)</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <source>This is a normal payment.</source> <translation>Dies ist eine normale Überweisung.</translation> </message> <message> <source>Pay &amp;To:</source> <translation>E&amp;mpfänger:</translation> </message> <message> <source>The UnitedWorldMoney address to send the payment to</source> <translation>UnitedWorldMoney-Adresse, an die überwiesen werden soll</translation> </message> <message> <source>Choose previously used address</source> <translation>Bereits verwendete Adresse auswählen</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Adresse aus der Zwischenablage einfügen</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Remove this entry</source> <translation>Diesen Eintrag entfernen</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Bezeichnung:</translation> </message> <message> <source>Enter a label for this address to add it to the list of used addresses</source> <translation>Adressbezeichnung eingeben, um diese in der Liste der bereits verwendeten Adressen zu speichern</translation> </message> <message> <source>A&amp;mount:</source> <translation>Betra&amp;g:</translation> </message> <message> <source>Message:</source> <translation>Nachricht:</translation> </message> <message> <source>A message that was attached to the UnitedWorldMoney: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the UnitedWorldMoney network.</source> <translation>Eine an die UnitedWorldMoney-URI angefügte Nachricht, die zusammen mit der Transaktion gespeichert wird. Hinweis: Diese Nachricht wird nicht über das UnitedWorldMoney-Netzwerk gesendet.</translation> </message> <message> <source>This is an unverified payment request.</source> <translation>Dies ist eine unverifizierte Zahlungsanforderung.</translation> </message> <message> <source>Pay To:</source> <translation>Empfänger:</translation> </message> <message> <source>Memo:</source> <translation>Memo:</translation> </message> <message> <source>This is a verified payment request.</source> <translation>Dies ist eine verifizierte Zahlungsanforderung.</translation> </message> <message> <source>Enter a label for this address to add it to your address book</source> <translation>Adressbezeichnung eingeben, diese wird zusammen mit der Adresse dem Adressbuch hinzugefügt</translation> </message> </context> <context> <name>ShutdownWindow</name> <message> <source>UnitedWorldMoney Core is shutting down...</source> <translation>UnitedWorldMoney Core wird beendet...</translation> </message> <message> <source>Do not shut down the computer until this window disappears.</source> <translation>Fahren Sie den Computer nicht herunter, bevor dieses Fenster verschwindet.</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <source>Signatures - Sign / Verify a Message</source> <translation>Signaturen - Signieren / Überprüfen einer Nachricht</translation> </message> <message> <source>&amp;Sign Message</source> <translation>Nachricht &amp;signieren</translation> </message> <message> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Sie können Nachrichten mit Ihren Adressen signieren, um zu beweisen, dass Sie diese besitzen. Seien Sie vorsichtig nichts fragwürdiges zu unterschreiben, da Phishing-Angriffe versuchen können, Sie dazu zu bringen, Ihre Identität zu überschreiben. Unterschreiben Sie nur ganz detaillierte Aussagen, denen Sie zustimmen.</translation> </message> <message> <source>The UnitedWorldMoney address to sign the message with</source> <translation>UnitedWorldMoney Adresse mit welcher die Nachricht signiert werden soll</translation> </message> <message> <source>Choose previously used address</source> <translation>Bereits verwendete Adresse auswählen</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Adresse aus der Zwischenablage einfügen</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Enter the message you want to sign here</source> <translation>Geben Sie hier die Nachricht ein, die Sie digital unterschreiben möchten</translation> </message> <message> <source>Signature</source> <translation>Signatur</translation> </message> <message> <source>Copy the current signature to the system clipboard</source> <translation>Kopiert die aktuelle Signatur in die Zwischenablage</translation> </message> <message> <source>Sign the message to prove you own this UnitedWorldMoney address</source> <translation>Signiere die Nachricht um zu Beweisen, dass du diese UnitedWorldMoney Adresse besitzt</translation> </message> <message> <source>The UnitedWorldMoney address the message was signed with</source> <translation>Die UnitedWorldMoney Adresse mit welcher die Nachricht signiert wurde.</translation> </message> <message> <source>Verify the message to ensure it was signed with the specified UnitedWorldMoney address</source> <translation>Überprüfen Sie die Nachricht, um sicherzustellen, dass sie mit der angegebenen UnitedWorldMoney-Adresse signiert wurde.</translation> </message> <message> <source>Sign &amp;Message</source> <translation>Signiere &amp;Nachricht </translation> </message> <message> <source>Reset all sign message fields</source> <translation>Setze alle signierten Nachrichten Felder zurück</translation> </message> <message> <source>Clear &amp;All</source> <translation>&amp;Alles zurücksetzen</translation> </message> <message> <source>&amp;Verify Message</source> <translation>&amp;Verifiziere Nachricht</translation> </message> <message> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Geben Sie die signierte Adresse, die Nachricht (stellen Sie sicher, dass Zeilenumbrüche, Leerzeichen, Tabs usw. genau kopieren) und Signatur unten ein, um die Nachricht zu überprüfen. Seien Sie vorsichtig, nicht mehr in die Signatur hinein zu interpretieren, als das was in der signierten Nachricht selbst angegeben ist, um zu vermeiden, von einem Mann-in-der-Mitte Angriff getroffen zu werden.</translation> </message> <message> <source>Verify &amp;Message</source> <translation>Verifiziere &amp;Nachricht</translation> </message> <message> <source>Reset all verify message fields</source> <translation>Zurücksetzen aller überprüften Felder.</translation> </message> <message> <source>Click "Sign Message" to generate signature</source> <translation>Klicken Sie auf "signiere Nachricht", um Signatur zu generieren</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Die eingegebene Adresse ist falsch</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Bitte die Adresse prüfen und erneut eingeben</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Die eingegebene Adresse passt zu keinem Schlüssel</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Entsperrung der Wallet wurde abgebrochen.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Kein privater Schlüssel für die eingegebene Adresse verfügbar</translation> </message> <message> <source>Message signing failed.</source> <translation>Signierung der Nachricht fehlgeschlagen.</translation> </message> <message> <source>Message signed.</source> <translation>Nachricht signiert.</translation> </message> <message> <source>The signature could not be decoded.</source> <translation>Die Signatur konnte nicht dekodiert werden.</translation> </message> <message> <source>Please check the signature and try again.</source> <translation>Bitte überprüfen Sie die Signatur und versuchen Sie es erneut.</translation> </message> <message> <source>The signature did not match the message digest.</source> <translation>Die Signatur stimmt nicht mit der Nachricht überein.</translation> </message> <message> <source>Message verification failed.</source> <translation>Nachrichtenüberprüfung fehlgeschlagen.</translation> </message> <message> <source>Message verified.</source> <translation>Nachricht überprüft.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <source>UnitedWorldMoney Core</source> <translation>UnitedWorldMoney Core</translation> </message> <message> <source>Version %1</source> <translation>Version %1</translation> </message> <message> <source>The Bitcoin Core developers</source> <translation>Die Bitcoin Core Entwickler</translation> </message> <message> <source>The Dash Core developers</source> <translation>Die Dash Core Entwickler</translation> </message> <message> <source>The UnitedWorldMoney Core developers</source> <translation>Die UnitedWorldMoney Core Entwickler</translation> </message> <message> <source>[testnet]</source> <translation>[Testnetzwerk]</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <source>KB/s</source> <translation>KB/s</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <source>Open until %1</source> <translation>Offen bis %1</translation> </message> <message> <source>conflicted</source> <translation>in Konflikt stehend</translation> </message> <message> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <source>%1/unconfirmed</source> <translation>%1/unbestätigt</translation> </message> <message> <source>%1 confirmations</source> <translation>%1 Bestätigungen</translation> </message> <message> <source>%1/offline (verified via SwiftX)</source> <translation>%1/offline (verifiziert via Swift Transaktion)</translation> </message> <message> <source>%1/confirmed (verified via SwiftX)</source> <translation>%1/bestätigt (verifiziert via Swift Transaktion)</translation> </message> <message> <source>%1 confirmations (verified via SwiftX)</source> <translation>%1 Bestätigungen (verifiziert via Swift Transaktion)</translation> </message> <message> <source>%1/offline (SwiftX verification in progress - %2 of %3 signatures)</source> <translation>%1/offline (Swift Transaktion wird verifiziert - %2 aus %3 Signaturen)</translation> </message> <message> <source>%1/confirmed (SwiftX verification in progress - %2 of %3 signatures )</source> <translation>%1/bestätigt (Swift Transaktion wird verifiziert - %2 von %3 Signaturen)</translation> </message> <message> <source>%1 confirmations (SwiftX verification in progress - %2 of %3 signatures)</source> <translation>%1 Bestätigungen (Swift Transaktion wird verifiziert - %2 von %3 Signaturen)</translation> </message> <message> <source>%1/offline (SwiftX verification failed)</source> <translation>%1/offline (Swift Transaktion konnte nicht verifiziert werden)</translation> </message> <message> <source>%1/confirmed (SwiftX verification failed)</source> <translation>%1/bestätigt (Swift Transaktion konnte nicht verifiziert werden)</translation> </message> <message> <source>Status</source> <translation>Status</translation> </message> <message> <source>, has not been successfully broadcast yet</source> <translation>, wurde noch nicht erfolgreich übertragen</translation> </message> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Source</source> <translation>Quelle</translation> </message> <message> <source>Generated</source> <translation>Erzeugt</translation> </message> <message> <source>From</source> <translation>Von</translation> </message> <message> <source>unknown</source> <translation>unbekannt</translation> </message> <message> <source>To</source> <translation>An</translation> </message> <message> <source>own address</source> <translation>eigene Adresse</translation> </message> <message> <source>watch-only</source> <translation>nur beobachtet</translation> </message> <message> <source>label</source> <translation>Bezeichnung</translation> </message> <message> <source>Credit</source> <translation>Gutschrift</translation> </message> <message> <source>not accepted</source> <translation>nicht angenommen</translation> </message> <message> <source>Debit</source> <translation>Belastung</translation> </message> <message> <source>Total debit</source> <translation>Gesamtbelastung</translation> </message> <message> <source>Total credit</source> <translation>Gesamtgutschrift</translation> </message> <message> <source>Transaction fee</source> <translation>Transaktionsgebühr</translation> </message> <message> <source>Net amount</source> <translation>Nettobetrag</translation> </message> <message> <source>Message</source> <translation>Nachricht</translation> </message> <message> <source>Comment</source> <translation>Kommentar</translation> </message> <message> <source>Transaction ID</source> <translation>Transaktions-ID</translation> </message> <message> <source>Output index</source> <translation>Output Index</translation> </message> <message> <source>Merchant</source> <translation>Händler</translation> </message> <message> <source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Erzeugte UWMs müssen %1 Blöcke lang reifen, bevor sie ausgegeben werden können. Als Sie diesen Block erzeugten, wurde er an das Netzwerk übertragen, um ihn der Blockkette hinzuzufügen. Falls dies fehlschlägt wird der Status in "nicht angenommen" geändert und Sie werden keine UWMs gutgeschrieben bekommen. Das kann gelegentlich passieren, wenn ein anderer Knoten einen Block fast zeitgleich erzeugt.</translation> </message> <message> <source>Debug information</source> <translation>Debuginformationen</translation> </message> <message> <source>Transaction</source> <translation>Transaktion</translation> </message> <message> <source>Inputs</source> <translation>Eingänge</translation> </message> <message> <source>Amount</source> <translation>Betrag</translation> </message> <message> <source>true</source> <translation>wahr</translation> </message> <message> <source>false</source> <translation>falsch</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <source>Transaction details</source> <translation>Transaktionsdetails</translation> </message> <message> <source>This pane shows a detailed description of the transaction</source> <translation>Dieser Bereich zeigt eine detaillierte Beschreibung der Transaktion an</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Type</source> <translation>Art</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>Open until %1</source> <translation>Offen bis %1</translation> </message> <message> <source>Offline</source> <translation>Offline</translation> </message> <message> <source>Unconfirmed</source> <translation>Unbestätigt</translation> </message> <message> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Wird bestätigt (%1 von %2 empfohlenen Bestätigungen)</translation> </message> <message> <source>Confirmed (%1 confirmations)</source> <translation>Bestätigt (%1 Bestätigungen)</translation> </message> <message> <source>Conflicted</source> <translation>Steht in Konflikt</translation> </message> <message> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Unreif (%1 Bestätigungen, wird verfügbar sein nach %2)</translation> </message> <message> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Dieser Block wurde von keinem anderen Knoten empfangen und wird wahrscheinlich nicht angenommen werden!</translation> </message> <message> <source>Received with</source> <translation>Empfangen über</translation> </message> <message> <source>Masternode Reward</source> <translation>Masternode Vergütung</translation> </message> <message> <source>Received from</source> <translation>Empfangen von</translation> </message> <message> <source>Received via Obfuscation</source> <translation>über/durch Verschleierung empfangen</translation> </message> <message> <source>UWM Stake</source> <translation>UWM Stake</translation> </message> <message> <source>zUWM Stake</source> <translation>zUWM Stake</translation> </message> <message> <source>Obfuscation Denominate</source> <translation>Stückelung der Verschleierung</translation> </message> <message> <source>Obfuscation Collateral Payment</source> <translation>Sicherheitspfandzahlung für Verschleierung</translation> </message> <message> <source>Obfuscation Make Collateral Inputs</source> <translation>Verschleierung Erzeugung des Sicherheitspfand</translation> </message> <message> <source>Obfuscation Create Denominations</source> <translation>Verschleierung Erzeugung der Stückelung</translation> </message> <message> <source>Converted UWM to zUWM</source> <translation>Umgewandeltes UWM zu zUWM</translation> </message> <message> <source>Spent zUWM</source> <translation>Ausgegebene zUWM</translation> </message> <message> <source>Received UWM from zUWM</source> <translation>Empfangene UWM von zUWM</translation> </message> <message> <source>Minted Change as zUWM from zUWM Spend</source> <translation>Geprägtes zUWM Wechselgeld aus zUWM Überweisung</translation> </message> <message> <source>Converted zUWM to UWM</source> <translation>Umgewandeltes zUWM zu UWM</translation> </message> <message> <source>Anonymous (zUWM Transaction)</source> <translation>Anonym (zUWM Transaktion)</translation> </message> <message> <source>Anonymous (zUWM Stake)</source> <translation>Anonym (zUWM Stake)</translation> </message> <message> <source>Sent to</source> <translation>Überwiesen an</translation> </message> <message> <source>Orphan Block - Generated but not accepted. This does not impact your holdings.</source> <translation>Verwaister Block - Generiert, aber nicht akzeptiert. Dies wirkt sich nicht auf Ihre Bestände aus.</translation> </message> <message> <source>Payment to yourself</source> <translation>Eigenüberweisung</translation> </message> <message> <source>Mined</source> <translation>Erarbeitet</translation> </message> <message> <source>Obfuscated</source> <translation>Verschleiert</translation> </message> <message> <source>watch-only</source> <translation>nur beobachtet</translation> </message> <message> <source>(n/a)</source> <translation>(k.A.)</translation> </message> <message> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Transaktionsstatus, fahren Sie mit der Maus über dieses Feld, um die Anzahl der Bestätigungen zu sehen.</translation> </message> <message> <source>Date and time that the transaction was received.</source> <translation>Datum und Uhrzeit zu der die Transaktion empfangen wurde.</translation> </message> <message> <source>Type of transaction.</source> <translation>Art der Transaktion.</translation> </message> <message> <source>Whether or not a watch-only address is involved in this transaction.</source> <translation>Zeigt ob eine nur beobachtete Adresse in dieser Transaktion beteiligt ist.</translation> </message> <message> <source>Destination address of transaction.</source> <translation>Zieladresse der Transaktion</translation> </message> <message> <source>Amount removed from or added to balance.</source> <translation>Der Betrag, der dem Kontostand abgezogen oder hinzugefügt wurde.</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>All</source> <translation>Alle</translation> </message> <message> <source>Today</source> <translation>Heute</translation> </message> <message> <source>This week</source> <translation>Diese Woche</translation> </message> <message> <source>This month</source> <translation>Diesen Monat</translation> </message> <message> <source>Last month</source> <translation>Letzten Monat</translation> </message> <message> <source>This year</source> <translation>Dieses Jahr</translation> </message> <message> <source>Range...</source> <translation>Zeitraum...</translation> </message> <message> <source>Most Common</source> <translation>Gängigste</translation> </message> <message> <source>Received with</source> <translation>Empfangen über</translation> </message> <message> <source>Sent to</source> <translation>Überwiesen an</translation> </message> <message> <source>To yourself</source> <translation>Eigenüberweisung</translation> </message> <message> <source>Mined</source> <translation>Erarbeitet</translation> </message> <message> <source>Minted</source> <translation>Erzeugt</translation> </message> <message> <source>Masternode Reward</source> <translation>Masternode Vergütung</translation> </message> <message> <source>Zerocoin Mint</source> <translation>Zerocoin Prägung</translation> </message> <message> <source>Zerocoin Spend</source> <translation>Zerocoin Ausgabe</translation> </message> <message> <source>Zerocoin Spend to Self</source> <translation>Zerocoin Ausgabe an sich Selbst</translation> </message> <message> <source>Other</source> <translation>Andere</translation> </message> <message> <source>Enter address or label to search</source> <translation>Zu suchende Adresse oder Bezeichnung eingeben</translation> </message> <message> <source>Min amount</source> <translation>Minimaler Betrag</translation> </message> <message> <source>Copy address</source> <translation>Adresse kopieren</translation> </message> <message> <source>Copy label</source> <translation>Bezeichnung kopieren</translation> </message> <message> <source>Copy amount</source> <translation>Betrag kopieren</translation> </message> <message> <source>Copy transaction ID</source> <translation>Transaktions-ID kopieren</translation> </message> <message> <source>Edit label</source> <translation>Bezeichnung bearbeiten</translation> </message> <message> <source>Show transaction details</source> <translation>Transaktionsdetails anzeigen</translation> </message> <message> <source>Export Transaction History</source> <translation>Transaktionsverlauf exportieren</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Kommagetrennte Datei (*.csv)</translation> </message> <message> <source>Confirmed</source> <translation>Bestätigt</translation> </message> <message> <source>Watch-only</source> <translation>Nur beobachtet</translation> </message> <message> <source>Date</source> <translation>Datum</translation> </message> <message> <source>Type</source> <translation>Art</translation> </message> <message> <source>Label</source> <translation>Bezeichnung</translation> </message> <message> <source>Address</source> <translation>Adresse</translation> </message> <message> <source>ID</source> <translation>ID</translation> </message> <message> <source>Exporting Failed</source> <translation>Exportieren fehlgeschlagen</translation> </message> <message> <source>There was an error trying to save the transaction history to %1.</source> <translation>Beim Speichern des Transaktionsverlaufs nach %1 ist ein Fehler aufgetreten.</translation> </message> <message> <source>Exporting Successful</source> <translation>Exportieren erfolgreich</translation> </message> <message> <source>Received UWM from zUWM</source> <translation>Empfangene UWM von zUWM</translation> </message> <message> <source>Zerocoin Spend, Change in zUWM</source> <translation>Zerocoin Überweisung, Wechselgeld in zUWM</translation> </message> <message> <source>The transaction history was successfully saved to %1.</source> <translation>Speichern des Transaktionsverlaufs nach %1 war erfolgreich.</translation> </message> <message> <source>Range:</source> <translation>Zeitraum:</translation> </message> <message> <source>to</source> <translation>bis</translation> </message> </context> <context> <name>UnitDisplayStatusBarControl</name> <message> <source>Unit to show amounts in. Click to select another unit.</source> <translation>Angezeigte Einheit. Klicken Sie, um eine andere Einheit zu wählen.</translation> </message> </context> <context> <name>WalletFrame</name> <message> <source>No wallet has been loaded.</source> <translation>Es wurde keine Wallet geladen.</translation> </message> </context> <context> <name>WalletModel</name> <message> <source>Send Coins</source> <translation>UWMs überweisen</translation> </message> <message> <source>SwiftX doesn't support sending values that high yet. Transactions are currently limited to %1 UWM.</source> <translation>Swift Transaktionen unterstützen das senden solch hoher Beträge noch nicht. Transaktionen sind momentan auf %1 UWM begrenzt.</translation> </message> </context> <context> <name>WalletView</name> <message> <source>HISTORY</source> <translation>VERLAUF</translation> </message> <message> <source>&amp;Export</source> <translation>&amp;Exportieren</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Daten aus der aktuellen Ansicht in eine Datei exportieren</translation> </message> <message> <source>Selected amount:</source> <translation>Ausgewählter Betrag:</translation> </message> <message> <source>Backup Wallet</source> <translation>Wallet sichern</translation> </message> <message> <source>Wallet Data (*.dat)</source> <translation>Wallet-Daten (*.dat)</translation> </message> </context> <context> <name>ZPivControlDialog</name> <message> <source>Select zUWM to Spend</source> <translation>Wählen Sie die zUWM aus, die Sie ausgeben möchten</translation> </message> <message> <source>Quantity</source> <translation>Menge</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>zUWM</source> <translation>zUWM</translation> </message> <message> <source>Select/Deselect All</source> <translation>Alle Aus-/Abwählen</translation> </message> </context> <context> <name>unitedworldmoney-core</name> <message> <source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source> <translation>(1 = behalte Tx-Meta-Daten z.B. Betrag, Besitzer und Zahlungsanforderungsinformationen, 2 = verwerfe Tx-Meta-Daten)</translation> </message> <message> <source>Allow JSON-RPC connections from specified source. Valid for &lt;ip&gt; are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source> <translation>Erlaubt JSON-RPC Verbindungen von der angegebenen Quelle. Valide für &lt;ip&gt; ist eine einzelne IP (z.B. 1.2.3.4), ein Netzwerk/Netzwerkmaske (z.B. 1.2.3.4/255.255.255.0) oder ein Netzwerk/CIDR (z.B. 1.2.3.4/24). Diese Option kann mehrfach angegeben werden</translation> </message> <message> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Binde an die angegebene Adresse und höre immer auf ihr. Benutze [host]:port Notation für IPv6</translation> </message> <message> <source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source> <translation>Binde an die angegebene Adresse und setze Verbindungen zu ihr auf die Whitelist. Benutze [host]:port Notation für IPv6</translation> </message> <message> <source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source> <translation>Binde an die angegebene Adresse und höre auf JSON-RPC verbindungen. Benutze [host]:port Notation für IPv6. Diese Option kann mehrere Male aufgerufen werden (Standard: Binde an alle Schnittstellen)</translation> </message> <message> <source>Calculated accumulator checkpoint is not what is recorded by block index</source> <translation>Der berechnete Akkumulator-Checkpoint stimmt nicht mit dem vom Blockindex aufgezeichneten überein</translation> </message> <message> <source>Cannot obtain a lock on data directory %s. UnitedWorldMoney Core is probably already running.</source> <translation>Kann keine Sperre für das Datenverzeichnis %s erhalten. UnitedWorldMoney Core läuft wahrscheinlich bereits.</translation> </message> <message> <source>Change automatic finalized budget voting behavior. mode=auto: Vote for only exact finalized budget match to my generated budget. (string, default: auto)</source> <translation>Ändere das automatische, finale Budget Stimmverhalten. Modus=auto: Stimme nur für exakt finale Budgets, welche meinem generierten Budget entsprechen. (string, Standard:auto)</translation> </message> <message> <source>Continuously rate-limit free transactions to &lt;n&gt;*1000 bytes per minute (default:%u)</source> <translation>Begrenze freie Transaktionen Permanent auf &lt;n&gt;*1000 Bytes pro Minute (Standard:%u)</translation> </message> <message> <source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source> <translation>Erstelle neue Dateien mit Systemstandardberechtigungen anstelle von umask 077 (nur wirksam mit deaktivierter Wallet-Funktionalität)</translation> </message> <message> <source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source> <translation>Lösche alle Wallet-Transaktionen und stelle nur diese Teile der Blockchain beim Start durch -neuscan wieder her</translation> </message> <message> <source>Delete all zerocoin spends and mints that have been recorded to the blockchain database and reindex them (0-1, default: %u)</source> <translation>Lösche alle Zerocoin Überweisungen und Prägungen die in der Blockchain-Datenbank gespeichert wurden und reindiziere diese (0-1, Standard: %u)</translation> </message> <message> <source>Distributed under the MIT software license, see the accompanying file COPYING or &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</source> <translation>Veröffentlicht unter der MIT-Software-Lizenz, siehe die beigelegte Datei COPYING oder &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</translation> </message> <message> <source>Enable automatic wallet backups triggered after each zUWM minting (0-1, default: %u)</source> <translation>Aktiviere nach jeder zUWM-Prägung ausgelöste automatische Wallet-Backups (0-1, Standard: %u)</translation> </message> <message> <source>Enable or disable staking functionality for UWM inputs (0-1, default: %u)</source> <translation>Aktiviere oder deaktiviere Staking-Funktionalität für UWM Eingänge (0-1, default: %u)</translation> </message> <message> <source>Enable or disable staking functionality for zUWM inputs (0-1, default: %u)</source> <translation>Aktiviere oder deaktiviere Staking-Funktionalität für zUWM Eingänge (0-1, default: %u)</translation> </message> <message> <source>Enable spork administration functionality with the appropriate private key.</source> <translation>Aktiviere die Funktionalität der Spork-Administration mit dem entsprechenden privaten Schlüssel.</translation> </message> <message> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source> <translation>Gebe den Regressions-Testmodus ein, der eine spezielle Kette verwendet, in der Blöcke sofort gelöst werden können.</translation> </message> <message> <source>Error: Listening for incoming connections failed (listen returned error %s)</source> <translation>Fehler: Überwachung von eingehenden Verbindungen fehlgeschlagen (listen return error %s)</translation> </message> <message> <source>Error: The transaction is larger than the maximum allowed transaction size!</source> <translation>Fehler: Die Transaktion ist größer als die maximal zulässige Transaktionsgröße!</translation> </message> <message> <source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source> <translation>Fehler: Nicht unterstütztes Argument -socks gefunden. Festlegen der SOCKS Version ist nicht mehr möglich, es werden nur noch SOCKS5-Proxys unterstützt.</translation> </message> <message> <source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source> <translation>Befehl ausführen, wenn eine relevante Warnung empfangen wird oder wir einen wirklich langen Fork sehen (%s in cmd wird durch Benachrichtigungen ersetzt)</translation> </message> <message> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Befehl ausführen, wenn eine Wallet-Transaktion geändert wird (%s in cmd wird durch TxID ersetzt)</translation> </message> <message> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Befehl ausführen, wenn sich der beste Block ändert (%s in cmd wird durch Blockhash ersetzt)</translation> </message> <message> <source>Fees (in UWM/Kb) smaller than this are considered zero fee for relaying (default: %s)</source> <translation>Gebühren (in UWM/kB) kleiner als diese gelten für die Weiterleitung als Null-Gebühr (default: %s)</translation> </message> <message> <source>Fees (in UWM/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source> <translation>Gebühren (in UWM/kB) kleiner als diese werden als Null-Gebühr für die Transaktionserstellung betrachtet (default: %s)</translation> </message> <message> <source>Flush database activity from memory pool to disk log every &lt;n&gt; megabytes (default: %u)</source> <translation>Flusht die Datenbankaktivität vom Speicherpool auf das Festplattenprotokoll alle &lt;n&gt; Megabytes (default: %u)</translation> </message> <message> <source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source> <translation>Wenn paytxfee nicht gesetzt ist, gebe genügend Gebühr an, sodass die Transaktionsbestätigungen im Durchschnitt innerhalb von n Blöcken beginnen (default: %u)</translation> </message> <message> <source>In this mode -genproclimit controls how many blocks are generated immediately.</source> <translation>In diesem Modus -genproclimit steuern, wie viele Blöcke sofort erzeugt werden.</translation> </message> <message> <source>Insufficient or insufficient confirmed funds, you might need to wait a few minutes and try again.</source> <translation>Unzureichend oder unzureichend bestätigtes Guthaben, warten Sie ein paar Minuten und probieren es dann nochmal.</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source> <translation>Ungültiger Betrag für -maxtxfee=&lt;amount&gt;: '%s' (muss mindestens die minrelay Gebühr von %s sein, um Transaktionsstau zu vermeiden)</translation> </message> <message> <source>Keep the specified amount available for spending at all times (default: 0)</source> <translation>Behalte jeder Zeit den angegebenen Betrag für die Ausgaben (default: 0)</translation> </message> <message> <source>Log transaction priority and fee per kB when mining blocks (default: %u)</source> <translation>Priorität der Protokolltransaktion und Gebühr pro kB wenn Blöcke geschürft werden (default: %u)</translation> </message> <message> <source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source> <translation>Pflege einen vollständigen Transaktionsindex, der von getrawtransaction rpc call verwendet wird. (default: %u)</translation> </message> <message> <source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source> <translation>Maximale Größe der Daten in Datenträgertransaktionen, die wir weiterleiten und abbilden (default: %u)</translation> </message> <message> <source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source> <translation>Maximale Gesamtgebühren für die Verwendung in einer einzigen Wallet-Transaktion. Zu niedrige Einstellung kann große Transaktionen abbrechen (default: %s)</translation> </message> <message> <source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source> <translation>Sekundenanzahl, um fehlerhafte Peers von der Wiederverbindung abzuhalten (default: %u)</translation> </message> <message> <source>Obfuscation uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source> <translation>Verschleierung benötigt exakt gestückelte Mengen um Gelder zu versenden, ggf. müssen Sie einfach mehr Münzen anonymisieren.</translation> </message> <message> <source>Output debugging information (default: %u, supplying &lt;category&gt; is optional)</source> <translation>Debug-Informationen ausgeben (Standard: %u, die Bereitstellung von &lt;category&gt; ist optional)</translation> </message> <message> <source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source> <translation>Abfrage von Gegenstellen-Adressen per DNS-Lookup, falls wenige Adressen verfügbar sind (Standard: 1, ausser -connect ist aktiviert)</translation> </message> <message> <source>Randomize credentials for every proxy connection. This enables Tor stream isolation (default: %u)</source> <translation>Randomisiere Zugangsdaten für jede einzelne Proxy-Verbindung. Dies ermöglicht die Stream-Isolation im Tor Netzwerk (Standard: %u)</translation> </message> <message> <source>Require high priority for relaying free or low-fee transactions (default:%u)</source> <translation>Benötige hohe Priorität um kostenlose Transaktionen oder Transaktionen mit niedriger Gebühr weiterzuleiten (Standard: %u)</translation> </message> <message> <source>Send trace/debug info to console instead of debug.log file (default: %u)</source> <translation>Sende die Debug-Info an die Konsole, anstatt in die debug.log-Datei (Standard: %u)</translation> </message> <message> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source> <translation>Maximale Größe von Transaktionen mit hoher Priorität/niedrigen Gebühren festlegen, in Bytes (Standard: %d)</translation> </message> <message> <source>Set the number of script verification threads (%u to %d, 0 = auto, &lt;0 = leave that many cores free, default: %d)</source> <translation>Anzahl der Threads festlegen, die für die Skript Verifikation verwendet werden (%u to %d, 0 = auto, &lt;0 = diese Anzahl an Kernen ungenutzt lassen, Standard: %d)</translation> </message> <message> <source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source> <translation>Wenn aktiviert, kann die Anzahl der CPU-Threads für die Coin-Erzeugung festgelegt werden (-1 = alle Kerne, Standard: %d)</translation> </message> <message> <source>Show N confirmations for a successfully locked transaction (0-9999, default: %u)</source> <translation>Zeige N Bestätigungen für eine erfolgreich gesperrte Transaktion (0-9999, Standard: %u)</translation> </message> <message> <source>Support filtering of blocks and transaction with bloom filters (default: %u)</source> <translation>Unterstütze Filtern von Blöcken und Transaktionen mittels Bloom-Filtern (Standard: %u)</translation> </message> <message> <source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit &lt;https://www.openssl.org/&gt; and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source> <translation>Dieses Produkt beinhaltet Software, die vom OpenSSL Projekt für die Nutzung im OpenSSL Toolkit &lt;https://www.openssl.org/&gt; entwickelt wurde. Desweiteren kryptografische Software , die von Eric Young, und UPnP Software, die von Thomas Bernard, geschrieben wurde.</translation> </message> <message> <source>Unable to bind to %s on this computer. UnitedWorldMoney Core is probably already running.</source> <translation>Fehler: Port %s ist bereits belegt! Läuft bereits eine andere UnitedWorldMoney Core Wallet ?</translation> </message> <message> <source>Unable to locate enough Obfuscation denominated funds for this transaction.</source> <translation>Nicht genügend gestückeltes, verschleiertes Guthaben für diese Transaktion gefunden.</translation> </message> <message> <source>Unable to locate enough Obfuscation non-denominated funds for this transaction that are not equal 10000 UWM.</source> <translation>Nicht genügend verschleiertes, nicht gestückeltes Guthaben für diese Transaktion gefunden, die nicht gleich 10000 UWM entsprechen.</translation> </message> <message> <source>Unable to locate enough funds for this transaction that are not equal 10000 UWM.</source> <translation>Nicht genügend Guthaben für diese Transaktion gefunden, die nicht gleich 10000 UWM entsprechen.</translation> </message> <message> <source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source> <translation>Verwende seperate SOCKS5-Proxys um Gegenstellen über Tor-Hidden-Services zu erreichen (Standard: %s)</translation> </message> <message> <source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source> <translation>Warnung: -maxtxfee ist sehr hoch eingestellt! Gebühren in dieser Höhe können in einer einzigen Transaktion abgebucht werden.</translation> </message> <message> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Warnung: -paytxfee ist sehr hoch eingestellt! Diese Transaktionsgebühr werden Ihnen abgebucht, falls Sie die Transaktion überweisen.</translation> </message> <message> <source>Warning: Please check that your computer's date and time are correct! If your clock is wrong UnitedWorldMoney Core will not work properly.</source> <translation>Warnung: Bitte stellen Sie sicher, das vom Computer verwendete Zeit und Datumangaben korrekt sind! Wenn ihr System falsche Zeitangaben nutzt, wird UnitedWorldMoney Core nicht korrekt funktionieren.</translation> </message> <message> <source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source> <translation>Warnung: Unstimmigkeiten im Netzwerk! Einige Miner scheinen Probleme zu haben.</translation> </message> <message> <source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Warnung: Es scheint einige Unstimmigkeiten mit unseren Gegenstellen zu geben. Eventuell müssen Sie oder andere Nodes Upgrades durchführen.</translation> </message> <message> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Warnung: Fehler beim Lesen der wallet.dat! Alle Schlüssel wurden korrekt gelesen, aber Transaktionsdaten oder Adressbuch-Einträge könnten fehlen oder inkorrekt sein.</translation> </message> <message> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Warnung: wallet.dat fehlerhaft, Daten wurden gerettet! Die originale wallet.dat wurde als wallet.{Zeitstempel}.bak in %s gesichert; falls ihr Kontostand oder ihre Transaktionen fehlerhaft sind, sollten Sie ein Backup zur Wiederherstellung nutzen.</translation> </message> <message> <source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source> <translation>Setze Verbindungen aus der folgenden Subnetzmaske oder IP-Adresse. Dies kann mehrere male spezifiziert werden.</translation> </message> <message> <source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source> <translation>Whitelist-Peers können nicht verboten werden und ihre Transaktionen werden immer weitergeleitet, auch wenn sie bereits im Mempool sind, nützlich z. für ein Gateway</translation> </message> <message> <source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source> <translation>Sie müssen einen privaten Masternode-Schlüssel in der Konfiguration festlegen. Bitte nutzen Sie die Dokumentation zwecks Hilfe.</translation> </message> <message> <source>(36674 could be used only on mainnet)</source> <translation>(36674 kann nur im Mainnet verwendet werden)</translation> </message> <message> <source>(default: %s)</source> <translation>(default: %s)</translation> </message> <message> <source>(default: 1)</source> <translation>(default: 1)</translation> </message> <message> <source>(must be 36674 for mainnet)</source> <translation>(muss im Mainnet 36674 sein)</translation> </message> <message> <source>Accept command line and JSON-RPC commands</source> <translation>Akzeptiere Kommandozeilen- und JSON-RPC- Eingaben</translation> </message> <message> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Akzeptiere Verbindungen von aussen (Standard: 1 , falls -proxy oder -connect nicht genutzt wird)</translation> </message> <message> <source>Accept public REST requests (default: %u)</source> <translation>Akzeptiere öffentliche REST Anfragen (Standard: %u)</translation> </message> <message> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Füge ein Node zum verbinden hinzu und versuche die Verbindung offen zu halten</translation> </message> <message> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Erlaube DNS lookups für -addnode, -seednode und -connect</translation> </message> <message> <source>Always query for peer addresses via DNS lookup (default: %u)</source> <translation>Gegenstellen-Adressen immer per DNS lookup anfragen (Standard: %u)</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Versuche private Schlüssel aus fehlerhafter wallet.dat wiederherzustellen.</translation> </message> <message> <source>Automatically create Tor hidden service (default: %d)</source> <translation>Tor-Hidden-Service automatisch erzeugen (Standard: %d)</translation> </message> <message> <source>Block creation options:</source> <translation>Block-Erzeugungs-Optionen:</translation> </message> <message> <source>Calculating missing accumulators...</source> <translation>Fehlende Akkus berechnen ...</translation> </message> <message> <source>Cannot downgrade wallet</source> <translation>Wallet downgrade nicht möglich</translation> </message> <message> <source>Cannot resolve -bind address: '%s'</source> <translation>Kann -bind Adresse nicht auflösen: '%s'</translation> </message> <message> <source>Cannot resolve -externalip address: '%s'</source> <translation>Kann -externalip Adresse nicht auflösen: '%s'</translation> </message> <message> <source>Cannot resolve -whitebind address: '%s'</source> <translation>Kann -whitebind Adresse nicht auflösen: '%s'</translation> </message> <message> <source>Cannot write default address</source> <translation>Standardadresse kann nicht geschrieben werden</translation> </message> <message> <source>Connect only to the specified node(s)</source> <translation>Nur mit den aufgeführten Node(s) verbinden</translation> </message> <message> <source>Connect through SOCKS5 proxy</source> <translation>Verbindung per SOCKS5 proxy</translation> </message> <message> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Verbinde zu einer Node um Gegenstellen-Adressen abzurufen und trenne die Verbindung.</translation> </message> <message> <source>Connection options:</source> <translation>Verbindungsoptionen:</translation> </message> <message> <source>Copyright (C) 2009-%i The Bitcoin Core Developers</source> <translation>Copyright (C) 2009-%i The Bitcoin Core Developers</translation> </message> <message> <source>Copyright (C) 2014-%i The Dash Core Developers</source> <translation>Copyright (C) 2014-%i The Dash Core Developers</translation> </message> <message> <source>Copyright (C) 2015-%i The PIVX Core Developers</source> <translation>Copyright (C) 2015-%i The PIVX Core Developers</translation> </message> <message> <source>Corrupted block database detected</source> <translation>Beschädigte Block-Datenbank gefunden</translation> </message> <message> <source>Could not parse masternode.conf</source> <translation>Kann masternode.conf nicht parsen</translation> </message> <message> <source>Debugging/Testing options:</source> <translation>Debugging- / Testoptionen</translation> </message> <message> <source>Delete blockchain folders and resync from scratch</source> <translation>Löschen Sie Blockchain-Ordner und resync von Grund auf neu</translation> </message> <message> <source>Disable OS notifications for incoming transactions (default: %u)</source> <translation>Benachrichtigungen über eingehende Transaktionen im Betriebsystem ausschalten (Standard: %u)</translation> </message> <message> <source>Disable safemode, override a real safe mode event (default: %u)</source> <translation>Deaktivieren Sie den Safemodus, überschreiben Sie ein echtes Safe-Mode-Ereignis (Standard: %u)</translation> </message> <message> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Eigene IP-Adresse erkennen (Standard: 1 beim Hören und keine -Ausnahme)</translation> </message> <message> <source>Do not load the wallet and disable wallet RPC calls</source> <translation>Wallet nicht laden und Wallet Fernaufruf/RPC unterbinden</translation> </message> <message> <source>Do you want to rebuild the block database now?</source> <translation>Möchten Sie die Blockdatenbank neu aufbauen?</translation> </message> <message> <source>Done loading</source> <translation>Geladen</translation> </message> <message> <source>Enable automatic Zerocoin minting (0-1, default: %u)</source> <translation>Automatische Zerocoin-Prägung ermöglichen (0-1, Standard: %u)</translation> </message> <message> <source>Enable publish hash transaction (locked via SwiftX) in &lt;address&gt;</source> <translation>Aktivieren Sie die Veröffentlichung der Hash-Transaktion (gesperrt über SwiftX) in &lt;address&gt;</translation> </message> <message> <source>Enable publish raw transaction (locked via SwiftX) in &lt;address&gt;</source> <translation>Aktivieren Sie die Publish-Raw-Transaktion (gesperrt über SwiftX) in&lt;address&gt;</translation> </message> <message> <source>Enable the client to act as a masternode (0-1, default: %u)</source> <translation>Ermögliche dem Client als Masternode zu fungieren (0-1, Standard: %u)</translation> </message> <message> <source>Error initializing block database</source> <translation>Fehler bei der Initialisierung der Block Datenbank</translation> </message> <message> <source>Error initializing wallet database environment %s!</source> <translation>Fehler beim Initialisieren der Wallet Datenbank Umgebung %s!</translation> </message> <message> <source>Error loading block database</source> <translation>Fehler beim Laden der Block-Datenbank</translation> </message> <message> <source>Error loading wallet.dat</source> <translation>Fehler beim Laden von wallet.dat</translation> </message> <message> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Fehler beim Laden von wallet.dat : Wallet beschädigt</translation> </message> <message> <source>Error loading wallet.dat: Wallet requires newer version of UnitedWorldMoney Core</source> <translation>Fehler beim Laden der wallet.dat: Neuere UnitedWorldMoney Core Version benötigt</translation> </message> <message> <source>Error opening block database</source> <translation>Fehler beim Öffnen der Block-Datenbank</translation> </message> <message> <source>Error reading from database, shutting down.</source> <translation>Fehler beim Lesen der Datenbank, wird heruntergefahren.</translation> </message> <message> <source>Error recovering public key.</source> <translation>Fehler bei der Wiederherstellung des öffentlichen Schlüssels.</translation> </message> <message> <source>Error writing zerocoinDB to disk</source> <translation>Fehler beim Schreiben von zerocoinDB auf die Festplatte</translation> </message> <message> <source>Error</source> <translation>Fehler</translation> </message> <message> <source>Error: A fatal internal error occured, see debug.log for details</source> <translation>Fehler: Ein schwerwiegender Fehler ist aufgetreten, schauen Sie für detailiertere Infos ins debug.log </translation> </message> <message> <source>Error: Disk space is low!</source> <translation>Fehler: Festplattenspeicher ist knapp.</translation> </message> <message> <source>Error: Unsupported argument -tor found, use -onion.</source> <translation>Fehler: Argument -tor nicht unterstützt, nutzen Sie -onion.</translation> </message> <message> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Fehler: Wallet verschlossen, Transaktion konnte nicht erstellt werden!</translation> </message> <message> <source>Failed to calculate accumulator checkpoint</source> <translation>Fehler beim Berechnen des Akku-Checkpoints</translation> </message> <message> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Abhören jeglicher Ports fehlgeschlagen. Nutzen Sie -listen=0 falls dies erwünscht ist.</translation> </message> <message> <source>Failed to parse host:port string</source> <translation>Fehler beim Analysieren von host: port string</translation> </message> <message> <source>Failed to read block</source> <translation>Block konnte nicht gelesen werden</translation> </message> <message> <source>Fee (in UWM/kB) to add to transactions you send (default: %s)</source> <translation>Gebühr (in UWM/kB) die den Transaktionen, die Sie überweisen, hinzugefügt wird (Standard: %s)</translation> </message> <message> <source>Force safe mode (default: %u)</source> <translation>Erzwinge Safe-Mode (Standard: %u)</translation> </message> <message> <source>Generate coins (default: %u)</source> <translation>Generiere Coins (default: %u)</translation> </message> <message> <source>How many blocks to check at startup (default: %u, 0 = all)</source> <translation>Zu überprüfende Blöcke beim Start (default: %u, 0 = all)</translation> </message> <message> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation>Wenn &lt;category&gt; nicht angegeben wurde, gebe alle Debuginformationen aus. </translation> </message> <message> <source>Importing...</source> <translation>Importiere...</translation> </message> <message> <source>Imports blocks from external blk000??.dat file</source> <translation>Importiere Blöcke aus externer Datei blk000??.dat</translation> </message> <message> <source>Include IP addresses in debug output (default: %u)</source> <translation>Ip-Adresse in Debug-Ausgabe einbeziehen (Standard: %u)</translation> </message> <message> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation>Falscher oder kein Genesis-Block gefunden. Falsches Datenverzeichnis für das Netzwerk?</translation> </message> <message> <source>Information</source> <translation>Hinweis</translation> </message> <message> <source>Initialization sanity check failed. UnitedWorldMoney Core is shutting down.</source> <translation>Initialisierung Sanity-Check fehlgeschlagen. UnitedWorldMoney Core schaltet ab.</translation> </message> <message> <source>Insufficient funds</source> <translation>Unzureichende Mittel</translation> </message> <message> <source>Insufficient funds.</source> <translation>Unzureichende Mittel.</translation> </message> <message> <source>Invalid -onion address or hostname: '%s'</source> <translation>Ungültige -onion Adresse oder Hostname: '%s'</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s'</source> <translation>Ungültiger Betrag für -maxtxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: '%s'</source> <translation>Ungültiger Betrag für -minrelaytxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -mintxfee=&lt;amount&gt;: '%s'</source> <translation>Ungültiger Betrag für -mintxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s' (must be at least %s)</source> <translation>Ungültiger Betrag für -paytxfee=&lt;amount&gt;: '%s' (muss mindestens %s sein)</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s'</source> <translation>Ungültiger Betrag für -paytxfee=&lt;amount&gt;: '%s'</translation> </message> <message> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>Ungültiger Betrag für -reservebalance=&lt;amount&gt;</translation> </message> <message> <source>Invalid amount</source> <translation>Ungültiger Betrag</translation> </message> <message> <source>Invalid masternodeprivkey. Please see documenation.</source> <translation>Ungültiger privater Masternode-Schlüssel. Bitte sehen Sie in der Dokumentation nach.</translation> </message> <message> <source>Invalid netmask specified in -whitelist: '%s'</source> <translation>Fehlerhafte Netzmaske in -whitelist festgelegt: '%s'</translation> </message> <message> <source>Invalid port detected in masternode.conf</source> <translation>Ungültiger Port in masternode.conf entdeckt</translation> </message> <message> <source>Invalid private key.</source> <translation>Ungültiger privater Schlüssel.</translation> </message> <message> <source>Percentage of automatically minted Zerocoin (1-100, default: %u)</source> <translation>Prozent automatisch geprägter Zerocoins (1-100, Standard: %u)</translation> </message> <message> <source>Reindex the UWM and zUWM money supply statistics</source> <translation>Die Geldmengenstatistik UWM und zUWM neu indizieren</translation> </message> <message> <source>Reindexing zerocoin database...</source> <translation>Reindiziere Zerocoin Datenbank...</translation> </message> <message> <source>Reindexing zerocoin failed</source> <translation>Zerocoin Reindizierung fehlgeschlagen</translation> </message> <message> <source>Selected coins value is less than payment target</source> <translation>Ausgewählter Coinwert ist geringer als der Zielbetrag</translation> </message> <message> <source>SwiftX options:</source> <translation>SwiftX Optionen:</translation> </message> <message> <source>This is a pre-release test build - use at your own risk - do not use for staking or merchant applications!</source> <translation>Dies ist ein Pre-Release-Testbuild - Nutzung auf eigene Gefahr - Nutzen Sie diese Version nicht für Staking oder Handelsanwendungen.</translation> </message> <message> <source> mints deleted </source> <translation>Prägungen gelöscht </translation> </message> <message> <source> mints updated, </source> <translation>Prägungen geupdatet,</translation> </message> <message> <source> unconfirmed transactions removed </source> <translation>unbestätigte Transaktionen entfernt </translation> </message> <message> <source>Disable all UnitedWorldMoney specific functionality (Masternodes, Zerocoin, SwiftX, Budgeting) (0-1, default: %u)</source> <translation>Deaktivieren Sie alle UnitedWorldMoney-spezifischen Funktionen (Masternodes, Zerocoin, SwiftX, Budgetierung) (0-1, Standard: %u)</translation> </message> <message> <source>Enable SwiftX, show confirmations for locked transactions (bool, default: %s)</source> <translation>Swifttx aktivieren, Bestätigungen für gesperrte Transaktionen anzeigen (bool, default: %s)</translation> </message> <message> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Fehler: Die Transaktion wurde abgelehnt! Dies könnte passieren, wenn einige Coins in deiner Wallet schon ausgegeben wurden, als wenn du eine Kopie von der wallet.dat benutzt hättest. Coins wurden in der Kopie verbracht, aber hier nicht als verbracht markiert.</translation> </message> <message> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Fehler: Diese Transaktion erfordert eine Transaktionsgebühr von mindestens %s wegen ihrer Menge, Komplexität oder Verwendung von kürzlich erhaltenen Mitteln!</translation> </message> <message> <source>Error: Unsupported argument -checklevel found. Checklevel must be level 4.</source> <translation>Fehler: Nicht unterstütztes Argument -checklevel entdeckt. Checklevel muss Level 4 sein.</translation> </message> <message> <source>Execute command when the best block changes and its size is over (%s in cmd is replaced by block hash, %d with the block size)</source> <translation>Befehl ausführen, wenn sich der beste Block ändert und seine Größe überschritten ist (%s in cmd wird durch Blockhash ersetzt, %d durch die Blockgröße)</translation> </message> <message> <source>Failed to find coin set amongst held coins with less than maxNumber of Spends</source> <translation>Fehler beim Finden der Münzen unter den gehaltenen Münzen mit weniger als maxNaval of Spends</translation> </message> <message> <source>In rare cases, a spend with 7 coins exceeds our maximum allowable transaction size, please retry spend using 6 or less coins</source> <translation>In seltenen Fällen überschreiten Ausgaben mit 7 Münzen die maximal zulässige Transaktionsgröße. Bitte versuchen Sie erneut, mit 6 oder weniger Münzen auszugeben</translation> </message> <message> <source>Preferred Denomination for automatically minted Zerocoin (1/5/10/50/100/500/1000/5000), 0 for no preference. default: %u)</source> <translation>Bervorzugte Stückelung für automatisch geprägte Zerocoins (1/5/10/50/100/500/1000/5000), 0 falls keine bestimmte Stückelung präferiert wird. Standard: %u)</translation> </message> <message> <source>Specify custom backup path to add a copy of any automatic zUWM backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup. If backuppath is set as well, 4 backups will happen</source> <translation>Benutzerdefinierten Sicherungspfad festlegen, in den Kopien von allen automatischen zUWM-Sicherungen abgelegt werden. Wenn ein Verzeichnis ausgewählt ist, wird jede Sicherung als Datei mit Zeitstempel erstellt. Wenn eine Datei ausgewählt ist, wird diese Datei bei jeder Sicherung überschrieben. Wenn der Sicherungspfad auch festgelegt ist, werden 4 Sicherungen erstellt.</translation> </message> <message> <source>Specify custom backup path to add a copy of any wallet backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup.</source> <translation>Benutzerdefinierten Sicherungspfad festlegen, in den Kopien von allen Wallet-Sicherungen abgelegt werden. Wenn ein Verzeichnis ausgewählt ist, wird jede Sicherung als Datei mit Zeitstempel erstellt. Wenn eine Datei ausgewählt ist, wird diese Datei bei jeder Sicherung überschrieben.</translation> </message> <message> <source>SwiftX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source> <translation>SwiftTX benötigt mindestens 6 confirmations, Sie sollten ggf. einige Minuten abwarten und dann erneut probieren.</translation> </message> <message> <source>&lt;category&gt; can be:</source> <translation>&lt;category&gt;kann folgendes sein:</translation> </message> <message> <source>Attempt to force blockchain corruption recovery</source> <translation>Versuche die Wiederherstellung der fehlerhaften Blockchain zu forcieren</translation> </message> <message> <source>CoinSpend: Accumulator witness does not verify</source> <translation>CoinSpend: Akkumulator-Zeuge verifiziert nicht</translation> </message> <message> <source>Display the stake modifier calculations in the debug.log file.</source> <translation>Zeigen Sie die Berechnungen des Pfahlmodifikators in der Datei debug.log an.</translation> </message> <message> <source>Display verbose coin stake messages in the debug.log file.</source> <translation>Zeigen Sie in der Datei "debug.log" ausführliche Meldungen zum Pfahlstatus an.</translation> </message> <message> <source>Enable publish hash block in &lt;address&gt;</source> <translation>Aktivieren Sie den Veröffentlichungs-Hash-Block in&lt;address&gt;</translation> </message> <message> <source>Enable publish hash transaction in &lt;address&gt;</source> <translation>Aktivieren Sie die Veröffentlichung der Hash-Transaktion in&lt;address&gt;</translation> </message> <message> <source>Enable publish raw block in &lt;address&gt;</source> <translation>Aktivieren Sie den Veröffentlichungsrohblock in&lt;address&gt;</translation> </message> <message> <source>Enable publish raw transaction in &lt;address&gt;</source> <translation>Aktivieren Sie die Veröffentlichung der Rohtransaktion in&lt;address&gt;</translation> </message> <message> <source>Enable staking functionality (0-1, default: %u)</source> <translation>Aktiviere Staking-Funktionalität (0-1, default: %u)</translation> </message> <message> <source>Error: A fatal internal error occurred, see debug.log for details</source> <translation>Fehler: Ein schwerwiegender Fehler ist aufgetreten, schauen Sie für detailiertere Infos ins debug.log </translation> </message> <message> <source>Error: No valid utxo!</source> <translation>Error: No valid utxo!</translation> </message> <message> <source>Failed to create mint</source> <translation>Fehler beim Erstellen von Minze</translation> </message> <message> <source>Failed to find Zerocoins in wallet.dat</source> <translation>Zerocoins in wallet.dat nicht gefunden</translation> </message> <message> <source>Failed to select a zerocoin</source> <translation>Fehler beim Auswählen einer Nullstelle</translation> </message> <message> <source>Failed to wipe zerocoinDB</source> <translation>Zerocoin Datenbank konnte nicht gelöscht werden</translation> </message> <message> <source>Failed to write coin serial number into wallet</source> <translation>Fehler beim Schreiben der Seriennummer der Münze in die Brieftasche</translation> </message> <message> <source>Keep at most &lt;n&gt; unconnectable transactions in memory (default: %u)</source> <translation>Halte höchstens &lt;n&gt; unverbindbare Transaktionen im Speicher (Standard: %u)</translation> </message> <message> <source>Limit size of signature cache to &lt;n&gt; entries (default: %u)</source> <translation>Begrenze die Größe des Signatur Cache auf &lt;n&gt; Einträge (Standard: %u)</translation> </message> <message> <source>Line: %d</source> <translation>Zeile: %d</translation> </message> <message> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>Überwache Port &lt;port&gt; auf JSON-RPC Verbindungen (Standard: %u oder Testnetzwerk: %u)</translation> </message> <message> <source>Listen for connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>Überwache Port &lt;port&gt; auf Verbindungen (Standard: %u oder Testnetzwerk: %u)</translation> </message> <message> <source>Loading addresses...</source> <translation>Lade Adressen...</translation> </message> <message> <source>Loading block index...</source> <translation>Lade Block-Index...</translation> </message> <message> <source>Loading budget cache...</source> <translation>Lade Budget Puffer...</translation> </message> <message> <source>Loading masternode cache...</source> <translation>Lade Masternode Cache...</translation> </message> <message> <source>Loading masternode payment cache...</source> <translation>Lade Masternode-Zahlungs-Puffer... </translation> </message> <message> <source>Loading sporks...</source> <translation>Lade Sporks...</translation> </message> <message> <source>Loading wallet... (%3.2f %%)</source> <translation>Lade Wallet... (%3.2f %%)</translation> </message> <message> <source>Loading wallet...</source> <translation>Lade Wallet...</translation> </message> <message> <source>Location of the auth cookie (default: data dir)</source> <translation>Location of the auth cookie (default: data dir)</translation> </message> <message> <source>Lock masternodes from masternode configuration file (default: %u)</source> <translation>Masternodes aus der masternode-Konfigurationsdatei sperren (Standard: %u)</translation> </message> <message> <source>Lookup(): Invalid -proxy address or hostname: '%s'</source> <translation>Lookup(): Invalid -proxy address or hostname: '%s'</translation> </message> <message> <source>Maintain at most &lt;n&gt; connections to peers (default: %u)</source> <translation>Pflegen Sie höchstens&lt;n&gt; Verbindungen zu Peers (Standard: %u)</translation> </message> <message> <source>Masternode options:</source> <translation>Masternode-Optionen:</translation> </message> <message> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Maximaler Empfangspuffer pro Verbindung &lt;n&gt;*1000 bytes (Standard: %u)</translation> </message> <message> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Maximaler Sendepuffer pro Verbindung &lt;n&gt;*1000 bytes (Standard: %u)</translation> </message> <message> <source>Mint did not make it into blockchain</source> <translation>Mint schaffte es nicht in Blockchain</translation> </message> <message> <source>Need address because change is not exact</source> <translation>Brauche eine Adresse, weil die Änderung nicht exakt ist</translation> </message> <message> <source>Need to specify a port with -whitebind: '%s'</source> <translation>Sie müssen einen Port mittels -whitebind festlegen: '%s'</translation> </message> <message> <source>Node relay options:</source> <translation>Optionen für Knotenrelais:</translation> </message> <message> <source>Not enough file descriptors available.</source> <translation>Nicht genügend Datei-Deskriptoren verfügbar.</translation> </message> <message> <source>Number of automatic wallet backups (default: 10)</source> <translation>Anzahl automatischer Wallet-Backups (Standard: 10)</translation> </message> <message> <source>Number of custom location backups to retain (default: %d)</source> <translation>Anzahl an benutzerdefinierter Backups die aufbewahrt werden (Standard: %d)</translation> </message> <message> <source>Only accept block chain matching built-in checkpoints (default: %u)</source> <translation>Akzeptiere nur Blockchains, die mit eingebauten Prüfpunkten übereinstimmt (Standard: %u)</translation> </message> <message> <source>Only connect to nodes in network &lt;net&gt; (ipv4, ipv6 or onion)</source> <translation>Nur Verbindung zu Knoten im Netzwerk &lt;net&gt; (ipv4, ipv6 oder onion)</translation> </message> <message> <source>Options:</source> <translation>Optionen:</translation> </message> <message> <source>Password for JSON-RPC connections</source> <translation>Passwort für JSON-RPC Verbindungen</translation> </message> <message> <source>isValid(): Invalid -proxy address or hostname: '%s'</source> <translation>isValid(): Invalid -proxy address or hostname: '%s'</translation> </message> <message> <source>Preparing for resync...</source> <translation>Vorbereitung für die Resynchronisierung ...</translation> </message> <message> <source>Prepend debug output with timestamp (default: %u)</source> <translation>Zeitstempel vor Debug-Ausgabe vermerken (Standard: %u)</translation> </message> <message> <source>Print version and exit</source> <translation>Version drucken und Programm beenden</translation> </message> <message> <source>RPC server options:</source> <translation>RPC Serveroptionen:</translation> </message> <message> <source>Randomly drop 1 of every &lt;n&gt; network messages</source> <translation>Nach dem Zufallsprinzip 1 von jedem fallen lassen&lt;n&gt; Netzwerknachrichten</translation> </message> <message> <source>Randomly fuzz 1 of every &lt;n&gt; network messages</source> <translation>Zufällig fuzz 1 von jedem&lt;n&gt; Netzwerknachrichten</translation> </message> <message> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Baue den Blockchain-Index mithilfe der aktuellen blk000??.dat Datei erneut auf</translation> </message> <message> <source>Receive and display P2P network alerts (default: %u)</source> <translation>Empfange P2P-Netzwerk-Warnsignale und zeige diese an (Standard: %u)</translation> </message> <message> <source>Reindex the accumulator database</source> <translation>Reinde die Akkumulator-Datenbank neu</translation> </message> <message> <source>Relay and mine data carrier transactions (default: %u)</source> <translation>Relay und meine Datenträgertransaktionen (Standard: %u)</translation> </message> <message> <source>Relay non-P2SH multisig (default: %u)</source> <translation>Relais nicht-P2SH multisig (Standard: %u)</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions</source> <translation>Neuscannen der Blockchain nach fehlenden Wallet-Transaktionen</translation> </message> <message> <source>Rescanning...</source> <translation>Neuscannen...</translation> </message> <message> <source>ResetMintZerocoin finished: </source> <translation>ResetMintZerocoin abgeschlossen: </translation> </message> <message> <source>ResetSpentZerocoin finished: </source> <translation>ResetSpentZerocoin abgeschlossen: </translation> </message> <message> <source>Run a thread to flush wallet periodically (default: %u)</source> <translation>Führen Sie einen Thread aus, um die Brieftasche regelmäßig zu leeren (Standard: %u)</translation> </message> <message> <source>Run in the background as a daemon and accept commands</source> <translation>Als Hintergrundprozess ausführen und Eingaben akzeptieren</translation> </message> <message> <source>Send transactions as zero-fee transactions if possible (default: %u)</source> <translation>Sende die Transaktion als gebührenfreie Transaktion, falls möglich (Standard: %u)</translation> </message> <message> <source>Session timed out.</source> <translation>Zeitüberschreitung</translation> </message> <message> <source>Set database cache size in megabytes (%d to %d, default: %d)</source> <translation>Größe des Datenbank-Cache in Megabytes festlegen (%d to %d, Standard: %d)</translation> </message> <message> <source>Set external address:port to get to this masternode (example: %s)</source> <translation>Port für externe Adressen festlegen, die verwendet werden um diese Masternode anzusteuern (Beispiel: %s)</translation> </message> <message> <source>Set key pool size to &lt;n&gt; (default: %u)</source> <translation>Setze Key-Pool Größe auf &lt;n&gt; (Standard: %u)</translation> </message> <message> <source>Set maximum block size in bytes (default: %d)</source> <translation>Maximale Blockgröße in Bytes festlegen (Standard: %d)</translation> </message> <message> <source>Set minimum block size in bytes (default: %u)</source> <translation>Minimale Blockgröße in Bytes festlegen (Standard: %u)</translation> </message> <message> <source>Set the Maximum reorg depth (default: %u)</source> <translation>Legen Sie die maximale Reorganisationstiefe fest (Standard: %u)</translation> </message> <message> <source>Set the masternode private key</source> <translation>Privaten Masternode-Schlüssel festlegen</translation> </message> <message> <source>Set the number of threads to service RPC calls (default: %d)</source> <translation>Anzahl der Threads festlegen, die genutzt werden um RPC Anfragen zu bearbeiten (Standard: %d)</translation> </message> <message> <source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source> <translation>Setzt das DB_PRIVATE-Flag in der Wallet-Datenbankumgebung (Standard: %u)</translation> </message> <message> <source>Show all debugging options (usage: --help -help-debug)</source> <translation>Zeige alle debugging Optionen (verwende: --help -help-debug)</translation> </message> <message> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Schrumpfe debug.log Datei beim Client-Start (Standard: 1, falls -debug nicht gesetzt ist)</translation> </message> <message> <source>Signing failed.</source> <translation>Signieren fehlgeschlagen.</translation> </message> <message> <source>Signing timed out.</source> <translation>Zeitüberschreitung bei der Signierung.</translation> </message> <message> <source>Signing transaction failed</source> <translation>Signieren der Transaktion fehlgeschlagen</translation> </message> <message> <source>Specify configuration file (default: %s)</source> <translation>Konfigurationsdatei auswählen (Standard: %s)</translation> </message> <message> <source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source> <translation>Verbindungs-Timeout in Millisekunden festlegen (Minimum: 1, Standard: %d)</translation> </message> <message> <source>Specify data directory</source> <translation>Datenverzeichnis festlegen</translation> </message> <message> <source>Specify masternode configuration file (default: %s)</source> <translation>Bezeichne Masternode-Konfigurationsdatei (default: %s)</translation> </message> <message> <source>Specify pid file (default: %s)</source> <translation>Bezeichne pid-Datei (default: %s)</translation> </message> <message> <source>Specify wallet file (within data directory)</source> <translation>Bezeichne Wallet-Datei (im Datenverzeichnis)</translation> </message> <message> <source>Specify your own public address</source> <translation>Bezeichnen Sie ihre eigene öffentliche Adresse</translation> </message> <message> <source>Spend Valid</source> <translation>Spend Valid</translation> </message> <message> <source>Spend unconfirmed change when sending transactions (default: %u)</source> <translation>Unbestätigte Änderung beim Senden von Transaktionen ausgeben (Standard: %u)</translation> </message> <message> <source>Staking options:</source> <translation>Staking-Optionen:</translation> </message> <message> <source>Stop running after importing blocks from disk (default: %u)</source> <translation>Stoppt den Lauf nach dem Importieren von Blöcken von der Festplatte (default: %u)</translation> </message> <message> <source>Synchronization failed</source> <translation>Synchronisierung fehlgeschlagen</translation> </message> <message> <source>Synchronization finished</source> <translation>Synchronisierung erfolgreich beendet</translation> </message> <message> <source>Synchronization pending...</source> <translation>Synchronisierung ausstehend...</translation> </message> <message> <source>Synchronizing budgets...</source> <translation>Synchronisiere Budgets....</translation> </message> <message> <source>Synchronizing masternode winners...</source> <translation>Synchronisiere ausgewählte Masternodes....</translation> </message> <message> <source>Synchronizing masternodes...</source> <translation>Synchronisiere Masternodes....</translation> </message> <message> <source>Synchronizing sporks...</source> <translation>Synchronisiere Sporks....</translation> </message> <message> <source>Syncing zUWM wallet...</source> <translation>Synchronisiere zUWM Wallet... </translation> </message> <message> <source>The coin spend has been used</source> <translation>Die Münzausgabe wurde verwendet</translation> </message> <message> <source>The transaction did not verify</source> <translation>Die Transaktion wurde nicht verifiziert</translation> </message> <message> <source>This help message</source> <translation>Dieser Hilfetext</translation> </message> <message> <source>This is experimental software.</source> <translation>Dies ist experimentelle Software.</translation> </message> <message> <source>This is intended for regression testing tools and app development.</source> <translation>Dies ist für Regressionstest-Tools und Anwendungsentwicklung gedacht.</translation> </message> <message> <source>Threshold for disconnecting misbehaving peers (default: %u)</source> <translation>Schwellenwert zum Trennen von sich falsch verhaltenden Gegenstellen (Standard: %u)</translation> </message> <message> <source>Too many spends needed</source> <translation>Zu viele Ausgaben benötigt</translation> </message> <message> <source>Tor control port password (default: empty)</source> <translation>Tor Kontroll-Port Passwort (Standard: ohne)</translation> </message> <message> <source>Tor control port to use if onion listening enabled (default: %s)</source> <translation>Zu verwendender Tor Kontroll-Port, falls onion Überwachung aktiviert ist (Standard %s)</translation> </message> <message> <source>Transaction Created</source> <translation>Transaktion erstellt</translation> </message> <message> <source>Transaction Mint Started</source> <translation>Transaktion Mint gestartet</translation> </message> <message> <source>Transaction amount too small</source> <translation>Transaktionsbetrag zu niedrig</translation> </message> <message> <source>Transaction amounts must be positive</source> <translation>Transaktionsbeträge müssen positiv sein</translation> </message> <message> <source>Transaction too large for fee policy</source> <translation>Transaktion ist für die Gebührenrichtlinie zu groß</translation> </message> <message> <source>Transaction too large</source> <translation>Transaktion zu groß</translation> </message> <message> <source>Trying to spend an already spent serial #, try again.</source> <translation>Versuchen Sie, eine bereits ausgegebene Seriennummer auszugeben, versuchen Sie es erneut.</translation> </message> <message> <source>Unable to bind to %s on this computer (bind returned error %s)</source> <translation>Kann auf diesem Computer nicht an %s binden (zurückgegebener Fehler: %s)</translation> </message> <message> <source>Unable to sign spork message, wrong key?</source> <translation>Die Spork-Nachricht konnte nicht signiert werden. Falscher Key?</translation> </message> <message> <source>Unable to start HTTP server. See debug log for details.</source> <translation>Der HTTP-Server konnte nicht gestartet werden. Details finden Sie im Debug-Protokoll.</translation> </message> <message> <source>Unknown network specified in -onlynet: '%s'</source> <translation>Unbekannter Netztyp in -onlynet angegeben: '%s'</translation> </message> <message> <source>Upgrade wallet to latest format</source> <translation>Wallet auf das neueste Format aktualisieren</translation> </message> <message> <source>Use UPnP to map the listening port (default: %u)</source> <translation>UPnP verwenden, um eine Portweiterleitung einzurichten (Standard: %u)</translation> </message> <message> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>UPnP verwenden, um eine Portweiterleitung einzurichten (Standard: 1, wenn abgehört wird)</translation> </message> <message> <source>Use a custom max chain reorganization depth (default: %u)</source> <translation>Verwenden Sie eine benutzerdefinierte Max-Reorganisation der Kette (Standard: %u)</translation> </message> <message> <source>Use the test network</source> <translation>Das Testnetzwerk verwenden</translation> </message> <message> <source>Username for JSON-RPC connections</source> <translation>Benutzername für JSON-RPC-Verbindungen</translation> </message> <message> <source>Value is below the smallest available denomination (= 1) of zUWM</source> <translation>Der Betrag ist unterhalb des kleinsten Wertes (=1) an zUWM</translation> </message> <message> <source>Verifying blocks...</source> <translation>Verifiziere Blöcke...</translation> </message> <message> <source>Verifying wallet...</source> <translation>Verifiziere Wallet...</translation> </message> <message> <source>Wallet %s resides outside data directory %s</source> <translation>Wallet %s liegt außerhalb des Datenverzeichnisses %s</translation> </message> <message> <source>Wallet needed to be rewritten: restart UnitedWorldMoney Core to complete</source> <translation>Wallet musste neu geschrieben werden: Bitte UnitedWorldMoney Core neu starten</translation> </message> <message> <source>Wallet options:</source> <translation>Wallet-Optionen:</translation> </message> <message> <source>Wallet window title</source> <translation>Fensterüberschrift der Wallet</translation> </message> <message> <source>Warning</source> <translation>Warnung</translation> </message> <message> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Warnung: Diese Version is veraltet, Aktualisierung erforderlich!</translation> </message> <message> <source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source> <translation>Warnung: Nicht unterstütztes Argument -benchmark wurde ignoriert, nutzen Sie -debug=bench.</translation> </message> <message> <source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source> <translation>Warnung: Nicht unterstütztes Argument -debugnet wurde ignoriert, nutze stattdessen -debug=net.</translation> </message> <message> <source>You don't have enough Zerocoins in your wallet</source> <translation>Du hast nicht genug Zerocoins in deiner Brieftasche</translation> </message> <message> <source>You need to rebuild the database using -reindex to change -txindex</source> <translation>Sie müssen die Datenbank mithilfe von -reindex neu erstellen, um -txindex zu ändern</translation> </message> <message> <source>Zapping all transactions from wallet...</source> <translation>Lösche alle Transaktionen aus der Wallet...</translation> </message> <message> <source>ZeroMQ notification options:</source> <translation>ZeroMQ Benachrichtigungsoptionen:</translation> </message> <message> <source>Zerocoin options:</source> <translation>Zerocoin Optionen:</translation> </message> <message> <source>on startup</source> <translation>beim Starten</translation> </message> <message> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat beschädigt, Datenrettung fehlgeschlagen</translation> </message> </context> </TS>
<message> <source>Ping Wait</source> <translation>Ping warten</translation>
kdagent.rs
//! kdirect kdagent type use crate::*; use kitsune_p2p::agent_store::*; use kitsune_p2p::KitsuneSignature; pub use kitsune_p2p_direct_api::{kd_agent_info::KdAgentInfoInner, KdAgentInfo}; /// Extension trait to augment the direct_api version of KdAgentInfo pub trait KdAgentInfoExt: Sized { /// convert KdAgentInfo into a kitsune AgentInfoSigned fn to_kitsune(&self) -> AgentInfoSigned; /// convert a kitsune AgentInfoSigned into KdAgentInfo fn from_kitsune(kitsune: &AgentInfoSigned) -> KdResult<Self>; } fn clamp(u: u64) -> i64 { if u > i64::MAX as u64 { i64::MAX } else
} impl KdAgentInfoExt for KdAgentInfo { fn to_kitsune(&self) -> AgentInfoSigned { use kitsune_p2p::KitsuneBinType; let space = self.root().to_kitsune_space(); let agent = self.agent().to_kitsune_agent(); let url_list = self.url_list().iter().map(|u| u.into()).collect(); let signed_at_ms = self.signed_at_ms() as u64; let expires_at_ms = self.expires_at_ms() as u64; let signature = Arc::new(KitsuneSignature(self.as_signature_ref().to_vec())); let encoded_bytes = self.as_encoded_info_ref().to_vec().into_boxed_slice(); let center_loc = agent.get_loc().into(); AgentInfoSigned(Arc::new(AgentInfoInner { space, agent, storage_arc: SgdArc { center_loc, half_length: u32::MAX, // TODO FIXME }, url_list, signed_at_ms, expires_at_ms, signature, encoded_bytes, })) } fn from_kitsune(kitsune: &AgentInfoSigned) -> KdResult<Self> { let root = KdHash::from_kitsune_space(&kitsune.space); let agent = KdHash::from_kitsune_agent(&kitsune.agent); let url_list = kitsune.url_list.iter().map(|u| u.as_str().into()).collect(); let signed_at_ms = clamp(kitsune.signed_at_ms); let expires_at_ms = clamp(kitsune.expires_at_ms); let signature = kitsune.signature.0.to_vec().into_boxed_slice().into(); let encoded_info = kitsune.encoded_bytes.clone().into(); Ok(Self(Arc::new(KdAgentInfoInner { root, agent, url_list, signed_at_ms, expires_at_ms, signature, encoded_info, }))) } }
{ u as i64 }
membership_mutator_test.go
// Copyright (c) 2017-2018 Uber Technologies, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package etcd import ( "testing" "github.com/golang/mock/gomock" "github.com/m3db/m3/src/cluster/kv/mem" "github.com/m3db/m3/src/cluster/placement" "github.com/m3db/m3/src/cluster/services" "github.com/stretchr/testify/assert" "github.com/uber/aresdb/cluster/kvstore" "github.com/uber/aresdb/controller/models" ) func TestMembershipMutator(t *testing.T)
{ placementInstance1 := placement. NewInstance(). SetHostname("host1"). SetPort(9374). SetID("inst1") placementInstance2 := placement. NewInstance(). SetHostname("host2"). SetPort(9374). SetID("inst2") instance1 := models.Instance{ Name: "inst1", Host: "host1", Port: 9374, } instance2 := models.Instance{ Name: "inst2", Host: "host2", Port: 9374, } t.Run("read ops should work", func(t *testing.T) { // test setup ctrl := gomock.NewController(t) defer ctrl.Finish() clusterService := services.NewMockServices(ctrl) heartbeatService := services.NewMockHeartbeatService(ctrl) clusterService.EXPECT().HeartbeatService(gomock.Any()).Return(heartbeatService, nil).AnyTimes() txnStore := mem.NewStore() etcdClient := &kvstore.EtcdClient{ ServiceName: "ares-controller", Environment: "test", Zone: "local", Services: clusterService, TxnStore: txnStore, } // test heartbeatService.EXPECT().Get(). Return([]string{"inst1"}, nil).Times(1) heartbeatService.EXPECT().GetInstances(). Return([]placement.Instance{placementInstance1}, nil).Times(1) membershipMutator := NewMembershipMutator(etcdClient) res, err := membershipMutator.GetInstances("ns1") assert.NoError(t, err) assert.Len(t, res, 1) assert.Equal(t, instance1, res[0]) hash1, err := membershipMutator.GetHash("ns1") assert.NoError(t, err) heartbeatService.EXPECT().Get(). Return([]string{"inst1", "inst2"}, nil).Times(1) heartbeatService.EXPECT().GetInstances(). Return([]placement.Instance{placementInstance1, placementInstance2}, nil).Times(1) res, err = membershipMutator.GetInstances("ns1") assert.NoError(t, err) assert.Len(t, res, 2) assert.Equal(t, instance1, res[0]) assert.Equal(t, instance2, res[1]) hash2, err := membershipMutator.GetHash("ns1") assert.NoError(t, err) assert.NotEqual(t, hash1, hash2) }) }
PGPS.tsx
import React, { useContext, useState } from 'react' import styled from 'styled-components' import { MessageContext } from './Device' const Headline = styled.h2` font-size: 100%; ` const Fieldset = styled.fieldset` display: flex; justify-content: space-between; input { width: 150px; height: 30px; } padding: 0.25rem 0; label { font-weight: normal; } align-items: center; ` const FormFooter = styled(Fieldset)` display: flex; flex-direction: row; justify-content: flex-end; } ` type Interval = 120 | 240 | 360 | 480 export const PGPS = ({ sendMessage: m, }: { sendMessage: (message: Record<string, any>, topic: string) => void }) => { const [numPredictions, setNumPredictions] = useState(42) const [interval, setInterval] = useState<Interval>(240) const { messages } = useContext(MessageContext) let url: URL | undefined = undefined const pgpsMessage = messages
.filter(({ topic }) => topic.endsWith('/pgps')) .pop() if (pgpsMessage !== undefined) { try { const m = JSON.parse(pgpsMessage.payload) url = new URL(`https://${m.host}/${m.path}`) } catch { // pass } } return ( <> <form> <Headline>Predicted GPS</Headline> <Fieldset> <label htmlFor="numPredictions">Number of predictions:</label> <input id="numPredictions" type="number" step={1} min={0} value={numPredictions} onChange={({ target: { value } }) => setNumPredictions(parseInt(value, 10)) } /> </Fieldset> <Fieldset> <label htmlFor="interval"> Time between predictions, in minutes: </label> <input id="interval" type="number" step={120} min={120} max={480} value={interval} onChange={({ target: { value } }) => setInterval( (Math.floor(parseInt(value, 10) / 120) * 120) as Interval, ) } /> </Fieldset> <FormFooter> <button type="button" onClick={() => { m({ n: numPredictions, int: interval }, 'pgps/get') }} > Request P-GPS data </button> </FormFooter> </form> {url !== undefined && ( <ul> <li> <a href={url.toString()}>{url.toString()}</a> </li> </ul> )} </> ) }
axisLabelEvent-2.ts
/** * sparkline sample */ import { Sparkline } from '../../src/sparkline/sparkline'; import { IAxisRenderingEventArgs } from '../../src/sparkline/model/interface'; let sparkline: Sparkline = new Sparkline({ height: '50px', width: '90%', lineWidth: 2, type: 'Line', valueType: 'Category', fill: '#3C78EF', negativePointColor: '#fc5070', format: 'n', axisSettings: { lineSettings: { visible: true } }, useGroupingSeparator: true, dataSource: [ { x: 0, xval: '2005', yval: 20090440 }, { x: 1, xval: '2006', yval: 20264080 }, { x: 2, xval: '2007', yval: 20434180 }, { x: 3, xval: '2008', yval: 21007310 }, { x: 4, xval: '2009', yval: 21262640 }, { x: 5, xval: '2010', yval: 21515750 }, { x: 6, xval: '2011', yval: 21766710 }, { x: 7, xval: '2012', yval: 22015580 }, { x: 8, xval: '2013', yval: 22262500 }, { x: 9, xval: '2014', yval: 22507620 }, ], xName: 'xval', yName: 'yval',
axisRendering: (args: IAxisRenderingEventArgs) => { args.minX = 5; } }); sparkline.appendTo('#container');
loss.py
import time import matplotlib import numpy as np matplotlib.use('Agg') import torch import torch.nn as nn class LossMultiTargets(nn.Module): def __init__(self,loss_fnc=torch.nn.CrossEntropyLoss()): super(LossMultiTargets, self).__init__() self.loss = loss_fnc def forward(self, inputs,targets): # loss = [] # for (input,target) in zip(inputs,targets): # loss.append(self.loss(input,target)) loss = 0 nb = len(targets) for (input,target) in zip(inputs,targets): loss += self.loss(input,target) loss /= nb return loss class MSELoss(torch.nn.Module): def __init__(self): super(MSELoss,self).__init__() def forward(self, input, target): #We only want places where the target is larger than zero (remember this is for distances) # mask = target > 0 # result = torch.mean((input[mask] - target[mask])**2) # result = torch.norm((input[mask] - target[mask])) ** 2 / torch.norm(target[mask]) ** 2 nb = target.shape[0] result = 0 for i in range(nb): inputi = input[i,:,:] targeti = target[i,:,:] maski = targeti > 0 if torch.sum(maski) == 0: #nothing to learn from this one continue assert torch.norm(targeti[maski]) > 0 result += torch.norm((inputi[maski] - targeti[maski])) ** 2 / torch.norm(targeti[maski]) ** 2 return result/nb def
(r1,r2): ''' Given two sets of 3D points of equal size. It computes the distance between these two sets of points, when allowing translation and rotation of the point clouds. We compute both chirality, and take whichever one has the lowest loss. r1 -> Tensor of shape (3,n) r2 -> Tensor of shape (3,n) ''' #First we translate the two sets, by setting both their centroids to origin r1c = r1 - torch.mean(r1, dim=1, keepdim=True) r2c = r2 - torch.mean(r2, dim=1, keepdim=True) H = r1c @ r2c.transpose(0,1) t1 = time.time() U, S, V = torch.svd(H) t2 = time.time() d = torch.sign(torch.det(V @ U.transpose(0,1))) t3 = time.time() tmp = torch.diag_embed(torch.tensor([1, 1, d])).to(device=V.device) t4 = time.time() R = V @ tmp @ U.transpose(0,1) t5 = time.time() # tmp2 = torch.diag_embed(torch.tensor([1, 1, -d])).to(device=V.device) # R2 = V @ tmp2 @ U.transpose(0,1) r1cr = R @ r1c # r1cr2 = R2 @ r1c assert torch.norm(r2c) > 0 loss_tr1 = torch.norm(r1cr - r2c) ** 2 / torch.norm(r2c) ** 2 # loss_tr2 = torch.norm(r1cr2 - r2c) ** 2 / torch.norm(r2c) ** 2 # if loss_tr1 < loss_tr2: loss_tr = loss_tr1 # pred = r1cr.squeeze().cpu().detach().numpy() # else: # pred = r1cr2.squeeze().cpu().detach().numpy() # loss_tr = loss_tr2 # target = r2c.squeeze().cpu().detach().numpy() print("{:2.4f},{:2.4f},{:2.4f},{:2.4f}".format(t2-t1,t3-t2,t4-t3,t5-t4)) return loss_tr#, pred, target def loss_tr_wrapper(r1,r2): ''' Note that any point with r2 coordinates set to zero is considered masked and will not be included in the calculation. (so use r1 for prediction and r2 for target, and just make sure no target point are accidently zero. Remember the point cloud is translation invariant, so you can just translate all points if needed) ''' nb = r1.shape[0] loss_tr = 0 for i in range(nb): r1i = r1[i, :, :] r2i = r2[i,:,:] mask = (r2i != 0).reshape(3, -1) mask = torch.sum(mask,dim=0) > 0 r1i = r1i[:,mask] r2i = r2i[:,mask] # loss_tri, predi, targeti = pc_translation_rotation_matching(r1i, r2i) loss_tri = pc_translation_rotation_matching(r1i, r2i) loss_tr += loss_tri loss_tr /= nb return loss_tr#, predi, targeti def loss_tr(r1,r2, return_coords=False): t1 = time.time() loss_tr = 0 mask = (r2 != 0).reshape(r2.shape) mask = (torch.sum(mask,dim=1) > 0).unsqueeze(1) mask = mask.repeat(1,3,1) batch_mask = torch.sum(mask,dim=(1,2)) > 0 r1 = r1[batch_mask,:,:] r2 = r2[batch_mask,:,:] mask = mask[batch_mask,:,:] nb = r1.shape[0] t2 = time.time() #First we translate the two sets, by setting both their centroids to origin r1c = torch.empty_like(r1) r2c = torch.empty_like(r2) for i in range(nb): r1c[i, :, :] = r1[i, :, :] - torch.mean(r1[i, mask[i, :, :]].reshape(3, -1), dim=1, keepdim=True) r2c[i, :, :] = r2[i, :, :] - torch.mean(r2[i, mask[i, :, :]].reshape(3, -1), dim=1, keepdim=True) t3 = time.time() r1c = r1c * mask r2c = r2c * mask H = torch.bmm(r1c,r2c.transpose(1,2)) # try: # U, S, V = torch.svd(H) # except: # torch.svd may have convergence issues for GPU and CPU. # U, S, V = torch.svd(H + 1e-4 * H.mean() * torch.rand(H.shape,device=H.device)) U, S, V = torch.svd(H) t4 = time.time() d = torch.sign(torch.det(torch.bmm(V, U.transpose(1,2)))) t5 = time.time() tt=torch.tensor([[1]*nb, [1]*nb, d]).transpose(0,1) tmp = torch.diag_embed(tt).to(device=V.device) t6 = time.time() R = torch.bmm(V, torch.bmm(tmp, U.transpose(1,2))) r1cr = torch.bmm(R, r1c) loss_tr = torch.mean(torch.norm(r1cr - r2c, dim=(1, 2)) ** 2 / torch.norm(r2c, dim=(1, 2)) ** 2) t7 = time.time() # print("{:2.4f},{:2.4f},{:2.4f},{:2.4f},{:2.4f},{:2.4f}".format(t2-t1,t3-t2,t4-t3,t5-t4,t6-t5,t7-t6)) if return_coords: pred = r1cr[-1,:,:].squeeze().cpu().detach().numpy() target = r2c[-1,:,:].squeeze().cpu().detach().numpy() return loss_tr, pred, target else: return loss_tr
pc_translation_rotation_matching
table_service_properties.py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. ***
import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs from ._inputs import * __all__ = ['TableServiceProperties'] class TableServiceProperties(pulumi.CustomResource): def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, account_name: Optional[pulumi.Input[str]] = None, cors: Optional[pulumi.Input[pulumi.InputType['CorsRulesArgs']]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, table_service_name: Optional[pulumi.Input[str]] = None, __props__=None, __name__=None, __opts__=None): """ The properties of a storage account’s Table service. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower-case letters only. :param pulumi.Input[pulumi.InputType['CorsRulesArgs']] cors: Specifies CORS rules for the Table service. You can include up to five CorsRule elements in the request. If no CorsRule elements are included in the request body, all CORS rules will be deleted, and CORS will be disabled for the Table service. :param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive. :param pulumi.Input[str] table_service_name: The name of the Table Service within the specified storage account. Table Service Name must be 'default' """ if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() if account_name is None and not opts.urn: raise TypeError("Missing required property 'account_name'") __props__['account_name'] = account_name __props__['cors'] = cors if resource_group_name is None and not opts.urn: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name __props__['table_service_name'] = table_service_name __props__['name'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:storage/v20200801preview:TableServiceProperties"), pulumi.Alias(type_="azure-native:storage:TableServiceProperties"), pulumi.Alias(type_="azure-nextgen:storage:TableServiceProperties"), pulumi.Alias(type_="azure-native:storage/latest:TableServiceProperties"), pulumi.Alias(type_="azure-nextgen:storage/latest:TableServiceProperties"), pulumi.Alias(type_="azure-native:storage/v20190601:TableServiceProperties"), pulumi.Alias(type_="azure-nextgen:storage/v20190601:TableServiceProperties"), pulumi.Alias(type_="azure-native:storage/v20210101:TableServiceProperties"), pulumi.Alias(type_="azure-nextgen:storage/v20210101:TableServiceProperties")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(TableServiceProperties, __self__).__init__( 'azure-native:storage/v20200801preview:TableServiceProperties', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'TableServiceProperties': """ Get an existing TableServiceProperties resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__["cors"] = None __props__["name"] = None __props__["type"] = None return TableServiceProperties(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter def cors(self) -> pulumi.Output[Optional['outputs.CorsRulesResponse']]: """ Specifies CORS rules for the Table service. You can include up to five CorsRule elements in the request. If no CorsRule elements are included in the request body, all CORS rules will be deleted, and CORS will be disabled for the Table service. """ return pulumi.get(self, "cors") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ The name of the resource """ return pulumi.get(self, "name") @property @pulumi.getter def type(self) -> pulumi.Output[str]: """ The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" """ return pulumi.get(self, "type") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
# *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi
urls.py
"""app URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.0/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls),
]
RestartButton.js
import React from 'react'; import PropTypes from 'prop-types'; import { Button } from '../shared'; import { RestartButtonWrapperDiv } from './RestartButton.style'; import { MESSAGE } from '../../constants/messages';
}; return ( <RestartButtonWrapperDiv> <Button size="medium" type="reset" onClick={onClick}> 다시 시작하기 </Button> </RestartButtonWrapperDiv> ); }; RestartButton.propTypes = { restart: PropTypes.func.isRequired, }; export default RestartButton;
export const RestartButton = ({ restart }) => { const onClick = () => { alert(MESSAGE.CONFIRM_RESTART); restart();
keys.py
import sys from copy import deepcopy from random import SystemRandom from .aggregation_info import AggregationInfo from .ec import (AffinePoint, JacobianPoint, default_ec, generator_Fq, hash_to_point_Fq2, hash_to_point_prehashed_Fq2, y_for_x) from .fields import Fq from .signature import Signature from .threshold import Threshold from .util import hash256, hmac256 RNG = SystemRandom() class PublicKey: """ Public keys are G1 elements, which are elliptic curve points (x, y), where each x, y is a 381 bit Fq element. The serialized represenentation is just the x value, and thus 48 bytes. (With the 1st bit determining the valid y). """ PUBLIC_KEY_SIZE = 48 def __init__(self, value): self.value = value @staticmethod def from_bytes(buffer): bit1 = buffer[0] & 0x80 buffer = bytes([buffer[0] & 0x1f]) + buffer[1:] x = Fq(default_ec.q, int.from_bytes(buffer, "big")) y_values = y_for_x(Fq(default_ec.q, x)) y_values.sort() y = y_values[0] if bit1: y = y_values[1] return PublicKey(AffinePoint(x, y, False, default_ec).to_jacobian()) @staticmethod def from_g1(g1_el): assert type(g1_el) == JacobianPoint return PublicKey(g1_el) def get_fingerprint(self): ser = self.serialize() return int.from_bytes(hash256(ser)[:4], "big") def serialize(self): return self.value.serialize() def size(self): return self.PUBLIC_KEY_SIZE def __eq__(self, other):
def __hash__(self): return int.from_bytes(self.value.serialize(), "big") def __lt__(self, other): return self.value.serialize() < other.value.serialize() def __str__(self): return "PublicKey(" + self.value.to_affine().__str__() + ")" def __repr__(self): return "PublicKey(" + self.value.to_affine().__repr__() + ")" def __deepcopy__(self, memo): return PublicKey.from_g1(deepcopy(self.value, memo)) class PrivateKey: """ Private keys are just random integers between 1 and the group order. """ PRIVATE_KEY_SIZE = 32 def __init__(self, value): self.value = value @staticmethod def from_bytes(buffer): return PrivateKey(int.from_bytes(buffer, "big")) @staticmethod def from_seed(seed): hashed = hmac256(seed, b"BLS private key seed") return PrivateKey(int.from_bytes(hashed, "big") % default_ec.n) @staticmethod def new_threshold(T, N): """ Create a new private key with associated data suitable for T of N threshold signatures under a Joint-Feldman scheme. After the dealing phase, one needs cooperation of T players out of N in order to sign a message with the master key pair. Return: - poly[0] - your share of the master secret key - commitments to your polynomial P - secret_fragments[j] = P(j), to be sent to player j (All N secret_fragments[j] can be combined to make a secret share.) """ assert 1 <= T <= N g1 = generator_Fq() poly = [Fq(default_ec.n, RNG.randint(1, default_ec.n - 1)) for _ in range(T)] commitments = [g1 * c for c in poly] secret_fragments = [sum(c * pow(x, i, default_ec.n) for i, c in enumerate(poly)) for x in range(1, N+1)] return PrivateKey(poly[0]), commitments, secret_fragments def get_public_key(self): return PublicKey.from_g1((self.value * generator_Fq()) .to_jacobian()) def sign(self, m): r = hash_to_point_Fq2(m).to_jacobian() aggregation_info = AggregationInfo.from_msg(self.get_public_key(), m) return Signature.from_g2(self.value * r, aggregation_info) def sign_prehashed(self, h): r = hash_to_point_prehashed_Fq2(h).to_jacobian() aggregation_info = AggregationInfo.from_msg_hash(self.get_public_key(), h) return Signature.from_g2(self.value * r, aggregation_info) def sign_threshold(self, m, player, players): """ As the given player out of a list of player indices, return a signature share for the given message. """ assert player in players r = hash_to_point_Fq2(m).to_jacobian() i = players.index(player) lambs = Threshold.lagrange_coeffs_at_zero(players) return Signature.from_g2(self.value * (r * lambs[i])) def __lt__(self, other): return self.value < other.value def __eq__(self, other): return self.value == other.value def __hash__(self): return self.value def serialize(self): return self.value.to_bytes(self.PRIVATE_KEY_SIZE, "big") def size(self): return self.PRIVATE_KEY_SIZE def __str__(self): return "PrivateKey(" + hex(self.value) + ")" def __repr__(self): return "PrivateKey(" + hex(self.value) + ")" class ExtendedPrivateKey: version = 1 EXTENDED_PRIVATE_KEY_SIZE = 77 def __init__(self, version, depth, parent_fingerprint, child_number, chain_code, private_key): self.version = version self.depth = depth self.parent_fingerprint = parent_fingerprint self.child_number = child_number self.chain_code = chain_code self.private_key = private_key @staticmethod def from_seed(seed): i_left = hmac256(seed + bytes([0]), b"BLS HD seed") i_right = hmac256(seed + bytes([1]), b"BLS HD seed") sk_int = int.from_bytes(i_left, "big") % default_ec.n sk = PrivateKey.from_bytes( sk_int.to_bytes(PrivateKey.PRIVATE_KEY_SIZE, "big")) return ExtendedPrivateKey(ExtendedPrivateKey.version, 0, 0, 0, i_right, sk) def private_child(self, i): if (self.depth >= 255): raise Exception("Cannot go further than 255 levels") # Hardened keys have i >= 2^31. Non-hardened have i < 2^31 hardened = (i >= (2 ** 31)) if (hardened): hmac_input = self.private_key.serialize() else: hmac_input = self.private_key.get_public_key().serialize() hmac_input += i.to_bytes(4, "big") i_left = hmac256(hmac_input + bytes([0]), self.chain_code) i_right = hmac256(hmac_input + bytes([1]), self.chain_code) sk_int = ((int.from_bytes(i_left, "big") + self.private_key.value) % default_ec.n) sk = PrivateKey.from_bytes( sk_int.to_bytes(PrivateKey.PRIVATE_KEY_SIZE, "big")) return ExtendedPrivateKey(ExtendedPrivateKey.version, self.depth + 1, self.private_key.get_public_key() .get_fingerprint(), i, i_right, sk) def public_child(self, i): return self.private_child(i).get_extended_public_key() def get_extended_public_key(self): serialized = (self.version.to_bytes(4, "big") + bytes([self.depth]) + self.parent_fingerprint.to_bytes(4, "big") + self.child_number.to_bytes(4, "big") + self.chain_code + self.private_key.get_public_key().serialize()) return ExtendedPublicKey.from_bytes(serialized) def get_private_key(self): return self.private_key def get_public_key(self): return self.private_key.get_public_key() def size(self): return self.EXTENDED_PRIVATE_KEY_SIZE def serialize(self): return (self.version.to_bytes(4, "big") + bytes([self.depth]) + self.parent_fingerprint.to_bytes(4, "big") + self.child_number.to_bytes(4, "big") + self.chain_code + self.private_key.serialize()) def __eq__(self, other): return self.serialize() == other.serialize() def __hash__(self): return int.from_bytes(self.serialize()) class ExtendedPublicKey: EXTENDED_PUBLIC_KEY_SIZE = 93 def __init__(self, version, depth, parent_fingerprint, child_number, chain_code, public_key): self.version = version self.depth = depth self.parent_fingerprint = parent_fingerprint self.child_number = child_number self.chain_code = chain_code self.public_key = public_key @staticmethod def from_bytes(serialized): version = int.from_bytes(serialized[:4], "big") depth = int.from_bytes(serialized[4:5], "big") parent_fingerprint = int.from_bytes(serialized[5:9], "big") child_number = int.from_bytes(serialized[9:13], "big") chain_code = serialized[13:45] public_key = PublicKey.from_bytes(serialized[45:]) return ExtendedPublicKey(version, depth, parent_fingerprint, child_number, chain_code, public_key) def public_child(self, i): if (self.depth >= 255): raise Exception("Cannot go further than 255 levels") # Hardened keys have i >= 2^31. Non-hardened have i < 2^31 if i >= (2 ** 31): raise Exception("Cannot derive hardened children from public key") hmac_input = self.public_key.serialize() + i.to_bytes(4, "big") i_left = hmac256(hmac_input + bytes([0]), self.chain_code) i_right = hmac256(hmac_input + bytes([1]), self.chain_code) sk_left_int = (int.from_bytes(i_left, "big") % default_ec.n) sk_left = PrivateKey.from_bytes( sk_left_int.to_bytes(PrivateKey.PRIVATE_KEY_SIZE, "big")) new_pk = PublicKey.from_g1(sk_left.get_public_key().value + self.public_key.value) return ExtendedPublicKey(self.version, self.depth + 1, self.public_key.get_fingerprint(), i, i_right, new_pk) def get_public_key(self): return self.public_key def size(self): return self.EXTENDED_PUBLIC_KEY_SIZE def serialize(self): return (self.version.to_bytes(4, "big") + bytes([self.depth]) + self.parent_fingerprint.to_bytes(4, "big") + self.child_number.to_bytes(4, "big") + self.chain_code + self.public_key.serialize()) def __eq__(self, other): return self.serialize() == other.serialize() def __hash__(self): return int.from_bytes(self.serialize()) """ Copyright 2018 Chia Network Inc Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
return self.value.serialize() == other.value.serialize()
root.go
/* Copyright © 2021 NAME HERE <EMAIL ADDRESS> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "github.com/Ogguz/kubecheckup/k8s" "github.com/Ogguz/kubecheckup/model" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "gopkg.in/yaml.v2" "os" ) var cfgFile string var Cfg model.Config // rootCmd represents the base command when called without any subcommands var rootCmd = &cobra.Command{ Use: "kubecheckup", Short: "TODO", Long: `TODO`, Run: func(cmd *cobra.Command, args []string) {k8s.RunAllTheTests(&Cfg)}, } // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { cobra.CheckErr(rootCmd.Execute()) } func init() { cobra.OnInitialize(initConfig) // Here you will define your flags and configuration settings. // Cobra supports persistent flags, which, if defined here, // will be global for your application. rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.kubecheckup.yaml)") // Cobra also supports local flags, which will only run // when this action is called directly. rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") } // initConfig reads in config file. func initConfig() { readFile(&Cfg,cfgFile) } func readFile(cfg *model.Config, cfgFile string) {
log.Debug("Opening ",cfgFile) f, err := os.Open(cfgFile) if err != nil { log.Fatal(err) } defer f.Close() decoder := yaml.NewDecoder(f) err = decoder.Decode(cfg) if err != nil { log.Fatal(err) } log.Debug("Kubernetes config file ", cfg.Kubernetes.ConfigFile) }
0020_auto_20201211_1230.py
# Generated by Django 3.1.3 on 2020-12-11 12:30 from django.db import migrations, models class Migration(migrations.Migration):
dependencies = [ ('tasks', '0019_auto_20201211_1229'), ] operations = [ migrations.AlterField( model_name='task', name='estimate_time', field=models.IntegerField(blank=True, null=True), ), ]
expr.rs
//! Type checking expressions. //! //! See `mod.rs` for more context on type checking in general. use crate::astconv::AstConv as _; use crate::check::cast; use crate::check::coercion::CoerceMany; use crate::check::fatally_break_rust; use crate::check::method::{probe, MethodError, SelfSource}; use crate::check::report_unexpected_variant_res; use crate::check::BreakableCtxt; use crate::check::Diverges; use crate::check::Expectation::{self, ExpectCastableToType, ExpectHasType, NoExpectation}; use crate::check::FnCtxt; use crate::check::Needs; use crate::check::TupleArgumentsFlag::DontTupleArguments; use crate::errors::{ FieldMultiplySpecifiedInInitializer, FunctionalRecordUpdateOnNonStruct, YieldExprOutsideOfGenerator, }; use crate::type_error_struct; use crate::errors::{AddressOfTemporaryTaken, ReturnStmtOutsideOfFnBody, StructExprNonExhaustive}; use rustc_ast as ast; use rustc_ast::util::lev_distance::find_best_match_for_name; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::ErrorReported; use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder, DiagnosticId}; use rustc_hir as hir; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::lang_items::LangItem; use rustc_hir::{ExprKind, QPath}; use rustc_infer::infer; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_middle::ty; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AllowTwoPhase}; use rustc_middle::ty::Ty; use rustc_middle::ty::TypeFoldable; use rustc_middle::ty::{AdtKind, Visibility}; use rustc_span::hygiene::DesugaringKind; use rustc_span::source_map::Span; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_trait_selection::traits::{self, ObligationCauseCode}; use std::fmt::Display; impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn check_expr_eq_type(&self, expr: &'tcx hir::Expr<'tcx>, expected: Ty<'tcx>) { let ty = self.check_expr_with_hint(expr, expected); self.demand_eqtype(expr.span, expected, ty); } pub fn check_expr_has_type_or_error( &self, expr: &'tcx hir::Expr<'tcx>, expected: Ty<'tcx>, extend_err: impl Fn(&mut DiagnosticBuilder<'_>), ) -> Ty<'tcx> { self.check_expr_meets_expectation_or_error(expr, ExpectHasType(expected), extend_err) } fn check_expr_meets_expectation_or_error( &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, extend_err: impl Fn(&mut DiagnosticBuilder<'_>), ) -> Ty<'tcx> { let expected_ty = expected.to_option(&self).unwrap_or(self.tcx.types.bool); let mut ty = self.check_expr_with_expectation(expr, expected); // While we don't allow *arbitrary* coercions here, we *do* allow // coercions from ! to `expected`. if ty.is_never() { assert!( !self.typeck_results.borrow().adjustments().contains_key(expr.hir_id), "expression with never type wound up being adjusted" ); let adj_ty = self.next_diverging_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::AdjustmentType, span: expr.span, }); self.apply_adjustments( expr, vec![Adjustment { kind: Adjust::NeverToAny, target: adj_ty }], ); ty = adj_ty; } if let Some(mut err) = self.demand_suptype_diag(expr.span, expected_ty, ty) { let expr = expr.peel_drop_temps(); self.suggest_deref_ref_or_into(&mut err, expr, expected_ty, ty, None); extend_err(&mut err); // Error possibly reported in `check_assign` so avoid emitting error again. err.emit_unless(self.is_assign_to_bool(expr, expected_ty)); } ty } pub(super) fn check_expr_coercable_to_type( &self, expr: &'tcx hir::Expr<'tcx>, expected: Ty<'tcx>, expected_ty_expr: Option<&'tcx hir::Expr<'tcx>>, ) -> Ty<'tcx> { let ty = self.check_expr_with_hint(expr, expected); // checks don't need two phase self.demand_coerce(expr, ty, expected, expected_ty_expr, AllowTwoPhase::No) } pub(super) fn check_expr_with_hint( &self, expr: &'tcx hir::Expr<'tcx>, expected: Ty<'tcx>, ) -> Ty<'tcx> { self.check_expr_with_expectation(expr, ExpectHasType(expected)) } fn check_expr_with_expectation_and_needs( &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, needs: Needs, ) -> Ty<'tcx> { let ty = self.check_expr_with_expectation(expr, expected); // If the expression is used in a place whether mutable place is required // e.g. LHS of assignment, perform the conversion. if let Needs::MutPlace = needs { self.convert_place_derefs_to_mutable(expr); } ty } pub(super) fn check_expr(&self, expr: &'tcx hir::Expr<'tcx>) -> Ty<'tcx> { self.check_expr_with_expectation(expr, NoExpectation) } pub(super) fn check_expr_with_needs( &self, expr: &'tcx hir::Expr<'tcx>, needs: Needs, ) -> Ty<'tcx> { self.check_expr_with_expectation_and_needs(expr, NoExpectation, needs) } /// Invariant: /// If an expression has any sub-expressions that result in a type error, /// inspecting that expression's type with `ty.references_error()` will return /// true. Likewise, if an expression is known to diverge, inspecting its /// type with `ty::type_is_bot` will return true (n.b.: since Rust is /// strict, _|_ can appear in the type of an expression that does not, /// itself, diverge: for example, fn() -> _|_.) /// Note that inspecting a type's structure *directly* may expose the fact /// that there are actually multiple representations for `Error`, so avoid /// that when err needs to be handled differently. pub(super) fn check_expr_with_expectation( &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, ) -> Ty<'tcx> { debug!(">> type-checking: expr={:?} expected={:?}", expr, expected); // True if `expr` is a `Try::from_ok(())` that is a result of desugaring a try block // without the final expr (e.g. `try { return; }`). We don't want to generate an // unreachable_code lint for it since warnings for autogenerated code are confusing. let is_try_block_generated_unit_expr = match expr.kind { ExprKind::Call(_, ref args) if expr.span.is_desugaring(DesugaringKind::TryBlock) => { args.len() == 1 && args[0].span.is_desugaring(DesugaringKind::TryBlock) } _ => false, }; // Warn for expressions after diverging siblings. if !is_try_block_generated_unit_expr { self.warn_if_unreachable(expr.hir_id, expr.span, "expression"); } // Hide the outer diverging and has_errors flags. let old_diverges = self.diverges.replace(Diverges::Maybe); let old_has_errors = self.has_errors.replace(false); let ty = ensure_sufficient_stack(|| self.check_expr_kind(expr, expected)); // Warn for non-block expressions with diverging children. match expr.kind { ExprKind::Block(..) | ExprKind::Loop(..) | ExprKind::Match(..) => {} // If `expr` is a result of desugaring the try block and is an ok-wrapped // diverging expression (e.g. it arose from desugaring of `try { return }`), // we skip issuing a warning because it is autogenerated code. ExprKind::Call(..) if expr.span.is_desugaring(DesugaringKind::TryBlock) => {} ExprKind::Call(ref callee, _) => { self.warn_if_unreachable(expr.hir_id, callee.span, "call") } ExprKind::MethodCall(_, ref span, _, _) => { self.warn_if_unreachable(expr.hir_id, *span, "call") } _ => self.warn_if_unreachable(expr.hir_id, expr.span, "expression"), } // Any expression that produces a value of type `!` must have diverged if ty.is_never() { self.diverges.set(self.diverges.get() | Diverges::always(expr.span)); } // Record the type, which applies it effects. // We need to do this after the warning above, so that // we don't warn for the diverging expression itself. self.write_ty(expr.hir_id, ty); // Combine the diverging and has_error flags. self.diverges.set(self.diverges.get() | old_diverges); self.has_errors.set(self.has_errors.get() | old_has_errors); debug!("type of {} is...", self.tcx.hir().node_to_string(expr.hir_id)); debug!("... {:?}, expected is {:?}", ty, expected); ty } fn
( &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, ) -> Ty<'tcx> { debug!("check_expr_kind(expr={:?}, expected={:?})", expr, expected); let tcx = self.tcx; match expr.kind { ExprKind::Box(ref subexpr) => self.check_expr_box(subexpr, expected), ExprKind::Lit(ref lit) => self.check_lit(&lit, expected), ExprKind::Binary(op, ref lhs, ref rhs) => self.check_binop(expr, op, lhs, rhs), ExprKind::Assign(ref lhs, ref rhs, ref span) => { self.check_expr_assign(expr, expected, lhs, rhs, span) } ExprKind::AssignOp(op, ref lhs, ref rhs) => self.check_binop_assign(expr, op, lhs, rhs), ExprKind::Unary(unop, ref oprnd) => self.check_expr_unary(unop, oprnd, expected, expr), ExprKind::AddrOf(kind, mutbl, ref oprnd) => { self.check_expr_addr_of(kind, mutbl, oprnd, expected, expr) } ExprKind::Path(QPath::LangItem(lang_item, _)) => { self.check_lang_item_path(lang_item, expr) } ExprKind::Path(ref qpath) => self.check_expr_path(qpath, expr), ExprKind::InlineAsm(asm) => self.check_expr_asm(asm), ExprKind::LlvmInlineAsm(ref asm) => { for expr in asm.outputs_exprs.iter().chain(asm.inputs_exprs.iter()) { self.check_expr(expr); } tcx.mk_unit() } ExprKind::Break(destination, ref expr_opt) => { self.check_expr_break(destination, expr_opt.as_deref(), expr) } ExprKind::Continue(destination) => { if destination.target_id.is_ok() { tcx.types.never } else { // There was an error; make type-check fail. tcx.ty_error() } } ExprKind::Ret(ref expr_opt) => self.check_expr_return(expr_opt.as_deref(), expr), ExprKind::Loop(ref body, _, source) => { self.check_expr_loop(body, source, expected, expr) } ExprKind::Match(ref discrim, ref arms, match_src) => { self.check_match(expr, &discrim, arms, expected, match_src) } ExprKind::Closure(capture, ref decl, body_id, _, gen) => { self.check_expr_closure(expr, capture, &decl, body_id, gen, expected) } ExprKind::Block(ref body, _) => self.check_block_with_expected(&body, expected), ExprKind::Call(ref callee, ref args) => self.check_call(expr, &callee, args, expected), ExprKind::MethodCall(ref segment, span, ref args, _) => { self.check_method_call(expr, segment, span, args, expected) } ExprKind::Cast(ref e, ref t) => self.check_expr_cast(e, t, expr), ExprKind::Type(ref e, ref t) => { let ty = self.to_ty_saving_user_provided_ty(&t); self.check_expr_eq_type(&e, ty); ty } ExprKind::DropTemps(ref e) => self.check_expr_with_expectation(e, expected), ExprKind::Array(ref args) => self.check_expr_array(args, expected, expr), ExprKind::ConstBlock(ref anon_const) => self.to_const(anon_const).ty, ExprKind::Repeat(ref element, ref count) => { self.check_expr_repeat(element, count, expected, expr) } ExprKind::Tup(ref elts) => self.check_expr_tuple(elts, expected, expr), ExprKind::Struct(ref qpath, fields, ref base_expr) => { self.check_expr_struct(expr, expected, qpath, fields, base_expr) } ExprKind::Field(ref base, field) => self.check_field(expr, &base, field), ExprKind::Index(ref base, ref idx) => self.check_expr_index(base, idx, expr), ExprKind::Yield(ref value, ref src) => self.check_expr_yield(value, expr, src), hir::ExprKind::Err => tcx.ty_error(), } } fn check_expr_box(&self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>) -> Ty<'tcx> { let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| match ty.kind() { ty::Adt(def, _) if def.is_box() => Expectation::rvalue_hint(self, ty.boxed_ty()), _ => NoExpectation, }); let referent_ty = self.check_expr_with_expectation(expr, expected_inner); self.tcx.mk_box(referent_ty) } fn check_expr_unary( &self, unop: hir::UnOp, oprnd: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let tcx = self.tcx; let expected_inner = match unop { hir::UnOp::UnNot | hir::UnOp::UnNeg => expected, hir::UnOp::UnDeref => NoExpectation, }; let mut oprnd_t = self.check_expr_with_expectation(&oprnd, expected_inner); if !oprnd_t.references_error() { oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t); match unop { hir::UnOp::UnDeref => { if let Some(ty) = self.lookup_derefing(expr, oprnd, oprnd_t) { oprnd_t = ty; } else { let mut err = type_error_struct!( tcx.sess, expr.span, oprnd_t, E0614, "type `{}` cannot be dereferenced", oprnd_t, ); let sp = tcx.sess.source_map().start_point(expr.span); if let Some(sp) = tcx.sess.parse_sess.ambiguous_block_expr_parse.borrow().get(&sp) { tcx.sess.parse_sess.expr_parentheses_needed(&mut err, *sp, None); } err.emit(); oprnd_t = tcx.ty_error(); } } hir::UnOp::UnNot => { let result = self.check_user_unop(expr, oprnd_t, unop); // If it's builtin, we can reuse the type, this helps inference. if !(oprnd_t.is_integral() || *oprnd_t.kind() == ty::Bool) { oprnd_t = result; } } hir::UnOp::UnNeg => { let result = self.check_user_unop(expr, oprnd_t, unop); // If it's builtin, we can reuse the type, this helps inference. if !oprnd_t.is_numeric() { oprnd_t = result; } } } } oprnd_t } fn check_expr_addr_of( &self, kind: hir::BorrowKind, mutbl: hir::Mutability, oprnd: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| { match ty.kind() { ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => { if oprnd.is_syntactic_place_expr() { // Places may legitimately have unsized types. // For example, dereferences of a fat pointer and // the last field of a struct can be unsized. ExpectHasType(ty) } else { Expectation::rvalue_hint(self, ty) } } _ => NoExpectation, } }); let ty = self.check_expr_with_expectation_and_needs(&oprnd, hint, Needs::maybe_mut_place(mutbl)); let tm = ty::TypeAndMut { ty, mutbl }; match kind { _ if tm.ty.references_error() => self.tcx.ty_error(), hir::BorrowKind::Raw => { self.check_named_place_expr(oprnd); self.tcx.mk_ptr(tm) } hir::BorrowKind::Ref => { // Note: at this point, we cannot say what the best lifetime // is to use for resulting pointer. We want to use the // shortest lifetime possible so as to avoid spurious borrowck // errors. Moreover, the longest lifetime will depend on the // precise details of the value whose address is being taken // (and how long it is valid), which we don't know yet until // type inference is complete. // // Therefore, here we simply generate a region variable. The // region inferencer will then select a suitable value. // Finally, borrowck will infer the value of the region again, // this time with enough precision to check that the value // whose address was taken can actually be made to live as long // as it needs to live. let region = self.next_region_var(infer::AddrOfRegion(expr.span)); self.tcx.mk_ref(region, tm) } } } /// Does this expression refer to a place that either: /// * Is based on a local or static. /// * Contains a dereference /// Note that the adjustments for the children of `expr` should already /// have been resolved. fn check_named_place_expr(&self, oprnd: &'tcx hir::Expr<'tcx>) { let is_named = oprnd.is_place_expr(|base| { // Allow raw borrows if there are any deref adjustments. // // const VAL: (i32,) = (0,); // const REF: &(i32,) = &(0,); // // &raw const VAL.0; // ERROR // &raw const REF.0; // OK, same as &raw const (*REF).0; // // This is maybe too permissive, since it allows // `let u = &raw const Box::new((1,)).0`, which creates an // immediately dangling raw pointer. self.typeck_results .borrow() .adjustments() .get(base.hir_id) .map_or(false, |x| x.iter().any(|adj| matches!(adj.kind, Adjust::Deref(_)))) }); if !is_named { self.tcx.sess.emit_err(AddressOfTemporaryTaken { span: oprnd.span }) } } fn check_lang_item_path( &self, lang_item: hir::LangItem, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { self.resolve_lang_item_path(lang_item, expr.span, expr.hir_id).1 } fn check_expr_path(&self, qpath: &hir::QPath<'_>, expr: &'tcx hir::Expr<'tcx>) -> Ty<'tcx> { let tcx = self.tcx; let (res, opt_ty, segs) = self.resolve_ty_and_res_ufcs(qpath, expr.hir_id, expr.span); let ty = match res { Res::Err => { self.set_tainted_by_errors(); tcx.ty_error() } Res::Def(DefKind::Ctor(_, CtorKind::Fictive), _) => { report_unexpected_variant_res(tcx, res, expr.span); tcx.ty_error() } _ => self.instantiate_value_path(segs, opt_ty, res, expr.span, expr.hir_id).0, }; if let ty::FnDef(..) = ty.kind() { let fn_sig = ty.fn_sig(tcx); if !tcx.features().unsized_fn_params { // We want to remove some Sized bounds from std functions, // but don't want to expose the removal to stable Rust. // i.e., we don't want to allow // // ```rust // drop as fn(str); // ``` // // to work in stable even if the Sized bound on `drop` is relaxed. for i in 0..fn_sig.inputs().skip_binder().len() { // We just want to check sizedness, so instead of introducing // placeholder lifetimes with probing, we just replace higher lifetimes // with fresh vars. let input = self .replace_bound_vars_with_fresh_vars( expr.span, infer::LateBoundRegionConversionTime::FnCall, fn_sig.input(i), ) .0; self.require_type_is_sized_deferred( input, expr.span, traits::SizedArgumentType(None), ); } } // Here we want to prevent struct constructors from returning unsized types. // There were two cases this happened: fn pointer coercion in stable // and usual function call in presence of unsized_locals. // Also, as we just want to check sizedness, instead of introducing // placeholder lifetimes with probing, we just replace higher lifetimes // with fresh vars. let output = self .replace_bound_vars_with_fresh_vars( expr.span, infer::LateBoundRegionConversionTime::FnCall, fn_sig.output(), ) .0; self.require_type_is_sized_deferred(output, expr.span, traits::SizedReturnType); } // We always require that the type provided as the value for // a type parameter outlives the moment of instantiation. let substs = self.typeck_results.borrow().node_substs(expr.hir_id); self.add_wf_bounds(substs, expr); ty } fn check_expr_break( &self, destination: hir::Destination, expr_opt: Option<&'tcx hir::Expr<'tcx>>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let tcx = self.tcx; if let Ok(target_id) = destination.target_id { let (e_ty, cause); if let Some(ref e) = expr_opt { // If this is a break with a value, we need to type-check // the expression. Get an expected type from the loop context. let opt_coerce_to = { // We should release `enclosing_breakables` before the `check_expr_with_hint` // below, so can't move this block of code to the enclosing scope and share // `ctxt` with the second `encloding_breakables` borrow below. let mut enclosing_breakables = self.enclosing_breakables.borrow_mut(); match enclosing_breakables.opt_find_breakable(target_id) { Some(ctxt) => ctxt.coerce.as_ref().map(|coerce| coerce.expected_ty()), None => { // Avoid ICE when `break` is inside a closure (#65383). return tcx.ty_error_with_message( expr.span, "break was outside loop, but no error was emitted", ); } } }; // If the loop context is not a `loop { }`, then break with // a value is illegal, and `opt_coerce_to` will be `None`. // Just set expectation to error in that case. let coerce_to = opt_coerce_to.unwrap_or_else(|| tcx.ty_error()); // Recurse without `enclosing_breakables` borrowed. e_ty = self.check_expr_with_hint(e, coerce_to); cause = self.misc(e.span); } else { // Otherwise, this is a break *without* a value. That's // always legal, and is equivalent to `break ()`. e_ty = tcx.mk_unit(); cause = self.misc(expr.span); } // Now that we have type-checked `expr_opt`, borrow // the `enclosing_loops` field and let's coerce the // type of `expr_opt` into what is expected. let mut enclosing_breakables = self.enclosing_breakables.borrow_mut(); let ctxt = match enclosing_breakables.opt_find_breakable(target_id) { Some(ctxt) => ctxt, None => { // Avoid ICE when `break` is inside a closure (#65383). return tcx.ty_error_with_message( expr.span, "break was outside loop, but no error was emitted", ); } }; if let Some(ref mut coerce) = ctxt.coerce { if let Some(ref e) = expr_opt { coerce.coerce(self, &cause, e, e_ty); } else { assert!(e_ty.is_unit()); let ty = coerce.expected_ty(); coerce.coerce_forced_unit( self, &cause, &mut |mut err| { self.suggest_mismatched_types_on_tail( &mut err, expr, ty, e_ty, cause.span, target_id, ); if let Some(val) = ty_kind_suggestion(ty) { let label = destination .label .map(|l| format!(" {}", l.ident)) .unwrap_or_else(String::new); err.span_suggestion( expr.span, "give it a value of the expected type", format!("break{} {}", label, val), Applicability::HasPlaceholders, ); } }, false, ); } } else { // If `ctxt.coerce` is `None`, we can just ignore // the type of the expression. This is because // either this was a break *without* a value, in // which case it is always a legal type (`()`), or // else an error would have been flagged by the // `loops` pass for using break with an expression // where you are not supposed to. assert!(expr_opt.is_none() || self.tcx.sess.has_errors()); } // If we encountered a `break`, then (no surprise) it may be possible to break from the // loop... unless the value being returned from the loop diverges itself, e.g. // `break return 5` or `break loop {}`. ctxt.may_break |= !self.diverges.get().is_always(); // the type of a `break` is always `!`, since it diverges tcx.types.never } else { // Otherwise, we failed to find the enclosing loop; // this can only happen if the `break` was not // inside a loop at all, which is caught by the // loop-checking pass. let err = self.tcx.ty_error_with_message( expr.span, "break was outside loop, but no error was emitted", ); // We still need to assign a type to the inner expression to // prevent the ICE in #43162. if let Some(ref e) = expr_opt { self.check_expr_with_hint(e, err); // ... except when we try to 'break rust;'. // ICE this expression in particular (see #43162). if let ExprKind::Path(QPath::Resolved(_, ref path)) = e.kind { if path.segments.len() == 1 && path.segments[0].ident.name == sym::rust { fatally_break_rust(self.tcx.sess); } } } // There was an error; make type-check fail. err } } fn check_expr_return( &self, expr_opt: Option<&'tcx hir::Expr<'tcx>>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { if self.ret_coercion.is_none() { self.tcx.sess.emit_err(ReturnStmtOutsideOfFnBody { span: expr.span }); } else if let Some(ref e) = expr_opt { if self.ret_coercion_span.borrow().is_none() { *self.ret_coercion_span.borrow_mut() = Some(e.span); } self.check_return_expr(e); } else { let mut coercion = self.ret_coercion.as_ref().unwrap().borrow_mut(); if self.ret_coercion_span.borrow().is_none() { *self.ret_coercion_span.borrow_mut() = Some(expr.span); } let cause = self.cause(expr.span, ObligationCauseCode::ReturnNoExpression); if let Some((fn_decl, _)) = self.get_fn_decl(expr.hir_id) { coercion.coerce_forced_unit( self, &cause, &mut |db| { let span = fn_decl.output.span(); if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { db.span_label( span, format!("expected `{}` because of this return type", snippet), ); } }, true, ); } else { coercion.coerce_forced_unit(self, &cause, &mut |_| (), true); } } self.tcx.types.never } pub(super) fn check_return_expr(&self, return_expr: &'tcx hir::Expr<'tcx>) { let ret_coercion = self.ret_coercion.as_ref().unwrap_or_else(|| { span_bug!(return_expr.span, "check_return_expr called outside fn body") }); let ret_ty = ret_coercion.borrow().expected_ty(); let return_expr_ty = self.check_expr_with_hint(return_expr, ret_ty.clone()); ret_coercion.borrow_mut().coerce( self, &self.cause(return_expr.span, ObligationCauseCode::ReturnValue(return_expr.hir_id)), return_expr, return_expr_ty, ); } pub(crate) fn check_lhs_assignable( &self, lhs: &'tcx hir::Expr<'tcx>, err_code: &'static str, expr_span: &Span, ) { if lhs.is_syntactic_place_expr() { return; } // FIXME: Make this use SessionDiagnostic once error codes can be dynamically set. let mut err = self.tcx.sess.struct_span_err_with_code( *expr_span, "invalid left-hand side of assignment", DiagnosticId::Error(err_code.into()), ); err.span_label(lhs.span, "cannot assign to this expression"); err.emit(); } /// Type check assignment expression `expr` of form `lhs = rhs`. /// The expected type is `()` and is passsed to the function for the purposes of diagnostics. fn check_expr_assign( &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, lhs: &'tcx hir::Expr<'tcx>, rhs: &'tcx hir::Expr<'tcx>, span: &Span, ) -> Ty<'tcx> { let expected_ty = expected.coercion_target_type(self, expr.span); if expected_ty == self.tcx.types.bool { // The expected type is `bool` but this will result in `()` so we can reasonably // say that the user intended to write `lhs == rhs` instead of `lhs = rhs`. // The likely cause of this is `if foo = bar { .. }`. let actual_ty = self.tcx.mk_unit(); let mut err = self.demand_suptype_diag(expr.span, expected_ty, actual_ty).unwrap(); let lhs_ty = self.check_expr(&lhs); let rhs_ty = self.check_expr(&rhs); let (applicability, eq) = if self.can_coerce(rhs_ty, lhs_ty) { (Applicability::MachineApplicable, true) } else { (Applicability::MaybeIncorrect, false) }; if !lhs.is_syntactic_place_expr() { // Do not suggest `if let x = y` as `==` is way more likely to be the intention. if let hir::Node::Expr(hir::Expr { kind: ExprKind::Match( _, _, hir::MatchSource::IfDesugar { .. } | hir::MatchSource::WhileDesugar, ), .. }) = self.tcx.hir().get( self.tcx.hir().get_parent_node(self.tcx.hir().get_parent_node(expr.hir_id)), ) { // Likely `if let` intended. err.span_suggestion_verbose( expr.span.shrink_to_lo(), "you might have meant to use pattern matching", "let ".to_string(), applicability, ); } } if eq { err.span_suggestion_verbose( *span, "you might have meant to compare for equality", "==".to_string(), applicability, ); } if self.sess().if_let_suggestions.borrow().get(&expr.span).is_some() { // We already emitted an `if let` suggestion due to an identifier not found. err.delay_as_bug(); } else { err.emit(); } return self.tcx.ty_error(); } self.check_lhs_assignable(lhs, "E0070", span); let lhs_ty = self.check_expr_with_needs(&lhs, Needs::MutPlace); let rhs_ty = self.check_expr_coercable_to_type(&rhs, lhs_ty, Some(lhs)); self.require_type_is_sized(lhs_ty, lhs.span, traits::AssignmentLhsSized); if lhs_ty.references_error() || rhs_ty.references_error() { self.tcx.ty_error() } else { self.tcx.mk_unit() } } fn check_expr_loop( &self, body: &'tcx hir::Block<'tcx>, source: hir::LoopSource, expected: Expectation<'tcx>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let coerce = match source { // you can only use break with a value from a normal `loop { }` hir::LoopSource::Loop => { let coerce_to = expected.coercion_target_type(self, body.span); Some(CoerceMany::new(coerce_to)) } hir::LoopSource::While | hir::LoopSource::WhileLet | hir::LoopSource::ForLoop => None, }; let ctxt = BreakableCtxt { coerce, may_break: false, // Will get updated if/when we find a `break`. }; let (ctxt, ()) = self.with_breakable_ctxt(expr.hir_id, ctxt, || { self.check_block_no_value(&body); }); if ctxt.may_break { // No way to know whether it's diverging because // of a `break` or an outer `break` or `return`. self.diverges.set(Diverges::Maybe); } // If we permit break with a value, then result type is // the LUB of the breaks (possibly ! if none); else, it // is nil. This makes sense because infinite loops // (which would have type !) are only possible iff we // permit break with a value [1]. if ctxt.coerce.is_none() && !ctxt.may_break { // [1] self.tcx.sess.delay_span_bug(body.span, "no coercion, but loop may not break"); } ctxt.coerce.map(|c| c.complete(self)).unwrap_or_else(|| self.tcx.mk_unit()) } /// Checks a method call. fn check_method_call( &self, expr: &'tcx hir::Expr<'tcx>, segment: &hir::PathSegment<'_>, span: Span, args: &'tcx [hir::Expr<'tcx>], expected: Expectation<'tcx>, ) -> Ty<'tcx> { let rcvr = &args[0]; let rcvr_t = self.check_expr(&rcvr); // no need to check for bot/err -- callee does that let rcvr_t = self.structurally_resolved_type(args[0].span, rcvr_t); let method = match self.lookup_method(rcvr_t, segment, span, expr, rcvr) { Ok(method) => { // We could add a "consider `foo::<params>`" suggestion here, but I wasn't able to // trigger this codepath causing `structuraly_resolved_type` to emit an error. self.write_method_call(expr.hir_id, method); Ok(method) } Err(error) => { if segment.ident.name != kw::Invalid { self.report_extended_method_error(segment, span, args, rcvr_t, error); } Err(()) } }; // Call the generic checker. self.check_method_argument_types( span, expr, method, &args[1..], DontTupleArguments, expected, ) } fn report_extended_method_error( &self, segment: &hir::PathSegment<'_>, span: Span, args: &'tcx [hir::Expr<'tcx>], rcvr_t: Ty<'tcx>, error: MethodError<'tcx>, ) { let rcvr = &args[0]; let try_alt_rcvr = |err: &mut DiagnosticBuilder<'_>, new_rcvr_t| { if let Some(new_rcvr_t) = new_rcvr_t { if let Ok(pick) = self.lookup_probe( span, segment.ident, new_rcvr_t, rcvr, probe::ProbeScope::AllTraits, ) { debug!("try_alt_rcvr: pick candidate {:?}", pick); // Make sure the method is defined for the *actual* receiver: // we don't want to treat `Box<Self>` as a receiver if // it only works because of an autoderef to `&self` if pick.autoderefs == 0 { err.span_label( pick.item.ident.span, &format!("the method is available for `{}` here", new_rcvr_t), ); } } } }; if let Some(mut err) = self.report_method_error( span, rcvr_t, segment.ident, SelfSource::MethodCall(rcvr), error, Some(args), ) { if let ty::Adt(..) = rcvr_t.kind() { // Try alternative arbitrary self types that could fulfill this call. // FIXME: probe for all types that *could* be arbitrary self-types, not // just this list. try_alt_rcvr(&mut err, self.tcx.mk_lang_item(rcvr_t, LangItem::OwnedBox)); try_alt_rcvr(&mut err, self.tcx.mk_lang_item(rcvr_t, LangItem::Pin)); try_alt_rcvr(&mut err, self.tcx.mk_diagnostic_item(rcvr_t, sym::Arc)); try_alt_rcvr(&mut err, self.tcx.mk_diagnostic_item(rcvr_t, sym::Rc)); } err.emit(); } } fn check_expr_cast( &self, e: &'tcx hir::Expr<'tcx>, t: &'tcx hir::Ty<'tcx>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { // Find the type of `e`. Supply hints based on the type we are casting to, // if appropriate. let t_cast = self.to_ty_saving_user_provided_ty(t); let t_cast = self.resolve_vars_if_possible(t_cast); let t_expr = self.check_expr_with_expectation(e, ExpectCastableToType(t_cast)); let t_cast = self.resolve_vars_if_possible(t_cast); // Eagerly check for some obvious errors. if t_expr.references_error() || t_cast.references_error() { self.tcx.ty_error() } else { // Defer other checks until we're done type checking. let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut(); match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) { Ok(cast_check) => { deferred_cast_checks.push(cast_check); t_cast } Err(ErrorReported) => self.tcx.ty_error(), } } } fn check_expr_array( &self, args: &'tcx [hir::Expr<'tcx>], expected: Expectation<'tcx>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let element_ty = if !args.is_empty() { let coerce_to = expected .to_option(self) .and_then(|uty| match *uty.kind() { ty::Array(ty, _) | ty::Slice(ty) => Some(ty), _ => None, }) .unwrap_or_else(|| { self.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span: expr.span, }) }); let mut coerce = CoerceMany::with_coercion_sites(coerce_to, args); assert_eq!(self.diverges.get(), Diverges::Maybe); for e in args { let e_ty = self.check_expr_with_hint(e, coerce_to); let cause = self.misc(e.span); coerce.coerce(self, &cause, e, e_ty); } coerce.complete(self) } else { self.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span: expr.span, }) }; self.tcx.mk_array(element_ty, args.len() as u64) } fn check_expr_repeat( &self, element: &'tcx hir::Expr<'tcx>, count: &'tcx hir::AnonConst, expected: Expectation<'tcx>, _expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let tcx = self.tcx; let count = self.to_const(count); let uty = match expected { ExpectHasType(uty) => match *uty.kind() { ty::Array(ty, _) | ty::Slice(ty) => Some(ty), _ => None, }, _ => None, }; let (element_ty, t) = match uty { Some(uty) => { self.check_expr_coercable_to_type(&element, uty, None); (uty, uty) } None => { let ty = self.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span: element.span, }); let element_ty = self.check_expr_has_type_or_error(&element, ty, |_| {}); (element_ty, ty) } }; if element_ty.references_error() { return tcx.ty_error(); } tcx.mk_ty(ty::Array(t, count)) } fn check_expr_tuple( &self, elts: &'tcx [hir::Expr<'tcx>], expected: Expectation<'tcx>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let flds = expected.only_has_type(self).and_then(|ty| { let ty = self.resolve_vars_with_obligations(ty); match ty.kind() { ty::Tuple(ref flds) => Some(&flds[..]), _ => None, } }); let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds { Some(ref fs) if i < fs.len() => { let ety = fs[i].expect_ty(); self.check_expr_coercable_to_type(&e, ety, None); ety } _ => self.check_expr_with_expectation(&e, NoExpectation), }); let tuple = self.tcx.mk_tup(elt_ts_iter); if tuple.references_error() { self.tcx.ty_error() } else { self.require_type_is_sized(tuple, expr.span, traits::TupleInitializerSized); tuple } } fn check_expr_struct( &self, expr: &hir::Expr<'_>, expected: Expectation<'tcx>, qpath: &QPath<'_>, fields: &'tcx [hir::Field<'tcx>], base_expr: &'tcx Option<&'tcx hir::Expr<'tcx>>, ) -> Ty<'tcx> { // Find the relevant variant let (variant, adt_ty) = if let Some(variant_ty) = self.check_struct_path(qpath, expr.hir_id) { variant_ty } else { self.check_struct_fields_on_error(fields, base_expr); return self.tcx.ty_error(); }; // Prohibit struct expressions when non-exhaustive flag is set. let adt = adt_ty.ty_adt_def().expect("`check_struct_path` returned non-ADT type"); if !adt.did.is_local() && variant.is_field_list_non_exhaustive() { self.tcx .sess .emit_err(StructExprNonExhaustive { span: expr.span, what: adt.variant_descr() }); } let error_happened = self.check_expr_struct_fields( adt_ty, expected, expr.hir_id, qpath.span(), variant, fields, base_expr.is_none(), ); if let &Some(ref base_expr) = base_expr { // If check_expr_struct_fields hit an error, do not attempt to populate // the fields with the base_expr. This could cause us to hit errors later // when certain fields are assumed to exist that in fact do not. if !error_happened { self.check_expr_has_type_or_error(base_expr, adt_ty, |_| {}); match adt_ty.kind() { ty::Adt(adt, substs) if adt.is_struct() => { let fru_field_types = adt .non_enum_variant() .fields .iter() .map(|f| { self.normalize_associated_types_in( expr.span, f.ty(self.tcx, substs), ) }) .collect(); self.typeck_results .borrow_mut() .fru_field_types_mut() .insert(expr.hir_id, fru_field_types); } _ => { self.tcx .sess .emit_err(FunctionalRecordUpdateOnNonStruct { span: base_expr.span }); } } } } self.require_type_is_sized(adt_ty, expr.span, traits::StructInitializerSized); adt_ty } fn check_expr_struct_fields( &self, adt_ty: Ty<'tcx>, expected: Expectation<'tcx>, expr_id: hir::HirId, span: Span, variant: &'tcx ty::VariantDef, ast_fields: &'tcx [hir::Field<'tcx>], check_completeness: bool, ) -> bool { let tcx = self.tcx; let adt_ty_hint = self .expected_inputs_for_expected_output(span, expected, adt_ty, &[adt_ty]) .get(0) .cloned() .unwrap_or(adt_ty); // re-link the regions that EIfEO can erase. self.demand_eqtype(span, adt_ty_hint, adt_ty); let (substs, adt_kind, kind_name) = match &adt_ty.kind() { &ty::Adt(adt, substs) => (substs, adt.adt_kind(), adt.variant_descr()), _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields"), }; let mut remaining_fields = variant .fields .iter() .enumerate() .map(|(i, field)| (field.ident.normalize_to_macros_2_0(), (i, field))) .collect::<FxHashMap<_, _>>(); let mut seen_fields = FxHashMap::default(); let mut error_happened = false; // Type-check each field. for field in ast_fields { let ident = tcx.adjust_ident(field.ident, variant.def_id); let field_type = if let Some((i, v_field)) = remaining_fields.remove(&ident) { seen_fields.insert(ident, field.span); self.write_field_index(field.hir_id, i); // We don't look at stability attributes on // struct-like enums (yet...), but it's definitely not // a bug to have constructed one. if adt_kind != AdtKind::Enum { tcx.check_stability(v_field.did, Some(expr_id), field.span); } self.field_ty(field.span, v_field, substs) } else { error_happened = true; if let Some(prev_span) = seen_fields.get(&ident) { tcx.sess.emit_err(FieldMultiplySpecifiedInInitializer { span: field.ident.span, prev_span: *prev_span, ident, }); } else { self.report_unknown_field(adt_ty, variant, field, ast_fields, kind_name, span); } tcx.ty_error() }; // Make sure to give a type to the field even if there's // an error, so we can continue type-checking. self.check_expr_coercable_to_type(&field.expr, field_type, None); } // Make sure the programmer specified correct number of fields. if kind_name == "union" { if ast_fields.len() != 1 { tcx.sess.span_err(span, "union expressions should have exactly one field"); } } else if check_completeness && !error_happened && !remaining_fields.is_empty() { let no_accessible_remaining_fields = remaining_fields .iter() .find(|(_, (_, field))| { field.vis.is_accessible_from(tcx.parent_module(expr_id).to_def_id(), tcx) }) .is_none(); if no_accessible_remaining_fields { self.report_no_accessible_fields(adt_ty, span); } else { self.report_missing_field(adt_ty, span, remaining_fields); } } error_happened } fn check_struct_fields_on_error( &self, fields: &'tcx [hir::Field<'tcx>], base_expr: &'tcx Option<&'tcx hir::Expr<'tcx>>, ) { for field in fields { self.check_expr(&field.expr); } if let Some(ref base) = *base_expr { self.check_expr(&base); } } /// Report an error for a struct field expression when there are fields which aren't provided. /// /// ```text /// error: missing field `you_can_use_this_field` in initializer of `foo::Foo` /// --> src/main.rs:8:5 /// | /// 8 | foo::Foo {}; /// | ^^^^^^^^ missing `you_can_use_this_field` /// /// error: aborting due to previous error /// ``` fn report_missing_field( &self, adt_ty: Ty<'tcx>, span: Span, remaining_fields: FxHashMap<Ident, (usize, &ty::FieldDef)>, ) { let tcx = self.tcx; let len = remaining_fields.len(); let mut displayable_field_names = remaining_fields.keys().map(|ident| ident.as_str()).collect::<Vec<_>>(); displayable_field_names.sort(); let truncated_fields_error = if len <= 3 { String::new() } else { format!(" and {} other field{}", (len - 3), if len - 3 == 1 { "" } else { "s" }) }; let remaining_fields_names = displayable_field_names .iter() .take(3) .map(|n| format!("`{}`", n)) .collect::<Vec<_>>() .join(", "); struct_span_err!( tcx.sess, span, E0063, "missing field{} {}{} in initializer of `{}`", pluralize!(remaining_fields.len()), remaining_fields_names, truncated_fields_error, adt_ty ) .span_label(span, format!("missing {}{}", remaining_fields_names, truncated_fields_error)) .emit(); } /// Report an error for a struct field expression when there are no visible fields. /// /// ```text /// error: cannot construct `Foo` with struct literal syntax due to inaccessible fields /// --> src/main.rs:8:5 /// | /// 8 | foo::Foo {}; /// | ^^^^^^^^ /// /// error: aborting due to previous error /// ``` fn report_no_accessible_fields(&self, adt_ty: Ty<'tcx>, span: Span) { self.tcx.sess.span_err( span, &format!( "cannot construct `{}` with struct literal syntax due to inaccessible fields", adt_ty, ), ); } fn report_unknown_field( &self, ty: Ty<'tcx>, variant: &'tcx ty::VariantDef, field: &hir::Field<'_>, skip_fields: &[hir::Field<'_>], kind_name: &str, ty_span: Span, ) { if variant.is_recovered() { self.set_tainted_by_errors(); return; } let mut err = self.type_error_struct_with_diag( field.ident.span, |actual| match ty.kind() { ty::Adt(adt, ..) if adt.is_enum() => struct_span_err!( self.tcx.sess, field.ident.span, E0559, "{} `{}::{}` has no field named `{}`", kind_name, actual, variant.ident, field.ident ), _ => struct_span_err!( self.tcx.sess, field.ident.span, E0560, "{} `{}` has no field named `{}`", kind_name, actual, field.ident ), }, ty, ); match variant.ctor_kind { CtorKind::Fn => { err.span_label(variant.ident.span, format!("`{adt}` defined here", adt = ty)); err.span_label(field.ident.span, "field does not exist"); err.span_label( ty_span, format!( "`{adt}` is a tuple {kind_name}, \ use the appropriate syntax: `{adt}(/* fields */)`", adt = ty, kind_name = kind_name ), ); } _ => { // prevent all specified fields from being suggested let skip_fields = skip_fields.iter().map(|ref x| x.ident.name); if let Some(field_name) = Self::suggest_field_name(variant, field.ident.name, skip_fields.collect()) { err.span_suggestion( field.ident.span, "a field with a similar name exists", field_name.to_string(), Applicability::MaybeIncorrect, ); } else { match ty.kind() { ty::Adt(adt, ..) => { if adt.is_enum() { err.span_label( field.ident.span, format!("`{}::{}` does not have this field", ty, variant.ident), ); } else { err.span_label( field.ident.span, format!("`{}` does not have this field", ty), ); } let available_field_names = self.available_field_names(variant); if !available_field_names.is_empty() { err.note(&format!( "available fields are: {}", self.name_series_display(available_field_names) )); } } _ => bug!("non-ADT passed to report_unknown_field"), } }; } } err.emit(); } // Return an hint about the closest match in field names fn suggest_field_name( variant: &'tcx ty::VariantDef, field: Symbol, skip: Vec<Symbol>, ) -> Option<Symbol> { let names = variant.fields.iter().filter_map(|field| { // ignore already set fields and private fields from non-local crates if skip.iter().any(|&x| x == field.ident.name) || (!variant.def_id.is_local() && field.vis != Visibility::Public) { None } else { Some(&field.ident.name) } }); find_best_match_for_name(names, field, None) } fn available_field_names(&self, variant: &'tcx ty::VariantDef) -> Vec<Symbol> { variant .fields .iter() .filter(|field| { let def_scope = self .tcx .adjust_ident_and_get_scope(field.ident, variant.def_id, self.body_id) .1; field.vis.is_accessible_from(def_scope, self.tcx) }) .map(|field| field.ident.name) .collect() } fn name_series_display(&self, names: Vec<Symbol>) -> String { // dynamic limit, to never omit just one field let limit = if names.len() == 6 { 6 } else { 5 }; let mut display = names.iter().take(limit).map(|n| format!("`{}`", n)).collect::<Vec<_>>().join(", "); if names.len() > limit { display = format!("{} ... and {} others", display, names.len() - limit); } display } // Check field access expressions fn check_field( &self, expr: &'tcx hir::Expr<'tcx>, base: &'tcx hir::Expr<'tcx>, field: Ident, ) -> Ty<'tcx> { let expr_t = self.check_expr(base); let expr_t = self.structurally_resolved_type(base.span, expr_t); let mut private_candidate = None; let mut autoderef = self.autoderef(expr.span, expr_t); while let Some((base_t, _)) = autoderef.next() { match base_t.kind() { ty::Adt(base_def, substs) if !base_def.is_enum() => { debug!("struct named {:?}", base_t); let (ident, def_scope) = self.tcx.adjust_ident_and_get_scope(field, base_def.did, self.body_id); let fields = &base_def.non_enum_variant().fields; if let Some(index) = fields.iter().position(|f| f.ident.normalize_to_macros_2_0() == ident) { let field = &fields[index]; let field_ty = self.field_ty(expr.span, field, substs); // Save the index of all fields regardless of their visibility in case // of error recovery. self.write_field_index(expr.hir_id, index); if field.vis.is_accessible_from(def_scope, self.tcx) { let adjustments = self.adjust_steps(&autoderef); self.apply_adjustments(base, adjustments); self.register_predicates(autoderef.into_obligations()); self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span); return field_ty; } private_candidate = Some((base_def.did, field_ty)); } } ty::Tuple(ref tys) => { let fstr = field.as_str(); if let Ok(index) = fstr.parse::<usize>() { if fstr == index.to_string() { if let Some(field_ty) = tys.get(index) { let adjustments = self.adjust_steps(&autoderef); self.apply_adjustments(base, adjustments); self.register_predicates(autoderef.into_obligations()); self.write_field_index(expr.hir_id, index); return field_ty.expect_ty(); } } } } _ => {} } } self.structurally_resolved_type(autoderef.span(), autoderef.final_ty(false)); if let Some((did, field_ty)) = private_candidate { self.ban_private_field_access(expr, expr_t, field, did); return field_ty; } if field.name == kw::Invalid { } else if self.method_exists(field, expr_t, expr.hir_id, true) { self.ban_take_value_of_method(expr, expr_t, field); } else if !expr_t.is_primitive_ty() { self.ban_nonexisting_field(field, base, expr, expr_t); } else { type_error_struct!( self.tcx().sess, field.span, expr_t, E0610, "`{}` is a primitive type and therefore doesn't have fields", expr_t ) .emit(); } self.tcx().ty_error() } fn suggest_await_on_field_access( &self, err: &mut DiagnosticBuilder<'_>, field_ident: Ident, base: &'tcx hir::Expr<'tcx>, ty: Ty<'tcx>, ) { let output_ty = match self.infcx.get_impl_future_output_ty(ty) { Some(output_ty) => self.resolve_vars_if_possible(output_ty), _ => return, }; let mut add_label = true; if let ty::Adt(def, _) = output_ty.kind() { // no field access on enum type if !def.is_enum() { if def.non_enum_variant().fields.iter().any(|field| field.ident == field_ident) { add_label = false; err.span_label( field_ident.span, "field not available in `impl Future`, but it is available in its `Output`", ); err.span_suggestion_verbose( base.span.shrink_to_hi(), "consider `await`ing on the `Future` and access the field of its `Output`", ".await".to_string(), Applicability::MaybeIncorrect, ); } } } if add_label { err.span_label(field_ident.span, &format!("field not found in `{}`", ty)); } } fn ban_nonexisting_field( &self, field: Ident, base: &'tcx hir::Expr<'tcx>, expr: &'tcx hir::Expr<'tcx>, expr_t: Ty<'tcx>, ) { debug!( "ban_nonexisting_field: field={:?}, base={:?}, expr={:?}, expr_ty={:?}", field, base, expr, expr_t ); let mut err = self.no_such_field_err(field.span, field, expr_t); match *expr_t.peel_refs().kind() { ty::Array(_, len) => { self.maybe_suggest_array_indexing(&mut err, expr, base, field, len); } ty::RawPtr(..) => { self.suggest_first_deref_field(&mut err, expr, base, field); } ty::Adt(def, _) if !def.is_enum() => { self.suggest_fields_on_recordish(&mut err, def, field); } ty::Param(param_ty) => { self.point_at_param_definition(&mut err, param_ty); } ty::Opaque(_, _) => { self.suggest_await_on_field_access(&mut err, field, base, expr_t.peel_refs()); } _ => {} } if field.name == kw::Await { // We know by construction that `<expr>.await` is either on Rust 2015 // or results in `ExprKind::Await`. Suggest switching the edition to 2018. err.note("to `.await` a `Future`, switch to Rust 2018"); err.help("set `edition = \"2018\"` in `Cargo.toml`"); err.note("for more on editions, read https://doc.rust-lang.org/edition-guide"); } err.emit(); } fn ban_private_field_access( &self, expr: &hir::Expr<'_>, expr_t: Ty<'tcx>, field: Ident, base_did: DefId, ) { let struct_path = self.tcx().def_path_str(base_did); let kind_name = self.tcx().def_kind(base_did).descr(base_did); let mut err = struct_span_err!( self.tcx().sess, field.span, E0616, "field `{}` of {} `{}` is private", field, kind_name, struct_path ); err.span_label(field.span, "private field"); // Also check if an accessible method exists, which is often what is meant. if self.method_exists(field, expr_t, expr.hir_id, false) && !self.expr_in_place(expr.hir_id) { self.suggest_method_call( &mut err, &format!("a method `{}` also exists, call it with parentheses", field), field, expr_t, expr, ); } err.emit(); } fn ban_take_value_of_method(&self, expr: &hir::Expr<'_>, expr_t: Ty<'tcx>, field: Ident) { let mut err = type_error_struct!( self.tcx().sess, field.span, expr_t, E0615, "attempted to take value of method `{}` on type `{}`", field, expr_t ); err.span_label(field.span, "method, not a field"); if !self.expr_in_place(expr.hir_id) { self.suggest_method_call( &mut err, "use parentheses to call the method", field, expr_t, expr, ); } else { err.help("methods are immutable and cannot be assigned to"); } err.emit(); } fn point_at_param_definition(&self, err: &mut DiagnosticBuilder<'_>, param: ty::ParamTy) { let generics = self.tcx.generics_of(self.body_id.owner.to_def_id()); let generic_param = generics.type_param(&param, self.tcx); if let ty::GenericParamDefKind::Type { synthetic: Some(..), .. } = generic_param.kind { return; } let param_def_id = generic_param.def_id; let param_hir_id = match param_def_id.as_local() { Some(x) => self.tcx.hir().local_def_id_to_hir_id(x), None => return, }; let param_span = self.tcx.hir().span(param_hir_id); let param_name = self.tcx.hir().ty_param_name(param_hir_id); err.span_label(param_span, &format!("type parameter '{}' declared here", param_name)); } fn suggest_fields_on_recordish( &self, err: &mut DiagnosticBuilder<'_>, def: &'tcx ty::AdtDef, field: Ident, ) { if let Some(suggested_field_name) = Self::suggest_field_name(def.non_enum_variant(), field.name, vec![]) { err.span_suggestion( field.span, "a field with a similar name exists", suggested_field_name.to_string(), Applicability::MaybeIncorrect, ); } else { err.span_label(field.span, "unknown field"); let struct_variant_def = def.non_enum_variant(); let field_names = self.available_field_names(struct_variant_def); if !field_names.is_empty() { err.note(&format!( "available fields are: {}", self.name_series_display(field_names), )); } } } fn maybe_suggest_array_indexing( &self, err: &mut DiagnosticBuilder<'_>, expr: &hir::Expr<'_>, base: &hir::Expr<'_>, field: Ident, len: &ty::Const<'tcx>, ) { if let (Some(len), Ok(user_index)) = (len.try_eval_usize(self.tcx, self.param_env), field.as_str().parse::<u64>()) { if let Ok(base) = self.tcx.sess.source_map().span_to_snippet(base.span) { let help = "instead of using tuple indexing, use array indexing"; let suggestion = format!("{}[{}]", base, field); let applicability = if len < user_index { Applicability::MachineApplicable } else { Applicability::MaybeIncorrect }; err.span_suggestion(expr.span, help, suggestion, applicability); } } } fn suggest_first_deref_field( &self, err: &mut DiagnosticBuilder<'_>, expr: &hir::Expr<'_>, base: &hir::Expr<'_>, field: Ident, ) { if let Ok(base) = self.tcx.sess.source_map().span_to_snippet(base.span) { let msg = format!("`{}` is a raw pointer; try dereferencing it", base); let suggestion = format!("(*{}).{}", base, field); err.span_suggestion(expr.span, &msg, suggestion, Applicability::MaybeIncorrect); } } fn no_such_field_err<T: Display>( &self, span: Span, field: T, expr_t: &ty::TyS<'_>, ) -> DiagnosticBuilder<'_> { type_error_struct!( self.tcx().sess, span, expr_t, E0609, "no field `{}` on type `{}`", field, expr_t ) } fn check_expr_index( &self, base: &'tcx hir::Expr<'tcx>, idx: &'tcx hir::Expr<'tcx>, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let base_t = self.check_expr(&base); let idx_t = self.check_expr(&idx); if base_t.references_error() { base_t } else if idx_t.references_error() { idx_t } else { let base_t = self.structurally_resolved_type(base.span, base_t); match self.lookup_indexing(expr, base, base_t, idx_t) { Some((index_ty, element_ty)) => { // two-phase not needed because index_ty is never mutable self.demand_coerce(idx, idx_t, index_ty, None, AllowTwoPhase::No); element_ty } None => { let mut err = type_error_struct!( self.tcx.sess, expr.span, base_t, E0608, "cannot index into a value of type `{}`", base_t ); // Try to give some advice about indexing tuples. if let ty::Tuple(..) = base_t.kind() { let mut needs_note = true; // If the index is an integer, we can show the actual // fixed expression: if let ExprKind::Lit(ref lit) = idx.kind { if let ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) = lit.node { let snip = self.tcx.sess.source_map().span_to_snippet(base.span); if let Ok(snip) = snip { err.span_suggestion( expr.span, "to access tuple elements, use", format!("{}.{}", snip, i), Applicability::MachineApplicable, ); needs_note = false; } } } if needs_note { err.help( "to access tuple elements, use tuple indexing \ syntax (e.g., `tuple.0`)", ); } } err.emit(); self.tcx.ty_error() } } } } fn check_expr_yield( &self, value: &'tcx hir::Expr<'tcx>, expr: &'tcx hir::Expr<'tcx>, src: &'tcx hir::YieldSource, ) -> Ty<'tcx> { match self.resume_yield_tys { Some((resume_ty, yield_ty)) => { self.check_expr_coercable_to_type(&value, yield_ty, None); resume_ty } // Given that this `yield` expression was generated as a result of lowering a `.await`, // we know that the yield type must be `()`; however, the context won't contain this // information. Hence, we check the source of the yield expression here and check its // value's type against `()` (this check should always hold). None if src.is_await() => { self.check_expr_coercable_to_type(&value, self.tcx.mk_unit(), None); self.tcx.mk_unit() } _ => { self.tcx.sess.emit_err(YieldExprOutsideOfGenerator { span: expr.span }); self.tcx.mk_unit() } } } fn check_expr_asm_operand(&self, expr: &'tcx hir::Expr<'tcx>, is_input: bool) { let needs = if is_input { Needs::None } else { Needs::MutPlace }; let ty = self.check_expr_with_needs(expr, needs); self.require_type_is_sized(ty, expr.span, traits::InlineAsmSized); if !is_input && !expr.is_syntactic_place_expr() { let mut err = self.tcx.sess.struct_span_err(expr.span, "invalid asm output"); err.span_label(expr.span, "cannot assign to this expression"); err.emit(); } // If this is an input value, we require its type to be fully resolved // at this point. This allows us to provide helpful coercions which help // pass the type candidate list in a later pass. // // We don't require output types to be resolved at this point, which // allows them to be inferred based on how they are used later in the // function. if is_input { let ty = self.structurally_resolved_type(expr.span, &ty); match *ty.kind() { ty::FnDef(..) => { let fnptr_ty = self.tcx.mk_fn_ptr(ty.fn_sig(self.tcx)); self.demand_coerce(expr, ty, fnptr_ty, None, AllowTwoPhase::No); } ty::Ref(_, base_ty, mutbl) => { let ptr_ty = self.tcx.mk_ptr(ty::TypeAndMut { ty: base_ty, mutbl }); self.demand_coerce(expr, ty, ptr_ty, None, AllowTwoPhase::No); } _ => {} } } } fn check_expr_asm(&self, asm: &'tcx hir::InlineAsm<'tcx>) -> Ty<'tcx> { for op in asm.operands { match op { hir::InlineAsmOperand::In { expr, .. } | hir::InlineAsmOperand::Const { expr } => { self.check_expr_asm_operand(expr, true); } hir::InlineAsmOperand::Out { expr, .. } => { if let Some(expr) = expr { self.check_expr_asm_operand(expr, false); } } hir::InlineAsmOperand::InOut { expr, .. } => { self.check_expr_asm_operand(expr, false); } hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => { self.check_expr_asm_operand(in_expr, true); if let Some(out_expr) = out_expr { self.check_expr_asm_operand(out_expr, false); } } hir::InlineAsmOperand::Sym { expr } => { self.check_expr(expr); } } } if asm.options.contains(ast::InlineAsmOptions::NORETURN) { self.tcx.types.never } else { self.tcx.mk_unit() } } } pub(super) fn ty_kind_suggestion(ty: Ty<'_>) -> Option<&'static str> { Some(match ty.kind() { ty::Bool => "true", ty::Char => "'a'", ty::Int(_) | ty::Uint(_) => "42", ty::Float(_) => "3.14159", ty::Error(_) | ty::Never => return None, _ => "value", }) }
check_expr_kind
index.js
/* @功能:首页面js @作者:diamondwang @时间:2013年11月13日 */ /* 注意,要在页面中先引入jquery*/ $(function(){ //右侧,话费、旅行、彩票、游戏切换效果 $(".service h2 span").mouseover(function(){ $(this).addClass("on").siblings().removeClass("on"); $(".service_wrap div").hide().eq($(this).index()).show(); }); //导购区域切换效果,疯狂抢购,热卖商品、推荐商品、新品上架,猜您喜欢 $(".guide_content h2 span").mouseover(function(){ $(this).addClass("on").siblings().removeClass("on"); $(".guide_wrap div").hide().eq($(this).index()).show(); }); //各楼层区域切换 $(".goodslist h2 span").mouseover(function(){ $(this).addClass("on").siblings().removeClass("on"); $(this).parent().next(".goodslist_wrap").find("div").hide().eq($(this).index()).show(); }); //首页幻灯片效果 var len = $(".slide_items li").size(); //获取图片的数目 var init = 1; //设置图片切换初始值,从第二张开始 var dt = null; //设置定时器 //定义一个函数完成动画 function slide(){ dt = setInterval(function(){ //大图切换 $(".slide_items li").stop(true,false).fadeOut().eq(init).fadeIn(); //数字索引切换 $(".slide_controls li").removeClass("on").eq(init).addClass("on"); init++; if (init >= len ){ init = 0; } },
} // function slide(){ // if (init >= len ){ // init = 0; // } // //大图切换 // $(".slide_items li").fadeOut().eq(init).fadeIn(); // //数字索引切换 // $(".slide_controls li").removeClass("on").eq(init).addClass("on"); // init++; // setTimeout("slide()",2000); // } //调用函数,实现动画 slide(); //鼠标放置在图片上则停止幻灯,离开则继续 $(".slide_items li").mouseover(function(){ clearInterval(dt); }).mouseout(function(){ slide(); }); //鼠标放置到数字索引上时,立即切换到该图片上,并停止动画,离开则继续 $(".slide_controls li").mouseover(function(){ clearInterval(dt); init = $(this).index(); $(".slide_items li").stop(true,false).fadeOut().eq(init).fadeIn(); init++; $(this).addClass("on").siblings().removeClass("on"); }).mouseout(function(){ slide(); }); });
5000)
asignacion-permisos.reducer.ts
import { State } from "../entity-state.model"; import { PerfilMenuRecursoNodo } from "../../../../seguridad/models"; import { RESOURCE_ACTIONS } from "../../../utils"; import { actions, PerfilMenuRecursoActions } from "../../actions/seguridad/asignacion-permisos.actions"; const INITIAL_STATE: State<PerfilMenuRecursoNodo> = { data: [], selected: null, action: null, loading: false, done: false, failed: false, errors: null, doneMessage: null }; export function
(state = INITIAL_STATE, action: PerfilMenuRecursoActions): State<PerfilMenuRecursoNodo> { switch (action.type) { case actions.RESET: return { ...INITIAL_STATE }; case actions.GET_BY_SISTEMA_PERFIL: { return { ...state, action: RESOURCE_ACTIONS.CONSULTA, loading: true, done: false, failed: false, errors: null, selected: null }; } case actions.GET_BY_SISTEMA_PERFIL_SUCCESS: { return { ...state, data: action.payload, loading: false, done: true, failed: false, errors: null, selected: null }; } case actions.GET_BY_SISTEMA_PERFIL_FAIL: { return { ...state, data: [], loading: false, done: false, failed: true, errors: action.payload, selected: null }; } case actions.ADD: { return { ...state, action: RESOURCE_ACTIONS.REGISTRO, selected: null, loading: true, done: false, failed: false, errors: null }; } case actions.ADD_SUCCESS: { return { ...state, selected: null, loading: false, done: true, failed: false, errors: null, doneMessage: action.payload.message }; } case actions.ADD_FAIL: { return { ...state, selected: null, loading: false, done: false, failed: true, errors: action.payload }; } default: return state; } }
asignacionPermisosReducer
server.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer from os import curdir, sep PORT_NUMBER = 8083 #This class will handles any incoming request from #the browser class myHandler(BaseHTTPRequestHandler): #Handler for the GET requests def do_GET(self): if self.path=="/": self.path="/index_example2.html" try: #Check the file extension required and #set the right mime type sendReply = False if self.path.endswith(".html"): mimetype='text/html' sendReply = True
mimetype='image/gif' sendReply = True if self.path.endswith(".js"): mimetype='application/javascript' sendReply = True if self.path.endswith(".css"): mimetype='text/css' sendReply = True if sendReply == True: #Open the static file requested and send it f = open(curdir + sep + self.path) self.send_response(200) self.send_header('Content-type',mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404,'File Not Found: %s' % self.path) try: #Create a web server and define the handler to manage the #incoming request server = HTTPServer(('', PORT_NUMBER), myHandler) print 'Started httpserver on port ' , PORT_NUMBER #Wait forever for incoming htto requests server.serve_forever() except KeyboardInterrupt: print '^C received, shutting down the web server' server.socket.close()
if self.path.endswith(".jpg"): mimetype='image/jpg' sendReply = True if self.path.endswith(".gif"):
dynamicselection.go
/* Copyright SecureKey Technologies Inc. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package dynamicselection import ( "time" "github.com/hyperledger/fabric-sdk-go/pkg/util/concurrent/lazycache" "github.com/hyperledger/fabric-sdk-go/pkg/util/concurrent/lazyref" copts "github.com/hyperledger/fabric-sdk-go/pkg/common/options" "github.com/hyperledger/fabric-sdk-go/pkg/common/providers/context" "github.com/hyperledger/fabric-sdk-go/pkg/common/providers/fab" "github.com/pkg/errors" "github.com/hyperledger/fabric-sdk-go/pkg/client/common/selection/dynamicselection/pgresolver" "github.com/hyperledger/fabric-sdk-go/pkg/client/common/selection/options" ) const defaultCacheTimeout = 30 * time.Minute // Opt applies a selection provider option type Opt func(*SelectionService) // WithLoadBalancePolicy sets the load-balance policy func WithLoadBalancePolicy(lbp pgresolver.LoadBalancePolicy) Opt { return func(s *SelectionService) { s.pgLBP = lbp } } // WithCacheTimeout sets the expiration timeout of the cache func WithCacheTimeout(timeout time.Duration) Opt
// SelectionService chooses endorsing peers for a given set of chaincodes using their chaincode policy type SelectionService struct { channelID string pgResolvers *lazycache.Cache pgLBP pgresolver.LoadBalancePolicy ccPolicyProvider CCPolicyProvider discoveryService fab.DiscoveryService cacheTimeout time.Duration } type policyProviderFactory func() (CCPolicyProvider, error) // NewService creates a new dynamic selection service func NewService(context context.Client, channelID string, discovery fab.DiscoveryService, opts ...Opt) (*SelectionService, error) { return newService(context, channelID, discovery, func() (CCPolicyProvider, error) { return newCCPolicyProvider(context, discovery, channelID) }, opts...) } func newService(context context.Client, channelID string, discovery fab.DiscoveryService, factory policyProviderFactory, opts ...Opt) (*SelectionService, error) { ccPolicyProvider, err := factory() if err != nil { return nil, errors.WithMessage(err, "Failed to create cc policy provider") } service := &SelectionService{ channelID: channelID, discoveryService: discovery, ccPolicyProvider: ccPolicyProvider, cacheTimeout: defaultCacheTimeout, pgLBP: pgresolver.NewRandomLBP(), } for _, opt := range opts { opt(service) } if service.cacheTimeout == 0 { service.cacheTimeout = context.EndpointConfig().Timeout(fab.SelectionServiceRefresh) } if service.pgLBP == nil { service.pgLBP = pgresolver.NewRandomLBP() } service.pgResolvers = lazycache.New( "PG_Resolver_Cache", func(key lazycache.Key) (interface{}, error) { return service.createPGResolver(key.(*resolverKey)) }, lazyref.WithAbsoluteExpiration(service.cacheTimeout), ) return service, nil } // GetEndorsersForChaincode returns the endorsing peers for the given chaincodes func (s *SelectionService) GetEndorsersForChaincode(chaincodes []*fab.ChaincodeCall, opts ...copts.Opt) ([]fab.Peer, error) { if len(chaincodes) == 0 { return nil, errors.New("no chaincode IDs provided") } params := options.NewParams(opts) var chaincodeIDs []string for _, cc := range chaincodes { chaincodeIDs = append(chaincodeIDs, cc.ID) } resolver, err := s.getPeerGroupResolver(chaincodeIDs) if err != nil { return nil, errors.WithMessagef(err, "Error getting peer group resolver for chaincodes [%v] on channel [%s]", chaincodeIDs, s.channelID) } peers, err := s.discoveryService.GetPeers() if err != nil { return nil, err } if params.PeerFilter != nil { var filteredPeers []fab.Peer for _, peer := range peers { if params.PeerFilter(peer) { filteredPeers = append(filteredPeers, peer) } else { logger.Debugf("Peer [%s] is not accepted by the filter and therefore peer group will be excluded.", peer.URL()) } } peers = filteredPeers } if params.PeerSorter != nil { sortedPeers := make([]fab.Peer, len(peers)) copy(sortedPeers, peers) peers = params.PeerSorter(sortedPeers) } peerGroup, err := resolver.Resolve(peers) if err != nil { return nil, err } return peerGroup.Peers(), nil } // Close closes all resources associated with the service func (s *SelectionService) Close() { s.pgResolvers.Close() } func (s *SelectionService) getPeerGroupResolver(chaincodeIDs []string) (pgresolver.PeerGroupResolver, error) { resolver, err := s.pgResolvers.Get(newResolverKey(s.channelID, chaincodeIDs...)) if err != nil { return nil, err } return resolver.(pgresolver.PeerGroupResolver), nil } func (s *SelectionService) createPGResolver(key *resolverKey) (pgresolver.PeerGroupResolver, error) { // Retrieve the signature policies for all of the chaincodes var policyGroups []pgresolver.GroupRetriever for _, ccID := range key.chaincodeIDs { policyGroup, err := s.getPolicyGroupForCC(key.channelID, ccID) if err != nil { return nil, errors.WithMessagef(err, "error retrieving signature policy for chaincode [%s] on channel [%s]", ccID, key.channelID) } policyGroups = append(policyGroups, policyGroup) } // Perform an 'and' operation on all of the peer groups aggregatePolicyGroupRetriever := func(peerRetriever pgresolver.MSPPeerRetriever) (pgresolver.GroupOfGroups, error) { var groups []pgresolver.Group for _, f := range policyGroups { grps, err := f(peerRetriever) if err != nil { return nil, err } groups = append(groups, grps) } return pgresolver.NewGroupOfGroups(groups).Nof(int32(len(policyGroups))) } // Create the resolver resolver, err := pgresolver.NewPeerGroupResolver(aggregatePolicyGroupRetriever, s.pgLBP) if err != nil { return nil, errors.WithMessagef(err, "error creating peer group resolver for chaincodes [%v] on channel [%s]", key.chaincodeIDs, key.channelID) } return resolver, nil } func (s *SelectionService) getPolicyGroupForCC(channelID string, ccID string) (pgresolver.GroupRetriever, error) { sigPolicyEnv, err := s.ccPolicyProvider.GetChaincodePolicy(ccID) if err != nil { return nil, errors.WithMessagef(err, "error querying chaincode [%s] on channel [%s]", ccID, channelID) } return pgresolver.CompileSignaturePolicy(sigPolicyEnv) }
{ return func(s *SelectionService) { s.cacheTimeout = timeout } }
lib.rs
//! This library only exists to have a common structure definition that can be //! passed from the bootloader to the kernel. This contains any information //! that the bootloader wants to enlighten the kernel with. //! //! This structure also support backwards passing, for the kernel to pass //! information back to the bootloader. This just means that this structure //! lives forever and is not deleted or moved by either the bootloader or //! kernel. #![no_std] use core::sync::atomic::AtomicU64; use serial::SerialPort; use rangeset::RangeSet; use lockcell::LockCell; use page_table::PageTable; /// Base vaddr to use for kernel stacks pub const KERNEL_STACKS_BASE: u64 = 0x0000_7473_0000_0000; /// The virtual base in the kernel page tables where physical memory is /// linearally mapped. Such that a dereference of `KERNEL_PHYS_WINDOW_BASE` /// in the kernel address space, will be accessing `0` in physical memory. pub const KERNEL_PHYS_WINDOW_BASE: u64 = 0xffff_cafe_0000_0000; /// The base virtual address to use for dynamic virtual allocations pub const KERNEL_VMEM_BASE: u64 = 0xffff_8000_0000_0000; /// Size to allocate for kernel stacks pub const KERNEL_STACK_SIZE: u64 = 32 * 1024; /// Padding deadspace to add between kernel stacks pub const KERNEL_STACK_PAD: u64 = 32 * 1024; /// Size of the kernel physical window (in bytes) pub const KERNEL_PHYS_WINDOW_SIZE: u64 = 32 * 1024 * 1024 * 1024; /// Structures to pass between both the 32-bit and 64-bit modes. This structure /// MUST be identical in both modes. Thus, no using pointers, references, or /// usizes. Also, make sure everything is marked `#[repr(C)]` otherwise the /// 32 and 64-bit variants may slightly be reordered as Rust by default allows /// re-ordering of non-repr-C structures to fit alignment demands without /// padding.
pub struct BootArgs { /// All memory which is available for use by the kernel and bootloader. /// This structure is potentially used at the same time by both the /// bootloader and the kernel. pub free_memory: LockCell<Option<RangeSet>>, /// The serial driver pub serial: LockCell<Option<SerialPort>>, /// The page table used for the kernel pub page_table: LockCell<Option<PageTable>>, /// The trampoline page table to be used during the paging transition from /// the bootloader to the kernel. This will have [0..bootloader_end] mapped /// in identity mapped, as well as [0..bootloader_end] mapped in at the /// address that the linear physical map will be present in the kernel page /// tables. This allows us to temporarily have both the kernel's physical /// memory view, and an identity mapped memory view such that the /// page table can be switched while executing in the low-memory physical /// addresses of the bootloader, and then we can jump to the kernel /// physical mapping. pub trampoline_page_table: LockCell<Option<PageTable>>, /// Address of the kernel entry point pub kernel_entry: LockCell<Option<u64>>, /// The virtual address of the "next available stack". This is just used to /// give unique stack addresses to each core as they come online. This /// doesn't need to be honored if you have another method of creating /// unique non-overlapping stacks for cores. pub stack_vaddr: AtomicU64, /// A lock to be used to make `print!()` macros fully atomic pub print_lock: LockCell<()>, }
#[repr(C)]
matrix.rs
use super::scalar_; use crate::prelude::*; use crate::{scalar, Point, Point3, RSXform, Rect, Scalar, Size, Vector}; use skia_bindings as sb; use skia_bindings::SkMatrix; use std::ops::{Index, IndexMut, Mul}; use std::slice; pub use skia_bindings::SkApplyPerspectiveClip as ApplyPerspectiveClip; #[test] fn test_apply_perspective_clip_naming() { let _ = ApplyPerspectiveClip::Yes; } bitflags! { // m85: On Windows the SkMatrix_TypeMask is defined as i32, // but we stick to u32 (macOS / Linux), because there is no need to leak // the platform difference to the Rust side. pub struct TypeMask: u32 { const IDENTITY = sb::SkMatrix_TypeMask_kIdentity_Mask as _; const TRANSLATE = sb::SkMatrix_TypeMask_kTranslate_Mask as _; const SCALE = sb::SkMatrix_TypeMask_kScale_Mask as _; const AFFINE = sb::SkMatrix_TypeMask_kAffine_Mask as _; const PERSPECTIVE = sb::SkMatrix_TypeMask_kPerspective_Mask as _; } } pub use skia_bindings::SkMatrix_ScaleToFit as ScaleToFit; #[test] fn test_matrix_scale_to_fit_naming() { let _ = ScaleToFit::End; } #[derive(Copy, Clone, Debug)] #[repr(C)] pub struct Matrix { mat: [scalar; 9usize], type_mask: u32, } impl NativeTransmutable<SkMatrix> for Matrix {} #[test] fn test_matrix_layout() { Matrix::test_layout() } impl PartialEq for Matrix { fn eq(&self, rhs: &Self) -> bool { unsafe { sb::C_SkMatrix_Equals(self.native(), rhs.native()) } } } impl Mul for Matrix { type Output = Self; fn mul(self, rhs: Matrix) -> Self::Output { Matrix::concat(&self, &rhs) } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Member { ScaleX = 0, SkewX = 1, TransX = 2, SkewY = 3, ScaleY = 4, TransY = 5, Persp0 = 6, Persp1 = 7, Persp2 = 8, } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum AffineMember { ScaleX = 0, SkewY = 1, SkewX = 2, ScaleY = 3, TransX = 4, TransY = 5, } impl Index<Member> for Matrix { type Output = scalar; fn index(&self, index: Member) -> &Self::Output { &self[index as usize] } } impl Index<AffineMember> for Matrix { type Output = scalar; fn index(&self, index: AffineMember) -> &Self::Output { &self[index as usize] } } impl Index<usize> for Matrix { type Output = scalar; fn
(&self, index: usize) -> &Self::Output { &self.native().fMat[index] } } impl IndexMut<Member> for Matrix { fn index_mut(&mut self, index: Member) -> &mut Self::Output { self.index_mut(index as usize) } } impl IndexMut<AffineMember> for Matrix { fn index_mut(&mut self, index: AffineMember) -> &mut Self::Output { self.index_mut(index as usize) } } impl IndexMut<usize> for Matrix { fn index_mut(&mut self, index: usize) -> &mut Self::Output { unsafe { &mut *sb::C_SkMatrix_SubscriptMut(self.native_mut(), index) } } } impl Default for Matrix { fn default() -> Self { Matrix::new() } } impl Matrix { const fn new() -> Self { Self { mat: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0], type_mask: TypeMask::IDENTITY.bits() | 0x10, } } #[deprecated(since = "0.33.0", note = "use Matrix::scale()")] pub fn new_scale(scale: (scalar, scalar)) -> Matrix { Self::scale(scale) } pub fn scale((sx, sy): (scalar, scalar)) -> Matrix { let mut m = Matrix::new(); m.set_scale((sx, sy), None); m } #[deprecated(since = "0.33.0", note = "use Matrix::translate()")] pub fn new_trans(d: impl Into<Vector>) -> Matrix { Self::translate(d) } pub fn translate(d: impl Into<Vector>) -> Matrix { let mut m = Matrix::new(); m.set_translate(d); m } pub fn rotate_deg(deg: scalar) -> Matrix { let mut m = Matrix::new(); m.set_rotate(deg, None); m } pub fn rotate_deg_pivot(deg: scalar, pivot: impl Into<Point>) -> Matrix { let mut m = Matrix::new(); m.set_rotate(deg, pivot.into()); m } pub fn rotate_rad(rad: scalar) -> Matrix { Self::rotate_deg(scalar_::radians_to_degrees(rad)) } #[allow(clippy::too_many_arguments)] pub fn new_all( scale_x: scalar, skew_x: scalar, trans_x: scalar, skew_y: scalar, scale_y: scalar, trans_y: scalar, pers_0: scalar, pers_1: scalar, pers_2: scalar, ) -> Matrix { let mut m = Matrix::new(); m.set_all( scale_x, skew_x, trans_x, skew_y, scale_y, trans_y, pers_0, pers_1, pers_2, ); m } pub fn get_type(&self) -> TypeMask { TypeMask::from_bits_truncate(unsafe { sb::C_SkMatrix_getType(self.native()) } as _) } pub fn is_identity(&self) -> bool { self.get_type() == TypeMask::IDENTITY } pub fn is_scale_translate(&self) -> bool { (self.get_type() & !(TypeMask::SCALE | TypeMask::TRANSLATE)).is_empty() } pub fn is_translate(&self) -> bool { (self.get_type() & !TypeMask::TRANSLATE).is_empty() } pub fn rect_stays_rect(&self) -> bool { unsafe { sb::C_SkMatrix_rectStaysRect(self.native()) } } pub fn preserves_axis_alignment(&self) -> bool { self.rect_stays_rect() } pub fn has_perspective(&self) -> bool { unsafe { sb::C_SkMatrix_hasPerspective(self.native()) } } pub fn is_similarity(&self) -> bool { unsafe { self.native().isSimilarity(scalar::NEARLY_ZERO) } } pub fn preserves_right_angles(&self) -> bool { unsafe { self.native().preservesRightAngles(scalar::NEARLY_ZERO) } } pub fn rc(&self, r: usize, c: usize) -> scalar { assert!(r <= 2); assert!(c <= 2); self[r * 3 + c] } pub fn scale_x(&self) -> scalar { self[Member::ScaleX] } pub fn scale_y(&self) -> scalar { self[Member::ScaleY] } pub fn skew_y(&self) -> scalar { self[Member::SkewY] } pub fn skew_x(&self) -> scalar { self[Member::SkewX] } pub fn translate_x(&self) -> scalar { self[Member::TransX] } pub fn translate_y(&self) -> scalar { self[Member::TransY] } pub fn persp_x(&self) -> scalar { self[Member::Persp0] } pub fn persp_y(&self) -> scalar { self[Member::Persp1] } pub fn set_scale_x(&mut self, v: scalar) -> &mut Self { self.set(Member::ScaleX, v) } pub fn set_scale_y(&mut self, v: scalar) -> &mut Self { self.set(Member::ScaleY, v) } pub fn set_skew_y(&mut self, v: scalar) -> &mut Self { self.set(Member::SkewY, v) } pub fn set_skew_x(&mut self, v: scalar) -> &mut Self { self.set(Member::SkewX, v) } pub fn set_translate_x(&mut self, v: scalar) -> &mut Self { self.set(Member::TransX, v) } pub fn set_translate_y(&mut self, v: scalar) -> &mut Self { self.set(Member::TransY, v) } pub fn set_persp_x(&mut self, v: scalar) -> &mut Self { self.set(Member::Persp0, v) } pub fn set_persp_y(&mut self, v: scalar) -> &mut Self { self.set(Member::Persp1, v) } #[allow(clippy::too_many_arguments)] pub fn set_all( &mut self, scale_x: scalar, skew_x: scalar, trans_x: scalar, skew_y: scalar, scale_y: scalar, trans_y: scalar, persp_0: scalar, persp_1: scalar, persp_2: scalar, ) -> &mut Self { self[Member::ScaleX] = scale_x; self[Member::SkewX] = skew_x; self[Member::TransX] = trans_x; self[Member::SkewY] = skew_y; self[Member::ScaleY] = scale_y; self[Member::TransY] = trans_y; self[Member::Persp0] = persp_0; self[Member::Persp1] = persp_1; self[Member::Persp2] = persp_2; self } pub fn get_9(&self, buffer: &mut [scalar; 9]) { buffer.copy_from_slice(&self.mat) } pub fn set_9(&mut self, buffer: &[scalar; 9]) -> &mut Self { unsafe { self.native_mut().set9(buffer.as_ptr()); } self } pub fn reset(&mut self) -> &mut Self { unsafe { self.native_mut().reset(); } self } pub fn set_identity(&mut self) -> &mut Self { self.reset(); self } pub fn set_translate(&mut self, v: impl Into<Vector>) -> &mut Self { let v = v.into(); unsafe { self.native_mut().setTranslate(v.x, v.y); } self } pub fn set_scale( &mut self, (sx, sy): (scalar, scalar), pivot: impl Into<Option<Point>>, ) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().setScale(sx, sy, pivot.x, pivot.y); } self } pub fn set_rotate(&mut self, degrees: scalar, pivot: impl Into<Option<Point>>) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().setRotate(degrees, pivot.x, pivot.y); } self } pub fn set_sin_cos( &mut self, (sin_value, cos_value): (scalar, scalar), pivot: impl Into<Option<Point>>, ) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut() .setSinCos(sin_value, cos_value, pivot.x, pivot.y); } self } pub fn set_rsxform(&mut self, rsxform: &RSXform) -> &mut Self { unsafe { self.native_mut().setRSXform(rsxform.native()); } self } pub fn set_skew( &mut self, (kx, ky): (scalar, scalar), pivot: impl Into<Option<Point>>, ) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().setSkew(kx, ky, pivot.x, pivot.y); } self } pub fn set_concat(&mut self, a: &Matrix, b: &Matrix) -> &mut Self { unsafe { self.native_mut().setConcat(a.native(), b.native()); } self } pub fn pre_translate(&mut self, delta: impl Into<Vector>) -> &mut Self { let delta = delta.into(); unsafe { self.native_mut().preTranslate(delta.x, delta.y); } self } pub fn pre_scale( &mut self, (sx, sy): (scalar, scalar), pivot: impl Into<Option<Point>>, ) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().preScale(sx, sy, pivot.x, pivot.y); } self } pub fn pre_rotate(&mut self, degrees: scalar, pivot: impl Into<Option<Point>>) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().preRotate(degrees, pivot.x, pivot.y); } self } pub fn pre_skew( &mut self, (kx, ky): (scalar, scalar), pivot: impl Into<Option<Point>>, ) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().preSkew(kx, ky, pivot.x, pivot.y); } self } pub fn pre_concat(&mut self, other: &Matrix) -> &mut Self { unsafe { self.native_mut().preConcat(other.native()); } self } pub fn post_translate(&mut self, delta: impl Into<Vector>) -> &mut Self { let delta = delta.into(); unsafe { self.native_mut().postTranslate(delta.x, delta.y); } self } pub fn post_scale( &mut self, (sx, sy): (scalar, scalar), pivot: impl Into<Option<Point>>, ) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().postScale(sx, sy, pivot.x, pivot.y); } self } #[deprecated( since = "0.27.0", note = "use post_scale((1.0 / x as scalar, 1.0 / y as scalar), None)" )] pub fn post_idiv(&mut self, (div_x, div_y): (i32, i32)) -> bool { if div_x == 0 || div_y == 0 { return false; } self.post_scale((1.0 / div_x as scalar, 1.0 / div_y as scalar), None); true } pub fn post_rotate(&mut self, degrees: scalar, pivot: impl Into<Option<Point>>) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().postRotate(degrees, pivot.x, pivot.y); } self } pub fn post_skew( &mut self, (kx, ky): (scalar, scalar), pivot: impl Into<Option<Point>>, ) -> &mut Self { let pivot = pivot.into().unwrap_or_default(); unsafe { self.native_mut().postSkew(kx, ky, pivot.x, pivot.y); } self } pub fn post_concat(&mut self, other: &Matrix) -> &mut Self { unsafe { self.native_mut().postConcat(other.native()); } self } pub fn set_rect_to_rect( &mut self, src: impl AsRef<Rect>, dst: impl AsRef<Rect>, stf: ScaleToFit, ) -> bool { unsafe { self.native_mut() .setRectToRect(src.as_ref().native(), dst.as_ref().native(), stf) } } pub fn from_rect_to_rect( src: impl AsRef<Rect>, dst: impl AsRef<Rect>, stf: ScaleToFit, ) -> Option<Matrix> { let mut m = Matrix::new_identity(); m.set_rect_to_rect(src, dst, stf).if_true_some(m) } pub fn set_poly_to_poly(&mut self, src: &[Point], dst: &[Point]) -> bool { if src.len() != dst.len() { return false; } unsafe { self.native_mut().setPolyToPoly( src.native().as_ptr(), dst.native().as_ptr(), src.len().try_into().unwrap(), ) } } pub fn from_poly_to_poly(src: &[Point], dst: &[Point]) -> Option<Matrix> { let mut m = Matrix::new_identity(); m.set_poly_to_poly(src, dst).if_true_some(m) } #[must_use] pub fn invert(&self) -> Option<Matrix> { let mut m = Matrix::new_identity(); unsafe { sb::C_SkMatrix_invert(self.native(), m.native_mut()) }.if_true_some(m) } pub fn set_affine_identity(affine: &mut [scalar; 6]) { unsafe { SkMatrix::SetAffineIdentity(affine.as_mut_ptr()) } } #[must_use] pub fn to_affine(&self) -> Option<[scalar; 6]> { let mut affine = [scalar::default(); 6]; unsafe { self.native().asAffine(affine.as_mut_ptr()) }.if_true_some(affine) } pub fn set_affine(&mut self, affine: &[scalar; 6]) -> &mut Self { unsafe { self.native_mut().setAffine(affine.as_ptr()) }; self } pub fn from_affine(affine: &[scalar; 6]) -> Matrix { let mut m = Matrix::new_identity(); unsafe { m.native_mut().setAffine(affine.as_ptr()); } m } pub fn normalize_perspective(&mut self) { unsafe { sb::C_SkMatrix_normalizePerspective(self.native_mut()) } } pub fn map_points(&self, dst: &mut [Point], src: &[Point]) { assert!(dst.len() >= src.len()); unsafe { self.native().mapPoints( dst.native_mut().as_mut_ptr(), src.native().as_ptr(), src.len().try_into().unwrap(), ) }; } pub fn map_points_inplace(&self, pts: &mut [Point]) { let ptr = pts.native_mut().as_mut_ptr(); unsafe { self.native() .mapPoints(ptr, ptr, pts.len().try_into().unwrap()) }; } pub fn map_homogeneous_points(&self, dst: &mut [Point3], src: &[Point3]) { assert!(dst.len() >= src.len()); unsafe { self.native().mapHomogeneousPoints( dst.native_mut().as_mut_ptr(), src.native().as_ptr(), src.len().try_into().unwrap(), ) }; } pub fn map_homogeneous_points_2d(&self, dst: &mut [Point3], src: &[Point]) { assert!(dst.len() >= src.len()); unsafe { self.native().mapHomogeneousPoints1( dst.native_mut().as_mut_ptr(), src.native().as_ptr(), src.len().try_into().unwrap(), ) }; } pub fn map_xy(&self, x: scalar, y: scalar) -> Point { self.map_point((x, y)) } pub fn map_point(&self, point: impl Into<Point>) -> Point { let point = point.into(); let mut p = Point::default(); unsafe { self.native().mapXY(point.x, point.y, p.native_mut()) }; p } pub fn map_vectors(&self, dst: &mut [Vector], src: &[Vector]) { assert!(dst.len() >= src.len()); unsafe { self.native().mapVectors( dst.native_mut().as_mut_ptr(), src.native().as_ptr(), src.len().try_into().unwrap(), ) } } pub fn map_vectors_inplace(&self, vecs: &mut [Vector]) { let ptr = vecs.native_mut().as_mut_ptr(); unsafe { self.native() .mapVectors(ptr, ptr, vecs.len().try_into().unwrap()) } } pub fn map_vector(&self, vec: impl Into<Vector>) -> Vector { let mut vec = vec.into(); self.map_vectors_inplace(slice::from_mut(&mut vec)); vec } pub fn map_rect(&self, rect: impl AsRef<Rect>) -> (Rect, bool) { self.map_rect_with_perspective_clip(rect, ApplyPerspectiveClip::Yes) } pub fn map_rect_with_perspective_clip( &self, rect: impl AsRef<Rect>, perspective_clip: ApplyPerspectiveClip, ) -> (Rect, bool) { let mut rect = *rect.as_ref(); let ptr = rect.native_mut(); let rect_stays_rect = unsafe { self.native().mapRect(ptr, ptr, perspective_clip) }; (rect, rect_stays_rect) } pub fn map_rect_to_quad(&self, rect: impl AsRef<Rect>) -> [Point; 4] { let mut quad = rect.as_ref().to_quad(); self.map_points_inplace(quad.as_mut()); quad } pub fn map_rect_scale_translate(&self, src: impl AsRef<Rect>) -> Option<Rect> { if self.is_scale_translate() { let mut rect = Rect::default(); unsafe { self.native() .mapRectScaleTranslate(rect.native_mut(), src.as_ref().native()) }; Some(rect) } else { None } } pub fn map_radius(&self, radius: scalar) -> Option<scalar> { if !self.has_perspective() { Some(unsafe { self.native().mapRadius(radius) }) } else { None } } #[deprecated(since = "0.27.0", note = "removed without replacement")] pub fn is_fixed_step_in_x(&self) -> ! { unimplemented!("removed without replacement") } #[deprecated(since = "0.27.0", note = "removed without replacement")] pub fn fixed_step_in_x(&self, _y: scalar) -> ! { unimplemented!("removed without replacement") } #[deprecated(since = "0.27.0", note = "removed without replacement")] pub fn cheap_equal_to(&self, _other: &Matrix) -> ! { unimplemented!("removed without replacement") } pub fn dump(&self) { unsafe { self.native().dump() } } pub fn min_scale(&self) -> scalar { unsafe { self.native().getMinScale() } } pub fn max_scale(&self) -> scalar { unsafe { self.native().getMaxScale() } } pub fn min_max_scales(&self) -> (scalar, scalar) { let mut r: [scalar; 2] = Default::default(); unsafe { self.native().getMinMaxScales(r.as_mut_ptr()) }; (r[0], r[1]) } pub fn decompose_scale(&self, mut remaining: Option<&mut Matrix>) -> Option<Size> { let mut size = Size::default(); unsafe { self.native() .decomposeScale(size.native_mut(), remaining.native_ptr_or_null_mut()) } .if_true_some(size) } pub fn i() -> &'static Matrix { &IDENTITY } pub fn invalid_matrix() -> &'static Matrix { Self::from_native_ref(unsafe { &*sb::C_SkMatrix_InvalidMatrix() }) } pub fn concat(a: &Matrix, b: &Matrix) -> Matrix { let mut m = Matrix::new_identity(); unsafe { m.native_mut().setConcat(a.native(), b.native()) }; m } pub fn dirty_matrix_type_cache(&mut self) { self.native_mut().fTypeMask = 0x80; } pub fn set_scale_translate( &mut self, (sx, sy): (scalar, scalar), t: impl Into<Vector>, ) -> &mut Self { let t = t.into(); unsafe { sb::C_SkMatrix_setScaleTranslate(self.native_mut(), sx, sy, t.x, t.y) } self } pub fn is_finite(&self) -> bool { unsafe { sb::C_SkMatrix_isFinite(self.native()) } } pub const fn new_identity() -> Self { Self::new() } } impl IndexGet for Matrix {} impl IndexSet for Matrix {} pub const IDENTITY: Matrix = Matrix::new_identity(); #[test] fn test_get_set_trait_compilation() { let mut m = Matrix::new_identity(); let _x = m.get(AffineMember::ScaleX); m.set(AffineMember::ScaleX, 1.0); } #[test] #[allow(clippy::float_cmp)] fn test_tuple_to_vector() { let mut m = Matrix::new_identity(); m.set_translate((10.0, 11.0)); assert_eq!(10.0, m.translate_x()); assert_eq!(11.0, m.translate_y()); } #[test] fn setting_a_matrix_component_recomputes_typemask() { let mut m = Matrix::default(); assert_eq!(TypeMask::IDENTITY, m.get_type()); m.set_persp_x(0.1); assert_eq!( TypeMask::TRANSLATE | TypeMask::SCALE | TypeMask::AFFINE | TypeMask::PERSPECTIVE, m.get_type() ); }
index
UpdateUserAvatarService.ts
import { injectable, inject } from 'tsyringe'; import User from '../infra/typeorm/entities/User'; import AppError from '@shared/errors/AppError'; import IUsersRepository from '../repositories/IUsersRepository'; import IStorageProvider from '@shared/container/providers/StorageProvider/models/IStorageProvider'; interface IRequest { userId: string; avatarFilename: string; } @injectable() class
{ constructor( @inject('UsersRepository') private usersRepository: IUsersRepository, @inject('StorageProvider') private storageProvider: IStorageProvider, ) {} public async execute({ userId, avatarFilename }: IRequest): Promise<User> { const user = await this.usersRepository.findById(userId); if (!user) { throw new AppError('Only authenticated users can change avatar.', 401); } if (user.avatar) { await this.storageProvider.deleteFile(user.avatar); } const filename = await this.storageProvider.saveFile(avatarFilename); user.avatar = filename; await this.usersRepository.save(user); return user; } } export default UpdateUserAvatarService;
UpdateUserAvatarService
WasmFunction.ts
import { Importable } from '../imports/Importable'; /** * Represents a function in WASM. * Note: This class is named WasmFunction instead of Function * to avoid conflicts with the default type "Function" */ export class
implements Importable { Index: number; }
WasmFunction
about.component.ts
// ------------------------------------------------------------------------------ // ----- atLeast1Req.modal.ts --------------------------------------------------- // ------------------------------------------------------------------------------ // copyright: 2017 WiM - USGS // authors: Tonia Roddick USGS Wisconsin Internet Mapping // purpose: modal used to show about information import { Component, ViewChild, OnInit, OnDestroy } from '@angular/core'; import { NgbModal, ModalDismissReasons } from '@ng-bootstrap/ng-bootstrap'; import { NSSService } from 'app/shared/services/app.service'; import { HttpClient, HttpHeaders } from '@angular/common/http'; import { FormGroup, FormBuilder, FormControl, Validators } from '@angular/forms'; import { ToasterService } from 'angular2-toaster/angular2-toaster'; import { SettingsService } from 'app/settings/settings.service'; declare var opr: any; declare var InstallTrigger: any; @Component({ selector: 'aboutModal', templateUrl: './about.component.html', styleUrls: ['./about.component.scss'] }) export class
implements OnInit, OnDestroy { @ViewChild('about', {static: true}) public aboutModal; // : ModalDirective; //modal for validator @ViewChild('form', {static: true}) ticketForm; private modalElement: any; public CloseResult: any; private modalSubscript; public appVersion: string; public Browser: string; public freshdeskCredentials; public WorkspaceID: string; public RegionID: string; public Server: string; private file: File | null = null; public newTicketForm: FormGroup; constructor( private http: HttpClient, public fb: FormBuilder, private _nssService: NSSService, public _settingsservice: SettingsService, private _modalService: NgbModal, private _toasterService: ToasterService, ) { this.newTicketForm = fb.group({ 'email': new FormControl(null, Validators.required), 'subject': new FormControl(null, Validators.required), 'description': new FormControl(null, Validators.required), 'attachment': new FormControl(null) }); } ngOnInit() { this.getBrowser(); // show the filter modal == Change Filters button was clicked in sidebar this.modalSubscript = this._nssService.showAboutModal.subscribe((show: boolean) => { if (show) { this.showAboutModal(); } }); this._nssService.getVersion.subscribe((v: string) => { this.appVersion = v; }); this.modalElement = this.aboutModal; } public showAboutModal(): void { this._modalService.open(this.modalElement, { backdrop: 'static', keyboard: false, size: 'lg' }).result.then( result => { // this is the solution for the first modal losing scrollability if (document.querySelector('body > .modal')) { document.body.classList.add('modal-open'); } this.CloseResult = `Closed with: ${result}`; }, reason => { this.CloseResult = `Dismissed ${this.getDismissReason(reason)}`; } ); } private getDismissReason(reason: any): string { if (reason === ModalDismissReasons.ESC) return 'by pressing ESC'; else if (reason === ModalDismissReasons.BACKDROP_CLICK) return 'by clicking on a backdrop'; else return `with: ${reason}`; } ngOnDestroy() { this.modalSubscript.unsubscribe(); } private getBrowser() { //modified from https://stackoverflow.com/questions/9847580/how-to-detect-safari-chrome-ie-firefox-and-opera-browser // Opera 8.0+ if ((!!(<any>window).opr && !!opr.addons) || !!(<any>window).opera || navigator.userAgent.indexOf(' OPR/') >= 0) this.Browser = "Opera"; // Firefox 1.0+ if (typeof InstallTrigger !== 'undefined') this.Browser = "Firefox"; // At least Safari 3+: "[object HTMLElementConstructor]" if (Object.prototype.toString.call((<any>window).HTMLElement).indexOf('Constructor') > 0) this.Browser = "Safari"; // Chrome 1+ if (!!(<any>window).chrome && (!!(<any>window).chrome.webstore||!!(<any>window).chrome.runtime)) this.Browser = "Chrome"; // Edge 20+ if (!(/*@cc_on!@*/false || !!(<any>document).documentMode) && !!(<any>window).StyleMedia) this.Browser = "Edge"; // Chromium-based Edge if (window.navigator.userAgent.toLowerCase().indexOf('edg/') > -1) this.Browser = "Chromium Edge"; // Internet Explorer 6-11 if (/*@cc_on!@*/false || !!(<any>document).documentMode) this.Browser = "IE"; } uploadFile(event) { const temp = (event.target as HTMLInputElement).files[0]; this.file = temp; } removeFile(){ this.newTicketForm.controls['attachment'].setValue(null); this.file = null; } private cancelAbout() { this.newTicketForm.reset(); this.removeFile(); } async submitFreshDeskTicket() { this.freshdeskCredentials = await this.http.get('./assets/secrets.json').toPromise() var url = "https://streamstats.freshdesk.com/api/v2/tickets" // need formdata object to send file correctly var formdata = new FormData(); formdata.append('status', "2"); formdata.append('tags[]', 'NSS'); formdata.append('[custom_fields][browser]', this.Browser); formdata.append('[custom_fields][softwareversion]', this.appVersion); if (this.file){ // if file was uploaded, add to form data formdata.append('attachments[]', this.file, this.file.name); } // read form values from html const formVal = this.newTicketForm.value; formdata.append('subject', formVal.subject); formdata.append('email', formVal.email); formdata.append('description', formVal.description); const headers: HttpHeaders = new HttpHeaders({ "Authorization": "Basic " + btoa(this.freshdeskCredentials.Token + ":" + 'X') }); // delete content type so webkit boundaries don't get added headers.delete('Content-Type'); this.http.post<any>(url, formdata, { headers: headers, observe: "response"}).subscribe( (res) => { this._toasterService.pop('info', 'Info', 'Ticket was created'), this.cancelAbout(); },(error) => { this._toasterService.pop('error', 'Error', 'Error creating ticket') } ); } }
AboutModal
obj_import_mtl_test.go
package obj import ( "os" "testing" ) func
(t *testing.T) { if os.Getenv("SINGLE_TEST") != "1" { return } obj := &ObjData{} err := importMtl(obj, "../eq/soldungb/cache/soldungb.mtl") if err != nil { t.Fatalf("importMtl: %s", err) } t.Fatalf("%+v", obj) }
TestImportMtl
helper.py
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """ IMPORTANT: This code is taken directly from Tensorflow (https://github.com/tensorflow/tensorflow) and is copied temporarily until it is available in a packaged Tensorflow version on pypi. TODO(dennybritz): Delete this code when it becomes available in TF. A library of helpers for use with SamplingDecoders. """ # pylint: skip-file from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import six from tensorflow.contrib.distributions.python.ops import categorical # from tensorflow.contrib.seq2seq.python.ops import decoder from seq2seq.contrib.seq2seq import decoder from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import embedding_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import tensor_array_ops from tensorflow.python.util import nest __all__ = [ "Helper", "TrainingHelper", "GreedyEmbeddingHelper", "CustomHelper", "ScheduledEmbeddingTrainingHelper", ] _transpose_batch_time = decoder._transpose_batch_time # pylint: disable=protected-access @six.add_metaclass(abc.ABCMeta) class Helper(object): """Helper interface. Helper instances are used by SamplingDecoder.""" @abc.abstractproperty def batch_size(self): """Returns a scalar int32 tensor.""" raise NotImplementedError("batch_size has not been implemented") @abc.abstractmethod def initialize(self, name=None): """Returns `(initial_finished, initial_inputs)`.""" pass @abc.abstractmethod def sample(self, time, outputs, state, name=None): """Returns `sample_ids`.""" pass @abc.abstractmethod def next_inputs(self, time, outputs, state, sample_ids, name=None): """Returns `(finished, next_inputs, next_state)`.""" pass class CustomHelper(Helper): """Base abstract class that allows the user to customize sampling.""" def __init__(self, initialize_fn, sample_fn, next_inputs_fn): """Initializer. Args: initialize_fn: callable that returns `(finished, next_inputs)` for the first iteration. sample_fn: callable that takes `(time, outputs, state)` and emits tensor `sample_ids`. next_inputs_fn: callable that takes `(time, outputs, state, sample_ids)` and emits `(finished, next_inputs, next_state)`. """ self._initialize_fn = initialize_fn self._sample_fn = sample_fn self._next_inputs_fn = next_inputs_fn self._batch_size = None @property def batch_size(self): if self._batch_size is None: raise ValueError("batch_size accessed before initialize was called") return self._batch_size def initialize(self, name=None): with ops.name_scope(name, "%sInitialize" % type(self).__name__): (finished, next_inputs) = self._initialize_fn() if self._batch_size is None: self._batch_size = array_ops.size(finished) return (finished, next_inputs) def sample(self, time, outputs, state, name=None): with ops.name_scope(name, "%sSample" % type(self).__name__, (time, outputs, state)): return self._sample_fn(time=time, outputs=outputs, state=state) def next_inputs(self, time, outputs, state, sample_ids, name=None): with ops.name_scope(name, "%sNextInputs" % type(self).__name__, (time, outputs, state)): return self._next_inputs_fn( time=time, outputs=outputs, state=state, sample_ids=sample_ids) class TrainingHelper(Helper): """A helper for use during training. Only reads inputs. Returned sample_ids are the argmax of the RNN output logits. """ def __init__(self, inputs, sequence_length, time_major=False, name=None): """Initializer. Args: inputs: A (structure of) input tensors. sequence_length: An int32 vector tensor. time_major: Python bool. Whether the tensors in `inputs` are time major. If `False` (default), they are assumed to be batch major. name: Name scope for any created operations. Raises: ValueError: if `sequence_length` is not a 1D tensor. """ with ops.name_scope(name, "TrainingHelper", [inputs, sequence_length]): inputs = ops.convert_to_tensor(inputs, name="inputs") if not time_major: inputs = nest.map_structure(_transpose_batch_time, inputs) def _unstack_ta(inp): return tensor_array_ops.TensorArray( dtype=inp.dtype, size=array_ops.shape(inp)[0], element_shape=inp.get_shape()[1:]).unstack(inp) self._input_tas = nest.map_structure(_unstack_ta, inputs) self._sequence_length = ops.convert_to_tensor( sequence_length, name="sequence_length") if self._sequence_length.get_shape().ndims != 1: raise ValueError( "Expected sequence_length to be a vector, but received shape: %s" % self._sequence_length.get_shape()) self._zero_inputs = nest.map_structure( lambda inp: array_ops.zeros_like(inp[0, :]), inputs) self._batch_size = array_ops.size(sequence_length) @property def batch_size(self): return self._batch_size def initialize(self, name=None): with ops.name_scope(name, "TrainingHelperInitialize"): finished = math_ops.equal(0, self._sequence_length) all_finished = math_ops.reduce_all(finished) next_inputs = control_flow_ops.cond( all_finished, lambda: self._zero_inputs, lambda: nest.map_structure(lambda inp: inp.read(0), self._input_tas)) return (finished, next_inputs) def sample(self, time, outputs, name=None, **unused_kwargs): with ops.name_scope(name, "TrainingHelperSample", [time, outputs]): sample_ids = math_ops.cast( math_ops.argmax( outputs, axis=-1), dtypes.int32) return sample_ids def next_inputs(self, time, outputs, state, name=None, **unused_kwargs): """next_inputs_fn for TrainingHelper.""" with ops.name_scope(name, "TrainingHelperNextInputs", [time, outputs, state]): next_time = time + 1 finished = (next_time >= self._sequence_length) all_finished = math_ops.reduce_all(finished) def read_from_ta(inp): return inp.read(next_time) next_inputs = control_flow_ops.cond( all_finished, lambda: self._zero_inputs, lambda: nest.map_structure(read_from_ta, self._input_tas)) return (finished, next_inputs, state) class ScheduledEmbeddingTrainingHelper(TrainingHelper): """A training helper that adds scheduled sampling. Returns -1s for sample_ids where no sampling took place; valid sample id values elsewhere. """ def __init__(self, inputs, sequence_length, embedding, sampling_probability, time_major=False, seed=None, scheduling_seed=None, name=None): """Initializer. Args: inputs: A (structure of) input tensors. sequence_length: An int32 vector tensor. embedding: A callable that takes a vector tensor of `ids` (argmax ids), or the `params` argument for `embedding_lookup`. sampling_probability: A 0D `float32` tensor: the probability of sampling categorically from the output ids instead of reading directly from the inputs. time_major: Python bool. Whether the tensors in `inputs` are time major. If `False` (default), they are assumed to be batch major. seed: The sampling seed. scheduling_seed: The schedule decision rule sampling seed. name: Name scope for any created operations. Raises: ValueError: if `sampling_probability` is not a scalar or vector. """ with ops.name_scope(name, "ScheduledEmbeddingSamplingWrapper", [embedding, sampling_probability]): if callable(embedding): self._embedding_fn = embedding else: self._embedding_fn = ( lambda ids: embedding_ops.embedding_lookup(embedding, ids)) self._sampling_probability = ops.convert_to_tensor( sampling_probability, name="sampling_probability") if self._sampling_probability.get_shape().ndims not in (0, 1): raise ValueError( "sampling_probability must be either a scalar or a vector. " "saw shape: %s" % (self._sampling_probability.get_shape())) self._seed = seed self._scheduling_seed = scheduling_seed super(ScheduledEmbeddingTrainingHelper, self).__init__( inputs=inputs, sequence_length=sequence_length, time_major=time_major, name=name) def initialize(self, name=None): return super(ScheduledEmbeddingTrainingHelper, self).initialize(name=name) def sample(self, time, outputs, state, name=None): with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperSample", [time, outputs, state]): # Return -1s where we did not sample, and sample_ids elsewhere select_sample_noise = random_ops.random_uniform( [self.batch_size], seed=self._scheduling_seed) select_sample = (self._sampling_probability > select_sample_noise) sample_id_sampler = categorical.Categorical(logits=outputs) return array_ops.where( select_sample, sample_id_sampler.sample(seed=self._seed), array_ops.tile([-1], [self.batch_size])) def next_inputs(self, time, outputs, state, sample_ids, name=None): with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperSample", [time, outputs, state, sample_ids]): (finished, base_next_inputs, state) = ( super(ScheduledEmbeddingTrainingHelper, self).next_inputs( time=time, outputs=outputs, state=state, sample_ids=sample_ids, name=name)) def maybe_sample(): """Perform scheduled sampling.""" where_sampling = math_ops.cast( array_ops.where(sample_ids > -1), dtypes.int32) where_not_sampling = math_ops.cast( array_ops.where(sample_ids <= -1), dtypes.int32) where_sampling_flat = array_ops.reshape(where_sampling, [-1]) where_not_sampling_flat = array_ops.reshape(where_not_sampling, [-1]) sample_ids_sampling = array_ops.gather(sample_ids, where_sampling_flat) inputs_not_sampling = array_ops.gather(base_next_inputs, where_not_sampling_flat) sampled_next_inputs = self._embedding_fn(sample_ids_sampling) base_shape = array_ops.shape(base_next_inputs) return (array_ops.scatter_nd( indices=where_sampling, updates=sampled_next_inputs, shape=base_shape) + array_ops.scatter_nd( indices=where_not_sampling, updates=inputs_not_sampling, shape=base_shape)) all_finished = math_ops.reduce_all(finished) next_inputs = control_flow_ops.cond( all_finished, lambda: base_next_inputs, maybe_sample) return (finished, next_inputs, state) class GreedyEmbeddingHelper(Helper): """A helper for use during inference. Uses the argmax of the output (treated as logits) and passes the result through an embedding layer to get the next input. """ def __init__(self, embedding, start_tokens, end_token): """Initializer. Args: embedding: A callable that takes a vector tensor of `ids` (argmax ids), or the `params` argument for `embedding_lookup`. start_tokens: `int32` vector shaped `[batch_size]`, the start tokens. end_token: `int32` scalar, the token that marks end of decoding. Raises: ValueError: if `sequence_length` is not a 1D tensor. """ if callable(embedding): self._embedding_fn = embedding else: self._embedding_fn = ( lambda ids: embedding_ops.embedding_lookup(embedding, ids)) self._start_tokens = ops.convert_to_tensor( start_tokens, dtype=dtypes.int32, name="start_tokens") self._end_token = ops.convert_to_tensor( end_token, dtype=dtypes.int32, name="end_token") if self._start_tokens.get_shape().ndims != 1: raise ValueError("start_tokens must be a vector") self._batch_size = array_ops.size(start_tokens) if self._end_token.get_shape().ndims != 0: raise ValueError("end_token must be a scalar") self._start_inputs = self._embedding_fn(self._start_tokens) @property def batch_size(self): return self._batch_size def initialize(self, name=None): finished = array_ops.tile([False], [self._batch_size]) return (finished, self._start_inputs) def sample(self, time, outputs, state, name=None): """sample for GreedyEmbeddingHelper.""" del time, state # unused by sample_fn # Outputs are logits, use argmax to get the most probable id if not isinstance(outputs, ops.Tensor): raise TypeError("Expected outputs to be a single Tensor, got: %s" % outputs) sample_ids = math_ops.cast(math_ops.argmax(outputs, axis=-1), dtypes.int32) return sample_ids def next_inputs(self, time, outputs, state, sample_ids, name=None):
"""next_inputs_fn for GreedyEmbeddingHelper.""" del time, outputs # unused by next_inputs_fn finished = math_ops.equal(sample_ids, self._end_token) all_finished = math_ops.reduce_all(finished) next_inputs = control_flow_ops.cond( all_finished, # If we're finished, the next_inputs value doesn't matter lambda: self._start_inputs, lambda: self._embedding_fn(sample_ids)) return (finished, next_inputs, state)
numberOfDivisions.test.js
expect(app(100, 2)).toBe(6); });
import app from '../../src/codewars/numberOfDivisions'; it('numberOfDivisions test', () => { expect(app(6, 2)).toBe(2);
dataset_characteristics.py
import datasets.import_datasets as im import pandas as pd #Takes a very long time to run, probably not worth running when the output datasets = ["BMS1", "BMS2", "toydata" "uci_retail", "mushroom", "Belgian_retail", "chess", "connect", "mushroom", "pumsb", "pumsb_star", "T40I10D100K", "T10I4D100K", "accidents", "instacart"] def
(datasets): df = pd.DataFrame(columns=['Dataset Name', 'Number of transactions', 'Number of Unique items', 'Minimum Transaction Length', 'Maximum Transaction Length', 'Average Transaction Length']) for dataset_name in datasets: print("Analysing", dataset_name) data = im.import_dataset(dataset_name) data = data.astype('bool') average = 0 minimum = 100000 maximum = 0 for _, row in data.iterrows(): transaction_len = sum(row) #Minimum transaction length if minimum > transaction_len: minimum = transaction_len #Maximum transaction length if maximum < transaction_len: maximum = transaction_len #Average transaction length average += transaction_len new_row = {'Dataset Name':dataset_name, 'Number of transactions':data.shape[0], 'Number of Unique items':data.shape[1], 'Minimum Transaction Length':minimum, 'Maximum Transaction Length':maximum, 'Average Transaction Length':average/data.shape[0] } df = df.append(new_row, ignore_index=True) print(df) return df main(datasets).to_csv('Dataset_details.csv')
main
bitmap_data.rs
//! flash.display.BitmapData object use crate::avm1::activation::Activation; use crate::avm1::error::Error; use crate::avm1::function::{Executable, FunctionObject}; use crate::avm1::object::bitmap_data::{BitmapDataObject, ChannelOptions, Color}; use crate::avm1::{Object, TObject, Value}; use crate::character::Character; use crate::display_object::TDisplayObject; use enumset::EnumSet; use gc_arena::MutationContext; pub fn constructor<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let width = args .get(0) .unwrap_or(&Value::Number(0.0)) .coerce_to_i32(activation)?; let height = args .get(1) .unwrap_or(&Value::Number(0.0)) .coerce_to_i32(activation)?; if width > 2880 || height > 2880 || width <= 0 || height <= 0 { log::warn!("Invalid BitmapData size {}x{}", width, height); return Err(Error::ConstructorFailure); } let transparency = args .get(2) .unwrap_or(&Value::Bool(true)) .as_bool(activation.current_swf_version()); let fill_color = args .get(3) // can't write this in hex // 0xFFFFFFFF as f64; .unwrap_or(&Value::Number(4294967295_f64)) .coerce_to_i32(activation)?; if let Some(bitmap_data) = this.as_bitmap_data_object() { bitmap_data .bitmap_data() .write(activation.context.gc_context) .init_pixels(width as u32, height as u32, fill_color, transparency); } Ok(Value::Undefined) } pub fn height<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { return Ok(bitmap_data.bitmap_data().read().height().into()); } } Ok((-1).into()) } pub fn width<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { return Ok(bitmap_data.bitmap_data().read().width().into()); } } Ok((-1).into()) } pub fn get_transparent<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { return Ok(bitmap_data.bitmap_data().read().transparency().into()); } } Ok((-1).into()) } pub fn get_rectangle<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { let proto = activation.context.system_prototypes.rectangle_constructor; let rect = proto.construct( activation, &[ 0.into(), 0.into(), bitmap_data.bitmap_data().read().width().into(), bitmap_data.bitmap_data().read().height().into(), ], )?; return Ok(rect.into()); } } Ok((-1).into()) } pub fn get_pixel<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let (Some(x_val), Some(y_val)) = (args.get(0), args.get(1)) { let x = x_val.coerce_to_i32(activation)?; let y = y_val.coerce_to_i32(activation)?; return Ok(bitmap_data.bitmap_data().read().get_pixel(x, y).into()); } } } Ok((-1).into()) } pub fn get_pixel32<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let (Some(x_val), Some(y_val)) = (args.get(0), args.get(1)) { let x = x_val.coerce_to_i32(activation)?; let y = y_val.coerce_to_i32(activation)?; let col: i32 = bitmap_data.bitmap_data().read().get_pixel32(x, y).into(); return Ok(col.into()); } } } Ok((-1).into()) } pub fn set_pixel<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let (Some(x_val), Some(y_val), Some(color_val)) = (args.get(0), args.get(1), args.get(2)) { let x = x_val.coerce_to_u32(activation)?; let y = y_val.coerce_to_u32(activation)?; let color = color_val.coerce_to_i32(activation)?; bitmap_data .bitmap_data() .write(activation.context.gc_context) .set_pixel(x, y, color.into()); return Ok(Value::Undefined); } } } Ok((-1).into()) } pub fn set_pixel32<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let (Some(x_val), Some(y_val), Some(color_val)) = (args.get(0), args.get(1), args.get(2)) { let x = x_val.coerce_to_i32(activation)?; let y = y_val.coerce_to_i32(activation)?; let color = color_val.coerce_to_i32(activation)?; bitmap_data .bitmap_data() .write(activation.context.gc_context) .set_pixel32(x, y, color.into()); } return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn copy_channel<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let source_bitmap = args .get(0) .unwrap_or(&Value::Undefined) .coerce_to_object(activation); let source_rect = args .get(1) .unwrap_or(&Value::Undefined) .coerce_to_object(activation); let dest_point = args .get(2) .unwrap_or(&Value::Undefined) .coerce_to_object(activation); let source_channel = args .get(3) .unwrap_or(&Value::Undefined) .coerce_to_i32(activation)?; let dest_channel = args .get(4) .unwrap_or(&Value::Undefined) .coerce_to_i32(activation)?; if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let Some(source_bitmap) = source_bitmap.as_bitmap_data_object() { //TODO: what if source is disposed let min_x = dest_point .get("x", activation)? .coerce_to_u32(activation)? .min(bitmap_data.bitmap_data().read().width()); let min_y = dest_point .get("y", activation)? .coerce_to_u32(activation)? .min(bitmap_data.bitmap_data().read().height()); let src_min_x = source_rect .get("x", activation)? .coerce_to_u32(activation)?; let src_min_y = source_rect .get("y", activation)? .coerce_to_u32(activation)?; let src_width = source_rect .get("width", activation)? .coerce_to_u32(activation)?; let src_height = source_rect .get("height", activation)? .coerce_to_u32(activation)?; let src_max_x = src_min_x + src_width; let src_max_y = src_min_y + src_height; let src_bitmap_data = source_bitmap.bitmap_data(); bitmap_data .bitmap_data() .write(activation.context.gc_context) .copy_channel( (min_x, min_y), (src_min_x, src_min_y, src_max_x, src_max_y), &src_bitmap_data.read(), source_channel, dest_channel, ); } return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn fill_rect<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let rectangle = args .get(0) .unwrap_or(&Value::Undefined) .coerce_to_object(activation); if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let Some(color_val) = args.get(1) { let color = color_val.coerce_to_i32(activation)?; let x = rectangle.get("x", activation)?.coerce_to_u32(activation)?; let y = rectangle.get("y", activation)?.coerce_to_u32(activation)?; let width = rectangle .get("width", activation)? .coerce_to_u32(activation)?; let height = rectangle .get("height", activation)? .coerce_to_u32(activation)?; bitmap_data .bitmap_data() .write(activation.context.gc_context) .fill_rect(x, y, width, height, color.into()); } return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn clone<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { let proto = activation.context.system_prototypes.bitmap_data_constructor; let new_bitmap_data = proto.construct( activation, &[ bitmap_data.bitmap_data().read().width().into(), bitmap_data.bitmap_data().read().height().into(), bitmap_data.bitmap_data().read().transparency().into(), 0xFFFFFF.into(), ], )?; let new_bitmap_data_object = new_bitmap_data.as_bitmap_data_object().unwrap(); new_bitmap_data_object .bitmap_data() .write(activation.context.gc_context) .set_pixels(bitmap_data.bitmap_data().read().pixels().to_vec()); return Ok(new_bitmap_data.into()); } } Ok((-1).into()) } pub fn dispose<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { bitmap_data.dispose(activation.context.gc_context); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn flood_fill<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let (Some(x_val), Some(y_val), Some(color_val)) = (args.get(0), args.get(1), args.get(2)) { let x = x_val.coerce_to_u32(activation)?; let y = y_val.coerce_to_u32(activation)?; let color = color_val.coerce_to_i32(activation)?; let color: Color = color.into(); let color: Color = color.to_premultiplied_alpha(bitmap_data.bitmap_data().read().transparency()); bitmap_data .bitmap_data() .write(activation.context.gc_context) .flood_fill(x, y, color); } return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn noise<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let low = args .get(1) .unwrap_or(&Value::Number(0.0)) .coerce_to_u32(activation)? as u8; let high = args .get(2) .unwrap_or(&Value::Number(255.0)) .coerce_to_u32(activation)? as u8; let channel_options = args .get(3) .unwrap_or(&Value::Number(ChannelOptions::rgb().0 as f64)) .coerce_to_u32(activation)?; let gray_scale = args .get(4) .unwrap_or(&Value::Bool(false)) .as_bool(activation.current_swf_version()); if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { if let Some(random_seed_val) = args.get(0) { let random_seed = random_seed_val.coerce_to_u32(activation)?; bitmap_data .bitmap_data() .write(activation.context.gc_context) .noise( activation.context.rng, random_seed, low, high, channel_options.into(), gray_scale, ) } return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn apply_filter<'gc>( _activation: &mut Activation<'_, 'gc, '_>, _this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { log::warn!("BitmapData.applyFilter - not yet implemented"); Ok((-1).into()) } pub fn draw<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.draw - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn generate_filter_rect<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.generateFilterRect - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn color_transform<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { let rectangle = args .get(0) .unwrap_or(&Value::Undefined) .coerce_to_object(activation); let color_transform = args .get(1) .unwrap_or(&Value::Undefined) .coerce_to_object(activation); let x = rectangle.get("x", activation)?.coerce_to_i32(activation)?; let y = rectangle.get("y", activation)?.coerce_to_i32(activation)?; let width = rectangle .get("width", activation)? .coerce_to_i32(activation)?; let height = rectangle .get("height", activation)? .coerce_to_i32(activation)?; let min_x = x.max(0) as u32; let end_x = (x + width) as u32; let min_y = y.max(0) as u32; let end_y = (y + height) as u32; if let Some(color_transform) = color_transform.as_color_transform_object() { bitmap_data .bitmap_data() .write(activation.context.gc_context) .color_transform(min_x, min_y, end_x, end_y, color_transform); } return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn get_color_bounds_rect<'gc>( activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { let find_color = args .get(2) .unwrap_or(&Value::Bool(true)) .as_bool(activation.current_swf_version()); if let (Some(mask_val), Some(color_val)) = (args.get(0), args.get(1)) { let mask = mask_val.coerce_to_i32(activation)?; let color = color_val.coerce_to_i32(activation)?; let (x, y, w, h) = bitmap_data .bitmap_data() .read() .color_bounds_rect(find_color, mask, color); let proto = activation.context.system_prototypes.rectangle_constructor; let rect = proto.construct(activation, &[x.into(), y.into(), w.into(), h.into()])?; return Ok(rect.into()); } } } Ok((-1).into()) } pub fn perlin_noise<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.perlinNoise - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn hit_test<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.hitTest - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn copy_pixels<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>>
pub fn merge<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.merge - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn palette_map<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.paletteMap - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn pixel_dissolve<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.pixelDissolve - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn scroll<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.scroll - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn threshold<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.threshold - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) } pub fn create_proto<'gc>( gc_context: MutationContext<'gc, '_>, proto: Object<'gc>, fn_proto: Object<'gc>, ) -> Object<'gc> { let bitmap_data_object = BitmapDataObject::empty_object(gc_context, Some(proto)); let mut object = bitmap_data_object.as_script_object().unwrap(); object.add_property( gc_context, "height", FunctionObject::function( gc_context, Executable::Native(height), Some(fn_proto), fn_proto, ), None, EnumSet::empty(), ); object.add_property( gc_context, "width", FunctionObject::function( gc_context, Executable::Native(width), Some(fn_proto), fn_proto, ), None, EnumSet::empty(), ); object.add_property( gc_context, "transparent", FunctionObject::function( gc_context, Executable::Native(get_transparent), Some(fn_proto), fn_proto, ), None, EnumSet::empty(), ); object.add_property( gc_context, "rectangle", FunctionObject::function( gc_context, Executable::Native(get_rectangle), Some(fn_proto), fn_proto, ), None, EnumSet::empty(), ); object.force_set_function( "getPixel", get_pixel, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "getPixel32", get_pixel32, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "setPixel", set_pixel, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "setPixel32", set_pixel32, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "copyChannel", copy_channel, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "fillRect", fill_rect, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function("clone", clone, gc_context, EnumSet::empty(), Some(fn_proto)); object.force_set_function( "dispose", dispose, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "floodFill", flood_fill, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function("noise", noise, gc_context, EnumSet::empty(), Some(fn_proto)); object.force_set_function( "colorTransform", color_transform, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "getColorBoundsRect", get_color_bounds_rect, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "perlinNoise", perlin_noise, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "applyFilter", apply_filter, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function("draw", draw, gc_context, EnumSet::empty(), Some(fn_proto)); object.force_set_function( "hitTest", hit_test, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "generateFilterRect", generate_filter_rect, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "copyPixels", copy_pixels, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function("merge", merge, gc_context, EnumSet::empty(), Some(fn_proto)); object.force_set_function( "paletteMap", palette_map, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "pixelDissolve", pixel_dissolve, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "scroll", scroll, gc_context, EnumSet::empty(), Some(fn_proto), ); object.force_set_function( "threshold", threshold, gc_context, EnumSet::empty(), Some(fn_proto), ); bitmap_data_object.into() } pub fn load_bitmap<'gc>( activation: &mut Activation<'_, 'gc, '_>, _this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let name = args .get(0) .unwrap_or(&Value::Undefined) .coerce_to_string(activation)?; let library = &*activation.context.library; let movie = activation.target_clip_or_root()?.movie(); let renderer = &mut activation.context.renderer; let character = movie .and_then(|m| library.library_for_movie(m)) .and_then(|l| l.get_character_by_export_name(name.as_str())); if let Some(Character::Bitmap(bitmap_object)) = character { if let Some(bitmap) = renderer.get_bitmap_pixels(bitmap_object.bitmap_handle()) { let proto = activation.context.system_prototypes.bitmap_data_constructor; let new_bitmap = proto.construct(activation, &[bitmap.width.into(), bitmap.height.into()])?; let new_bitmap_object = new_bitmap.as_bitmap_data_object().unwrap(); let pixels: Vec<i32> = bitmap.data.into(); new_bitmap_object .bitmap_data() .write(activation.context.gc_context) .set_pixels(pixels.into_iter().map(|p| p.into()).collect()); return Ok(new_bitmap.into()); } } Ok(Value::Undefined) } pub fn create_bitmap_data_object<'gc>( gc_context: MutationContext<'gc, '_>, bitmap_data_proto: Object<'gc>, fn_proto: Option<Object<'gc>>, ) -> Object<'gc> { let object = FunctionObject::constructor( gc_context, Executable::Native(constructor), fn_proto, bitmap_data_proto, ); let mut script_object = object.as_script_object().unwrap(); script_object.force_set_function( "loadBitmap", load_bitmap, gc_context, EnumSet::empty(), fn_proto, ); object }
{ if let Some(bitmap_data) = this.as_bitmap_data_object() { if !bitmap_data.disposed() { log::warn!("BitmapData.copyPixels - not yet implemented"); return Ok(Value::Undefined); } } Ok((-1).into()) }
gapi.client.adexchangebuyer-tests.ts
/* This is stub file for gapi.client.{{=it.name}} definition tests */ /* IMPORTANT. * This file was automatically generated by https://github.com/Bolisov/google-api-typings-generator. Please do not edit it manually. * In case of any problems please post issue to https://github.com/Bolisov/google-api-typings-generator **/ gapi.load('client', () => { /** now we can use gapi.client */ gapi.client.load('adexchangebuyer', 'v1.4', () => { /** now we can use gapi.client.adexchangebuyer */ /** don't forget to authenticate your client before sending any request to resources: */ /** declare client_id registered in Google Developers Console */ const client_id = '<<PUT YOUR CLIENT ID HERE>>'; const scope = [ /** Manage your Ad Exchange buyer account configuration */ 'https://www.googleapis.com/auth/adexchange.buyer', ]; const immediate = true; gapi.auth.authorize({ client_id, scope, immediate }, authResult => { if (authResult && !authResult.error) { /** handle succesfull authorization */ run(); } else { /** handle authorization error */ } }); run(); }); async function
() { /** Gets one account by ID. */ await gapi.client.adexchangebuyer.accounts.get({ id: 1, }); /** Retrieves the authenticated user's list of accounts. */ await gapi.client.adexchangebuyer.accounts.list({ }); /** Updates an existing account. This method supports patch semantics. */ await gapi.client.adexchangebuyer.accounts.patch({ confirmUnsafeAccountChange: true, id: 2, }); /** Updates an existing account. */ await gapi.client.adexchangebuyer.accounts.update({ confirmUnsafeAccountChange: true, id: 2, }); /** Returns the billing information for one account specified by account ID. */ await gapi.client.adexchangebuyer.billingInfo.get({ accountId: 1, }); /** Retrieves a list of billing information for all accounts of the authenticated user. */ await gapi.client.adexchangebuyer.billingInfo.list({ }); /** Returns the budget information for the adgroup specified by the accountId and billingId. */ await gapi.client.adexchangebuyer.budget.get({ accountId: "accountId", billingId: "billingId", }); /** * Updates the budget amount for the budget of the adgroup specified by the accountId and billingId, with the budget amount in the request. This method * supports patch semantics. */ await gapi.client.adexchangebuyer.budget.patch({ accountId: "accountId", billingId: "billingId", }); /** Updates the budget amount for the budget of the adgroup specified by the accountId and billingId, with the budget amount in the request. */ await gapi.client.adexchangebuyer.budget.update({ accountId: "accountId", billingId: "billingId", }); /** Add a deal id association for the creative. */ await gapi.client.adexchangebuyer.creatives.addDeal({ accountId: 1, buyerCreativeId: "buyerCreativeId", dealId: "dealId", }); /** Gets the status for a single creative. A creative will be available 30-40 minutes after submission. */ await gapi.client.adexchangebuyer.creatives.get({ accountId: 1, buyerCreativeId: "buyerCreativeId", }); /** Submit a new creative. */ await gapi.client.adexchangebuyer.creatives.insert({ }); /** Retrieves a list of the authenticated user's active creatives. A creative will be available 30-40 minutes after submission. */ await gapi.client.adexchangebuyer.creatives.list({ accountId: 1, buyerCreativeId: "buyerCreativeId", dealsStatusFilter: "dealsStatusFilter", maxResults: 4, openAuctionStatusFilter: "openAuctionStatusFilter", pageToken: "pageToken", }); /** Lists the external deal ids associated with the creative. */ await gapi.client.adexchangebuyer.creatives.listDeals({ accountId: 1, buyerCreativeId: "buyerCreativeId", }); /** Remove a deal id associated with the creative. */ await gapi.client.adexchangebuyer.creatives.removeDeal({ accountId: 1, buyerCreativeId: "buyerCreativeId", dealId: "dealId", }); /** Delete the specified deals from the proposal */ await gapi.client.adexchangebuyer.marketplacedeals.delete({ proposalId: "proposalId", }); /** Add new deals for the specified proposal */ await gapi.client.adexchangebuyer.marketplacedeals.insert({ proposalId: "proposalId", }); /** List all the deals for a given proposal */ await gapi.client.adexchangebuyer.marketplacedeals.list({ pqlQuery: "pqlQuery", proposalId: "proposalId", }); /** Replaces all the deals in the proposal with the passed in deals */ await gapi.client.adexchangebuyer.marketplacedeals.update({ proposalId: "proposalId", }); /** Add notes to the proposal */ await gapi.client.adexchangebuyer.marketplacenotes.insert({ proposalId: "proposalId", }); /** Get all the notes associated with a proposal */ await gapi.client.adexchangebuyer.marketplacenotes.list({ pqlQuery: "pqlQuery", proposalId: "proposalId", }); /** Update a given private auction proposal */ await gapi.client.adexchangebuyer.marketplaceprivateauction.updateproposal({ privateAuctionId: "privateAuctionId", }); /** Retrieves the authenticated user's list of performance metrics. */ await gapi.client.adexchangebuyer.performanceReport.list({ accountId: "accountId", endDateTime: "endDateTime", maxResults: 3, pageToken: "pageToken", startDateTime: "startDateTime", }); /** Deletes an existing pretargeting config. */ await gapi.client.adexchangebuyer.pretargetingConfig.delete({ accountId: "accountId", configId: "configId", }); /** Gets a specific pretargeting configuration */ await gapi.client.adexchangebuyer.pretargetingConfig.get({ accountId: "accountId", configId: "configId", }); /** Inserts a new pretargeting configuration. */ await gapi.client.adexchangebuyer.pretargetingConfig.insert({ accountId: "accountId", }); /** Retrieves a list of the authenticated user's pretargeting configurations. */ await gapi.client.adexchangebuyer.pretargetingConfig.list({ accountId: "accountId", }); /** Updates an existing pretargeting config. This method supports patch semantics. */ await gapi.client.adexchangebuyer.pretargetingConfig.patch({ accountId: "accountId", configId: "configId", }); /** Updates an existing pretargeting config. */ await gapi.client.adexchangebuyer.pretargetingConfig.update({ accountId: "accountId", configId: "configId", }); /** Gets the requested product by id. */ await gapi.client.adexchangebuyer.products.get({ productId: "productId", }); /** Gets the requested product. */ await gapi.client.adexchangebuyer.products.search({ pqlQuery: "pqlQuery", }); /** Get a proposal given its id */ await gapi.client.adexchangebuyer.proposals.get({ proposalId: "proposalId", }); /** Create the given list of proposals */ await gapi.client.adexchangebuyer.proposals.insert({ }); /** Update the given proposal. This method supports patch semantics. */ await gapi.client.adexchangebuyer.proposals.patch({ proposalId: "proposalId", revisionNumber: "revisionNumber", updateAction: "updateAction", }); /** Search for proposals using pql query */ await gapi.client.adexchangebuyer.proposals.search({ pqlQuery: "pqlQuery", }); /** Update the given proposal to indicate that setup has been completed. */ await gapi.client.adexchangebuyer.proposals.setupcomplete({ proposalId: "proposalId", }); /** Update the given proposal */ await gapi.client.adexchangebuyer.proposals.update({ proposalId: "proposalId", revisionNumber: "revisionNumber", updateAction: "updateAction", }); /** Gets the requested publisher profile(s) by publisher accountId. */ await gapi.client.adexchangebuyer.pubprofiles.list({ accountId: 1, }); } });
run
0004_specialization_department.py
# Generated by Django 3.2.7 on 2021-09-18 16:04 from django.db import migrations, models import django.db.models.deletion class
(migrations.Migration): dependencies = [ ('user', '0003_auto_20210918_2131'), ] operations = [ migrations.AddField( model_name='specialization', name='department', field=models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, to='user.department'), preserve_default=False, ), ]
Migration
fm2o2.py
import glob import os from xml.dom import minidom import xml.etree.ElementTree as ET path = r"C:\Users\shamb\Desktop\dita_demo" valid_path = r"C:\Users\shamb\Desktop\dita_demo_scrubbed" wildcard = "*.xml" full_path = os.path.join(path, wildcard) os.makedirs(valid_path, exist_ok=True) file_list = glob.glob(full_path) print("The file set includes:") for this_file in file_list: print(this_file)
# print(type(mydoc)) tree = ET.parse(this_file) root = tree.getroot() print('\nAll item data:') for elem in root: for subelem in elem: print(subelem.text)
# mydoc = minidom.parse(this_file)
i8042.rs
// // Copyright 2022 The Project Oak Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // use x86_64::instructions::{hlt, port::PortWriteOnly}; /// Shuts down the machine via i8042. Assumes the VMM exposes the device. pub fn shutdown() -> !
{ // This is safe as both qemu and crosvm expose the i8042 device by default. unsafe { let mut port = PortWriteOnly::new(0x64); port.write(0xFE_u8); } loop { hlt(); } }
p.ts
} from '@brickd/core' const p: ComponentConfigTypes = { nodePropsConfig: { children: { type: NODE_PROPS_TYPES.reactNode, }, }, propsConfig: { children: { label: '文本内容', type: PROPS_TYPES.string, }, }, } export default p
import { ComponentConfigTypes, NODE_PROPS_TYPES, PROPS_TYPES,
pipelineRun.go
// *** WARNING: this file was generated by the Pulumi SDK Generator. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** package containerregistry import ( "context" "reflect" "github.com/pkg/errors" "github.com/pulumi/pulumi/sdk/v2/go/pulumi" ) // An object that represents a pipeline run for a container registry. // API Version: 2020-11-01-preview. type PipelineRun struct { pulumi.CustomResourceState // How the pipeline run should be forced to recreate even if the pipeline run configuration has not changed. ForceUpdateTag pulumi.StringPtrOutput `pulumi:"forceUpdateTag"` // The name of the resource. Name pulumi.StringOutput `pulumi:"name"` // The provisioning state of a pipeline run. ProvisioningState pulumi.StringOutput `pulumi:"provisioningState"` // The request parameters for a pipeline run. Request PipelineRunRequestResponsePtrOutput `pulumi:"request"` // The response of a pipeline run. Response PipelineRunResponseResponseOutput `pulumi:"response"` // Metadata pertaining to creation and last modification of the resource. SystemData SystemDataResponseOutput `pulumi:"systemData"` // The type of the resource. Type pulumi.StringOutput `pulumi:"type"` } // NewPipelineRun registers a new resource with the given unique name, arguments, and options. func NewPipelineRun(ctx *pulumi.Context, name string, args *PipelineRunArgs, opts ...pulumi.ResourceOption) (*PipelineRun, error) { if args == nil { return nil, errors.New("missing one or more required arguments") } if args.RegistryName == nil { return nil, errors.New("invalid value for required argument 'RegistryName'") } if args.ResourceGroupName == nil { return nil, errors.New("invalid value for required argument 'ResourceGroupName'") } aliases := pulumi.Aliases([]pulumi.Alias{ { Type: pulumi.String("azure-nextgen:containerregistry/v20191201preview:PipelineRun"), }, { Type: pulumi.String("azure-nextgen:containerregistry/v20201101preview:PipelineRun"), }, }) opts = append(opts, aliases) var resource PipelineRun err := ctx.RegisterResource("azure-nextgen:containerregistry:PipelineRun", name, args, &resource, opts...) if err != nil { return nil, err } return &resource, nil } // GetPipelineRun gets an existing PipelineRun resource's state with the given name, ID, and optional // state properties that are used to uniquely qualify the lookup (nil if not required). func GetPipelineRun(ctx *pulumi.Context, name string, id pulumi.IDInput, state *PipelineRunState, opts ...pulumi.ResourceOption) (*PipelineRun, error) { var resource PipelineRun err := ctx.ReadResource("azure-nextgen:containerregistry:PipelineRun", name, id, state, &resource, opts...) if err != nil { return nil, err } return &resource, nil } // Input properties used for looking up and filtering PipelineRun resources. type pipelineRunState struct { // How the pipeline run should be forced to recreate even if the pipeline run configuration has not changed. ForceUpdateTag *string `pulumi:"forceUpdateTag"` // The name of the resource. Name *string `pulumi:"name"` // The provisioning state of a pipeline run. ProvisioningState *string `pulumi:"provisioningState"` // The request parameters for a pipeline run. Request *PipelineRunRequestResponse `pulumi:"request"` // The response of a pipeline run. Response *PipelineRunResponseResponse `pulumi:"response"` // Metadata pertaining to creation and last modification of the resource. SystemData *SystemDataResponse `pulumi:"systemData"` // The type of the resource. Type *string `pulumi:"type"` } type PipelineRunState struct { // How the pipeline run should be forced to recreate even if the pipeline run configuration has not changed. ForceUpdateTag pulumi.StringPtrInput // The name of the resource. Name pulumi.StringPtrInput // The provisioning state of a pipeline run. ProvisioningState pulumi.StringPtrInput // The request parameters for a pipeline run. Request PipelineRunRequestResponsePtrInput // The response of a pipeline run. Response PipelineRunResponseResponsePtrInput // Metadata pertaining to creation and last modification of the resource. SystemData SystemDataResponsePtrInput // The type of the resource. Type pulumi.StringPtrInput } func (PipelineRunState) ElementType() reflect.Type { return reflect.TypeOf((*pipelineRunState)(nil)).Elem() } type pipelineRunArgs struct { // How the pipeline run should be forced to recreate even if the pipeline run configuration has not changed. ForceUpdateTag *string `pulumi:"forceUpdateTag"` // The name of the pipeline run. PipelineRunName *string `pulumi:"pipelineRunName"` // The name of the container registry. RegistryName string `pulumi:"registryName"` // The request parameters for a pipeline run. Request *PipelineRunRequest `pulumi:"request"` // The name of the resource group to which the container registry belongs. ResourceGroupName string `pulumi:"resourceGroupName"` } // The set of arguments for constructing a PipelineRun resource. type PipelineRunArgs struct { // How the pipeline run should be forced to recreate even if the pipeline run configuration has not changed. ForceUpdateTag pulumi.StringPtrInput // The name of the pipeline run. PipelineRunName pulumi.StringPtrInput // The name of the container registry. RegistryName pulumi.StringInput // The request parameters for a pipeline run. Request PipelineRunRequestPtrInput // The name of the resource group to which the container registry belongs. ResourceGroupName pulumi.StringInput } func (PipelineRunArgs) ElementType() reflect.Type { return reflect.TypeOf((*pipelineRunArgs)(nil)).Elem() } type PipelineRunInput interface { pulumi.Input ToPipelineRunOutput() PipelineRunOutput ToPipelineRunOutputWithContext(ctx context.Context) PipelineRunOutput } func (*PipelineRun) ElementType() reflect.Type { return reflect.TypeOf((*PipelineRun)(nil)) } func (i *PipelineRun) ToPipelineRunOutput() PipelineRunOutput { return i.ToPipelineRunOutputWithContext(context.Background()) } func (i *PipelineRun) ToPipelineRunOutputWithContext(ctx context.Context) PipelineRunOutput { return pulumi.ToOutputWithContext(ctx, i).(PipelineRunOutput) } type PipelineRunOutput struct { *pulumi.OutputState } func (PipelineRunOutput) ElementType() reflect.Type { return reflect.TypeOf((*PipelineRun)(nil)) } func (o PipelineRunOutput) ToPipelineRunOutput() PipelineRunOutput { return o } func (o PipelineRunOutput) ToPipelineRunOutputWithContext(ctx context.Context) PipelineRunOutput { return o } func
() { pulumi.RegisterOutputType(PipelineRunOutput{}) }
init
views.py
from StringIO import StringIO import copy import logging import hashlib import itertools from lxml import etree import os import re import json from collections import defaultdict from xml.dom.minidom import parseString from diff_match_patch import diff_match_patch from django.core.cache import cache from django.template.loader import render_to_string from django.utils.translation import ugettext as _, get_language from django.views.decorators.cache import cache_control from corehq import ApplicationsTab, toggles, privileges, feature_previews from corehq.apps.app_manager import commcare_settings from corehq.apps.app_manager.exceptions import ( AppEditingError, AppManagerException, BlankXFormError, ConflictingCaseTypeError, FormNotFoundException, IncompatibleFormTypeException, ModuleNotFoundException, RearrangeError, ) from corehq.apps.app_manager.forms import CopyApplicationForm from corehq.apps.app_manager import id_strings from corehq.apps.app_manager.templatetags.xforms_extras import trans from corehq.apps.commtrack.models import Program from corehq.apps.hqmedia.views import DownloadMultimediaZip from corehq.apps.hqwebapp.templatetags.hq_shared_tags import toggle_enabled from corehq.apps.hqwebapp.utils import get_bulk_upload_form from corehq.apps.reports.formdetails.readable import ( FormQuestionResponse, questions_in_hierarchy, ) from corehq.apps.sms.views import get_sms_autocomplete_context from django.utils.http import urlencode as django_urlencode from couchdbkit.exceptions import ResourceConflict from django.http import HttpResponse, Http404, HttpResponseBadRequest, HttpResponseForbidden from unidecode import unidecode from django.http import HttpResponseRedirect from django.core.urlresolvers import reverse, RegexURLResolver, Resolver404 from django.shortcuts import render from corehq.apps.translations.models import Translation from corehq.util.view_utils import set_file_download from dimagi.utils.django.cached_object import CachedObject from django.utils.http import urlencode from django.views.decorators.http import require_GET from django.conf import settings from couchdbkit.resource import ResourceNotFound from corehq.apps.app_manager.const import ( APP_V1, APP_V2, CAREPLAN_GOAL, CAREPLAN_TASK, MAJOR_RELEASE_TO_VERSION, ) from corehq.apps.app_manager.success_message import SuccessMessage from corehq.apps.app_manager.util import is_valid_case_type, get_all_case_properties, add_odk_profile_after_build, ParentCasePropertyBuilder, commtrack_ledger_sections from corehq.apps.app_manager.util import save_xform, get_settings_values from corehq.apps.domain.models import Domain from corehq.apps.domain.views import DomainViewMixin from corehq.apps.translations import system_text as st_trans from corehq.util.compression import decompress from couchexport.export import FormattedRow, export_raw from couchexport.models import Format from couchexport.shortcuts import export_response from couchexport.writers import Excel2007ExportWriter from dimagi.utils.couch.database import get_db from dimagi.utils.couch.resource_conflict import retry_resource from corehq.apps.app_manager.xform import ( CaseError, XForm, XFormError, XFormValidationError, ) from corehq.apps.builds.models import CommCareBuildConfig, BuildSpec from corehq.apps.users.decorators import require_permission from corehq.apps.users.models import Permissions from dimagi.utils.decorators.memoized import memoized from dimagi.utils.decorators.view import get_file from dimagi.utils.django.cache import make_template_fragment_key from dimagi.utils.excel import WorkbookJSONReader from dimagi.utils.logging import notify_exception from dimagi.utils.subprocess_timeout import ProcessTimedOut from dimagi.utils.web import json_response, json_request from corehq.apps.reports import util as report_utils from corehq.apps.domain.decorators import login_and_domain_required, login_or_digest from corehq.apps.app_manager.models import ( AdvancedForm, AdvancedFormActions, AdvancedModule, Application, ApplicationBase, CareplanForm, CareplanModule, DeleteApplicationRecord, DeleteFormRecord, DeleteModuleRecord, DetailColumn, Form, FormActions, Module, ParentSelect, SavedAppBuild, get_app, load_case_reserved_words, str_to_cls, ) from corehq.apps.app_manager.models import import_app as import_app_util, SortElement from dimagi.utils.web import get_url_base from corehq.apps.app_manager.decorators import safe_download, no_conflict_require_POST, \ require_can_edit_apps, require_deploy_apps from django.contrib import messages from django_prbac.exceptions import PermissionDenied from django_prbac.utils import ensure_request_has_privilege logger = logging.getLogger(__name__) def _encode_if_unicode(s): return s.encode('utf-8') if isinstance(s, unicode) else s CASE_TYPE_CONFLICT_MSG = ( "Warning: The form's new module " "has a different case type from the old module.<br />" "Make sure all case properties you are loading " "are available in the new case type" ) @require_deploy_apps def back_to_main(req, domain, app_id=None, module_id=None, form_id=None, unique_form_id=None, edit=True): """ returns an HttpResponseRedirect back to the main page for the App Manager app with the correct GET parameters. This is meant to be used by views that process a POST request, which then redirect to the main page. """ page = None params = {} if edit: params['edit'] = 'true' args = [domain] if app_id is not None: args.append(app_id) if unique_form_id is not None: app = get_app(domain, app_id) obj = app.get_form(unique_form_id, bare=False) if obj['type'] == 'user_registration': page = 'view_user_registration' else: module_id = obj['module'].id form_id = obj['form'].id if module_id is not None: args.append(module_id) if form_id is not None: args.append(form_id) if page: view_name = page else: view_name = { 1: 'default', 2: 'view_app', 3: 'view_module', 4: 'view_form', }[len(args)] return HttpResponseRedirect("%s%s" % ( reverse('corehq.apps.app_manager.views.%s' % view_name, args=args), "?%s" % urlencode(params) if params else "" )) def bail(req, domain, app_id, not_found=""): if not_found: messages.error(req, 'Oops! We could not find that %s. Please try again' % not_found) else: messages.error(req, 'Oops! We could not complete your request. Please try again') return back_to_main(req, domain, app_id) def _get_xform_source(request, app, form, filename="form.xml"): download = json.loads(request.GET.get('download', 'false')) lang = request.COOKIES.get('lang', app.langs[0]) source = form.source if download: response = HttpResponse(source) response['Content-Type'] = "application/xml" for lc in [lang] + app.langs: if lc in form.name: filename = "%s.xml" % unidecode(form.name[lc]) break set_file_download(response, filename) return response else: return json_response(source) @require_can_edit_apps def get_xform_source(req, domain, app_id, module_id, form_id): app = get_app(domain, app_id) try: form = app.get_module(module_id).get_form(form_id) except IndexError: raise Http404() return _get_xform_source(req, app, form) @require_can_edit_apps def get_user_registration_source(req, domain, app_id): app = get_app(domain, app_id) form = app.get_user_registration() return _get_xform_source(req, app, form, filename="User Registration.xml") def xform_display(req, domain, form_unique_id): try: form, app = Form.get_form(form_unique_id, and_app=True) except ResourceNotFound: raise Http404() if domain != app.domain: raise Http404() langs = [req.GET.get('lang')] + app.langs questions = form.get_questions(langs, include_triggers=True, include_groups=True) if req.GET.get('format') == 'html': questions = [FormQuestionResponse(q) for q in questions] return render(req, 'app_manager/xform_display.html', { 'questions': questions_in_hierarchy(questions) }) else: return json_response(questions) @require_can_edit_apps def form_casexml(req, domain, form_unique_id): try: form, app = Form.get_form(form_unique_id, and_app=True) except ResourceNotFound: raise Http404() if domain != app.domain: raise Http404() return HttpResponse(form.create_casexml()) @login_or_digest @require_can_edit_apps def app_source(req, domain, app_id): app = get_app(domain, app_id) return HttpResponse(app.export_json()) @require_can_edit_apps def copy_app_check_domain(req, domain, name, app_id): app_copy = import_app_util(app_id, domain, name=name) return back_to_main(req, app_copy.domain, app_id=app_copy._id) @require_can_edit_apps def copy_app(req, domain): app_id = req.POST.get('app') form = CopyApplicationForm(app_id, req.POST) if form.is_valid(): return copy_app_check_domain(req, form.cleaned_data['domain'], form.cleaned_data['name'], app_id) else: return view_generic(req, domain, app_id=app_id, copy_app_form=form) @require_can_edit_apps def import_app(req, domain, template="app_manager/import_app.html"): if req.method == "POST": _clear_app_cache(req, domain) name = req.POST.get('name') compressed = req.POST.get('compressed') valid_request = True if not name: messages.error(req, _("You must submit a name for the application you are importing.")) valid_request = False if not compressed: messages.error(req, _("You must submit the source data.")) valid_request = False if not valid_request: return render(req, template, {'domain': domain}) source = decompress([chr(int(x)) if int(x) < 256 else int(x) for x in compressed.split(',')]) source = json.loads(source) assert(source is not None) app = import_app_util(source, domain, name=name) return back_to_main(req, domain, app_id=app._id) else: app_id = req.GET.get('app') redirect_domain = req.GET.get('domain') or None if redirect_domain is not None: redirect_domain = redirect_domain.lower() if Domain.get_by_name(redirect_domain): return HttpResponseRedirect( reverse('import_app', args=[redirect_domain]) + "?app={app_id}".format(app_id=app_id) ) else: if redirect_domain: messages.error(req, "We can't find a project called %s." % redirect_domain) else: messages.error(req, "You left the project name blank.") return HttpResponseRedirect(req.META.get('HTTP_REFERER', req.path)) if app_id: app = get_app(None, app_id) assert(app.get_doc_type() in ('Application', 'RemoteApp')) assert(req.couch_user.is_member_of(app.domain)) else: app = None return render(req, template, { 'domain': domain, 'app': app, 'is_superuser': req.couch_user.is_superuser }) @require_deploy_apps def default(req, domain): """ Handles a url that does not include an app_id. Currently the logic is taken care of by view_app, but this view exists so that there's something to reverse() to. (I guess I should use url(..., name="default") in url.py instead?) """ return view_app(req, domain) def get_form_view_context_and_template(request, form, langs, is_user_registration, messages=messages): xform_questions = [] xform = None form_errors = [] xform_validation_errored = False try: xform = form.wrapped_xform() except XFormError as e: form_errors.append(u"Error in form: %s" % e) except Exception as e: logging.exception(e) form_errors.append(u"Unexpected error in form: %s" % e) if xform and xform.exists(): if xform.already_has_meta(): messages.warning(request, "This form has a meta block already! " "It may be replaced by CommCare HQ's standard meta block." ) try: form.validate_form() xform_questions = xform.get_questions(langs, include_triggers=True) except etree.XMLSyntaxError as e: form_errors.append(u"Syntax Error: %s" % e) except AppEditingError as e: form_errors.append(u"Error in application: %s" % e) except XFormValidationError: xform_validation_errored = True # showing these messages is handled by validate_form_for_build ajax pass except XFormError as e: form_errors.append(u"Error in form: %s" % e) # any other kind of error should fail hard, # but for now there are too many for that to be practical except Exception as e: if settings.DEBUG: raise notify_exception(request, 'Unexpected Build Error') form_errors.append(u"Unexpected System Error: %s" % e) else: # remove upload questions (attachemnts) until MM Case Properties # are released to general public is_previewer = toggles.MM_CASE_PROPERTIES.enabled(request.user.username) xform_questions = [q for q in xform_questions if q["tag"] != "upload" or is_previewer] try: form_action_errors = form.validate_for_build() if not form_action_errors: form.add_stuff_to_xform(xform) if settings.DEBUG and False: xform.validate() except CaseError as e: messages.error(request, u"Error in Case Management: %s" % e) except XFormValidationError as e: messages.error(request, unicode(e)) except Exception as e: if settings.DEBUG: raise logging.exception(unicode(e)) messages.error(request, u"Unexpected Error: %s" % e) try: languages = xform.get_languages() except Exception: languages = [] for err in form_errors: messages.error(request, err) module_case_types = [] app = form.get_app() if is_user_registration: module_case_types = None else: for module in app.get_modules(): for case_type in module.get_case_types(): module_case_types.append({ 'id': module.unique_id, 'module_name': trans(module.name, langs), 'case_type': case_type, 'module_type': module.doc_type }) if not form.unique_id: form.get_unique_id() app.save() context = { 'is_user_registration': is_user_registration, 'nav_form': form if not is_user_registration else '', 'xform_languages': languages, "xform_questions": xform_questions, 'case_reserved_words_json': load_case_reserved_words(), 'module_case_types': module_case_types, 'form_errors': form_errors, 'xform_validation_errored': xform_validation_errored, 'allow_cloudcare': app.application_version == APP_V2 and isinstance(form, Form), 'allow_form_copy': isinstance(form, Form), 'allow_form_filtering': not isinstance(form, CareplanForm), 'allow_form_workflow': not isinstance(form, CareplanForm), } if isinstance(form, CareplanForm): context.update({ 'mode': form.mode, 'fixed_questions': form.get_fixed_questions(), 'custom_case_properties': [{'key': key, 'path': path} for key, path in form.custom_case_updates.items()], 'case_preload': [{'key': key, 'path': path} for key, path in form.case_preload.items()], }) return "app_manager/form_view_careplan.html", context elif isinstance(form, AdvancedForm): def commtrack_programs(): if app.commtrack_enabled: programs = Program.by_domain(app.domain) return [{'value': program.get_id, 'label': program.name} for program in programs] else: return [] all_programs = [{'value': '', 'label': _('All Programs')}] context.update({ 'show_custom_ref': toggles.APP_BUILDER_CUSTOM_PARENT_REF.enabled(request.user.username), 'commtrack_programs': all_programs + commtrack_programs(), }) return "app_manager/form_view_advanced.html", context else: context.update({ 'show_custom_ref': toggles.APP_BUILDER_CUSTOM_PARENT_REF.enabled(request.user.username), }) return "app_manager/form_view.html", context def get_app_view_context(request, app): is_cloudcare_allowed = False try: ensure_request_has_privilege(request, privileges.CLOUDCARE) is_cloudcare_allowed = True except PermissionDenied: pass settings_layout = copy.deepcopy( commcare_settings.LAYOUT[app.get_doc_type()]) for section in settings_layout: new_settings = [] for setting in section['settings']: toggle_name = setting.get('toggle') if toggle_name and not toggle_enabled(request, toggle_name): continue new_settings.append(setting) section['settings'] = new_settings context = { 'settings_layout': settings_layout, 'settings_values': get_settings_values(app), 'is_cloudcare_allowed': is_cloudcare_allowed, } build_config = CommCareBuildConfig.fetch() options = build_config.get_menu() if not request.user.is_superuser: options = [option for option in options if not option.superuser_only] options_map = defaultdict(lambda:{"values": [], "value_names": []}) for option in options: builds = options_map[option.build.major_release()] builds["values"].append(option.build.to_string()) builds["value_names"].append(option.get_label()) if "default" not in builds: app_ver = MAJOR_RELEASE_TO_VERSION[option.build.major_release()] builds["default"] = build_config.get_default(app_ver).to_string() (build_spec_setting,) = filter( lambda x: x['type'] == 'hq' and x['id'] == 'build_spec', [setting for section in context['settings_layout'] for setting in section['settings']] ) build_spec_setting['options_map'] = options_map build_spec_setting['default_app_version'] = app.application_version if app.get_doc_type() == 'Application': try: # todo remove get_media_references multimedia = app.get_media_references() except ProcessTimedOut: notify_exception(request) messages.warning(request, ( "We were unable to check if your forms had errors. " "Refresh the page and we will try again." )) multimedia = { 'references': {}, 'form_errors': True, 'missing_refs': False, } context.update({ 'multimedia': multimedia, }) context.update({ 'bulk_upload': { 'action': reverse('upload_translations', args=(app.domain, app.get_id)), 'download_url': reverse('download_translations', args=(app.domain, app.get_id)), 'adjective': _(u"U\u200BI translation"), 'plural_noun': _(u"U\u200BI translations"), }, }) context.update({ 'bulk_upload_form': get_bulk_upload_form(context), }) return context def get_langs(request, app): lang = request.GET.get('lang', request.COOKIES.get('lang', app.langs[0] if hasattr(app, 'langs') and app.langs else '') ) langs = None if app and hasattr(app, 'langs'): if not app.langs and not app.is_remote_app: # lots of things fail if the app doesn't have any languages. # the best we can do is add 'en' if there's nothing else. app.langs.append('en') app.save() if not lang or lang not in app.langs: lang = (app.langs or ['en'])[0] langs = [lang] + app.langs return lang, langs def _clear_app_cache(request, domain): from corehq import ApplicationsTab ApplicationBase.get_db().view('app_manager/applications_brief', startkey=[domain], limit=1, ).all() for is_active in True, False: key = make_template_fragment_key('header_tab', [ domain, None, # tab.org should be None for any non org page ApplicationsTab.view, is_active, request.couch_user.get_id, get_language(), ]) cache.delete(key) def get_apps_base_context(request, domain, app): lang, langs = get_langs(request, app) if getattr(request, 'couch_user', None): edit = ((request.GET.get('edit', 'true') == 'true') and (request.couch_user.can_edit_apps(domain) or request.user.is_superuser)) timezone = report_utils.get_timezone(request.couch_user, domain) else: edit = False timezone = None context = { 'lang': lang, 'langs': langs, 'domain': domain, 'edit': edit, 'app': app, 'URL_BASE': get_url_base(), 'timezone': timezone, } if app: for _lang in app.langs: try: SuccessMessage(app.success_message.get(_lang, ''), '').check_message() except Exception as e: messages.error(request, "Your success message is malformed: %s is not a keyword" % e) v2_app = app.application_version == APP_V2 context.update({ 'show_care_plan': (v2_app and not app.has_careplan_module and toggles.APP_BUILDER_CAREPLAN.enabled(request.user.username)), 'show_advanced': (v2_app and (toggles.APP_BUILDER_ADVANCED.enabled(request.user.username) or getattr(app, 'commtrack_enabled', False))), }) return context @cache_control(no_cache=True, no_store=True) @require_deploy_apps def paginate_releases(request, domain, app_id): limit = request.GET.get('limit') try: limit = int(limit) except ValueError: limit = 10 start_build_param = request.GET.get('start_build') if start_build_param and json.loads(start_build_param): start_build = json.loads(start_build_param) assert isinstance(start_build, int) else: start_build = {} timezone = report_utils.get_timezone(request.couch_user, domain) saved_apps = get_db().view('app_manager/saved_app', startkey=[domain, app_id, start_build], endkey=[domain, app_id], descending=True, limit=limit, wrapper=lambda x: SavedAppBuild.wrap(x['value']).to_saved_build_json(timezone), ).all() include_media = toggles.APP_BUILDER_INCLUDE_MULTIMEDIA_ODK.enabled( request.user.username ) for app in saved_apps: app['include_media'] = include_media and app['doc_type'] != 'RemoteApp' return json_response(saved_apps) @require_deploy_apps def release_manager(request, domain, app_id, template='app_manager/releases.html'): app = get_app(domain, app_id) latest_release = get_app(domain, app_id, latest=True) context = get_apps_base_context(request, domain, app) context['sms_contacts'] = get_sms_autocomplete_context(request, domain)['sms_contacts'] context.update({ 'release_manager': True, 'saved_apps': [], 'latest_release': latest_release, }) if not app.is_remote_app(): # Multimedia is not supported for remote applications at this time. # todo remove get_media_references multimedia = app.get_media_references() context.update({ 'multimedia': multimedia, }) response = render(request, template, context) response.set_cookie('lang', _encode_if_unicode(context['lang'])) return response @login_and_domain_required def current_app_version(request, domain, app_id): """ Return current app version and the latest release """ app = get_app(domain, app_id) latest = get_db().view('app_manager/saved_app', startkey=[domain, app_id, {}], endkey=[domain, app_id], descending=True, limit=1, ).first() latest_release = latest['value']['version'] if latest else None return json_response({ 'currentVersion': app.version, 'latestRelease': latest_release, }) @no_conflict_require_POST @require_can_edit_apps def release_build(request, domain, app_id, saved_app_id): is_released = request.POST.get('is_released') == 'true' ajax = request.POST.get('ajax') == 'true' saved_app = get_app(domain, saved_app_id) if saved_app.copy_of != app_id: raise Http404 saved_app.is_released = is_released saved_app.save(increment_version=False) from corehq.apps.app_manager.signals import app_post_release app_post_release.send(Application, application=saved_app) if ajax: return json_response({'is_released': is_released}) else: return HttpResponseRedirect(reverse('release_manager', args=[domain, app_id])) def get_module_view_context_and_template(app, module): defaults = ('name', 'date-opened', 'status') if app.case_sharing: defaults += ('#owner_name',) builder = ParentCasePropertyBuilder(app, defaults=defaults) def ensure_unique_ids(): # make sure all modules have unique ids modules = app.modules if any(not mod.unique_id for mod in modules): for mod in modules: mod.get_or_create_unique_id() app.save() def get_parent_modules(case_type): parent_types = builder.get_parent_types(case_type) modules = app.modules parent_module_ids = [mod.unique_id for mod in modules if mod.case_type in parent_types] return [{ 'unique_id': mod.unique_id, 'name': mod.name, 'is_parent': mod.unique_id in parent_module_ids, } for mod in app.modules if mod.case_type != case_type and mod.unique_id != module.unique_id] def get_sort_elements(details): return [prop.values() for prop in details.sort_elements] ensure_unique_ids() if isinstance(module, CareplanModule): return "app_manager/module_view_careplan.html", { 'parent_modules': get_parent_modules(CAREPLAN_GOAL), 'details': [ { 'label': _('Goal List'), 'type': 'careplan_goal', 'model': 'case', 'properties': sorted(builder.get_properties(CAREPLAN_GOAL)), 'sort_elements': json.dumps(get_sort_elements(module.goal_details.short)), 'short': module.goal_details.short, 'long': module.goal_details.long, }, { 'label': _('Task List'), 'type': 'careplan_task', 'model': 'case', 'properties': sorted(builder.get_properties(CAREPLAN_TASK)), 'sort_elements': json.dumps(get_sort_elements(module.task_details.short)), 'short': module.task_details.short, 'long': module.task_details.long, }, ], } elif isinstance(module, AdvancedModule): case_type = module.case_type def get_details(): details = [{ 'label': _('Case List'), 'type': 'case', 'model': 'case', 'properties': sorted(builder.get_properties(case_type)), 'sort_elements': json.dumps(get_sort_elements(module.case_details.short)), 'short': module.case_details.short, 'long': module.case_details.long, }] if app.commtrack_enabled: details.append({ 'label': _('Product List'), 'type': 'product', 'model': 'product', 'properties': ['name'] + commtrack_ledger_sections(app.commtrack_requisition_mode), 'sort_elements': json.dumps(get_sort_elements(module.product_details.short)), 'short': module.product_details.short, }) return details return "app_manager/module_view.html", { 'details': get_details(), } else: case_type = module.case_type return "app_manager/module_view.html", { 'parent_modules': get_parent_modules(case_type), 'details': [ { 'label': _('Case List'), 'type': 'case', 'model': 'case', 'properties': sorted(builder.get_properties(case_type)), 'sort_elements': json.dumps(get_sort_elements(module.case_details.short)), 'short': module.case_details.short, 'long': module.case_details.long, 'parent_select': module.parent_select, }, ], } @retry_resource(3) def view_generic(req, domain, app_id=None, module_id=None, form_id=None, is_user_registration=False, copy_app_form=None): """ This is the main view for the app. All other views redirect to here. """ if form_id and not module_id: return bail(req, domain, app_id) app = module = form = None try: if app_id: app = get_app(domain, app_id) if is_user_registration: if not app.show_user_registration: raise Http404() form = app.get_user_registration() if module_id: try: module = app.get_module(module_id) except ModuleNotFoundException: raise Http404() if not module.unique_id: module.get_or_create_unique_id() app.save() if form_id: try: form = module.get_form(form_id) except IndexError: raise Http404() except ModuleNotFoundException: return bail(req, domain, app_id) context = get_apps_base_context(req, domain, app) if not app: all_applications = ApplicationBase.view('app_manager/applications_brief', startkey=[domain], endkey=[domain, {}], #stale=settings.COUCH_STALE_QUERY, ).all() if all_applications: app_id = all_applications[0].id return back_to_main(req, domain, app_id=app_id, module_id=module_id, form_id=form_id) if app and app.copy_of: # don't fail hard. return HttpResponseRedirect(reverse("corehq.apps.app_manager.views.view_app", args=[domain,app.copy_of])) # grandfather in people who set commcare sense earlier if app and 'use_commcare_sense' in app: if app['use_commcare_sense']: if 'features' not in app.profile: app.profile['features'] = {} app.profile['features']['sense'] = 'true' del app['use_commcare_sense'] app.save() context.update({ 'module': module, 'form': form, }) if app and not module and hasattr(app, 'translations'): context.update({"translations": app.translations.get(context['lang'], {})}) if form: template, form_context = get_form_view_context_and_template(req, form, context['langs'], is_user_registration) context.update({ 'case_properties': get_all_case_properties(app), }) context.update(form_context) elif module: template, module_context = get_module_view_context_and_template(app, module) module_context["enable_calc_xpaths"] = ( feature_previews.CALC_XPATHS.enabled(getattr(req, 'domain', None)) ) module_context["enable_enum_image"] = ( feature_previews.ENUM_IMAGE.enabled(getattr(req, 'domain', None)) ) context.update(module_context) else: template = "app_manager/app_view.html" if app: context.update(get_app_view_context(req, app)) error = req.GET.get('error', '') context.update({ 'error':error, 'app': app, }) # Pass form for Copy Application to template: context.update({ 'copy_app_form': copy_app_form if copy_app_form is not None else CopyApplicationForm(app_id) }) response = render(req, template, context) response.set_cookie('lang', _encode_if_unicode(context['lang'])) return response @require_can_edit_apps def get_commcare_version(request, app_id, app_version): options = CommCareBuildConfig.fetch().get_menu(app_version) return json_response(options) @require_can_edit_apps def view_user_registration(request, domain, app_id): return view_generic(request, domain, app_id, is_user_registration=True) @require_GET @require_deploy_apps def view_form(req, domain, app_id, module_id, form_id): return view_generic(req, domain, app_id, module_id, form_id) @require_GET @require_deploy_apps def view_module(req, domain, app_id, module_id): return view_generic(req, domain, app_id, module_id) @require_GET @require_deploy_apps def view_app(req, domain, app_id=None): # redirect old m=&f= urls module_id = req.GET.get('m', None) form_id = req.GET.get('f', None) if module_id or form_id: return back_to_main(req, domain, app_id=app_id, module_id=module_id, form_id=form_id) return view_generic(req, domain, app_id) @require_can_edit_apps def form_source(req, domain, app_id, module_id, form_id): return form_designer(req, domain, app_id, module_id, form_id) @require_can_edit_apps def user_registration_source(req, domain, app_id): return form_designer(req, domain, app_id, is_user_registration=True) @require_can_edit_apps def form_designer(req, domain, app_id, module_id=None, form_id=None, is_user_registration=False): app = get_app(domain, app_id) if is_user_registration: form = app.get_user_registration() else: try: module = app.get_module(module_id) except ModuleNotFoundException: return bail(req, domain, app_id, not_found="module") try: form = module.get_form(form_id) except IndexError: return bail(req, domain, app_id, not_found="form") context = get_apps_base_context(req, domain, app) context.update(locals()) context.update({ 'vellum_debug': settings.VELLUM_DEBUG, 'vellum_prerelease': settings.VELLUM_PRERELEASE, 'edit': True, 'nav_form': form if not is_user_registration else '', 'formdesigner': True, 'multimedia_object_map': app.get_object_map(), 'sessionid': req.COOKIES.get('sessionid') }) return render(req, 'app_manager/form_designer.html', context) @no_conflict_require_POST @require_can_edit_apps def new_app(req, domain): "Adds an app to the database" lang = 'en' type = req.POST["type"] application_version = req.POST.get('application_version', APP_V1) cls = str_to_cls[type] if cls == Application: app = cls.new_app(domain, "Untitled Application", lang=lang, application_version=application_version) app.add_module(Module.new_module("Untitled Module", lang)) app.new_form(0, "Untitled Form", lang) else: app = cls.new_app(domain, "Untitled Application", lang=lang) if req.project.secure_submissions: app.secure_submissions = True app.save() _clear_app_cache(req, domain) app_id = app.id return back_to_main(req, domain, app_id=app_id) @no_conflict_require_POST @require_can_edit_apps def new_module(req, domain, app_id): "Adds a module to an app" app = get_app(domain, app_id) lang = req.COOKIES.get('lang', app.langs[0]) name = req.POST.get('name') module_type = req.POST.get('module_type', 'case') if module_type == 'case': module = app.add_module(Module.new_module(name, lang)) module_id = module.id app.new_form(module_id, "Untitled Form", lang) app.save() response = back_to_main(req, domain, app_id=app_id, module_id=module_id) response.set_cookie('suppress_build_errors', 'yes') return response elif module_type in MODULE_TYPE_MAP: fn = MODULE_TYPE_MAP[module_type][FN] validations = MODULE_TYPE_MAP[module_type][VALIDATIONS] error = next((v[1] for v in validations if v[0](app)), None) if error: messages.warning(req, error) return back_to_main(req, domain, app_id=app.id) else: return fn(req, domain, app, name, lang) else: logger.error('Unexpected module type for new module: "%s"' % module_type) return back_to_main(req, domain, app_id=app_id) def _new_careplan_module(req, domain, app, name, lang): from corehq.apps.app_manager.util import new_careplan_module target_module_index = req.POST.get('target_module_id') target_module = app.get_module(target_module_index) if not target_module.case_type: name = target_module.name[lang] messages.error(req, _("Please set the case type for the target module '{name}'.".format(name=name))) return back_to_main(req, domain, app_id=app.id) module = new_careplan_module(app, name, lang, target_module) app.save() response = back_to_main(req, domain, app_id=app.id, module_id=module.id) response.set_cookie('suppress_build_errors', 'yes') messages.info(req, _('Caution: Care Plan modules are a labs feature')) return response def _new_advanced_module(req, domain, app, name, lang): module = app.add_module(AdvancedModule.new_module(name, lang)) module_id = module.id app.new_form(module_id, _("Untitled Form"), lang) app.save() response = back_to_main(req, domain, app_id=app.id, module_id=module_id) response.set_cookie('suppress_build_errors', 'yes') messages.info(req, _('Caution: Advanced modules are a labs feature')) return response @no_conflict_require_POST @require_can_edit_apps def new_form(req, domain, app_id, module_id): "Adds a form to an app (under a module)" app = get_app(domain, app_id) lang = req.COOKIES.get('lang', app.langs[0]) name = req.POST.get('name') form = app.new_form(module_id, name, lang) app.save() # add form_id to locals() form_id = form.id response = back_to_main(req, domain, app_id=app_id, module_id=module_id, form_id=form_id) return response @no_conflict_require_POST @require_can_edit_apps def delete_app(req, domain, app_id): "Deletes an app from the database" app = get_app(domain, app_id) record = app.delete_app() messages.success(req, 'You have deleted an application. <a href="%s" class="post-link">Undo</a>' % reverse('undo_delete_app', args=[domain, record.get_id]), extra_tags='html' ) app.save() _clear_app_cache(req, domain) return back_to_main(req, domain) @no_conflict_require_POST @require_can_edit_apps def undo_delete_app(request, domain, record_id): try: app = get_app(domain, record_id) app.unretire() app_id = app.id except Exception: record = DeleteApplicationRecord.get(record_id) record.undo() app_id = record.app_id _clear_app_cache(request, domain) messages.success(request, 'Application successfully restored.') return back_to_main(request, domain, app_id=app_id) @no_conflict_require_POST @require_can_edit_apps def delete_module(req, domain, app_id, module_unique_id): "Deletes a module from an app" app = get_app(domain, app_id) try: record = app.delete_module(module_unique_id) except ModuleNotFoundException: return bail(req, domain, app_id) if record is not None: messages.success(req, 'You have deleted a module. <a href="%s" class="post-link">Undo</a>' % reverse('undo_delete_module', args=[domain, record.get_id]), extra_tags='html' ) app.save() return back_to_main(req, domain, app_id=app_id) @no_conflict_require_POST @require_can_edit_apps def undo_delete_module(request, domain, record_id): record = DeleteModuleRecord.get(record_id) record.undo() messages.success(request, 'Module successfully restored.') return back_to_main(request, domain, app_id=record.app_id, module_id=record.module_id) @no_conflict_require_POST @require_can_edit_apps def delete_form(req, domain, app_id, module_unique_id, form_unique_id): "Deletes a form from an app" app = get_app(domain, app_id) record = app.delete_form(module_unique_id, form_unique_id) if record is not None: messages.success( req, 'You have deleted a form. <a href="%s" class="post-link">Undo</a>' % reverse('undo_delete_form', args=[domain, record.get_id]), extra_tags='html' ) app.save() return back_to_main( req, domain, app_id=app_id, module_id=app.get_module_by_unique_id(module_unique_id).id) @no_conflict_require_POST @require_can_edit_apps def copy_form(req, domain, app_id, module_id, form_id): app = get_app(domain, app_id) to_module_id = int(req.POST['to_module_id']) try: app.copy_form(int(module_id), int(form_id), to_module_id) except ConflictingCaseTypeError: messages.warning(req, CASE_TYPE_CONFLICT_MSG, extra_tags="html") app.save() except BlankXFormError: # don't save! messages.error(req, _('We could not copy this form, because it is blank.' 'In order to copy this form, please add some questions first.')) except IncompatibleFormTypeException: # don't save! messages.error(req, _('This form could not be copied because it ' 'is not compatible with the selected module.')) else: app.save() return back_to_main(req, domain, app_id=app_id, module_id=module_id, form_id=form_id) @no_conflict_require_POST @require_can_edit_apps def undo_delete_form(request, domain, record_id): record = DeleteFormRecord.get(record_id) try: record.undo() messages.success(request, 'Form successfully restored.') except ModuleNotFoundException: messages.error(request, 'Form could not be restored: module is missing.') return back_to_main(request, domain, app_id=record.app_id, module_id=record.module_id, form_id=record.form_id) @no_conflict_require_POST @require_can_edit_apps def edit_module_attr(req, domain, app_id, module_id, attr): """ Called to edit any (supported) module attribute, given by attr """ attributes = { "all": None, "case_type": None, "put_in_root": None, "display_separately": None, "name": None, "case_label": None, "referral_label": None, 'media_image': None, 'media_audio': None, "case_list": ('case_list-show', 'case_list-label'), "task_list": ('task_list-show', 'task_list-label'), "parent_module": None, } if attr not in attributes: return HttpResponseBadRequest() def should_edit(attribute): if attribute == attr: return True if 'all' == attr: if attributes[attribute]: for param in attributes[attribute]: if not req.POST.get(param): return False return True else: return req.POST.get(attribute) is not None app = get_app(domain, app_id) module = app.get_module(module_id) lang = req.COOKIES.get('lang', app.langs[0]) resp = {'update': {}} if should_edit("case_type"): case_type = req.POST.get("case_type", None) if is_valid_case_type(case_type): # todo: something better than nothing when invalid old_case_type = module["case_type"] module["case_type"] = case_type for cp_mod in (mod for mod in app.modules if isinstance(mod, CareplanModule)): if cp_mod.unique_id != module.unique_id and cp_mod.parent_select.module_id == module.unique_id: cp_mod.case_type = case_type def rename_action_case_type(mod): for form in mod.forms: for action in form.actions.get_all_actions(): if action.case_type == old_case_type: action.case_type = case_type if isinstance(module, AdvancedModule): rename_action_case_type(module) for ad_mod in (mod for mod in app.modules if isinstance(mod, AdvancedModule)): if ad_mod.unique_id != module.unique_id and ad_mod.case_type != old_case_type: # only apply change if the module's case_type does not reference the old value rename_action_case_type(ad_mod) else: return HttpResponseBadRequest("case type is improperly formatted") if should_edit("put_in_root"): module["put_in_root"] = json.loads(req.POST.get("put_in_root")) if should_edit("display_separately"): module["display_separately"] = json.loads(req.POST.get("display_separately")) if should_edit("parent_module"): parent_module = req.POST.get("parent_module") module.parent_select.module_id = parent_module for attribute in ("name", "case_label", "referral_label"): if should_edit(attribute): name = req.POST.get(attribute, None) module[attribute][lang] = name if should_edit("name"): resp['update'].update({'.variable-module_name': module.name[lang]}) for SLUG in ('case_list', 'task_list'): if should_edit(SLUG): module[SLUG].show = json.loads(req.POST['{SLUG}-show'.format(SLUG=SLUG)]) module[SLUG].label[lang] = req.POST['{SLUG}-label'.format(SLUG=SLUG)] _handle_media_edits(req, module, should_edit, resp) app.save(resp) resp['case_list-show'] = module.requires_case_details() return HttpResponse(json.dumps(resp)) @no_conflict_require_POST @require_can_edit_apps def edit_module_detail_screens(req, domain, app_id, module_id): """ Called to over write entire detail screens at a time """ params = json_request(req.POST) detail_type = params.get('type') screens = params.get('screens') parent_select = params.get('parent_select') sort_elements = screens['sort_elements'] if not screens: return HttpResponseBadRequest("Requires JSON encoded param 'screens'") app = get_app(domain, app_id) module = app.get_module(module_id) if detail_type == 'case': detail = module.case_details elif detail_type == CAREPLAN_GOAL: detail = module.goal_details elif detail_type == CAREPLAN_TASK: detail = module.task_details else: try: detail = getattr(module, '{0}_details'.format(detail_type)) except AttributeError: return HttpResponseBadRequest("Unknown detail type '%s'" % detail_type) detail.short.columns = map(DetailColumn.wrap, screens['short']) detail.long.columns = map(DetailColumn.wrap, screens['long']) detail.short.sort_elements = [] for sort_element in sort_elements: item = SortElement() item.field = sort_element['field'] item.type = sort_element['type'] item.direction = sort_element['direction'] detail.short.sort_elements.append(item) if parent_select: module.parent_select = ParentSelect.wrap(parent_select) resp = {} app.save(resp) return json_response(resp) def validate_module_for_build(request, domain, app_id, module_id, ajax=True): app = get_app(domain, app_id) try: module = app.get_module(module_id) except ModuleNotFoundException: raise Http404() errors = module.validate_for_build() lang, langs = get_langs(request, app) response_html = render_to_string('app_manager/partials/build_errors.html', { 'app': app, 'build_errors': errors, 'not_actual_build': True, 'domain': domain, 'langs': langs, 'lang': lang }) if ajax: return json_response({'error_html': response_html}) return HttpResponse(response_html) def _handle_media_edits(request, item, should_edit, resp): if not resp.has_key('corrections'): resp['corrections'] = {} for attribute in ('media_image', 'media_audio'): if should_edit(attribute): val = request.POST.get(attribute) if val: if val.startswith('jr://'): pass elif val.startswith('/file/'): val = 'jr:/' + val elif val.startswith('file/'): val = 'jr://' + val elif val.startswith('/'): val = 'jr://file' + val else: val = 'jr://file/' + val resp['corrections'][attribute] = val else: val = None setattr(item, attribute, val) @no_conflict_require_POST @login_or_digest @require_permission(Permissions.edit_apps, login_decorator=None) def patch_xform(request, domain, app_id, unique_form_id): patch = request.POST['patch'] sha1_checksum = request.POST['sha1'] app = get_app(domain, app_id) form = app.get_form(unique_form_id) current_xml = form.source if hashlib.sha1(current_xml.encode('utf-8')).hexdigest() != sha1_checksum: return json_response({'status': 'conflict', 'xform': current_xml}) dmp = diff_match_patch() xform, _ = dmp.patch_apply(dmp.patch_fromText(patch), current_xml) save_xform(app, form, xform) response_json = { 'status': 'ok', 'sha1': hashlib.sha1(form.source.encode('utf-8')).hexdigest() } app.save(response_json) return json_response(response_json) @no_conflict_require_POST @login_or_digest @require_permission(Permissions.edit_apps, login_decorator=None) def edit_form_attr(req, domain, app_id, unique_form_id, attr): """ Called to edit any (supported) form attribute, given by attr """ app = get_app(domain, app_id) form = app.get_form(unique_form_id) lang = req.COOKIES.get('lang', app.langs[0]) ajax = json.loads(req.POST.get('ajax', 'true')) resp = {} def should_edit(attribute): if req.POST.has_key(attribute): return True elif req.FILES.has_key(attribute): return True else: return False if should_edit("user_reg_data"): # should be user_registrations only data = json.loads(req.POST['user_reg_data']) data_paths = data['data_paths'] data_paths_dict = {} for path in data_paths: data_paths_dict[path.split('/')[-1]] = path form.data_paths = data_paths_dict if should_edit("name"): name = req.POST['name'] form.name[lang] = name resp['update'] = {'.variable-form_name': form.name[lang]} if should_edit("xform"): try: # support FILES for upload and POST for ajax post from Vellum try: xform = req.FILES.get('xform').read() except Exception: xform = req.POST.get('xform') else: try: xform = unicode(xform, encoding="utf-8") except Exception: raise Exception("Error uploading form: Please make sure your form is encoded in UTF-8") if req.POST.get('cleanup', False): try: # First, we strip all newlines and reformat the DOM. px = parseString(xform.replace('\r\n', '')).toprettyxml() # Then we remove excess newlines from the DOM output. text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL) prettyXml = text_re.sub('>\g<1></', px) xform = prettyXml except Exception: pass if xform: save_xform(app, form, xform) else: raise Exception("You didn't select a form to upload") except Exception, e: if ajax: return HttpResponseBadRequest(unicode(e)) else: messages.error(req, unicode(e)) if should_edit("show_count"): show_count = req.POST['show_count'] form.show_count = True if show_count == "True" else False if should_edit("put_in_root"): put_in_root = req.POST['put_in_root'] form.put_in_root = True if put_in_root == "True" else False if should_edit('form_filter'): form.form_filter = req.POST['form_filter'] if should_edit('post_form_workflow'): form.post_form_workflow = req.POST['post_form_workflow'] if should_edit('auto_gps_capture'): form.auto_gps_capture = req.POST['auto_gps_capture'] == 'true' _handle_media_edits(req, form, should_edit, resp) app.save(resp) if ajax: return HttpResponse(json.dumps(resp)) else: return back_to_main(req, domain, app_id=app_id, unique_form_id=unique_form_id) @no_conflict_require_POST @require_can_edit_apps def rename_language(req, domain, form_unique_id): old_code = req.POST.get('oldCode') new_code = req.POST.get('newCode') try: form, app = Form.get_form(form_unique_id, and_app=True) except ResourceConflict: raise Http404() if app.domain != domain: raise Http404() try: form.rename_xform_language(old_code, new_code) app.save() return HttpResponse(json.dumps({"status": "ok"})) except XFormError as e: response = HttpResponse(json.dumps({'status': 'error', 'message': unicode(e)})) response.status_code = 409 return response @require_GET @login_and_domain_required def validate_language(request, domain, app_id): app = get_app(domain, app_id) term = request.GET.get('term', '').lower() if term in [lang.lower() for lang in app.langs]: return HttpResponse(json.dumps({'match': {"code": term, "name": term}, 'suggestions': []})) else: return HttpResponseRedirect("%s?%s" % (reverse('langcodes.views.validate', args=[]), django_urlencode({'term': term}))) @no_conflict_require_POST @require_can_edit_apps def edit_form_actions(req, domain, app_id, module_id, form_id): app = get_app(domain, app_id) form = app.get_module(module_id).get_form(form_id) form.actions = FormActions.wrap(json.loads(req.POST['actions'])) form.requires = req.POST.get('requires', form.requires) response_json = {} app.save(response_json) response_json['propertiesMap'] = get_all_case_properties(app) return json_response(response_json) @no_conflict_require_POST @require_can_edit_apps def edit_careplan_form_actions(req, domain, app_id, module_id, form_id): app = get_app(domain, app_id) form = app.get_module(module_id).get_form(form_id) transaction = json.loads(req.POST.get('transaction')) for question in transaction['fixedQuestions']: setattr(form, question['name'], question['path']) def to_dict(properties): return dict((p['key'], p['path']) for p in properties) form.custom_case_updates = to_dict(transaction['case_properties']) form.case_preload = to_dict(transaction['case_preload']) response_json = {} app.save(response_json) return json_response(response_json) @no_conflict_require_POST @require_can_edit_apps def
(req, domain, app_id, module_id, form_id): app = get_app(domain, app_id) form = app.get_module(module_id).get_form(form_id) json_loads = json.loads(req.POST.get('actions')) actions = AdvancedFormActions.wrap(json_loads) form.actions = actions response_json = {} app.save(response_json) response_json['propertiesMap'] = get_all_case_properties(app) return json_response(response_json) @require_can_edit_apps def multimedia_list_download(req, domain, app_id): app = get_app(domain, app_id) include_audio = req.GET.get("audio", True) include_images = req.GET.get("images", True) strip_jr = req.GET.get("strip_jr", True) filelist = [] for m in app.get_modules(): for f in m.get_forms(): parsed = XForm(f.source) parsed.validate(version=app.application_version) if include_images: filelist.extend(parsed.image_references) if include_audio: filelist.extend(parsed.audio_references) if strip_jr: filelist = [s.replace("jr://file/", "") for s in filelist if s] response = HttpResponse() set_file_download(response, 'list.txt') response.write("\n".join(sorted(set(filelist)))) return response @require_GET @login_and_domain_required def commcare_profile(req, domain, app_id): app = get_app(domain, app_id) return HttpResponse(json.dumps(app.profile)) @no_conflict_require_POST @require_can_edit_apps def edit_commcare_settings(request, domain, app_id): sub_responses = ( edit_commcare_profile(request, domain, app_id), edit_app_attr(request, domain, app_id, 'all'), ) response = {} for sub_response in sub_responses: response.update( json.loads(sub_response.content) ) return json_response(response) @no_conflict_require_POST @require_can_edit_apps def edit_commcare_profile(request, domain, app_id): try: settings = json.loads(request.raw_post_data) except TypeError: return HttpResponseBadRequest(json.dumps({ 'reason': 'POST body must be of the form:' '{"properties": {...}, "features": {...}}' })) app = get_app(domain, app_id) changed = defaultdict(dict) for type in ["features", "properties"]: for name, value in settings.get(type, {}).items(): if type not in app.profile: app.profile[type] = {} app.profile[type][name] = value changed[type][name] = value response_json = {"status": "ok", "changed": changed} app.save(response_json) return json_response(response_json) def validate_langs(request, existing_langs, validate_build=True): o = json.loads(request.raw_post_data) langs = o['langs'] rename = o['rename'] build = o['build'] assert set(rename.keys()).issubset(existing_langs) assert set(rename.values()).issubset(langs) # assert that there are no repeats in the values of rename assert len(set(rename.values())) == len(rename.values()) # assert that no lang is renamed to an already existing lang for old, new in rename.items(): if old != new: assert(new not in existing_langs) # assert that the build langs are in the correct order if validate_build: assert sorted(build, key=lambda lang: langs.index(lang)) == build return (langs, rename, build) @no_conflict_require_POST @require_can_edit_apps def edit_app_langs(request, domain, app_id): """ Called with post body: { langs: ["en", "es", "hin"], rename: { "hi": "hin", "en": "en", "es": "es" }, build: ["es", "hin"] } """ app = get_app(domain, app_id) try: langs, rename, build = validate_langs(request, app.langs) except AssertionError: return HttpResponse(status=400) # now do it for old, new in rename.items(): if old != new: app.rename_lang(old, new) def replace_all(list1, list2): if list1 != list2: while list1: list1.pop() list1.extend(list2) replace_all(app.langs, langs) replace_all(app.build_langs, build) app.save() return json_response(langs) @require_can_edit_apps @no_conflict_require_POST def edit_app_translations(request, domain, app_id): params = json_request(request.POST) lang = params.get('lang') translations = params.get('translations') app = get_app(domain, app_id) app.set_translations(lang, translations) response = {} app.save(response) return json_response(response) @require_GET def get_app_translations(request, domain): params = json_request(request.GET) lang = params.get('lang', 'en') key = params.get('key', None) one = params.get('one', False) translations = Translation.get_translations(lang, key, one) if isinstance(translations, dict): translations = {k: v for k, v in translations.items() if not id_strings.is_custom_app_string(k)} return json_response(translations) @no_conflict_require_POST @require_can_edit_apps def delete_app_lang(req, domain, app_id): """ DEPRECATED Called when a language (such as 'zh') is to be deleted from app.langs """ lang_id = int(req.POST['index']) app = get_app(domain, app_id) del app.langs[lang_id] app.save() return back_to_main(req, domain, app_id=app_id) @no_conflict_require_POST @require_can_edit_apps def edit_app_attr(request, domain, app_id, attr): """ Called to edit any (supported) app attribute, given by attr """ app = get_app(domain, app_id) lang = request.COOKIES.get('lang', (app.langs or ['en'])[0]) try: hq_settings = json.loads(request.raw_post_data)['hq'] except ValueError: hq_settings = request.POST attributes = [ 'all', 'recipients', 'name', 'success_message', 'use_commcare_sense', 'text_input', 'platform', 'build_spec', 'show_user_registration', 'use_custom_suite', 'custom_suite', 'admin_password', # Application only 'cloudcare_enabled', 'application_version', 'case_sharing', 'translation_strategy', 'auto_gps_capture', # RemoteApp only 'profile_url', 'manage_urls' ] if attr not in attributes: return HttpResponseBadRequest() def should_edit(attribute): return attribute == attr or ('all' == attr and attribute in hq_settings) resp = {"update": {}} # For either type of app easy_attrs = ( ('application_version', None), ('build_spec', BuildSpec.from_string), ('case_sharing', None), ('cloudcare_enabled', None), ('commtrack_enabled', None), ('commtrack_requisition_mode', lambda m: None if m == 'disabled' else m), ('manage_urls', None), ('name', None), ('platform', None), ('recipients', None), ('text_input', None), ('use_custom_suite', None), ('secure_submissions', None), ('translation_strategy', None), ('auto_gps_capture', None), ) for attribute, transformation in easy_attrs: if should_edit(attribute): value = hq_settings[attribute] if transformation: value = transformation(value) setattr(app, attribute, value) if should_edit("name"): _clear_app_cache(request, domain) name = hq_settings['name'] resp['update'].update({ '.variable-app_name': name, '[data-id="{id}"]'.format(id=app_id): ApplicationsTab.make_app_title(name, app.doc_type), }) if should_edit("success_message"): success_message = hq_settings['success_message'] app.success_message[lang] = success_message if should_edit("build_spec"): resp['update']['commcare-version'] = app.commcare_minor_release if should_edit("admin_password"): admin_password = hq_settings.get('admin_password') if admin_password: app.set_admin_password(admin_password) # For Normal Apps if should_edit("cloudcare_enabled"): if app.get_doc_type() not in ("Application",): raise Exception("App type %s does not support cloudcare" % app.get_doc_type()) try: ensure_request_has_privilege(request, privileges.CLOUDCARE) except PermissionDenied: app.cloudcare_enabled = False if should_edit('show_user_registration'): show_user_registration = hq_settings['show_user_registration'] app.show_user_registration = show_user_registration if show_user_registration: # load the form source and also set its unique_id app.get_user_registration() def require_remote_app(): if app.get_doc_type() not in ("RemoteApp",): raise Exception("App type %s does not support profile url" % app.get_doc_type()) # For RemoteApps if should_edit("profile_url"): require_remote_app() app['profile_url'] = hq_settings['profile_url'] if should_edit("manage_urls"): require_remote_app() app.save(resp) # this is a put_attachment, so it has to go after everything is saved if should_edit("custom_suite"): app.set_custom_suite(hq_settings['custom_suite']) return HttpResponse(json.dumps(resp)) @no_conflict_require_POST @require_can_edit_apps def rearrange(req, domain, app_id, key): """ This function handles any request to switch two items in a list. Key tells us the list in question and must be one of 'forms', 'modules', 'detail', or 'langs'. The two POST params 'to' and 'from' give us the indicies of the items to be rearranged. """ app = get_app(domain, app_id) ajax = json.loads(req.POST.get('ajax', 'false')) i, j = (int(x) for x in (req.POST['to'], req.POST['from'])) resp = {} module_id = None try: if "forms" == key: to_module_id = int(req.POST['to_module_id']) from_module_id = int(req.POST['from_module_id']) try: app.rearrange_forms(to_module_id, from_module_id, i, j) except ConflictingCaseTypeError: messages.warning(req, CASE_TYPE_CONFLICT_MSG, extra_tags="html") elif "modules" == key: app.rearrange_modules(i, j) except IncompatibleFormTypeException: messages.error(req, _( 'The form can not be moved into the desired module.' )) return back_to_main(req, domain, app_id=app_id, module_id=module_id) except (RearrangeError, ModuleNotFoundException): messages.error(req, _( 'Oops. ' 'Looks like you got out of sync with us. ' 'The sidebar has been updated, so please try again.' )) return back_to_main(req, domain, app_id=app_id, module_id=module_id) app.save(resp) if ajax: return HttpResponse(json.dumps(resp)) else: return back_to_main(req, domain, app_id=app_id, module_id=module_id) # The following three functions deal with # Saving multiple versions of the same app # i.e. "making builds" @no_conflict_require_POST @require_can_edit_apps def save_copy(req, domain, app_id): """ Saves a copy of the app to a new doc. See VersionedDoc.save_copy """ comment = req.POST.get('comment') app = get_app(domain, app_id) errors = app.validate_app() if not errors: try: copy = app.make_build( comment=comment, user_id=req.couch_user.get_id, previous_version=app.get_latest_app(released_only=False) ) copy.save(increment_version=False) finally: # To make a RemoteApp always available for building if app.is_remote_app(): app.save(increment_version=True) else: copy = None copy = copy and SavedAppBuild.wrap(copy.to_json()).to_saved_build_json( report_utils.get_timezone(req.couch_user, domain) ) lang, langs = get_langs(req, app) return json_response({ "saved_app": copy, "error_html": render_to_string('app_manager/partials/build_errors.html', { 'app': get_app(domain, app_id), 'build_errors': errors, 'domain': domain, 'langs': langs, 'lang': lang }), }) def validate_form_for_build(request, domain, app_id, unique_form_id, ajax=True): app = get_app(domain, app_id) try: form = app.get_form(unique_form_id) except FormNotFoundException: # this can happen if you delete the form from another page raise Http404() errors = form.validate_for_build() lang, langs = get_langs(request, app) if ajax and "blank form" in [error.get('type') for error in errors]: response_html = render_to_string('app_manager/partials/create_form_prompt.html') else: response_html = render_to_string('app_manager/partials/build_errors.html', { 'app': app, 'form': form, 'build_errors': errors, 'not_actual_build': True, 'domain': domain, 'langs': langs, 'lang': lang }) if ajax: return json_response({ 'error_html': response_html, }) else: return HttpResponse(response_html) @no_conflict_require_POST @require_can_edit_apps def revert_to_copy(req, domain, app_id): """ Copies a saved doc back to the original. See VersionedDoc.revert_to_copy """ app = get_app(domain, app_id) copy = get_app(domain, req.POST['saved_app']) app = app.make_reversion_to_copy(copy) app.save() messages.success(req, "Successfully reverted to version %s, now at version %s" % (copy.version, app.version)) return back_to_main(req, domain, app_id=app_id) @no_conflict_require_POST @require_can_edit_apps def delete_copy(req, domain, app_id): """ Deletes a saved copy permanently from the database. See VersionedDoc.delete_copy """ app = get_app(domain, app_id) copy = get_app(domain, req.POST['saved_app']) app.delete_copy(copy) return json_response({}) # download_* views are for downloading the files that the application generates # (such as CommCare.jad, suite.xml, profile.xml, etc. BAD_BUILD_MESSAGE = "Sorry: this build is invalid. Try deleting it and rebuilding. If error persists, please contact us at [email protected]" def _download_index_files(request): files = [] if request.app.copy_of: files = [(path[len('files/'):], request.app.fetch_attachment(path)) for path in request.app._attachments if path.startswith('files/')] else: try: files = sorted(request.app.create_all_files().items()) except Exception: messages.error(request, _( "We were unable to get your files " "because your Application has errors. " "Please click <strong>Make New Version</strong> " "under <strong>Deploy</strong> " "for feedback on how to fix these errors." ), extra_tags='html') return files @safe_download def download_index(req, domain, app_id, template="app_manager/download_index.html"): """ A landing page, mostly for debugging, that has links the jad and jar as well as all the resource files that will end up zipped into the jar. """ return render(req, template, { 'app': req.app, 'files': _download_index_files(req), }) class DownloadCCZ(DownloadMultimediaZip): name = 'download_ccz' compress_zip = True zip_name = 'commcare.ccz' def check_before_zipping(self): pass def iter_files(self): skip_files = ('profile.xml', 'profile.ccpr', 'media_profile.xml') get_name = lambda f: {'media_profile.ccpr': 'profile.ccpr'}.get(f, f) def _files(): for name, f in _download_index_files(self.request): if name not in skip_files: yield (get_name(name), f.encode('utf-8')) media_files, errors = super(DownloadCCZ, self).iter_files() return itertools.chain(_files(), media_files), errors @safe_download def download_file(req, domain, app_id, path): mimetype_map = { 'ccpr': 'commcare/profile', 'jad': 'text/vnd.sun.j2me.app-descriptor', 'jar': 'application/java-archive', 'xml': 'application/xml', 'txt': 'text/plain', } try: mimetype = mimetype_map[path.split('.')[-1]] except KeyError: mimetype = None response = HttpResponse(mimetype=mimetype) if path in ('CommCare.jad', 'CommCare.jar'): set_file_download(response, path) full_path = path else: full_path = 'files/%s' % path def resolve_path(path): return RegexURLResolver( r'^', 'corehq.apps.app_manager.download_urls').resolve(path) try: assert req.app.copy_of obj = CachedObject('{id}::{path}'.format( id=req.app._id, path=full_path, )) if not obj.is_cached(): payload = req.app.fetch_attachment(full_path) if type(payload) is unicode: payload = payload.encode('utf-8') buffer = StringIO(payload) metadata = {'content_type': mimetype} obj.cache_put(buffer, metadata, timeout=0) else: _, buffer = obj.get() payload = buffer.getvalue() response.write(payload) response['Content-Length'] = len(response.content) return response except (ResourceNotFound, AssertionError): if req.app.copy_of: if req.META.get('HTTP_USER_AGENT') == 'bitlybot': raise Http404() elif path == 'profile.ccpr': # legacy: should patch build to add odk profile # which wasn't made on build for a long time add_odk_profile_after_build(req.app) req.app.save() return download_file(req, domain, app_id, path) else: try: resolve_path(path) except Resolver404: # ok this was just a url that doesn't exist # todo: log since it likely exposes a mobile bug # logging was removed because such a mobile bug existed # and was spamming our emails pass else: # this resource should exist but doesn't logging.error( 'Expected build resource %s not found' % path, extra={'request': req} ) if not req.app.build_broken: req.app.build_broken = True req.app.build_broken_reason = 'incomplete-build' try: req.app.save() except ResourceConflict: # this really isn't a big deal: # It'll get updated next time a resource is req'd; # in fact the conflict is almost certainly from # another thread doing this exact update pass raise Http404() try: callback, callback_args, callback_kwargs = resolve_path(path) except Resolver404: raise Http404() return callback(req, domain, app_id, *callback_args, **callback_kwargs) @safe_download def download_profile(req, domain, app_id): """ See ApplicationBase.create_profile """ return HttpResponse( req.app.create_profile() ) @safe_download def download_media_profile(req, domain, app_id): return HttpResponse( req.app.create_profile(with_media=True) ) def odk_install(req, domain, app_id, with_media=False): app = get_app(domain, app_id) qr_code_view = "odk_qr_code" if not with_media else "odk_media_qr_code" context = { "domain": domain, "app": app, "qr_code": reverse("corehq.apps.app_manager.views.%s" % qr_code_view, args=[domain, app_id]), "profile_url": app.odk_profile_display_url if not with_media else app.odk_media_profile_display_url, } return render(req, "app_manager/odk_install.html", context) def odk_qr_code(req, domain, app_id): qr_code = get_app(domain, app_id).get_odk_qr_code() return HttpResponse(qr_code, mimetype="image/png") def odk_media_qr_code(req, domain, app_id): qr_code = get_app(domain, app_id).get_odk_qr_code(with_media=True) return HttpResponse(qr_code, mimetype="image/png") @safe_download def download_odk_profile(req, domain, app_id): """ See ApplicationBase.create_profile """ return HttpResponse( req.app.create_profile(is_odk=True), mimetype="commcare/profile" ) @safe_download def download_odk_media_profile(req, domain, app_id): return HttpResponse( req.app.create_profile(is_odk=True, with_media=True), mimetype="commcare/profile" ) @safe_download def download_suite(req, domain, app_id): """ See Application.create_suite """ return HttpResponse( req.app.create_suite() ) @safe_download def download_media_suite(req, domain, app_id): """ See Application.create_media_suite """ return HttpResponse( req.app.create_media_suite() ) @safe_download def download_app_strings(req, domain, app_id, lang): """ See Application.create_app_strings """ return HttpResponse( req.app.create_app_strings(lang) ) @safe_download def download_xform(req, domain, app_id, module_id, form_id): """ See Application.fetch_xform """ try: return HttpResponse( req.app.fetch_xform(module_id, form_id) ) except (IndexError, ModuleNotFoundException): raise Http404() except AppManagerException: unique_form_id = req.app.get_module(module_id).get_form(form_id).unique_id response = validate_form_for_build(req, domain, app_id, unique_form_id, ajax=False) response.status_code = 404 return response @safe_download def download_user_registration(req, domain, app_id): """See Application.fetch_xform""" return HttpResponse( req.app.get_user_registration().render_xform() ) @safe_download def download_jad(req, domain, app_id): """ See ApplicationBase.create_jadjar """ app = req.app try: jad, _ = app.create_jadjar() except ResourceConflict: return download_jad(req, domain, app_id) try: response = HttpResponse(jad) except Exception: messages.error(req, BAD_BUILD_MESSAGE) return back_to_main(req, domain, app_id=app_id) set_file_download(response, "CommCare.jad") response["Content-Type"] = "text/vnd.sun.j2me.app-descriptor" response["Content-Length"] = len(jad) return response @safe_download def download_jar(req, domain, app_id): """ See ApplicationBase.create_jadjar This is the only view that will actually be called in the process of downloading a complete CommCare.jar build (i.e. over the air to a phone). """ response = HttpResponse(mimetype="application/java-archive") app = req.app _, jar = app.create_jadjar() set_file_download(response, 'CommCare.jar') response['Content-Length'] = len(jar) try: response.write(jar) except Exception: messages.error(req, BAD_BUILD_MESSAGE) return back_to_main(req, domain, app_id=app_id) return response def download_test_jar(request): with open(os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'CommCare.jar')) as f: jar = f.read() response = HttpResponse(mimetype="application/java-archive") set_file_download(response, "CommCare.jar") response['Content-Length'] = len(jar) response.write(jar) return response @safe_download def download_raw_jar(req, domain, app_id): """ See ApplicationBase.fetch_jar """ response = HttpResponse( req.app.fetch_jar() ) response['Content-Type'] = "application/java-archive" return response def emulator_page(req, domain, app_id, template): copied_app = app = get_app(domain, app_id) if app.copy_of: app = get_app(domain, app.copy_of) # Coupled URL -- Sorry! build_path = "/builds/{version}/{build_number}/Generic/WebDemo/".format( **copied_app.get_preview_build()._doc ) return render(req, template, { 'domain': domain, 'app': app, 'build_path': build_path, 'url_base': get_url_base() }) @require_can_edit_apps def emulator(req, domain, app_id, template="app_manager/emulator.html"): return emulator_page(req, domain, app_id, template) def emulator_handler(req, domain, app_id): exchange = req.GET.get("exchange", '') if exchange: return emulator_page(req, domain, app_id, template="app_manager/exchange_emulator.html") else: return emulator(req, domain, app_id) def emulator_commcare_jar(req, domain, app_id): response = HttpResponse( get_app(domain, app_id).fetch_emulator_commcare_jar() ) response['Content-Type'] = "application/java-archive" return response @require_can_edit_apps def formdefs(request, domain, app_id): langs = [json.loads(request.GET.get('lang', '"en"'))] format = request.GET.get('format', 'json') app = get_app(domain, app_id) def get_questions(form): xform = XForm(form.source) prefix = '/%s/' % xform.data_node.tag_name def remove_prefix(string): if string.startswith(prefix): return string[len(prefix):] else: raise Exception() def transform_question(q): return { 'id': remove_prefix(q['value']), 'type': q['tag'], 'text': q['label'] if q['tag'] != 'hidden' else '' } return [transform_question(q) for q in xform.get_questions(langs)] formdefs = [{ 'name': "%s, %s" % (f['form'].get_module().name['en'], f['form'].name['en']) if f['type'] == 'module_form' else 'User Registration', 'columns': ['id', 'type', 'text'], 'rows': get_questions(f['form']) } for f in app.get_forms(bare=False)] if format == 'xlsx': f = StringIO() writer = Excel2007ExportWriter() writer.open([(sheet['name'], [FormattedRow(sheet['columns'])]) for sheet in formdefs], f) writer.write([( sheet['name'], [FormattedRow([cell for (_, cell) in sorted(row.items(), key=lambda item: sheet['columns'].index(item[0]))]) for row in sheet['rows']] ) for sheet in formdefs]) writer.close() response = HttpResponse(f.getvalue(), mimetype=Format.from_format('xlsx').mimetype) set_file_download(response, 'formdefs.xlsx') return response else: return json_response(formdefs) def _questions_for_form(request, form, langs): class FakeMessages(object): def __init__(self): self.messages = defaultdict(list) def add_message(self, type, message): self.messages[type].append(message) def error(self, request, message, *args, **kwargs): self.add_message('error', message) def warning(self, request, message, *args, **kwargs): self.add_message('warning', message) m = FakeMessages() _, context = get_form_view_context_and_template(request, form, langs, None, messages=m) xform_questions = context['xform_questions'] return xform_questions, m.messages def _find_name(names, langs): name = None for lang in langs: if lang in names: name = names[lang] break if name is None: lang = names.keys()[0] name = names[lang] return name @require_can_edit_apps def app_summary(request, domain, app_id): return summary(request, domain, app_id, should_edit=True) def app_summary_from_exchange(request, domain, app_id): dom = Domain.get_by_name(domain) if dom.is_snapshot: return summary(request, domain, app_id, should_edit=False) else: return HttpResponseForbidden() def summary(request, domain, app_id, should_edit=True): app = get_app(domain, app_id) if app.doc_type == 'RemoteApp': raise Http404() context = get_apps_base_context(request, domain, app) langs = context['langs'] modules = [] for module in app.get_modules(): forms = [] for form in module.get_forms(): questions, messages = _questions_for_form(request, form, langs) forms.append({'name': _find_name(form.name, langs), 'questions': questions, 'messages': dict(messages)}) modules.append({'name': _find_name(module.name, langs), 'forms': forms}) context['modules'] = modules context['summary'] = True if should_edit: return render(request, "app_manager/summary.html", context) else: return render(request, "app_manager/exchange_summary.html", context) @require_can_edit_apps def download_translations(request, domain, app_id): app = get_app(domain, app_id) properties = tuple(["property"] + app.langs + ["default"]) temp = StringIO() headers = (("translations", properties),) row_dict = {} for i, lang in enumerate(app.langs): index = i + 1 trans_dict = app.translations.get(lang, {}) for prop, trans in trans_dict.iteritems(): if prop not in row_dict: row_dict[prop] = [prop] num_to_fill = index - len(row_dict[prop]) row_dict[prop].extend(["" for i in range(num_to_fill)] if num_to_fill > 0 else []) row_dict[prop].append(trans) rows = row_dict.values() all_prop_trans = dict(st_trans.DEFAULT + st_trans.CC_DEFAULT + st_trans.CCODK_DEFAULT + st_trans.ODKCOLLECT_DEFAULT) all_prop_trans = dict((k.lower(), v) for k, v in all_prop_trans.iteritems()) rows.extend([[t] for t in sorted(all_prop_trans.keys()) if t not in [k.lower() for k in row_dict]]) def fillrow(row): num_to_fill = len(properties) - len(row) row.extend(["" for i in range(num_to_fill)] if num_to_fill > 0 else []) return row def add_default(row): row[-1] = all_prop_trans.get(row[0].lower(), "") return row rows = [add_default(fillrow(row)) for row in rows] data = (("translations", tuple(rows)),) export_raw(headers, data, temp) return export_response(temp, Format.XLS_2007, "translations") @no_conflict_require_POST @require_can_edit_apps @get_file("bulk_upload_file") def upload_translations(request, domain, app_id): success = False try: workbook = WorkbookJSONReader(request.file) translations = workbook.get_worksheet(title='translations') app = get_app(domain, app_id) trans_dict = defaultdict(dict) error_properties = [] for row in translations: for lang in app.langs: if row.get(lang): all_parameters = re.findall("\$.*?}", row[lang]) for param in all_parameters: if not re.match("\$\{[0-9]+}", param): error_properties.append(row["property"] + ' - ' + row[lang]) trans_dict[lang].update({row["property"]: row[lang]}) if error_properties: message = _("We found problem with following translations:") message += "<br>" for prop in error_properties: message += "<li>%s</li>" % prop messages.error(request, message, extra_tags='html') else: app.translations = dict(trans_dict) app.save() success = True except Exception: notify_exception(request, 'Bulk Upload Translations Error') messages.error(request, _("Something went wrong! Update failed. We're looking into it")) if success: messages.success(request, _("UI Translations Updated!")) return HttpResponseRedirect(reverse('app_languages', args=[domain, app_id])) common_module_validations = [ (lambda app: app.application_version == APP_V1, _('Please upgrade you app to > 2.0 in order to add a Careplan module')) ] FN = 'fn' VALIDATIONS = 'validations' MODULE_TYPE_MAP = { 'careplan': { FN: _new_careplan_module, VALIDATIONS: common_module_validations + [ (lambda app: app.has_careplan_module, _('This application already has a Careplan module')) ] }, 'advanced': { FN: _new_advanced_module, VALIDATIONS: common_module_validations } }
edit_advanced_form_actions
tooltip.go
package main import ( "image/color" "strings" "github.com/Terisback/ludok" ) const ( letterWidth = 8 lineHeight = 10 ) type Tooltip struct { padding int elements []string textColor color.RGBA bgColor color.RGBA textMaxLength int Width int Height int } func NewTooltip(padding int, textColor color.RGBA, backgroundColor color.RGBA) *Tooltip
func (t *Tooltip) Update(value string) { t.elements = strings.Split(value, "\n") t.textMaxLength = 0 for _, v := range t.elements { length := len(v) if length >= t.textMaxLength { t.textMaxLength = length } } t.Width = (t.textMaxLength * letterWidth) + t.padding*2 t.Height = (len(t.elements) * lineHeight) + t.padding*2 - 4 } func (t *Tooltip) Draw(x, y int) { ludok.Graphics.Box(x, y, x+t.Width, y+t.Height, t.bgColor) for i, v := range t.elements { ludok.Graphics.Print(x+t.padding, y+t.padding+i*lineHeight, v, t.textColor) } }
{ return &Tooltip{padding: padding, textColor: textColor, bgColor: backgroundColor} }
drawable.rs
use crate::shader::{InstancingMode, ToUniforms}; use crate::DrawError; pub trait Drawable<I, V>
/// The instancing mode supported by this `Drawable`. /// /// We use this to check that the supplied `glium::Program` is compatible /// with the `Drawable`. const INSTANCING_MODE: InstancingMode; fn draw<U, S>( &self, program: &glium::Program, uniforms: &U, draw_params: &glium::DrawParameters, target: &mut S, ) -> Result<(), DrawError> where U: ToUniforms, S: glium::Surface; }
where V: glium::vertex::Vertex, {
withings.py
from .oauth import BaseOAuth1 class WithingsOAuth(BaseOAuth1): name = 'withings' AUTHORIZATION_URL = 'https://oauth.withings.com/account/authorize' REQUEST_TOKEN_URL = 'https://oauth.withings.com/account/request_token' ACCESS_TOKEN_URL = 'https://oauth.withings.com/account/access_token' ID_KEY = 'userid' def
(self, response): """Return user details from Withings account""" return {'userid': response['access_token']['userid'], 'email': ''}
get_user_details
weighted.go
package weightedQuickUnion import ( "fmt" ) type WeightedQuickUnionUF struct { size int arr []int arrSize []int sets int setMap map[int][]int } func NewWeightedQuickUnionUF(n int) *WeightedQuickUnionUF { uf := &WeightedQuickUnionUF{ size: n, sets: n, }
uf.arr = make([]int, n) uf.arrSize = make([]int, n) uf.setMap = make(map[int][]int, n) for i := 0; i < n; i++ { uf.arr[i] = i uf.arrSize[i] = 1 uf.setMap[i] = []int{i} //append(uf.setMap[i], i) } return uf } func (w *WeightedQuickUnionUF) Union(p, q int) { rootp := w.root(p) rootq := w.root(q) if rootp == rootq { return } // fmt.Println("|Items: ", p, q, "|Arr: ", w.arr[p], w.arr[q]) // fmt.Println("|Roots: ", rootp, rootq, "|ArrSize: ", w.arrSize[rootp], w.arrSize[rootq]) // fmt.Println("1: ", w.setMap[rootp], w.setMap[rootq], w.arr) if w.arrSize[rootp] >= w.arrSize[rootq] { w.arr[rootq] = rootp w.arrSize[rootp] += w.arrSize[rootq] w.setMap[rootp] = append(w.setMap[rootp], w.setMap[rootq]...) delete(w.setMap, rootq) } else { w.arr[rootp] = rootq w.arrSize[rootq] += w.arrSize[rootp] w.setMap[rootq] = append(w.setMap[rootq], w.setMap[rootp]...) delete(w.setMap, rootp) } // fmt.Println("2: ", w.setMap[rootp], w.setMap[rootq], w.arr) // fmt.Println("|Sets: ", w.setMap) w.sets-- } func (w *WeightedQuickUnionUF) Find(p int) int { rootp := w.root(p) max := rootp for _, v := range w.setMap[rootp] { if v > max { max = v } } return max } func (w *WeightedQuickUnionUF) Connected(p, q int) bool { return w.root(p) == w.root(q) } func (w *WeightedQuickUnionUF) Count() int { return w.sets } func (w *WeightedQuickUnionUF) LogData() string { str := fmt.Sprintln("|Arr: ", w.arr, "|ArrSize", w.arrSize, "\n|Sets:", w.setMap) return str } func (w *WeightedQuickUnionUF) root(node int) int { t := node for { if t == w.arr[t] { break } w.arr[t] = w.arr[w.arr[t]] t = w.arr[t] } return t }
testing.go
// Licensed to Elasticsearch B.V. under one or more contributor // license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright // ownership. Elasticsearch B.V. licenses this file to you under // the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //go:build !integration // +build !integration package elasticsearch import ( "io/ioutil" "path/filepath" "testing" "github.com/stretchr/testify/require" "github.com/elastic/beats/v7/libbeat/common" "github.com/elastic/beats/v7/metricbeat/helper" "github.com/elastic/beats/v7/metricbeat/mb" mbtest "github.com/elastic/beats/v7/metricbeat/mb/testing" ) // TestMapper tests mapping methods func TestMapper(t *testing.T, glob string, mapper func(mb.ReporterV2, []byte) error) { files, err := filepath.Glob(glob) require.NoError(t, err) // Makes sure glob matches at least 1 file require.True(t, len(files) > 0) for _, f := range files { t.Run(f, func(t *testing.T) { input, err := ioutil.ReadFile(f) require.NoError(t, err) reporter := &mbtest.CapturingReporterV2{} err = mapper(reporter, input) require.NoError(t, err) require.True(t, len(reporter.GetEvents()) >= 1) require.Equal(t, 0, len(reporter.GetErrors())) }) } } // TestMapperWithInfo tests mapping methods with Info fields func
(t *testing.T, glob string, mapper func(mb.ReporterV2, Info, []byte) error) { files, err := filepath.Glob(glob) require.NoError(t, err) // Makes sure glob matches at least 1 file require.True(t, len(files) > 0) info := Info{ ClusterID: "1234", ClusterName: "helloworld", } for _, f := range files { t.Run(f, func(t *testing.T) { input, err := ioutil.ReadFile(f) require.NoError(t, err) reporter := &mbtest.CapturingReporterV2{} err = mapper(reporter, info, input) require.NoError(t, err) require.True(t, len(reporter.GetEvents()) >= 1) require.Equal(t, 0, len(reporter.GetErrors())) }) } } // TestMapperWithMetricSetAndInfo tests mapping methods with Info fields func TestMapperWithMetricSetAndInfo(t *testing.T, glob string, ms MetricSetAPI, mapper func(mb.ReporterV2, MetricSetAPI, Info, []byte) error) { files, err := filepath.Glob(glob) require.NoError(t, err) // Makes sure glob matches at least 1 file require.True(t, len(files) > 0) info := Info{ ClusterID: "1234", ClusterName: "helloworld", } for _, f := range files { t.Run(f, func(t *testing.T) { input, err := ioutil.ReadFile(f) require.NoError(t, err) reporter := &mbtest.CapturingReporterV2{} err = mapper(reporter, ms, info, input) require.NoError(t, err) require.True(t, len(reporter.GetEvents()) >= 1) require.Equal(t, 0, len(reporter.GetErrors())) }) } } // TestMapperWithMetricSetAndInfo tests mapping methods with Info fields func TestMapperWithHttpHelper(t *testing.T, glob string, httpClient *helper.HTTP, mapper func(mb.ReporterV2, *helper.HTTP, Info, []byte) error) { files, err := filepath.Glob(glob) require.NoError(t, err) // Makes sure glob matches at least 1 file require.True(t, len(files) > 0) info := Info{ ClusterID: "1234", ClusterName: "helloworld", Version: Version{Number: &common.Version{ Major: 7, Minor: 6, Bugfix: 0, }}, } for _, f := range files { t.Run(f, func(t *testing.T) { input, err := ioutil.ReadFile(f) require.NoError(t, err) reporter := &mbtest.CapturingReporterV2{} err = mapper(reporter, httpClient, info, input) require.NoError(t, err) require.True(t, len(reporter.GetEvents()) >= 1) require.Equal(t, 0, len(reporter.GetErrors())) }) } }
TestMapperWithInfo
stream.rs
use std::io::{Cursor, Error as IoError, ErrorKind, Read, Result as IoResult, Write}; use std::net::SocketAddr; use std::net::TcpStream; use std::net::ToSocketAddrs; use std::time::Duration; use std::time::Instant; use chunked_transfer::Decoder as ChunkDecoder; #[cfg(feature = "tls")] use rustls::ClientSession; #[cfg(feature = "tls")] use rustls::StreamOwned; #[cfg(feature = "socks-proxy")] use socks::{TargetAddr, ToTargetAddr}; #[cfg(feature = "native-tls")] use native_tls::{HandshakeError, TlsConnector, TlsStream}; use crate::proxy::Proto; use crate::proxy::Proxy; use crate::error::Error; use crate::unit::Unit; #[allow(clippy::large_enum_variant)] pub enum Stream { Http(TcpStream), #[cfg(all(feature = "tls", not(feature = "native-tls")))] Https(rustls::StreamOwned<rustls::ClientSession, TcpStream>), #[cfg(all(feature = "native-tls", not(feature = "tls")))] Https(TlsStream<TcpStream>), Cursor(Cursor<Vec<u8>>), #[cfg(test)] Test(Box<dyn Read + Send>, Vec<u8>), } // DeadlineStream wraps a stream such that read() will return an error // after the provided deadline, and sets timeouts on the underlying // TcpStream to ensure read() doesn't block beyond the deadline. // When the From trait is used to turn a DeadlineStream back into a // Stream (by PoolReturningRead), the timeouts are removed. pub struct DeadlineStream { stream: Stream, deadline: Option<Instant>, } impl DeadlineStream { pub(crate) fn new(stream: Stream, deadline: Option<Instant>) -> Self { DeadlineStream { stream, deadline } } } impl From<DeadlineStream> for Stream { fn from(deadline_stream: DeadlineStream) -> Stream { // Since we are turning this back into a regular, non-deadline Stream, // remove any timeouts we set. let stream = deadline_stream.stream; if let Some(socket) = stream.socket() { socket.set_read_timeout(None).unwrap(); socket.set_write_timeout(None).unwrap(); } stream } } impl Read for DeadlineStream { fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> { if let Some(deadline) = self.deadline { let timeout = time_until_deadline(deadline)?; if let Some(socket) = self.stream.socket() { socket.set_read_timeout(Some(timeout))?; socket.set_write_timeout(Some(timeout))?; } } self.stream.read(buf) } } // If the deadline is in the future, return the remaining time until // then. Otherwise return a TimedOut error. fn time_until_deadline(deadline: Instant) -> IoResult<Duration> { let now = Instant::now(); match now.checked_duration_since(deadline) { Some(_) => Err(IoError::new( ErrorKind::TimedOut, "timed out reading response", )), None => Ok(deadline - now), } } impl ::std::fmt::Debug for Stream { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> { write!( f, "Stream[{}]", match self { Stream::Http(_) => "http", #[cfg(any( all(feature = "tls", not(feature = "native-tls")), all(feature = "native-tls", not(feature = "tls")), ))] Stream::Https(_) => "https", Stream::Cursor(_) => "cursor", #[cfg(test)] Stream::Test(_, _) => "test", } ) } } impl Stream { // Check if the server has closed a stream by performing a one-byte // non-blocking read. If this returns EOF, the server has closed the // connection: return true. If this returns WouldBlock (aka EAGAIN), // that means the connection is still open: return false. Otherwise // return an error. fn serverclosed_stream(stream: &std::net::TcpStream) -> IoResult<bool> { let mut buf = [0; 1]; stream.set_nonblocking(true)?; let result = match stream.peek(&mut buf) { Ok(0) => Ok(true), Ok(_) => Ok(false), // TODO: Maybe this should produce an "unexpected response" error Err(e) if e.kind() == ErrorKind::WouldBlock => Ok(false), Err(e) => Err(e), }; stream.set_nonblocking(false)?; result } // Return true if the server has closed this connection. pub(crate) fn server_closed(&self) -> IoResult<bool> { match self.socket() { Some(socket) => Stream::serverclosed_stream(socket), None => Ok(false), } } pub fn is_poolable(&self) -> bool { match self { Stream::Http(_) => true, #[cfg(any( all(feature = "tls", not(feature = "native-tls")), all(feature = "native-tls", not(feature = "tls")), ))] Stream::Https(_) => true, _ => false, } } pub(crate) fn socket(&self) -> Option<&TcpStream> { match self { Stream::Http(tcpstream) => Some(tcpstream), #[cfg(feature = "tls")] Stream::Https(rustls_stream) => Some(&rustls_stream.sock), _ => None, } } #[cfg(test)] pub fn to_write_vec(&self) -> Vec<u8> { match self { Stream::Test(_, writer) => writer.clone(), _ => panic!("to_write_vec on non Test stream"), } } } impl Read for Stream { fn read(&mut self, buf: &mut [u8]) -> IoResult<usize> { match self { Stream::Http(sock) => sock.read(buf), #[cfg(any( all(feature = "tls", not(feature = "native-tls")), all(feature = "native-tls", not(feature = "tls")), ))] Stream::Https(stream) => read_https(stream, buf), Stream::Cursor(read) => read.read(buf), #[cfg(test)] Stream::Test(reader, _) => reader.read(buf), } } } impl<R: Read> From<ChunkDecoder<R>> for Stream where R: Read, Stream: From<R>, { fn from(chunk_decoder: ChunkDecoder<R>) -> Stream { chunk_decoder.into_inner().into() } } #[cfg(all(feature = "tls", not(feature = "native-tls")))] fn read_https( stream: &mut StreamOwned<ClientSession, TcpStream>, buf: &mut [u8], ) -> IoResult<usize> { match stream.read(buf) { Ok(size) => Ok(size), Err(ref e) if is_close_notify(e) => Ok(0), Err(e) => Err(e), } } #[cfg(all(feature = "native-tls", not(feature = "tls")))] fn read_https(stream: &mut TlsStream<TcpStream>, buf: &mut [u8]) -> IoResult<usize> { match stream.read(buf) { Ok(size) => Ok(size), Err(ref e) if is_close_notify(e) => Ok(0), Err(e) => Err(e), } } #[allow(deprecated)] #[cfg(any(feature = "tls", feature = "native-tls"))] fn is_close_notify(e: &std::io::Error) -> bool { if e.kind() != ErrorKind::ConnectionAborted { return false; } if let Some(msg) = e.get_ref() { // :( return msg.description().contains("CloseNotify"); } false } impl Write for Stream { fn write(&mut self, buf: &[u8]) -> IoResult<usize> { match self { Stream::Http(sock) => sock.write(buf), #[cfg(any( all(feature = "tls", not(feature = "native-tls")), all(feature = "native-tls", not(feature = "tls")),
Stream::Cursor(_) => panic!("Write to read only stream"), #[cfg(test)] Stream::Test(_, writer) => writer.write(buf), } } fn flush(&mut self) -> IoResult<()> { match self { Stream::Http(sock) => sock.flush(), #[cfg(any( all(feature = "tls", not(feature = "native-tls")), all(feature = "native-tls", not(feature = "tls")), ))] Stream::Https(stream) => stream.flush(), Stream::Cursor(_) => panic!("Flush read only stream"), #[cfg(test)] Stream::Test(_, writer) => writer.flush(), } } } pub(crate) fn connect_http(unit: &Unit) -> Result<Stream, Error> { // let hostname = unit.url.host_str().unwrap(); let port = unit.url.port().unwrap_or(80); connect_host(unit, hostname, port).map(Stream::Http) } #[cfg(all(feature = "tls", feature = "native-certs"))] fn configure_certs(config: &mut rustls::ClientConfig) { config.root_store = rustls_native_certs::load_native_certs().expect("Could not load patform certs"); } #[cfg(all(feature = "tls", not(feature = "native-certs")))] fn configure_certs(config: &mut rustls::ClientConfig) { config .root_store .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS); } #[cfg(all(feature = "tls", not(feature = "native-tls")))] pub(crate) fn connect_https(unit: &Unit) -> Result<Stream, Error> { use lazy_static::lazy_static; use std::sync::Arc; lazy_static! { static ref TLS_CONF: Arc<rustls::ClientConfig> = { let mut config = rustls::ClientConfig::new(); configure_certs(&mut config); Arc::new(config) }; } let hostname = unit.url.host_str().unwrap(); let port = unit.url.port().unwrap_or(443); let sni = webpki::DNSNameRef::try_from_ascii_str(hostname) .map_err(|err| Error::DnsFailed(err.to_string()))?; let tls_conf: &Arc<rustls::ClientConfig> = unit.tls_config.as_ref().map(|c| &c.0).unwrap_or(&*TLS_CONF); let sess = rustls::ClientSession::new(&tls_conf, sni); let sock = connect_host(unit, hostname, port)?; let stream = rustls::StreamOwned::new(sess, sock); Ok(Stream::Https(stream)) } #[cfg(all(feature = "native-tls", not(feature = "tls")))] pub(crate) fn connect_https(unit: &Unit) -> Result<Stream, Error> { let hostname = unit.url.host_str().unwrap(); let port = unit.url.port().unwrap_or(443); let sock = connect_host(unit, hostname, port)?; let tls_connector = TlsConnector::new().map_err(|e| Error::TlsError(e))?; let stream = tls_connector.connect(hostname, sock).map_err(|e| match e { HandshakeError::Failure(err) => Error::TlsError(err), _ => Error::BadStatusRead, })?; Ok(Stream::Https(stream)) } pub(crate) fn connect_host(unit: &Unit, hostname: &str, port: u16) -> Result<TcpStream, Error> { let deadline: Option<Instant> = if unit.timeout_connect > 0 { Instant::now().checked_add(Duration::from_millis(unit.timeout_connect)) } else { unit.deadline }; // TODO: Find a way to apply deadline to DNS lookup. let sock_addrs: Vec<SocketAddr> = match unit.proxy { Some(ref proxy) => format!("{}:{}", proxy.server, proxy.port), None => format!("{}:{}", hostname, port), } .to_socket_addrs() .map_err(|e| Error::DnsFailed(format!("{}", e)))? .collect(); if sock_addrs.is_empty() { return Err(Error::DnsFailed(format!("No ip address for {}", hostname))); } let proto = if let Some(ref proxy) = unit.proxy { Some(proxy.proto) } else { None }; let mut any_err = None; let mut any_stream = None; // Find the first sock_addr that accepts a connection for sock_addr in sock_addrs { // ensure connect timeout or overall timeout aren't yet hit. let timeout = match deadline { Some(deadline) => Some(time_until_deadline(deadline)?), None => None, }; // connect with a configured timeout. let stream = if Some(Proto::SOCKS5) == proto { connect_socks5( unit.proxy.to_owned().unwrap(), deadline, sock_addr, hostname, port, ) } else if let Some(timeout) = timeout { TcpStream::connect_timeout(&sock_addr, timeout) } else { TcpStream::connect(&sock_addr) }; if let Ok(stream) = stream { any_stream = Some(stream); break; } else if let Err(err) = stream { any_err = Some(err); } } let mut stream = if let Some(stream) = any_stream { stream } else { let err = Error::ConnectionFailed(format!("{}", any_err.expect("Connect error"))); return Err(err); }; // rust's absurd api returns Err if we set 0. // Setting it to None will disable the native system timeout if let Some(deadline) = deadline { stream .set_read_timeout(Some(deadline - Instant::now())) .ok(); } else if unit.timeout_read > 0 { stream .set_read_timeout(Some(Duration::from_millis(unit.timeout_read as u64))) .ok(); } else { stream.set_read_timeout(None).ok(); } if let Some(deadline) = deadline { stream .set_write_timeout(Some(deadline - Instant::now())) .ok(); } else if unit.timeout_write > 0 { stream .set_write_timeout(Some(Duration::from_millis(unit.timeout_write as u64))) .ok(); } else { stream.set_write_timeout(None).ok(); } if proto == Some(Proto::HTTPConnect) { if let Some(ref proxy) = unit.proxy { write!(stream, "{}", proxy.connect(hostname, port)).unwrap(); stream.flush()?; let mut proxy_response = Vec::new(); loop { let mut buf = vec![0; 256]; let total = stream.read(&mut buf)?; proxy_response.append(&mut buf); if total < 256 { break; } } Proxy::verify_response(&proxy_response)?; } } Ok(stream) } #[cfg(feature = "socks-proxy")] fn socks5_local_nslookup(hostname: &str, port: u16) -> Result<TargetAddr, std::io::Error> { let addrs: Vec<SocketAddr> = format!("{}:{}", hostname, port) .to_socket_addrs() .map_err(|e| std::io::Error::new(ErrorKind::NotFound, format!("DNS failure: {}.", e)))? .collect(); if addrs.is_empty() { return Err(std::io::Error::new( ErrorKind::NotFound, "DNS failure: no socket addrs found.", )); } match addrs[0].to_target_addr() { Ok(addr) => Ok(addr), Err(err) => { return Err(std::io::Error::new( ErrorKind::NotFound, format!("DNS failure: {}.", err), )) } } } #[cfg(feature = "socks-proxy")] fn connect_socks5( proxy: Proxy, deadline: Option<time::Instant>, proxy_addr: SocketAddr, host: &str, port: u16, ) -> Result<TcpStream, std::io::Error> { use socks::TargetAddr::Domain; use std::net::{Ipv4Addr, Ipv6Addr}; use std::str::FromStr; let host_addr = if Ipv4Addr::from_str(host).is_ok() || Ipv6Addr::from_str(host).is_ok() { match socks5_local_nslookup(host, port) { Ok(addr) => addr, Err(err) => return Err(err), } } else { Domain(String::from(host), port) }; // Since Socks5Stream doesn't support set_read_timeout, a suboptimal one is implemented via // thread::spawn. // # Happy Path // 1) thread spawns 2) get_socks5_stream returns ok 3) tx sends result ok // 4) slave_signal signals done and cvar notifies master_signal 5) cvar.wait_timeout receives the done signal // 6) rx receives the socks5 stream and the function exists // # Sad path // 1) get_socks5_stream hangs 2)slave_signal does not send done notification 3) cvar.wait_timeout times out // 3) an exception is thrown. // # Defects // 1) In the event of a timeout, a thread may be left running in the background. // TODO: explore supporting timeouts upstream in Socks5Proxy. #[allow(clippy::mutex_atomic)] let stream = if let Some(deadline) = deadline { use std::sync::mpsc::channel; use std::sync::{Arc, Condvar, Mutex}; use std::thread; let master_signal = Arc::new((Mutex::new(false), Condvar::new())); let slave_signal = master_signal.clone(); let (tx, rx) = channel(); thread::spawn(move || { let (lock, cvar) = &*slave_signal; if tx // try to get a socks5 stream and send it to the parent thread's rx .send(get_socks5_stream(&proxy, &proxy_addr, host_addr)) .is_ok() { // if sending the stream has succeeded we need to notify the parent thread let mut done = lock.lock().unwrap(); // set the done signal to true *done = true; // notify the parent thread cvar.notify_one(); } }); let (lock, cvar) = &*master_signal; let done = lock.lock().unwrap(); let done_result = cvar.wait_timeout(done, deadline - Instant::now()).unwrap(); let done = done_result.0; if *done { rx.recv().unwrap()? } else { return Err(std::io::Error::new( ErrorKind::TimedOut, format!( "SOCKS5 proxy: {}:{} timed out connecting after {}ms.", host, port, timeout_connect ), )); } } else { get_socks5_stream(&proxy, &proxy_addr, host_addr)? }; Ok(stream) } #[cfg(feature = "socks-proxy")] fn get_socks5_stream( proxy: &Proxy, proxy_addr: &SocketAddr, host_addr: TargetAddr, ) -> Result<TcpStream, std::io::Error> { use socks::Socks5Stream; if proxy.use_authorization() { let stream = Socks5Stream::connect_with_password( proxy_addr, host_addr, &proxy.user.as_ref().unwrap(), &proxy.password.as_ref().unwrap(), )? .into_inner(); Ok(stream) } else { match Socks5Stream::connect(proxy_addr, host_addr) { Ok(socks_stream) => Ok(socks_stream.into_inner()), Err(err) => Err(err), } } } #[cfg(not(feature = "socks-proxy"))] fn connect_socks5( _proxy: Proxy, _deadline: Option<Instant>, _proxy_addr: SocketAddr, _hostname: &str, _port: u16, ) -> Result<TcpStream, std::io::Error> { Err(std::io::Error::new( ErrorKind::Other, "SOCKS5 feature disabled.", )) } #[cfg(test)] pub(crate) fn connect_test(unit: &Unit) -> Result<Stream, Error> { use crate::test; test::resolve_handler(unit) } #[cfg(not(test))] pub(crate) fn connect_test(unit: &Unit) -> Result<Stream, Error> { Err(Error::UnknownScheme(unit.url.scheme().to_string())) } #[cfg(not(any(feature = "tls", feature = "native-tls")))] pub(crate) fn connect_https(unit: &Unit) -> Result<Stream, Error> { Err(Error::UnknownScheme(unit.url.scheme().to_string())) }
))] Stream::Https(stream) => stream.write(buf),
lint-non-camel-case-types-non-uppercase-statics-unicode.rs
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[forbid(non_camel_case_types)]; #[forbid(non_uppercase_statics)]; #[feature(non_ascii_idents)]; // Some scripts (e.g. hiragana) don't have a concept of // upper/lowercase struct ヒ; static ラ: uint = 0; pub fn main
}
() {
freemarker-mode-lint.ts
import { CodeMirror, TemplateType } from '@syndesis/ui'; import { FreemarkerParser } from '@syndesis/utils'; import i18n from '../../../../../../i18n'; import { AbstractLanguageLint } from './abstract-language-lint'; import { TemplateSymbol } from './template-symbol';
} public parse(content: string): TemplateSymbol[] { const symbols: TemplateSymbol[] = []; const parser: FreemarkerParser = new FreemarkerParser(); const result: any = parser.parse(content); for (const token of result.tokens) { if (token.type === 'Interpolation') { symbols.push(new TemplateSymbol(token.params, 'string')); } } return symbols; } protected define(): void { CodeMirror.defineMode(this.name(), (config, parserConfig) => { return { token: (stream, state) => { let ch; if (stream.match('${')) { // tslint:disable-next-line while ((ch = stream.next()) != null) { if (ch === '}') { return this.name(); } } } while (stream.next() != null && !stream.match('${', false)) { // Read it but don't do anything } return null; }, }; }); super.define(); } protected validate(text: string, errors: any[]): void { try { const parser: FreemarkerParser = new FreemarkerParser(); const result: any = parser.parse(text); if (result.ast && result.ast.errors) { for (const error of result.ast.errors) { const startLine = error.loc.start.line > 0 ? error.loc.start.line - 1 : 0; const startCol = error.loc.start.column > 0 ? error.loc.start.column - 1 : 0; const endLine = error.loc.end.line > 0 ? error.loc.end.line - 1 : 0; const endCol = error.loc.end.column > 0 ? error.loc.end.column - 1 : 0; errors.push({ from: CodeMirror.Pos(startLine, startCol), message: error.message, severity: 'error', to: CodeMirror.Pos(endLine, endCol), }); } } let totalSymbols = 0; for (const token of result.tokens) { if (token.type === 'Interpolation') { totalSymbols++; } } if (totalSymbols === 0) { const msg = i18n.t('integrations:steps.templater-no-symbols'); errors.push({ from: CodeMirror.Pos(0, 0), message: msg, severity: 'warning', to: CodeMirror.Pos(0, 0), }); } } catch (exception) { errors.push({ from: CodeMirror.Pos(0, 0), message: exception.message, severity: 'error', to: CodeMirror.Pos(0, 0), }); } } }
export class FreemarkerModeLint extends AbstractLanguageLint { constructor() { super(TemplateType.Freemarker);
csn.rs
#[doc = "Register `CSN` reader"] pub struct R(crate::R<CSN_SPEC>); impl core::ops::Deref for R { type Target = crate::R<CSN_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<CSN_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<CSN_SPEC>) -> Self { R(reader) } } #[doc = "Register `CSN` writer"] pub struct W(crate::W<CSN_SPEC>); impl core::ops::Deref for W { type Target = crate::W<CSN_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<CSN_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<CSN_SPEC>) -> Self { W(writer) } } #[doc = "Field `PIN` reader - Pin number"] pub struct PIN_R(crate::FieldReader<u8, u8>); impl PIN_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { PIN_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PIN_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PIN` writer - Pin number"] pub struct PIN_W<'a> { w: &'a mut W, } impl<'a> PIN_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x1f) | (value as u32 & 0x1f); self.w } } #[doc = "Field `PORT` reader - Port number"] pub struct PORT_R(crate::FieldReader<bool, bool>); impl PORT_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { PORT_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PORT_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PORT` writer - Port number"] pub struct PORT_W<'a> { w: &'a mut W, } impl<'a> PORT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u32 & 0x01) << 5); self.w } } #[doc = "Connection\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CONNECT_A { #[doc = "1: Disconnect"] DISCONNECTED = 1, #[doc = "0: Connect"] CONNECTED = 0, } impl From<CONNECT_A> for bool { #[inline(always)] fn from(variant: CONNECT_A) -> Self { variant as u8 != 0 } } #[doc = "Field `CONNECT` reader - Connection"] pub struct CONNECT_R(crate::FieldReader<bool, CONNECT_A>); impl CONNECT_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { CONNECT_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CONNECT_A { match self.bits { true => CONNECT_A::DISCONNECTED, false => CONNECT_A::CONNECTED, } } #[doc = "Checks if the value of the field is `DISCONNECTED`"] #[inline(always)] pub fn is_disconnected(&self) -> bool { **self == CONNECT_A::DISCONNECTED } #[doc = "Checks if the value of the field is `CONNECTED`"] #[inline(always)] pub fn is_connected(&self) -> bool { **self == CONNECT_A::CONNECTED } } impl core::ops::Deref for CONNECT_R { type Target = crate::FieldReader<bool, CONNECT_A>; #[inline(always)] fn deref(&self) -> &Self::Target
} #[doc = "Field `CONNECT` writer - Connection"] pub struct CONNECT_W<'a> { w: &'a mut W, } impl<'a> CONNECT_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CONNECT_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Disconnect"] #[inline(always)] pub fn disconnected(self) -> &'a mut W { self.variant(CONNECT_A::DISCONNECTED) } #[doc = "Connect"] #[inline(always)] pub fn connected(self) -> &'a mut W { self.variant(CONNECT_A::CONNECTED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:4 - Pin number"] #[inline(always)] pub fn pin(&self) -> PIN_R { PIN_R::new((self.bits & 0x1f) as u8) } #[doc = "Bit 5 - Port number"] #[inline(always)] pub fn port(&self) -> PORT_R { PORT_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 31 - Connection"] #[inline(always)] pub fn connect(&self) -> CONNECT_R { CONNECT_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:4 - Pin number"] #[inline(always)] pub fn pin(&mut self) -> PIN_W { PIN_W { w: self } } #[doc = "Bit 5 - Port number"] #[inline(always)] pub fn port(&mut self) -> PORT_W { PORT_W { w: self } } #[doc = "Bit 31 - Connection"] #[inline(always)] pub fn connect(&mut self) -> CONNECT_W { CONNECT_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Pin select for chip select signal CSN.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [csn](index.html) module"] pub struct CSN_SPEC; impl crate::RegisterSpec for CSN_SPEC { type Ux = u32; } #[doc = "`read()` method returns [csn::R](R) reader structure"] impl crate::Readable for CSN_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [csn::W](W) writer structure"] impl crate::Writable for CSN_SPEC { type Writer = W; } #[doc = "`reset()` method sets CSN to value 0xffff_ffff"] impl crate::Resettable for CSN_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0xffff_ffff } }
{ &self.0 }
lib.rs
use std::{ future::Future, pin::Pin, sync::{Arc, Mutex}, task::{Context, Poll, Waker}, thread, time::Duration, }; // 本程序参考教程为 https://course.rs/async-rust/async/future-excuting.html pub struct TimerFuture { // Arc<T> 参考 https://zhuanlan.zhihu.com/p/300971430 // 简单的说 Arc<T> 是线程安全版本的 Rc<T> shared_state: Arc<Mutex<SharedState>>, } struct SharedState { completed: bool, waker: Option<Waker>, } impl Future for TimerFuture { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let mut shared_state = self.shared_state.lock().unwrap(); if shared_state.completed { Poll::Ready(()) } else { shared_state.waker = Some(cx.waker().clone()); Poll::Pending } } } impl TimerFuture { pub fn new(duration: Duration) -> Self { le
hared_state = Arc::new(Mutex::new(SharedState { completed: false, waker: None, })); let thread_shared_state = shared_state.clone(); thread::spawn(move || { thread::sleep(duration); let mut shared_state = thread_shared_state.lock().unwrap(); shared_state.completed = true; if let Some(waker) = shared_state.waker.take() { waker.wake() } }); TimerFuture { shared_state } } }
t s
app.js
// we use self-invoking functions so the mainCtrl and apiService do not become global (function(){ angular.module("app",[]) .constant("myApiUrl","http://localhost:8080") .service("apiService",apiService) .controller("mainCtrl",mainCtrl) .value("isLoading",false); mainCtrl.$inject=["apiService","isLoading"]; /** * You might notice we used in the controller declaration 'mainCtrl as vm' above. In the following the 'vm' * * references to 'this 'of the function */ function mainCtrl(apiService,isLoading){ this.title = "Hello World!"; this.subtitle = "This is small app combining Angular and NodeJS written for educational purposes!"; this.fetchRandomNumberFromServer = fetchRandomNumberFromServer.bind(this); this.isLoading = isLoading; // initialise fetchRandomNumberFromServer.call(this); function fetchRandomNumberFromServer(){ this.isLoading = true; // we use angular.bind so inside the onDataFetched function we could set up this of the mainCtrl apiService.fetchRandomNumberFromServer(angular.bind(this,function(randomNumber){ this.isLoading = false; this.randomNumber = randomNumber; })); } } apiService.$inject = ["$http","myApiUrl"]; function apiService($http,myApiUrl){ return { fetchRandomNumberFromServer : fetchRandomNumberFromServer } function fetchRandomNumberFromServer(callback){ $http.get(myApiUrl+"/get-random-number").then(function(response){ console.log(response); callback(response.data.randomNumber); });
}());
} }
async.py
# 使用yield 实现单线程的异步并发效果 import time def consumer(name): print("%s 准备吃包子啦!" %name) while True: baozi = yield #接收值 print("包子[%s]来了,被[%s]吃了!" %(baozi,name)) def producer(name): c = consumer("A") c2 = consumer("B") c.__next__() c2.__next__() print("老子开始做包子了") for i in range(1): #0 time.sleep(1) print("做了两个包子了") c.send(i) #//给 yiled 送值 c2.send(i) producer("alex") # 每分钟做两个包子,并同时分给两个人 """ A 准备吃包子啦! B 准备吃包子啦! 老子开始做包子了 做了两个包子了 包子[0]来了,被[A]吃了! 包子[0]来了,被[B]吃了! 做了两个包子了 包子[1]来了,被[A]吃了! 包子[1]来了,被[B]吃了! """ # python 装饰器 # tv=login(tv) # tv("alex") def w1(func): print("我在w1函数内") def inner(): print("我在inner函数内") # #2 #3 return func return inner #@w1 def f1(): print('我在f1函数内') flag=w1(f1) # 执行w1函数 #print(flag) flag=flag() #执行inner 函数 flag() ##执行f1 函数 ''' 我在w1函数内 我在inner函数内 我在f1函数内 ''' #---------------next---------- print("开始@的用法说明") @w1 def f2(): print('我在f1函数内') f2() """ @w1 :执行w1,把自己装饰的函数名当作参数,相对于w1(f2) show 函数重新定义,w1(show)返回值 新show = """ #@w1(f1) #如此是这样 def f3(): print('我在f1函数内') """ @filter(before,after) 1. 执行filter(before,after) 2.@outer 3 新的 """ #---------------递归 ---------- def calc(n):
print(n) res=calc(n/2) print(n,res) return res calc(20) """ 20 10.0 5.0 2.5 2.5 None 5.0 None 10.0 None 20 None """ # def 数列 第三位数=数2+数1 def func3(arg1,arg2): if arg1==0: print(arg1) print(arg2) arg3=arg1+arg2 print(arg3) if arg3<110: func3(arg2,arg3) func3(0,1) """ 0 1 1 2 3 5 8 """ # 二分查找法 data = list(range(1,100,3)) print(data)
if n/2 >1:
gen_dapp_util.go
// Copyright Fuzamei Corp. 2018 All Rights Reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package tasks import ( "fmt" "regexp" "strings" sysutil "github.com/33cn/chain33/util" ) type actionInfoItem struct { memberName string memberType string } /** 通过正则获取Action的成员变量名和类型,其具体操作步骤如下: 1. 读取需要解析的proto文件 2. 通过搜索,定位到指定Action的起始为止 3. 使用正则获取该Action中的oneof Value的内容 4. 使用正则解析oneof Value中的内容,获取变量名和类型名 5. 将获取到的变量名去除空格,并将首字母大写 */ func readDappActionFromProto(protoContent, actionName string) ([]*actionInfoItem, error) { // 如果文件中含有与ActionName部分匹配的文字,则会造成搜索到多个 index := strings.Index(protoContent, actionName) if index < 0 { return nil, fmt.Errorf("action %s Not Existed", actionName) } expr := fmt.Sprintf(`\s*oneof\s+value\s*{\s+([\w\s=;]*)\}`) reg := regexp.MustCompile(expr) oneOfValueStrs := reg.FindAllStringSubmatch(protoContent, index) expr = fmt.Sprintf(`\s+(\w+)([\s\w]+)=\s+(\d+);`) reg = regexp.MustCompile(expr) members := reg.FindAllStringSubmatch(oneOfValueStrs[0][0], -1) actionInfos := make([]*actionInfoItem, 0) for _, member := range members { memberType := strings.Replace(member[1], " ", "", -1) memberName := strings.Replace(member[2], " ", "", -1) // 根据proto生成pb.go的规则,成员变量首字母必须大写 memberName, _ = sysutil.MakeStringToUpper(memberName, 0, 1) actionInfos = append(actionInfos, &actionInfoItem{ memberName: memberName, memberType: memberType, }) } if len(actionInfos) == 0 { return nil, fmt.Errorf("can Not Find %s Member Info", actionName) } return actionInfos, nil } func formatExecContent(infos []*actionInfoItem, dappName string) string { fnFmtStr := `func (c *%s) Exec_%s(payload *ptypes.%s, tx *types.Transaction, index int) (*types.Receipt, error) { var receipt *types.Receipt //implement code return receipt, nil } ` content := "" for _, info := range infos { content += fmt.Sprintf(fnFmtStr, dappName, info.memberName, info.memberType) } return content } func formatExecLocalContent(infos []*actionInfoItem, dappName string) string { fnFmtStr := `func (c *%s) ExecLocal_%s(payload *ptypes.%s, tx *types.Transaction, receiptData *types.ReceiptData, index int) (*types.LocalDBSet, error) { var dbSet *types.LocalDBSet //implement code return dbSet, nil } ` content := "" for _, info := range infos { content += fmt.Sprintf(fnFmtStr, dappName, info.memberName, info.memberType) } return content } func formatExecDelLocalContent(infos []*actionInfoItem, dappName string) string { fnFmtStr := `func (c *%s) ExecDelLocal_%s(payload *ptypes.%s, tx *types.Transaction, receiptData *types.ReceiptData, index int) (*types.LocalDBSet, error) { var dbSet *types.LocalDBSet //implement code return dbSet, nil } ` content := "" for _, info := range infos { co
.Sprintf("const (\n%s)\n", items) return } // 组成规则是 ActionName + ActionMemberName func buildActionIDText(infos []*actionInfoItem, className string) (text string) { items := fmt.Sprintf("TyUnknowAction = iota + 100\n") for _, info := range infos { items += fmt.Sprintf("Ty%sAction\n", info.memberName) } items += "\n" for _, info := range infos { items += fmt.Sprintf("Name%sAction = \"%s\"\n", info.memberName, info.memberName) } text = fmt.Sprintf("const (\n%s)\n", items) return } // 返回 map[string]int32 func buildTypeMapText(infos []*actionInfoItem, className string) (text string) { var items string for _, info := range infos { items += fmt.Sprintf("Name%sAction: Ty%sAction,\n", info.memberName, info.memberName) } text = fmt.Sprintf("map[string]int32{\n%s}", items) return } // 返回 map[string]*types.LogInfo func buildLogMapText() (text string) { text = fmt.Sprintf("map[int64]*types.LogInfo{\n\t//LogID: {Ty: reflect.TypeOf(LogStruct), Name: LogName},\n}") return }
ntent += fmt.Sprintf(fnFmtStr, dappName, info.memberName, info.memberType) } return content } // 组成规则是 TyLog+ActionName + ActionMemberName func buildActionLogTypeText(infos []*actionInfoItem, className string) (text string) { items := fmt.Sprintf("TyUnknownLog = iota + 100\n") for _, info := range infos { items += fmt.Sprintf("Ty%sLog\n", info.memberName) } text = fmt
main.go
// Copyright 2013-2014 go-diameter authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Converts Wireshark diameter dictionaries to go-diameter format. // Use: wireshark-dict-tool < wireshark-dict.xml > new-dict.xml // // Some wireshark dictionaries must be slightly fixed before they can // be converted by this tool. package main // TODO: Improve the parser and fix AVP properties during conversion: // <avp name=".." code=".." must="" may="" must-not="" may-encrypt=""> import ( "encoding/xml" "log" "os" "github.com/danievanzyl/go-diameter/v4/diam/dict" ) func main() { wsd, err := load(os.Stdin) if err != nil { log.Fatal(err) } var newDict = &dict.File{} for _, app := range wsd.App { newApp := &dict.App{ ID: app.ID, Type: app.Type, Name: app.Name, } copyVendors(wsd.Vendor, newApp) copyCommands(app.Cmd, newApp) copyAvps(app.AVP, newApp) newDict.App = append(newDict.App, newApp) } os.Stdout.Write([]byte(`<?xml version="1.0" encoding="UTF-8"?>` + "\n")) enc := xml.NewEncoder(os.Stdout) enc.Indent("", "\t") enc.Encode(newDict) } func copyVendors(src []*Vendor, dst *dict.App) { for _, vendor := range src { dst.Vendor = append(dst.Vendor, &dict.Vendor{ ID: vendor.ID, Name: vendor.Name, }) } } func copyCommands(src []*Cmd, dst *dict.App) { for _, cmd := range src { newCmd := &dict.Command{ Code: cmd.Code, Name: cmd.Name, Short: cmd.Name, } copyCmdRules(cmd.Request.Fixed.Rule, &newCmd.Request, false) copyCmdRules(cmd.Request.Required.Rule, &newCmd.Request, true) copyCmdRules(cmd.Request.Optional.Rule, &newCmd.Request, false) copyCmdRules(cmd.Answer.Fixed.Rule, &newCmd.Answer, false) copyCmdRules(cmd.Answer.Required.Rule, &newCmd.Answer, true) copyCmdRules(cmd.Answer.Optional.Rule, &newCmd.Answer, false) dst.Command = append(dst.Command, newCmd) } } func copyCmdRules(src []*Rule, dst *dict.CommandRule, required bool) { for _, req := range src { dst.Rule = append(dst.Rule, &dict.Rule{ AVP: req.Name, Required: required, Min: req.Min, Max: req.Max, }) } } func copyAvps(src []*AVP, dst *dict.App)
{ for _, avp := range src { newAVP := &dict.AVP{ Name: avp.Name, Code: avp.Code, } if avp.Type.Name == "" && avp.Grouped != nil { newAVP.Data = dict.Data{TypeName: "Grouped"} } else { newAVP.Data = dict.Data{TypeName: avp.Type.Name} } switch avp.MayEncrypt { case "yes": newAVP.MayEncrypt = "Y" case "no": newAVP.MayEncrypt = "N" default: newAVP.MayEncrypt = "-" } switch avp.Mandatory { case "must": newAVP.Must = "M" case "may": newAVP.May = "P" default: newAVP.Must = "" } if newAVP.May != "" { switch avp.Protected { case "may": newAVP.May = "P" default: newAVP.May = "" } } for _, p := range avp.Enum { newAVP.Data.Enum = append(newAVP.Data.Enum, &dict.Enum{ Name: p.Name, Code: p.Code, }) } for _, grp := range avp.Grouped { for _, p := range grp.GAVP { newAVP.Data.Rule = append(newAVP.Data.Rule, &dict.Rule{ AVP: p.Name, Min: p.Min, Max: p.Max, }) } for _, p := range grp.Required.Rule { newAVP.Data.Rule = append(newAVP.Data.Rule, &dict.Rule{ AVP: p.Name, Required: true, Min: p.Min, Max: p.Max, }) } for _, p := range grp.Optional.Rule { newAVP.Data.Rule = append(newAVP.Data.Rule, &dict.Rule{ AVP: p.Name, Required: false, Min: p.Min, Max: p.Max, }) } } dst.AVP = append(dst.AVP, newAVP) } }
utils.py
"""NLP Dataset""" import os import re from typing import List, Union, Dict, Tuple import nltk import unicodedata import numpy as np from dlex.configs import ModuleConfigs from dlex.utils.logging import logger # nltk.download('punkt') # Turn a Unicode string to plain ASCII, thanks to # https://stackoverflow.com/a/518232/2809427 def unicodeToAscii(s): return ''.join( c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn' ) def load_tkn_to_idx(filename): tkn_to_idx = {} fo = open(filename, encoding='utf-8') for line in fo: line = line.strip() if line == "": continue tkn_to_idx[line] = len(tkn_to_idx) fo.close() return tkn_to_idx def normalize_lower(sentence: str): return sentence.strip().lower() def normalize_lower_alphanumeric(sentence: str): s = sentence.strip().lower() s = re.sub("[^a-z0-9\uAC00-\uD7A3]+", " ", s) return s def normalize_string_ascii(sentence): """ :param str sentence: :return: normalized sentence, separated by space :rtype str """ # x = re.sub("[^ a-zA-Z0-9\uAC00-\uD7A3]+", " ", x) # x = re.sub("[\u3040-\u30FF]+", "\u3042", x) # convert Hiragana and Katakana to あ # x = re.sub("[\u4E00-\u9FFF]+", "\u6F22", x) # convert CJK unified ideographs to 漢 sent = unicodeToAscii(sentence.lower().strip()) sent = re.sub(r"([.!?,])", r" \1", sent) sent = re.sub(r"[^a-zA-Z.!?,]+", r" ", sent) sent = re.sub(r"\s+", " ", sent) sent = re.sub("^ | $", "", sent) words = sent.split(' ') ret = [] for word in words: ret.append(normalize_word(word)) return ' '.join(ret) def normalize_string(sentence): """ :param str sentence: :return: normalized sentence, separated by space :rtype str """ # x = re.sub("[^ a-zA-Z0-9\uAC00-\uD7A3]+", " ", x) # x = re.sub("[\u3040-\u30FF]+", "\u3042", x) # convert Hiragana and Katakana to あ # x = re.sub("[\u4E00-\u9FFF]+", "\u6F22", x) # convert CJK unified ideographs to 漢 sentence = re.sub(r"([\.!?,\";\(\)])\'", r" \1", sentence) # sent = re.sub(r"[^a-zA-Z.!?,]+", r" ", sent) sentence = re.sub(r"\s+", " ", sentence) sentence = re.sub("^ | $", "", sentence) words = sentence.split(' ') ret = [] for word in words: ret.append(normalize_word(word)) return ' '.join(ret) def normalize_word(word): punctuations = [',', '.', '-', '"', ':', '!', '(', ')', '...', '?'] if word in ',.!?': return word elif word in punctuations: return '<punc>' elif any('0' <= c <= '9' for c in word): return '<non-word>' else: return word.lower() def normalize_none(s): return s def nltk_tokenize(s): return nltk.word_tokenize(s) class Tokenizer: def __init__(self, normalize_fn=None, tokenize_fn=None): self.normalize_fn = normalize_fn self.tokenize_fn = tokenize_fn def process(self, s): s = self.normalize_fn(s) s = self.tokenize_fn(s) return s spacy_nlp = None def spacy_tokenize(s): import spacy from spacy.symbols import ORTH global spacy_nlp if spacy_nlp is None: # sputnik.install('spacy', spacy.about.__version__, 'en_default', data_path=ModuleConfigs.get_tmp_path()) spacy_nlp = spacy.load('en_core_web_sm', via=ModuleConfigs.get_tmp_path()) spacy_nlp.tokenizer.add_special_case('<eos>', [{ORTH: '<eos>'}]) spacy_nlp.tokenizer.add_special_case('<bos>', [{ORTH: '<bos>'}]) spacy_nlp.tokenizer.add_special_case('<unk>', [{ORTH: '<unk>'}]) return [_s.text for _s in spacy_nlp.tokenizer(s)] def normalize_char(char): return char.lower().replace(' ', '_') def space_tokenize(s): return s.split(' ') def char_tokenize(s: str): s = s.replace(" ", "_") return list(s) def mecab_tokenize(s): import MeCab wakati = MeCab.Tagger("-Owakati") return wakati.parse(s).split() def write_vocab( text: Union[str, List[str], List[List[str]]], output_path: str, tokenizer: Tokenizer = None, min_freq=0, specials=None): """ :param text: text or list of sentences :param output_path: :param tokenizer: if tokenizer is None, tokens are separated by space :param min_freq: :param specials: :return: """ if tokenizer is None: tokenizer = Tokenizer(normalize_none, space_tokenize) if specials is None: specials = ['<pad>', '<sos>', '<eos>', '<oov>'] word_freqs = {} if isinstance(text, str): text = [text] for sent in text: if isinstance(sent, str): # if normalize_fn is not None: # s = normalize_fn(sent.replace('_', ' ')) # else: # s = sent # ls = char_tokenize(s) if token == 'char' else space_tokenize(s) sent = tokenizer.process(sent) for word in sent: if word.strip() == '': continue if word in word_freqs: word_freqs[word] += 1 else: word_freqs[word] = 1 words = list([word for word in word_freqs if word_freqs[word] > min_freq]) words.sort(key=lambda word: word_freqs[word], reverse=True) with open(output_path, "w", encoding='utf-8') as fo: fo.write('\n'.join(specials) + '\n') fo.write("\n".join(words)) logger.info("Vocab written to %s (%d tokens)", output_path, len(specials) + len(words)) def get_token_id(vocab, word): """ :type vocab: Vocab :type word: str :rtype: int """ if word in vocab: return vocab[word] else: if '<oov>' in vocab: return vocab['<oov>'] elif '<unk>' in vocab: return vocab['<unk>'] else: raise Exception("No out-of-vocabulary token found.") class Vocab: def __init__(self, index2token: List[str] = None, token2index: Dict[str, int] = None): if index
@classmethod def from_file(cls, file_name): index2token = [] fo = open(file_name, encoding='utf-8') for line in fo: line = line.strip() if line == "": continue index2token.append(line) fo.close() return cls(index2token) def __getitem__(self, token: str) -> int: return self._token2index[token] if token in self._token2index else self.oov_token_idx def tolist(self) -> List[str]: return self._index2token def get_token_id(self, token): return self[token] or self.oov_token_idx def add_token(self, token: str): if token not in self._token2index: self._token2index[token] = len(self._token2index) self._index2token.append(token) def __len__(self): return len(self._token2index) def get_token(self, idx: int) -> str: return self._index2token[idx] def decode_idx_list(self, ls: List[int], ignore: List[int] = None, stop_at: int = None) -> List[str]: ret = [] for idx in ls: if stop_at and idx == stop_at: break elif ignore and idx in ignore: continue else: ret.append(self.get_token(idx)) return ret def encode_token_list(self, ls: List[str]) -> List[int]: return [self.get_token_id(token) for token in ls] @property def sos_token_idx(self) -> int: idx = self['<sos>'] or self['<s>'] assert idx is not None return idx @property def eos_token_idx(self) -> int: idx = self['<eos>'] or self['</s>'] assert idx is not None return idx @property def blank_token_idx(self): idx = self['<blank>'] or self['<pad>'] assert idx is not None return idx @property def oov_token_idx(self) -> int: if '<oov>' in self._token2index: return self._token2index['<oov>'] elif '<unk>' in self._token2index: return self._token2index['<unk>'] else: raise Exception("<oov> token not found.") def get_specials(self): return [token for token in self._index2token if token.startswith('<')] def init_pretrained_embeddings( self, pretrained: str, emb_name: str = None, dim: int = None) -> np.ndarray: if pretrained == 'glove': from torchtext.vocab import GloVe dim = dim or 300 vocab = GloVe( name=emb_name or '840B', dim=dim, cache=os.path.join(ModuleConfigs.get_tmp_path(), "torchtext")) elif pretrained == 'fasttext': from torchtext.vocab import FastText vocab = FastText() else: raise ValueError("Pre-trained embeddings not found.") vectors = vocab.vectors oovs = [] embeddings = np.zeros([len(self), dim]) for idx, t in enumerate(self._index2token): _t = t.lower() if _t in vocab.stoi: embeddings[idx, :] = vectors[vocab.stoi[_t]].cpu().numpy() if all(token in vocab.stoi for token in _t.split(' ')): embeddings[idx, :] = np.sum([vectors[vocab.stoi[token]].cpu().numpy() for token in _t.split(' ')]) else: oovs.append(_t) if oovs: logger.warning(f"{len(oovs)} tokens not found in pre-trained embeddings: {', '.join(oovs)}") logger.debug(f"Load embeddings: {pretrained} (no. embeddings: {len(self) - len(oovs):,})") self.embedding_dim = dim self.embeddings = embeddings def get_token_embedding(self, token: str) -> np.ndarray: if self.embeddings is None: raise ValueError('Embeddings are not initialized') return self.embeddings[self.get_token_id(token)] def embed_token_list(self, ls): emb = np.zeros(self.embedding_dim) for token in ls: emb += self.get_token_embedding(token) return emb def load_embeddings( pretrained: str, emb_name: str = None, dim: int = None, vocab_size: int = None, tokens: List[str] = None, specials: List[str] = None) -> Tuple[np.ndarray, Vocab]: """ Load pre-trained embedding defined in dataset.embeddings :param tokens: if specified, only load embeddings of these tokens :param specials: special tokens :return: """ if not pretrained: assert dim is not None assert vocab_size is not None return np.random.rand(vocab_size, dim), None elif pretrained.lower() in ["glove", "fasttext"]: if pretrained.lower() == 'glove': from torchtext.vocab import GloVe vocab = GloVe( name=emb_name, dim=dim, cache=os.path.join(ModuleConfigs.get_tmp_path(), "torchtext")) elif pretrained.lower() == 'fasttext': from torchtext.vocab import FastText vocab = FastText() else: raise ValueError("Pre-trained embeddings not found.") vectors = vocab.vectors index2token = vocab.itos token2index = None if tokens: # limit vocabulary to list of tokens num_oovs = 0 keep = [] index2token = [] token2index = {} for t in tokens: _t = t.lower() if _t in token2index: if t not in token2index: token2index[t] = token2index[_t] elif _t in vocab.stoi: keep.append(vocab.stoi[_t.lower()]) token2index[_t] = len(index2token) token2index[t] = len(index2token) index2token.append(_t) else: num_oovs += 1 vectors = vectors[keep] if num_oovs: logger.warning(f"{num_oovs} tokens not found in pre-trained embeddings") logger.debug(f"Load embeddings: {pretrained} (no. embeddings: {len(index2token):,})") if specials is not None: for s in specials: token2index[s] = len(index2token) index2token.append(s) index2token += specials vectors = torch.cat([vectors, torch.rand(len(specials), len(vectors[0]))]) # return nn.Embedding.from_pretrained(vectors, freeze=emb.freeze or True), Vocab(index2token, token2index) return vectors, Vocab(index2token, token2index) else: raise ValueError(f"{pretrained} is not supported.")
2token is None: self._token2index = {} self._index2token = [] else: self._index2token = index2token if token2index: self._token2index = token2index else: self._token2index = {token: idx for idx, token in enumerate(index2token)} self.embeddings = None self.embedding_dim = None
player_service.py
import time import cgi import json import BaseHTTPServer import os from player import Player HOST_NAME = '0.0.0.0' PORT_NUMBER = os.environ.has_key('PORT') and int(os.environ['PORT']) or 9000 class PlayerService(BaseHTTPServer.BaseHTTPRequestHandler): def do_POST(self): self.send_response(200) self.send_header("Content-type", "application/json") self.end_headers() ctype, pdict = cgi.parse_header(self.headers.getheader('content-type')) if ctype == 'multipart/form-data': postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded':
postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1) else: postvars = {} action = postvars['action'][0] if 'game_state' in postvars: game_state = json.loads(postvars['game_state'][0]) else: game_state = {} response = '' if action == 'bet_request': response = Player().bet_request(game_state) elif action == 'showdown': Player().showdown(game_state) elif action == 'version': response = Player.VERSION self.wfile.write(response) if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class((HOST_NAME, PORT_NUMBER), PlayerService) print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER) try: httpd.serve_forever() except KeyboardInterrupt: pass httpd.server_close() print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
length = int(self.headers.getheader('content-length'))