file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
assertion.py | from casbin import log
class | :
key = ""
value = ""
tokens = []
policy = []
rm = None
def build_role_links(self, rm):
self.rm = rm
count = self.value.count("_")
for rule in self.policy:
if count < 2:
raise RuntimeError('the number of "_" in role definition should be at least 2')
if len(rule) < count:
raise RuntimeError("grouping policy elements do not meet role definition")
if count == 2:
self.rm.add_link(rule[0], rule[1])
elif count == 3:
self.rm.add_link(rule[0], rule[1], rule[2])
elif count == 4:
self.rm.add_link(rule[0], rule[1], rule[2], rule[3])
log.log_print("Role links for: " + self.key)
self.rm.print_roles()
| Assertion |
cache.rs | use crate::common::get_cache_dir;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
pub enum Cache {
/// Clear the cache
#[structopt(name = "clean")]
Clean,
/// Display the location of the cache
#[structopt(name = "dir")]
Dir,
}
impl Cache {
pub fn | (&self) {
match &self {
Cache::Clean => {
use std::fs;
let cache_dir = get_cache_dir();
if cache_dir.exists() {
fs::remove_dir_all(cache_dir.clone()).expect("Can't remove cache dir");
}
fs::create_dir_all(cache_dir).expect("Can't create cache dir");
}
Cache::Dir => {
println!("{}", get_cache_dir().to_string_lossy());
}
}
}
}
| execute |
udmach6bsel.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::UDMACH6BSEL {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
}
}
#[doc = r" Value of the field"]
pub struct | {
bits: u32,
}
impl RESERVED0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:31 - Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior."]
#[inline]
pub fn reserved0(&self) -> RESERVED0R {
let bits = {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u32
};
RESERVED0R { bits }
}
}
| RESERVED0R |
actions.py | #!/usr/bin/python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import sys
import yaml
from charmhelpers.core.host import service_pause, service_resume
from charmhelpers.core.hookenv import action_fail
from charmhelpers.core.unitdata import HookData, kv
from charmhelpers.contrib.openstack.utils import (
get_os_codename_package,
set_os_workload_status,
)
from lib.swift_storage_utils import (
assess_status,
REQUIRED_INTERFACES,
SWIFT_SVCS,
)
from hooks.swift_storage_hooks import (
CONFIGS,
)
def _get_services():
"""Return a list of services that need to be (un)paused."""
services = SWIFT_SVCS[:]
# Before Icehouse there was no swift-container-sync
if get_os_codename_package("swift-container") < "icehouse":
services.remove("swift-container-sync")
return services
def get_action_parser(actions_yaml_path, action_name,
get_services=_get_services):
"""Make an argparse.ArgumentParser seeded from actions.yaml definitions."""
with open(actions_yaml_path) as fh:
doc = yaml.load(fh)[action_name]["description"]
parser = argparse.ArgumentParser(description=doc)
parser.add_argument("--services", default=get_services())
# TODO: Add arguments for params defined in the actions.yaml
return parser
def pause(args):
"""Pause all the swift services.
@raises Exception if any services fail to stop
"""
for service in args.services:
stopped = service_pause(service)
if not stopped:
raise Exception("{} didn't stop cleanly.".format(service))
with HookData()():
kv().set('unit-paused', True)
set_os_workload_status(CONFIGS, REQUIRED_INTERFACES,
charm_func=assess_status)
def resume(args):
"""Resume all the swift services.
@raises Exception if any services fail to start
"""
for service in args.services:
started = service_resume(service)
if not started:
raise Exception("{} didn't start cleanly.".format(service))
with HookData()():
kv().set('unit-paused', False)
set_os_workload_status(CONFIGS, REQUIRED_INTERFACES,
charm_func=assess_status)
# A dictionary of all the defined actions to callables (which take
# parsed arguments).
ACTIONS = {"pause": pause, "resume": resume}
def main(argv):
action_name = _get_action_name()
actions_yaml_path = _get_actions_yaml_path()
parser = get_action_parser(actions_yaml_path, action_name)
args = parser.parse_args(argv)
try:
action = ACTIONS[action_name]
except KeyError:
return "Action %s undefined" % action_name
else:
try:
action(args)
except Exception as e:
action_fail(str(e))
def | ():
"""Return the name of the action."""
return os.path.basename(__file__)
def _get_actions_yaml_path():
"""Return the path to actions.yaml"""
cwd = os.path.dirname(__file__)
return os.path.join(cwd, "..", "actions.yaml")
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| _get_action_name |
bitswap.py | from . import base
class Section(base.SectionBase):
@base.returns_single_item
def wantlist(self, peer=None, **kwargs):
"""Returns blocks currently on the bitswap wantlist.
.. code-block:: python
>>> client.bitswap.wantlist()
{'Keys': [
'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb',
'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K',
'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp'
]}
Parameters
----------
peer : str
Peer to show wantlist for.
Returns
-------
dict
+------+----------------------------------------------------+
| Keys | List of blocks the connected daemon is looking for |
+------+----------------------------------------------------+
"""
args = (peer,)
return self._client.request('/bitswap/wantlist', args, decoder='json', **kwargs)
@base.returns_single_item
def stat(self, **kwargs):
| """Returns some diagnostic information from the bitswap agent.
.. code-block:: python
>>> client.bitswap.stat()
{'BlocksReceived': 96,
'DupBlksReceived': 73,
'DupDataReceived': 2560601,
'ProviderBufLen': 0,
'Peers': [
'QmNZFQRxt9RMNm2VVtuV2Qx7q69bcMWRVXmr5CEkJEgJJP',
'QmNfCubGpwYZAQxX8LQDsYgB48C4GbfZHuYdexpX9mbNyT',
'QmNfnZ8SCs3jAtNPc8kf3WJqJqSoX7wsX7VqkLdEYMao4u',
…
],
'Wantlist': [
'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb',
'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K',
'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp'
]
}
Returns
-------
dict
Statistics, peers and wanted blocks
"""
return self._client.request('/bitswap/stat', decoder='json', **kwargs) |
|
tls_flags.go | package commands
import (
"errors"
"fmt"
"os"
"strconv"
"code.cloudfoundry.org/cfdot/commands/helpers"
"github.com/spf13/cobra"
)
// errors
var (
errMissingCACertFile = errors.New("--caCertFile must be specified if using HTTPS and --skipCertVerify is not set")
errMissingClientCertAndKeyFiles = errors.New("--clientCertFile and --clientKeyFile must both be specified for TLS connections.")
)
var (
Config helpers.TLSConfig
)
func AddTLSFlags(cmd *cobra.Command) |
func tlsPreHook(cmd *cobra.Command, args []string) error {
var err, returnErr error
// Only look at the environment variable if the flag has not been set.
if !cmd.Flags().Lookup("skipCertVerify").Changed && os.Getenv("SKIP_CERT_VERIFY") != "" {
Config.SkipCertVerify, err = strconv.ParseBool(os.Getenv("SKIP_CERT_VERIFY"))
if err != nil {
returnErr = NewCFDotValidationError(
cmd,
fmt.Errorf(
"The value '%s' is not a valid value for SKIP_CERT_VERIFY. Please specify one of the following valid boolean values: 1, t, T, TRUE, true, True, 0, f, F, FALSE, false, False",
os.Getenv("SKIP_CERT_VERIFY")),
)
return returnErr
}
}
if Config.CACertFile == "" {
Config.CACertFile = os.Getenv("CA_CERT_FILE")
}
if Config.CertFile == "" {
Config.CertFile = os.Getenv("CLIENT_CERT_FILE")
}
if Config.KeyFile == "" {
Config.KeyFile = os.Getenv("CLIENT_KEY_FILE")
}
if !Config.SkipCertVerify {
if Config.CACertFile == "" {
returnErr = NewCFDotValidationError(cmd, errMissingCACertFile)
return returnErr
}
err := validateReadableFile(cmd, Config.CACertFile, "CA cert")
if err != nil {
return err
}
}
if (Config.KeyFile == "") || (Config.CertFile == "") {
returnErr = NewCFDotValidationError(cmd, errMissingClientCertAndKeyFiles)
return returnErr
}
if Config.KeyFile != "" {
err := validateReadableFile(cmd, Config.KeyFile, "key")
if err != nil {
return err
}
}
if Config.CertFile != "" {
err := validateReadableFile(cmd, Config.CertFile, "cert")
if err != nil {
return err
}
}
return nil
}
| {
cmd.Flags().BoolVar(&Config.SkipCertVerify, "skipCertVerify", false, "when set to true, skips all SSL/TLS certificate verification [environment variable equivalent: SKIP_CERT_VERIFY]")
cmd.Flags().StringVar(&Config.CACertFile, "caCertFile", "", "path the Certificate Authority (CA) file to use when verifying TLS keypairs [environment variable equivalent: CA_CERT_FILE]")
cmd.Flags().StringVar(&Config.CertFile, "clientCertFile", "", "path to the TLS client certificate to use during mutual-auth TLS [environment variable equivalent: CLIENT_CERT_FILE]")
cmd.Flags().StringVar(&Config.KeyFile, "clientKeyFile", "", "path to the TLS client private key file to use during mutual-auth TLS [environment variable equivalent: CLIENT_KEY_FILE]")
cmd.PreRunE = tlsPreHook
} |
clap.rs | use clap::{App, Arg, ArgGroup, SubCommand};
fn device_arg() -> Arg<'static, 'static> {
Arg::with_name("device")
.short("d")
.long("device")
.takes_value(true)
.value_name("DEVICE")
.help("Specifies the spotify device to use")
}
fn format_arg() -> Arg<'static, 'static> {
Arg::with_name("format")
.short("f")
.long("format")
.takes_value(true)
.value_name("FORMAT")
.help("Specifies the output format")
.long_help(
"There are multiple format specifiers you can use: %a: artist, %b: album, %p: playlist, \
%t: track, %h: show, %f: flags (shuffle, repeat, like), %s: playback status, %v: volume, %d: current device. \
Example: spt pb -s -f 'playing on %d at %v%'",
)
}
pub fn playback_subcommand() -> App<'static, 'static> {
SubCommand::with_name("playback")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about("Interacts with the playback of a device")
.long_about(
"Use `playback` to interact with the playback of the current or any other device. \
You can specify another device with `--device`. If no options were provided, spt \
will default to just displaying the current playback. Actually, after every action \
spt will display the updated playback. The output format is configurable with the \
`--format` flag. Some options can be used together, other options have to be alone.
Here's a list:
* `--next` and `--previous` cannot be used with other options
* `--status`, `--toggle`, `--transfer`, `--volume`, `--like`, `--repeat` and `--shuffle` \
can be used together
* `--share-track` and `--share-album` cannot be used with other options",
)
.visible_alias("pb")
.arg(device_arg())
.arg(
format_arg()
.default_value("%f %s %t - %a")
.default_value_ifs(&[
("volume", None, "%v% %f %s %t - %a"),
("transfer", None, "%f %s %t - %a on %d"),
]),
)
.arg(
Arg::with_name("toggle")
.short("t")
.long("toggle")
.help("Pauses/resumes the playback of a device"),
)
.arg(
Arg::with_name("status")
.short("s")
.long("status")
.help("Prints out the current status of a device (default)"),
)
.arg(
Arg::with_name("share-track")
.long("share-track")
.help("Returns the url to the current track"),
)
.arg(
Arg::with_name("share-album")
.long("share-album")
.help("Returns the url to the album of the current track"),
)
.arg(
Arg::with_name("transfer")
.long("transfer")
.takes_value(true)
.value_name("DEVICE")
.help("Transfers the playback to new DEVICE"),
)
.arg(
Arg::with_name("like")
.long("like")
.help("Likes the current song"),
)
.arg(
Arg::with_name("shuffle")
.long("shuffle")
.help("Toggles shuffle mode"),
)
.arg(
Arg::with_name("repeat")
.long("repeat")
.help("Switches between repeat modes"),
)
.arg(
Arg::with_name("next")
.short("n")
.long("next")
.multiple(true)
.help("Jumps to the next song")
.long_help(
"This jumps to the next song if specied once. If you want to jump, let's say 3 songs \
forward, you can use `--next` 3 times: `spt pb -nnn`.",
),
)
.arg(
Arg::with_name("previous")
.short("p")
.long("previous")
.multiple(true)
.help("Jumps to the previous song")
.long_help(
"This jumps to the beginning of the current song if specied once. You probably want to \
jump to the previous song though, so you can use the previous flag twice: `spt pb -pp`. To jump \
two songs back, you can use `spt pb -ppp` and so on.",
),
)
.arg(
Arg::with_name("volume")
.short("v")
.long("volume")
.takes_value(true)
.value_name("VOLUME")
.help("Sets the volume of a device to VOLUME (1 - 100)"),
)
.group(
ArgGroup::with_name("jumps")
.args(&["next", "previous"])
.multiple(false)
.conflicts_with_all(&["single", "flags", "actions"]),
)
.group(
ArgGroup::with_name("flags")
.args(&["like", "shuffle", "repeat"])
.multiple(true)
.conflicts_with_all(&["single", "jumps"]),
)
.group(
ArgGroup::with_name("actions")
.args(&["toggle", "status", "transfer", "volume"])
.multiple(true)
.conflicts_with_all(&["single", "jumps"]),
)
.group(
ArgGroup::with_name("single")
.args(&["share-track", "share-album"])
.multiple(false)
.conflicts_with_all(&["actions", "flags", "jumps"]),
)
}
pub fn play_subcommand() -> App<'static, 'static> {
SubCommand::with_name("play")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about("Plays a uri or another spotify item by name")
.long_about(
"If you specify a uri, the type can be inferred. If you want to play something by \
name, you have to specify the type: `--track`, `--album`, `--artist`, `--playlist` \
or `--show`. The first item which was found will be played without confirmation. \
To add a track to the queue, use `--queue`. To play a random song from a playlist, \
use `--random`. Again, with `--format` you can specify how the output will look. \
The same function as found in `playback` will be called.",
)
.visible_alias("p")
.arg(device_arg())
.arg(format_arg().default_value("%f %s %t - %a"))
.arg(
Arg::with_name("uri")
.short("u")
.long("uri")
.takes_value(true)
.value_name("URI")
.help("Plays the URI"),
)
.arg(
Arg::with_name("name")
.short("n")
.long("name")
.takes_value(true)
.value_name("NAME")
.requires("contexts")
.help("Plays the first match with NAME from the specified category"),
)
.arg(
Arg::with_name("queue")
.short("q")
.long("queue")
// Only works with tracks
.conflicts_with_all(&["album", "artist", "playlist", "show"])
.help("Adds track to queue instead of playing it directly"),
)
.arg(
Arg::with_name("random")
.short("r")
.long("random")
// Only works with playlists
.conflicts_with_all(&["track", "album", "artist", "show"])
.help("Plays a random track (only works with playlists)"),
)
.arg(
Arg::with_name("album")
.short("b")
.long("album")
.help("Looks for an album"),
)
.arg(
Arg::with_name("artist")
.short("a")
.long("artist")
.help("Looks for an artist"),
)
.arg(
Arg::with_name("track")
.short("t")
.long("track")
.help("Looks for a track"),
)
.arg(
Arg::with_name("show")
.short("w")
.long("show")
.help("Looks for a show"),
)
.arg(
Arg::with_name("playlist")
.short("p")
.long("playlist")
.help("Looks for a playlist"),
)
.group(
ArgGroup::with_name("contexts")
.args(&["track", "artist", "playlist", "album", "show"])
.multiple(false),
)
.group(
ArgGroup::with_name("actions")
.args(&["uri", "name"])
.multiple(false)
.required(true),
)
}
pub fn | () -> App<'static, 'static> {
SubCommand::with_name("list")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about("Lists devices, liked songs and playlists")
.long_about(
"This will list devices, liked songs or playlists. With the `--limit` flag you are \
able to specify the amount of results (between 1 and 50). Here, the `--format` is \
even more awesome, get your output exactly the way you want. The format option will \
be applied to every item found.",
)
.visible_alias("l")
.arg(format_arg().default_value_ifs(&[
("devices", None, "%v% %d"),
("liked", None, "%t - %a (%u)"),
("playlists", None, "%p (%u)"),
]))
.arg(
Arg::with_name("devices")
.short("d")
.long("devices")
.help("Lists devices"),
)
.arg(
Arg::with_name("playlists")
.short("p")
.long("playlists")
.help("Lists playlists"),
)
.arg(
Arg::with_name("liked")
.long("liked")
.help("Lists liked songs"),
)
.arg(
Arg::with_name("limit")
.long("limit")
.takes_value(true)
.help("Specifies the maximum number of results (1 - 50)"),
)
.group(
ArgGroup::with_name("listable")
.args(&["devices", "playlists", "liked"])
.required(true)
.multiple(false),
)
}
pub fn search_subcommand() -> App<'static, 'static> {
SubCommand::with_name("search")
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about("Searches for tracks, albums and more")
.long_about(
"This will search for something on spotify and displays you the items. The output \
format can be changed with the `--format` flag and the limit can be changed with \
the `--limit` flag (between 1 and 50). The type can't be inferred, so you have to \
specify it.",
)
.visible_alias("s")
.arg(format_arg().default_value_ifs(&[
("tracks", None, "%t - %a (%u)"),
("playlists", None, "%p (%u)"),
("artists", None, "%a (%u)"),
("albums", None, "%b - %a (%u)"),
("shows", None, "%h - %a (%u)"),
]))
.arg(
Arg::with_name("search")
.required(true)
.takes_value(true)
.value_name("SEARCH")
.help("Specifies the search query"),
)
.arg(
Arg::with_name("albums")
.short("b")
.long("albums")
.help("Looks for albums"),
)
.arg(
Arg::with_name("artists")
.short("a")
.long("artists")
.help("Looks for artists"),
)
.arg(
Arg::with_name("playlists")
.short("p")
.long("playlists")
.help("Looks for playlists"),
)
.arg(
Arg::with_name("tracks")
.short("t")
.long("tracks")
.help("Looks for tracks"),
)
.arg(
Arg::with_name("shows")
.short("w")
.long("shows")
.help("Looks for shows"),
)
.arg(
Arg::with_name("limit")
.long("limit")
.takes_value(true)
.help("Specifies the maximum number of results (1 - 50)"),
)
.group(
ArgGroup::with_name("searchable")
.args(&["playlists", "tracks", "albums", "artists", "shows"])
.required(true)
.multiple(false),
)
}
| list_subcommand |
trade_agent_tests.py | import unittest
from SDWLE.agents.trade.possible_play import PossiblePlays
from SDWLE.cards import Wisp, WarGolem, BloodfenRaptor, RiverCrocolisk, AbusiveSergeant, ArgentSquire
from testsSDW.agents.trade.test_helpers import TestHelpers
from testsSDW.agents.trade.test_case_mixin import TestCaseMixin
class | (TestCaseMixin, unittest.TestCase):
def test_setup_smoke(self):
game = TestHelpers().make_game()
self.add_minions(game, 0, Wisp(), WarGolem())
self.add_minions(game, 1, BloodfenRaptor())
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
def test_basic_trade(self):
game = TestHelpers().make_game()
self.add_minions(game, 1, Wisp(), WarGolem())
self.add_minions(game, 0, BloodfenRaptor())
self.make_all_active(game)
game.play_single_turn()
self.assert_minions(game.players[1], "War Golem")
self.assert_minions(game.players[0], "Bloodfen Raptor")
def test_buff_target(self):
game = TestHelpers().make_game()
self.add_minions(game, 0, BloodfenRaptor(), RiverCrocolisk())
self.make_all_active(game)
game.players[0].agent.player = game.players[0]
self.add_minions(game, 0, AbusiveSergeant())
game.play_single_turn()
def test_hero_power(self):
game = self.make_game()
cards = self.make_cards(game.current_player, ArgentSquire())
possible_plays = PossiblePlays(cards, 10, allow_hero_power=True)
self.assertEqual(1, len(possible_plays.plays()))
| TestTradeAgent |
context.go | package breaker
import (
"context"
"sync/atomic"
)
// BreakByContext returns a new Breaker based on the Context.
func BreakByContext(ctx context.Context, cancel context.CancelFunc) Interface {
return (&contextBreaker{newBreaker(), cancel, ctx.Done()}).trigger()
}
// WithContext returns a new Breaker and an associated Context derived from ctx.
// Deprecated: use BreakByContext instead.
// TODO:v2 will be removed
func WithContext(ctx context.Context) (Interface, context.Context) {
ctx, cancel := context.WithCancel(ctx)
return (&contextBreaker{newBreaker(), cancel, ctx.Done()}).trigger(), ctx
}
type contextBreaker struct {
*breaker
cancel context.CancelFunc
signal <-chan struct{}
}
// Done returns a channel that's closed when a cancellation signal occurred.
func (br *contextBreaker) Done() <-chan struct{} {
return br.signal
}
// Close closes the Done channel and releases resources associated with it.
func (br *contextBreaker) Close() {
br.cancel()
}
| func (br *contextBreaker) trigger() Interface {
go func() {
<-br.signal
atomic.StoreInt32(&br.released, 1)
}()
return br
} | |
circuits_circuit_terminations_update_responses.go | // Code generated by go-swagger; DO NOT EDIT.
// Copyright 2020 The go-netbox Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package circuits
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/tomasherout/go-netbox/netbox/models"
)
// CircuitsCircuitTerminationsUpdateReader is a Reader for the CircuitsCircuitTerminationsUpdate structure.
type CircuitsCircuitTerminationsUpdateReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *CircuitsCircuitTerminationsUpdateReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewCircuitsCircuitTerminationsUpdateOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
default:
return nil, runtime.NewAPIError("response status code does not match any response statuses defined for this endpoint in the swagger spec", response, response.Code())
}
}
// NewCircuitsCircuitTerminationsUpdateOK creates a CircuitsCircuitTerminationsUpdateOK with default headers values
func NewCircuitsCircuitTerminationsUpdateOK() *CircuitsCircuitTerminationsUpdateOK |
/* CircuitsCircuitTerminationsUpdateOK describes a response with status code 200, with default header values.
CircuitsCircuitTerminationsUpdateOK circuits circuit terminations update o k
*/
type CircuitsCircuitTerminationsUpdateOK struct {
Payload *models.CircuitTermination
}
func (o *CircuitsCircuitTerminationsUpdateOK) Error() string {
return fmt.Sprintf("[PUT /circuits/circuit-terminations/{id}/][%d] circuitsCircuitTerminationsUpdateOK %+v", 200, o.Payload)
}
func (o *CircuitsCircuitTerminationsUpdateOK) GetPayload() *models.CircuitTermination {
return o.Payload
}
func (o *CircuitsCircuitTerminationsUpdateOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.CircuitTermination)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
| {
return &CircuitsCircuitTerminationsUpdateOK{}
} |
agent.py | """
The agent module contains three abstract classes that are subclassed in order to create algorithms.
The classes are:
* Player - for an algorithm that cannot learn and can only play
* Learner - for a learning algorithm controlling a single agent
* MultiLearner - for a learning algorithm of controlling a number of agents
"""
import abc
from typing import List, Iterable
from ezcoach.enviroment import Manifest
class Player(abc.ABC):
"""
The abstract class representing a playing agent. It can be initialized with the manifest of the game
and can react to states by selecting actions.
Both methods are empty and must be implemented in the concrete class.
A class that inherits from the Player class can be used with the Runner's test procedure.
"""
@abc.abstractmethod
def initialize(self, manifest: Manifest):
"""
Initializes the object with the manifest that describe the game.
:param manifest: a Manifest class obtained from the environment.
"""
@abc.abstractmethod
def act(self, state):
"""
Selects an action to be performed in the given state.
:param state: a state received from the environment
:return: an action compliant with the manifest provided in initialize method
"""
@classmethod
def __subclasshook__(cls, obj):
if cls is Player:
methods = ('initialize', 'act')
if all(any(method in superclass.__dict__
for superclass in obj.__mro__)
for method in methods):
return True
return NotImplemented
class Learner(Player):
"""
The abstract class representing an agent that is capable of learning. It inherits from the Player class
and thus it is capable of playing.
Only do_start_episode method must be implemented. Other methods can be left unimplemented and consequently empty.
Rewards are received on the step basis in receive_reward method and on episode basis with episode_ended method.
Methods that ensure persistence are added for convenience.
An agent derived from Learner can be used in both training and testing procedures.
"""
@abc.abstractmethod
def do_start_episode(self, episode: int) -> bool:
"""
Decides if next episode should be started.
:param episode: the number of an episode to be started (starting from 1)
:return: the decision if the next episode should be started
"""
def episode_started(self, episode: int):
"""
Informs the algorithm that the episode was started.
:param episode: the number of the started episode (starting from 1)
"""
def receive_reward(self, previous_state, action, reward: float, accumulated_reward: float, next_state):
"""
Receives a reward from an environment.
:param previous_state: the state that precedes the reward
:param action: the action that precedes the reward
:param reward: the numerical reward signal
:param accumulated_reward: the reward accumulated during the current episode
:param next_state: the state that follow the reward
"""
def episode_ended(self, terminal_state, accumulated_reward):
""" | :param terminal_state: the last state of the episode
:param accumulated_reward: the accumulated reward assuming no discount
"""
@classmethod
def __subclasshook__(cls, obj):
if cls is Learner:
methods = ('initialize', 'act',
'do_start_episode', 'episode_started', 'receive_reward', 'episode_ended')
if all(any(method in superclass.__dict__
for superclass in obj.__mro__)
for method in methods):
return True
return NotImplemented
class MultiLearner(Learner):
"""
The class representing a learning algorithm capable of controlling a number of agents.
It inherits from Learner class. The list of player numbers is provided in set_players method before each episode.
The number identifying currently acting player is set in set_acting_player method which is invoked before
act and receive_reward methods during an episode and before episode_ended method at the end of an episode.
"""
@abc.abstractmethod
def set_players(self, players: Iterable[int]):
"""
Informs the learner about the players that it will control.
:param players: an iterable of numbers identifying players
"""
@abc.abstractmethod
def set_acting_player(self, player):
"""
Sets the current player that will act, receive reward and end episode.
:param player: a number identifying the acting player
"""
@classmethod
def __subclasshook__(cls, obj):
if cls is MultiLearner:
methods = ('initialize', 'act',
'do_start_episode', 'episode_started', 'receive_reward', 'episode_ended',
'set_players', 'set_acting_player')
if all(any(method in superclass.__dict__
for superclass in obj.__mro__)
for method in methods):
return True
return NotImplemented | Receives the accumulated reward for an episode. If a discount is used this value should be ignored
and the actual reward should be calculated using receive_reward method during the episode.
|
extractor.py | import os
from typing import Dict, Iterable, List, Sequence, Set, Tuple
try:
import looker_sdk
from looker_sdk.sdk.api31.methods import Looker31SDK
from looker_sdk.sdk.api31.models import DashboardElement
except ImportError:
print("Please install metaphor[looker] extra\n")
raise
from metaphor.models.metadata_change_event import (
Chart,
ChartType,
Dashboard,
DashboardInfo,
DashboardLogicalID,
DashboardPlatform,
DashboardUpstream,
MetadataChangeEvent,
VirtualViewType,
)
from metaphor.common.entity_id import to_virtual_view_entity_id
from metaphor.common.event_util import EventUtil
from metaphor.common.extractor import BaseExtractor
from metaphor.common.logger import get_logger
from metaphor.looker.config import LookerConnectionConfig, LookerRunConfig
from metaphor.looker.lookml_parser import Model, fullname, parse_project
logger = get_logger(__name__)
class LookerExtractor(BaseExtractor):
"""Looker metadata extractor"""
@staticmethod
def | ():
return LookerRunConfig
vis_type_map = {
"looker_area": ChartType.AREA,
"looker_bar": ChartType.BAR,
"looker_boxplot": ChartType.BOX_PLOT,
"looker_column": ChartType.COLUMN,
"looker_donut_multiples": ChartType.DONUT,
"looker_line": ChartType.LINE,
"looker_map": ChartType.MAP,
"looker_geo_coordinates": ChartType.MAP,
"looker_geo_choropleth": ChartType.MAP,
"looker_pie": ChartType.PIE,
"looker_scatter": ChartType.SCATTER,
"table": ChartType.TABLE,
"looker_grid": ChartType.TABLE,
"looker_single_record": ChartType.TABLE,
"single_value": ChartType.TEXT,
"text": ChartType.TEXT,
}
def initSdk(self, config: LookerRunConfig) -> Looker31SDK:
# Load config using environment variables instead from looker.ini file
# See https://github.com/looker-open-source/sdk-codegen#environment-variable-configuration
os.environ["LOOKERSDK_BASE_URL"] = config.base_url
os.environ["LOOKERSDK_CLIENT_ID"] = config.client_id
os.environ["LOOKERSDK_CLIENT_SECRET"] = config.client_secret
os.environ["LOOKERSDK_VERIFY_SSL"] = str(config.verify_ssl)
os.environ["LOOKERSDK_TIMEOUT"] = str(config.timeout)
return looker_sdk.init31()
async def extract(self, config: LookerRunConfig) -> List[MetadataChangeEvent]:
assert isinstance(config, LookerExtractor.config_class())
logger.info("Fetching metadata from Looker")
sdk = self.initSdk(config)
# Lower case all connection names for case-insensitive lookup
connections: Dict[str, LookerConnectionConfig] = {
k.lower(): v for (k, v) in config.connections.items()
}
model_map, virtual_views = parse_project(
config.lookml_dir, connections, config.project_source_url
)
dashboards = self._fetch_dashboards(config, sdk, model_map)
dashboard_events = [EventUtil.build_dashboard_event(d) for d in dashboards]
virtual_view_events = [
EventUtil.build_virtual_view_event(d) for d in virtual_views
]
return dashboard_events + virtual_view_events
def _fetch_dashboards(
self, config: LookerRunConfig, sdk: Looker31SDK, model_map: Dict[str, Model]
) -> List[Dashboard]:
dashboards: List[Dashboard] = []
for basic_dashboard in sdk.all_dashboards():
assert basic_dashboard.id is not None
dashboard = sdk.dashboard(dashboard_id=basic_dashboard.id)
dashboard_info = DashboardInfo()
dashboard_info.title = dashboard.title
dashboard_info.description = dashboard.description
dashboard_info.url = (
f"{config.base_url}/{dashboard.preferred_viewer}/{dashboard.id}"
)
# All numeric fields must be converted to "float" to meet quicktype's expectation
if dashboard.view_count is not None:
dashboard_info.view_count = float(dashboard.view_count)
dashboard_info.charts = []
upstream = None
if dashboard.dashboard_elements is not None:
(dashboard_info.charts, upstream) = self._extract_charts(
dashboard.dashboard_elements, model_map
)
dashboards.append(
Dashboard(
logical_id=DashboardLogicalID(
dashboard.id, DashboardPlatform.LOOKER
),
dashboard_info=dashboard_info,
upstream=upstream,
)
)
return dashboards
def _extract_charts(
self,
dashboard_elements: Sequence[DashboardElement],
model_map: Dict[str, Model],
) -> Tuple[List[Chart], DashboardUpstream]:
charts = []
explore_ids: Set[str] = set()
for e in filter(lambda e: e.type == "vis", dashboard_elements):
if e.result_maker is None:
logger.warning(f"Unable to find result_maker in element {e.title}")
continue
chart_type = None
if e.result_maker.vis_config is not None:
chart_type = self.vis_type_map.get(
e.result_maker.vis_config.get("type", ""), ChartType.OTHER
)
charts.append(
Chart(
# Use "id" if "title" is None or empty string
title=e.title if e.title else e.id,
description=e.note_text,
chart_type=chart_type,
)
)
if not isinstance(e.result_maker.filterables, Iterable):
logger.warning(f"Unable to iterate filterables in element {e.title}")
continue
for f in e.result_maker.filterables:
if f.model is None or f.view is None:
logger.warning(f"Missing model or view in element {e.title}")
continue
model = model_map.get(f.model)
if model is None:
logger.error(f"Chart {e.title} references invalid model {f.model}")
continue
explore = model.explores.get(f.view)
if explore is None:
logger.error(f"Chart {e.title} references invalid explore {f.view}")
continue
explore_ids.add(
str(
to_virtual_view_entity_id(
fullname(f.model, explore.name),
VirtualViewType.LOOKER_EXPLORE,
)
)
)
return (
charts,
DashboardUpstream(
source_virtual_views=list(explore_ids),
),
)
| config_class |
lib.rs | //! The main crate of Rexpect
//!
//! # Overview
//!
//! Rexpect is a loose port of [pexpect](pexpect.readthedocs.io/en/stable/)
//! which itself is inspired by Don Libe's expect.
//!
//! It's main components (depending on your need you can use either of those)
//!
//! - [session](session/index.html): automate stuff in Rust
//! - [reader](reader/index.html): a non-blocking reader with buffering, matching on
//! strings/regex/...
//! - [process](process/index.html): spawn a process in a pty
//!
//! # Basic example
//!
//! ```no_run
//!
//! extern crate rexpect;
//!
//! use rexpect::spawn;
//! use rexpect::errors::*;
//!
//! fn do_ftp() -> Result<()> {
//! let mut p = spawn("ftp speedtest.tele2.net", Some(2000))?;
//! p.exp_regex("Name \\(.*\\):")?;
//! p.send_line("anonymous")?;
//! p.exp_string("Password")?;
//! p.send_line("test")?;
//! p.exp_string("ftp>")?;
//! p.send_line("cd upload")?;
//! p.exp_string("successfully changed.\r\nftp>")?;
//! p.send_line("pwd")?;
//! p.exp_regex("[0-9]+ \"/upload\"")?;
//! p.send_line("exit")?;
//! p.exp_eof()?;
//! Ok(())
//! }
//!
//!
//! fn main() {
//! do_ftp().unwrap_or_else(|e| panic!("ftp job failed with {}", e));
//! }
//! ```
//!
//! # Example with bash
//!
//! Tip: try the chain of commands first in a bash session.
//! The tricky thing is to get the wait_for_prompt right.
//! What `wait_for_prompt` actually does is seeking to the next
//! visible prompt. If you forgot to call this once your next call to
//! `wait_for_prompt` comes out of sync and you're seeking to a prompt
//! printed "above" the last `execute()`.
//!
//! ```no_run
//! extern crate rexpect;
//! use rexpect::spawn_bash;
//! use rexpect::errors::*;
//!
//!
//! fn run() -> Result<()> {
//! let mut p = spawn_bash(Some(30_000))?;
//! p.execute("ping 8.8.8.8", "bytes of data")?;
//! p.send_control('z')?;
//! p.wait_for_prompt()?;
//! p.execute("bg", "suspended")?;
//! p.send_line("sleep 1")?;
//! p.wait_for_prompt()?;
//! p.execute("fg", "continued")?;
//! p.send_control('c')?;
//! p.exp_string("packet loss")?;
//! Ok(())
//! }
//!
//! fn main() {
//! run().unwrap_or_else(|e| panic!("bash process failed with {}", e));
//! }
//!
//! ```
pub mod process;
pub mod reader;
pub mod session;
pub use reader::ReadUntil;
pub use session::{spawn, spawn_bash, spawn_python, spawn_stream};
pub mod errors {
use std::time;
// Create the Error, ErrorKind, ResultExt, and Result types
error_chain::error_chain! {
errors {
EOF(expected:String, got:String, exit_code:Option<String>) {
description("End of filestream (usually stdout) occurred, most probably\
because the process terminated")
display("EOF (End of File): Expected {} but got EOF after reading \"{}\", \
process terminated with {:?}", expected, got,
exit_code.as_ref()
.unwrap_or(& "unknown".to_string()))
} | }
Timeout(expected:String, got:String, timeout:time::Duration) {
description("The process didn't end within the given timeout")
display("Timeout Error: Expected {} but got \"{}\" (after waiting {} ms)",
expected, got, (timeout.as_secs() * 1000) as u32
+ timeout.subsec_nanos() / 1_000_000)
}
EmptyProgramName {
description("The provided program name is empty.")
display("EmptyProgramName")
}
}
}
} | BrokenPipe {
description("The pipe to the process is broken. Most probably because\
the process died.")
display("PipeError") |
generate.go | // Copyright 2018 mixtool authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package mixer
import (
"encoding/json"
"github.com/ghodss/yaml"
"github.com/google/go-jsonnet"
"github.com/grafana/tanka/pkg/jsonnet/native"
"github.com/pkg/errors"
)
type GenerateOptions struct {
AlertsFilename string
RulesFilename string
Directory string
JPaths []string
YAML bool
}
func NewVM(jpath []string) *jsonnet.VM {
vm := jsonnet.MakeVM()
vm.Importer(&jsonnet.FileImporter{
JPaths: jpath,
})
for _, nf := range native.Funcs() {
vm.NativeFunction(nf)
}
return vm
}
func GenerateAlerts(filename string, opts GenerateOptions) ([]byte, error) {
vm := NewVM(opts.JPaths)
j, err := evaluatePrometheusAlerts(vm, filename)
if err != nil {
return nil, err
}
output := []byte(j)
if opts.YAML {
output, err = yaml.JSONToYAML(output)
if err != nil |
}
return output, nil
}
func GenerateRules(filename string, opts GenerateOptions) ([]byte, error) {
vm := NewVM(opts.JPaths)
j, err := evaluatePrometheusRules(vm, filename)
if err != nil {
return nil, err
}
output := []byte(j)
if opts.YAML {
output, err = yaml.JSONToYAML(output)
if err != nil {
return nil, err
}
}
return output, nil
}
func GenerateRulesAlerts(filename string, opts GenerateOptions) ([]byte, error) {
vm := NewVM(opts.JPaths)
j, err := evaluatePrometheusRulesAlerts(vm, filename)
if err != nil {
return nil, err
}
output := []byte(j)
if opts.YAML {
output, err = yaml.JSONToYAML(output)
if err != nil {
return nil, err
}
}
return output, nil
}
func GenerateDashboards(filename string, opts GenerateOptions) (map[string]json.RawMessage, error) {
vm := NewVM(opts.JPaths)
j, err := evaluateGrafanaDashboards(vm, filename)
if err != nil {
return nil, err
}
var dashboards map[string]json.RawMessage
if err := json.Unmarshal([]byte(j), &dashboards); err != nil {
return nil, errors.Wrap(err, "failed to unmarshal dashboards")
}
return dashboards, nil
}
| {
return nil, err
} |
app.module.ts | import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import {HTTP_INTERCEPTORS} from '@angular/common/http';
import {HttpClientModule} from '@angular/common/http';
import {AuthInterceptor} from './http-inerceptor';
import { RouterModule } from '@angular/router';
import { AppComponent } from './app.component';
import { MainComponent } from './main/main.component';
import { HomeComponent } from './home/home.component';
import { HeaderComponent } from './header/header.component';
import { ProfileComponent } from './profile/profile.component';
import { ROUTES } from './app.routes';
import { AuthService } from './auth/auth.service';
import { ProfileService } from './profile/profile.service';
import { ProfileResolver } from './profile/profile.resolver';
import { UserResolver } from './profile/user.resolver';
import { BookService } from './book/book.service';
import { BookResolver } from './book/book.resolver';
import { AuthorService } from './author/author.service';
import { AuthorResolver } from './author/author.resolver';
import { HomeService } from './home/home.service';
import { HomeResolver } from './home/home.resolver';
import { CallbackComponent } from './callback/callback.component';
import { BookListComponent } from './shared/components/book-list/book-list.component';
import { BookComponent } from './book/book.component';
import { ReviewComponent } from './review/review.component';
import { ReviewService } from './review/review.service';
import { ReviewResolver } from './review/review.resolver';
import { FormFieldErrorComponent } from './shared/components/form-field-error/form-field-error.component';
import { AuthorComponent } from './author/author.component';
@NgModule({
declarations: [
AppComponent,
MainComponent,
HomeComponent,
CallbackComponent,
HeaderComponent,
ProfileComponent,
BookListComponent,
BookComponent,
ReviewComponent,
FormFieldErrorComponent,
AuthorComponent
],
imports: [
BrowserModule,
FormsModule,
HttpModule,
HttpClientModule,
RouterModule.forRoot(ROUTES)
],
providers: [
AuthService,
ProfileService,
ProfileResolver,
BookService,
BookResolver,
ReviewService,
ReviewResolver,
UserResolver,
AuthorService,
AuthorResolver,
HomeService,
HomeResolver,
{
provide: HTTP_INTERCEPTORS,
useClass: AuthInterceptor,
multi: true
}
],
bootstrap: [AppComponent]
})
export class | { }
| AppModule |
Sort.py | # coding=utf-8
# Author: Tom Lambert
# Content: Implementierung der Sort-Klasse für ab6.
class Sort(object):
"""Implementiert Sortier-Algorithmen mit der Möglichkeit einer statistischen Auswertung"""
def __init__(self):
self.counter_swap = 0 # entspricht ca 2 Elementabrufen und 2 Elementzuweisungen
self.counter_list_item_assignment = 0
self.counter_item_compare = 0
self.counter_get_item_from_list = 0
self.counter_add_item_to_result_list = 0
self.counter_recursive_call = 0
self.counter_split_list = 0
self.counter_copy_list = 0
self.counter_sort_call = 0
def qu | elf, lst):
"""
Sortiert die lst-Liste mit dem Quick-Sort-Algorithmus und gibt die sortierte Liste zurück.
Bestimmte Operationen werden in den counter_-Variablen gezählt.
"""
self.counter_sort_call += 1
if len(lst) > 1:
self.counter_get_item_from_list += 1
pivot = lst[0]
ltp = [] # less than pivot item
gtp = [] # greater than pivot item
ep = [] # equals pivot item
for item in lst:
self.counter_get_item_from_list += 1
self.counter_item_compare += 1
if item < pivot:
self.counter_add_item_to_result_list += 1
ltp.append(item)
elif item > pivot:
self.counter_add_item_to_result_list += 1
gtp.append(item)
else:
self.counter_add_item_to_result_list += 1
ep.append(item)
self.counter_split_list += 1
self.counter_recursive_call += 1
ltp = self.quick_sort(ltp)
self.counter_recursive_call += 1
gtp = self.quick_sort(gtp)
result = ltp
self.counter_add_item_to_result_list += len(ep)
result.extend(ep)
self.counter_add_item_to_result_list += len(gtp)
result.extend(gtp)
return result
else:
return lst
def gnome_sort(self, lst):
"""
Sortiert die lst-Liste mit dem Gnome-Sort-Algorithmus und gibt die sortierte Liste zurück.
Bestimmte Operationen werden in den counter_-Variablen gezählt.
"""
self.counter_sort_call += 1
self.counter_copy_list += 1
lst = list(lst) # copy the list, because lists are mutable and passed by reference
pos = 0
while pos < len(lst):
self.counter_get_item_from_list += 2
self.counter_item_compare += 1
if pos == 0 or lst[pos] >= lst[pos - 1]:
pos += 1
else:
self.counter_swap += 1
lst[pos], lst[pos - 1] = lst[pos - 1], lst[pos]
pos -= 1
return lst
def insertion_sort(self, lst):
"""
Sortiert die lst-Liste mit dem Insertion-Sort-Algorithmus und gibt die sortierte Liste zurück.
Bestimmte Operationen werden in den counter_-Variablen gezählt.
"""
self.counter_sort_call += 1
self.counter_copy_list += 1
lst = list(lst) # copy the list, because lists are mutable and passed by reference
for i in range(1, len(lst)):
self.counter_get_item_from_list += 1
val = lst[i]
j = i
while j > 0 and lst[j - 1] > val:
self.counter_item_compare += 1
self.counter_get_item_from_list += 1
self.counter_list_item_assignment += 1
lst[j] = lst[j - 1]
j = j - 1 # breaks the while loop
lst[j] = val
self.counter_list_item_assignment += 1
return lst
| ick_sort(s |
get_usergroups_group_id_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package products
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/swag"
strfmt "github.com/go-openapi/strfmt"
)
// NewGetUsergroupsGroupIDParams creates a new GetUsergroupsGroupIDParams object
// with the default values initialized.
func NewGetUsergroupsGroupIDParams() *GetUsergroupsGroupIDParams {
var ()
return &GetUsergroupsGroupIDParams{
timeout: cr.DefaultTimeout,
}
}
// NewGetUsergroupsGroupIDParamsWithTimeout creates a new GetUsergroupsGroupIDParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewGetUsergroupsGroupIDParamsWithTimeout(timeout time.Duration) *GetUsergroupsGroupIDParams |
// NewGetUsergroupsGroupIDParamsWithContext creates a new GetUsergroupsGroupIDParams object
// with the default values initialized, and the ability to set a context for a request
func NewGetUsergroupsGroupIDParamsWithContext(ctx context.Context) *GetUsergroupsGroupIDParams {
var ()
return &GetUsergroupsGroupIDParams{
Context: ctx,
}
}
// NewGetUsergroupsGroupIDParamsWithHTTPClient creates a new GetUsergroupsGroupIDParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewGetUsergroupsGroupIDParamsWithHTTPClient(client *http.Client) *GetUsergroupsGroupIDParams {
var ()
return &GetUsergroupsGroupIDParams{
HTTPClient: client,
}
}
/*GetUsergroupsGroupIDParams contains all the parameters to send to the API endpoint
for the get usergroups group ID operation typically these are written to a http.Request
*/
type GetUsergroupsGroupIDParams struct {
/*GroupID
Group ID
*/
GroupID int64
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) WithTimeout(timeout time.Duration) *GetUsergroupsGroupIDParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) WithContext(ctx context.Context) *GetUsergroupsGroupIDParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) WithHTTPClient(client *http.Client) *GetUsergroupsGroupIDParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithGroupID adds the groupID to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) WithGroupID(groupID int64) *GetUsergroupsGroupIDParams {
o.SetGroupID(groupID)
return o
}
// SetGroupID adds the groupId to the get usergroups group ID params
func (o *GetUsergroupsGroupIDParams) SetGroupID(groupID int64) {
o.GroupID = groupID
}
// WriteToRequest writes these params to a swagger request
func (o *GetUsergroupsGroupIDParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
// path param group_id
if err := r.SetPathParam("group_id", swag.FormatInt64(o.GroupID)); err != nil {
return err
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| {
var ()
return &GetUsergroupsGroupIDParams{
timeout: timeout,
}
} |
blogPost.js | import React from "react"
import { graphql, Link } from "gatsby"
import Img from "gatsby-image"
import {
Post,
FeaturedImage,
BlogPost,
Navigation,
ButtonNavigation,
} from "../styles/styles"
import SEO from "../components/seo"
import Layout from "../components/layout"
import Share from "./share"
const Template = ({ data, pageContext }) => {
const post = data.markdownRemark.frontmatter
const title = post.title
const date = post.date
const html = data.markdownRemark.html
const { next, prev } = pageContext
const myUrl = data.site.siteMetadata.siteUrl + post.path
return (
<Layout>
<SEO
title={title}
description={post.excerpt}
image={data.markdownRemark.featuredImg.childImageSharp.fluid}
pathname={myUrl}
/>
<Post>
<h1 id="topp">{title}</h1>
<Share url={myUrl} />
<small>
<em>{date}</em>
</small>
<FeaturedImage>
<a href={post.photoRef}>
<Img
fluid={data.markdownRemark.featuredImg.childImageSharp.fluid}
alt={post.photoBy}
title={post.photoBy}
/>
</a>
</FeaturedImage>
<BlogPost>
<div dangerouslySetInnerHTML={{ __html: html }} />
</BlogPost>
<Navigation>
{prev && (
<ButtonNavigation>
<Link to={prev.frontmatter.path}>Previous</Link>
</ButtonNavigation>
)}
{next && (
<ButtonNavigation>
<Link to={next.frontmatter.path}>Next</Link>
</ButtonNavigation>
)}
</Navigation>
</Post>
</Layout>
)
}
export const postQuery = graphql`
query($pathSlug: String!) { | author
siteUrl
}
}
markdownRemark(frontmatter: { path: { eq: $pathSlug } }) {
html
frontmatter {
title
date(formatString: "MMMM, DD, YYYY")
author
path
tags
excerpt
}
featuredImg {
childImageSharp {
fluid {
...GatsbyImageSharpFluid
}
}
}
}
}
`
export default Template | site {
siteMetadata {
title
description |
config.js | 'use strict';
const { _assign } = require('./helper/obj');
const debug = require('./debug');
const is = require('./helper/is');
const { _deep_clone } = require('./helper/obj');
class Configuration {
constructor(config = {}, sep = '.') {
this.init(config, sep);
}
init(config = {}, sep = '.') {
this.config = _deep_clone(config);
this.sep = sep;
}
assign(config) {
if (!is.empty(config)) {
_assign(this.config, config);
}
return this.config;
}
| get(key = null, _default = null) {
if (!key) {
return this.config;
} else if (key.indexOf(this.sep) < 0) {
return !is.invalid(this.config[key]) ? this.config[key] : _default;
}
const keyArr = key.split(this.sep);
function recur(keys, curr, _default) {
if (is.invalid(keys)) {
return _default;
}
if (!keys.length) {
return curr;
}
const key = keys.shift();
if (is.invalid(curr[key])) {
return _default;
}
return recur(keys, curr[key], _default);
}
return recur(keyArr, this.config, _default);
}
validate(keys = []) {
let failed = [];
if (is.array(keys)) {
let i = 0;
while (i < keys.length) {
const value = this.get(keys[i]);
if (is.empty(value)) {
failed.push(keys[i]);
}
i++;
}
} else if (is.string(keys)) {
const value = this.get(keys);
if (is.empty(value)) {
failed.push(keys);
}
} else {
debug.stack(`Unsupported keys data type. ${typeof keys}`);
}
return failed;
}
}
module.exports = Configuration; | |
routermq.py | # -*- coding: utf-8 -*-
"""Router with RabbitMQ topic exchange"""
import asyncio
import logging
import asynqp
from urllib.parse import urlparse
# RABBITMQ_HOST = 'localhost'
# RABBITMQ_PORT = 5672
# RABBITMQ_USERNAME = 'guest'
# RABBITMQ_PASSWORD = 'guest'
# RABBITMQ_VIRTUAL_HOST = '/'
# EXCHANGE = 'sam.router'
# QUEUE = 'sam.queue'
RECONNECT_BACKOFF = 1.0
log = logging.getLogger(__name__)
class RouterMQ():
"""Router based on RabbitMQ with exchange topic."""
def __init__(self, outgoing_key='Alarms.keeper',
routing_keys='#',
queue_name=None,
callback=None,
exchange='sam.router',
url=None,
host='localhost',
port=5672,
login='guest',
password='guest',
virtualhost='/'):
self.connection = None
self.channel = None
self.exchange = None
self.queue = None
self.consumer = None
self.send_packet = None
self.routing_keys = routing_keys
self.queue_name = queue_name or 'undefined'
self.outgoing_key = outgoing_key
self.callback = callback
_host = _port = _login = _password = _virtualhost = None
if url:
|
self.MQ_HOST = _host or host
self.MQ_PORT = _port or port
self.MQ_LOGIN = _login or login
self.MQ_PASSWORD = _password or password
self.MQ_VIRTUAL_HOST = _virtualhost or virtualhost
self.EXCHANGE = exchange
def get_info(self):
return {
'hostname': self.MQ_HOST,
'port': self.MQ_PORT,
'username': self.MQ_LOGIN,
'password': '******',
'virtualhost': self.MQ_VIRTUAL_HOST,
'queue': self.queue_name,
'outgoing_key': self.outgoing_key,
'routing_keys': self.routing_keys,
'exchange': self.EXCHANGE,
'type': 'AMQP',
}
def set_callback(self, callback):
"""Set the function to process received message"""
self.callback = callback
def connect(self):
asyncio.ensure_future(self.reconnector())
async def _connect(self):
"""Connects to the amqp exchange and queue"""
def log_returned_message(message):
"""Log when message has no handler in message queue"""
log.warning("Nobody cared for {0} {1}".format(message.routing_key,
message.json()))
# self.connection = await asynqp.connect(
# self.MQ_HOST,
# int(self.MQ_PORT),
# self.MQ_LOGIN,
# self.MQ_PASSWORD,
# self.MQ_VIRTUAL_HOST
# )
try:
self.connection = await asynqp.connect(
self.MQ_HOST,
int(self.MQ_PORT),
self.MQ_LOGIN,
self.MQ_PASSWORD,
self.MQ_VIRTUAL_HOST
)
self.channel = await self.connection.open_channel()
self.channel.set_return_handler(log_returned_message)
self.exchange = await self.channel.declare_exchange(self.EXCHANGE,
'topic')
self.queue = await self.channel.declare_queue(self.queue_name,
auto_delete=True)
for routing_key in self.routing_keys:
await self.queue.bind(self.exchange, routing_key)
self.consumer = await self.queue.consume(self.handle_message)
except asynqp.AMQPError as err:
log.error("Could not consume on queue".format(err))
if self.connection:
await self.connection.close()
# except ConnectionLostError:
# log.error('Amqp Connection Lost.')
# if self.connection:
# await self.connection.close()
except Exception as err:
log.error('Amqp Connection Error: {}'.format(err))
if self.connection:
await self.connection.close()
async def reconnector(self):
try:
while True:
if self.connection is None or self.connection.is_closed():
url = 'amqp://{}:{}@{}:{}{}'.format(self.MQ_LOGIN,
self.MQ_PASSWORD,
self.MQ_HOST,
self.MQ_PORT,
self.MQ_VIRTUAL_HOST)
log.info("Connecting to rabbitmq [{}] ...".format(url))
try:
await self._connect()
except Exception as err:
log.error("Failed to connect to rabbitmq Error: {} "
"Will retry in {} seconds"
.format(err, RECONNECT_BACKOFF))
self.connection = None
if self.connection is None:
await asyncio.sleep(RECONNECT_BACKOFF)
else:
log.info("RabbitMQ Successfully connected. ")
# poll connection state every 100ms
await asyncio.sleep(0.5)
except asyncio.CancelledError:
if self.connection is not None:
await self.connection.close()
except Exception as err:
log.error("Connect to rabbitmq have Error: {}".format(err))
if self.connection is not None:
await self.connection.close()
def publish(self, mesg, outgoing_key=None):
"""Route publish packet from client to message queue"""
try:
key = outgoing_key or self.outgoing_key or ''
msg = asynqp.Message(mesg, content_encoding='utf-8')
if self.exchange:
self.exchange.publish(msg, key)
log.info("To %s: %s", key, mesg)
else:
log.error("Could not publish, because exchange is not exist")
except Exception as err:
log.error('Could not publish, because some error: {}'.format(err))
def handle_message(self, message):
"""Handle message coming from rabbitmq and route them to the
respective clients"""
routing_key = message.routing_key
json = message.json()
log.info("From [%s] %s", routing_key, json)
if self.callback:
asyncio.ensure_future(self.callback(json))
def main(debug=True):
# configure log
log = logging.getLogger("")
formatter = logging.Formatter("%(asctime)s %(levelname)s " +
"[%(module)s] %(message)s")
# log the things
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
# ch.setLevel(logging.DEBUG)
# ch.setLevel(logging.ERROR)
# ch.setLevel(logging.CRITICAL)
if debug:
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.INFO)
ch.setFormatter(formatter)
log.addHandler(ch)
global loop
loop = asyncio.get_event_loop()
loop.set_debug(0)
router = RouterMQ(outgoing_key='Alarms.keeper',
routing_keys=['Actions.*'],
queue_name='keeper',
host='localhost')
reconnect_amqp_task = loop.create_task(router.reconnector())
try:
loop.run_forever()
except KeyboardInterrupt:
reconnect_amqp_task.cancel()
loop.run_until_complete(reconnect_amqp_task)
finally:
loop.close()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
exit(1)
| _url = urlparse(url)
_host = _url.hostname
_port = _url.port
_login = _url.username
_password = _url.password
_virtualhost = _url.path[1:] |
metrics.py | # Copyright (c) 2021, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from django.utils.translation import ugettext_lazy as _
from rest_framework.generics import GenericAPIView, ListAPIView
from rest_framework.response import Response
from .serializers import CartItemSerializer, LifetimeSerializer, MetricsSerializer
from .. import settings
from ..compat import reverse, six
from ..filters import DateRangeFilter
from ..metrics.base import (
abs_monthly_balances,
aggregate_transactions_by_period,
month_periods,
aggregate_transactions_change_by_period,
get_different_units,
)
from ..metrics.subscriptions import (
active_subscribers,
churn_subscribers,
subscribers_age,
)
from ..metrics.transactions import lifetime_value
from ..mixins import (
CartItemSmartListMixin,
CouponMixin,
ProviderMixin,
DateRangeContextMixin,
)
from ..models import CartItem, Plan, Transaction
from ..utils import convert_dates_to_utc, get_organization_model
LOGGER = logging.getLogger(__name__)
class BalancesAPIView(DateRangeContextMixin, ProviderMixin, GenericAPIView):
"""
Retrieves 12-month trailing deferred balances
Generate a table of revenue (rows) per months (columns) for a default
balance sheet (Income, Backlog, Receivable).
**Tags**: metrics, provider, transactionmodel
**Examples**
.. code-block:: http
GET /api/metrics/cowork/balances/ HTTP/1.1
responds
.. code-block:: json
{
"title": "Balances",
"scale": 0.01,
"unit": "usd",
"table": [
{
"key": "Income",
"values": [
["2014-09-01T00:00:00Z", 0],
["2014-10-01T00:00:00Z", 1532624],
["2014-11-01T00:00:00Z", 2348340],
["2014-12-01T00:00:00Z", 3244770],
["2015-01-01T00:00:00Z", 5494221],
["2015-02-01T00:00:00Z", 7214221],
["2015-03-01T00:00:00Z", 8444221],
["2015-04-01T00:00:00Z", 9784221],
["2015-05-01T00:00:00Z", 12784221],
["2015-06-01T00:00:00Z", 14562341],
["2015-07-01T00:00:00Z", 16567341],
["2015-08-01T00:00:00Z", 17893214],
["2015-08-06T02:24:50.485Z", 221340]
]
},
{
"key": "Backlog",
"values": [
["2014-09-01T00:00:00Z", 1712624],
["2014-10-01T00:00:00Z", 3698340],
["2014-11-01T00:00:00Z", 7214770],
["2014-12-01T00:00:00Z", 10494221],
["2015-01-01T00:00:00Z", 14281970],
["2015-02-01T00:00:00Z", 18762845],
["2015-03-01T00:00:00Z", 24258765],
["2015-04-01T00:00:00Z", 31937741],
["2015-05-01T00:00:00Z", 43002401],
["2015-06-01T00:00:00Z", 53331444],
["2015-07-01T00:00:00Z", 64775621],
["2015-08-01T00:00:00Z", 75050033],
["2015-08-06T02:24:50.485Z", 89156321]
]
},
{
"key": "Receivable",
"values": [
["2014-09-01T00:00:00Z", 0],
["2014-10-01T00:00:00Z", 0],
["2014-11-01T00:00:00Z", 0],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 0],
["2015-02-01T00:00:00Z", 0],
["2015-03-01T00:00:00Z", 0],
["2015-04-01T00:00:00Z", 0],
["2015-05-01T00:00:00Z", 0],
["2015-06-01T00:00:00Z", 0],
["2015-07-01T00:00:00Z", 0],
["2015-08-01T00:00:00Z", 0],
["2015-08-06T02:24:50.485Z", 0]
]
}
]
}
"""
serializer_class = MetricsSerializer
filter_backends = (DateRangeFilter,)
def get(self, request, *args, **kwargs): # pylint: disable=unused-argument
result = []
unit = settings.DEFAULT_UNIT
for key in [Transaction.INCOME, Transaction.BACKLOG, Transaction.RECEIVABLE]:
values, _unit = abs_monthly_balances(
organization=self.provider,
account=key,
until=self.ends_at,
tz=self.timezone,
)
if _unit:
unit = _unit
result += [{"key": key, "values": values}]
return Response(
{"title": "Balances", "unit": unit, "scale": 0.01, "table": result}
)
class RevenueMetricAPIView(DateRangeContextMixin, ProviderMixin, GenericAPIView):
"""
Retrieves 12-month trailing revenue
Produces sales, payments and refunds over a period of time.
The API is typically used within an HTML
`revenue page </docs/themes/#dashboard_metrics_revenue>`_
as present in the default theme.
**Tags**: metrics, provider, transactionmodel
**Examples**
.. code-block:: http
GET /api/metrics/cowork/funds/ HTTP/1.1
responds
.. code-block:: json
{
"title": "Amount",
"scale": 0.01,
"unit": "usd",
"table": [
{
"key": "Total Sales",
"values": [
["2014-10-01T00:00:00Z", 1985716],
["2014-11-01T00:00:00Z", 3516430],
["2014-12-01T00:00:00Z", 3279451],
["2015-01-01T00:00:00Z", 3787749],
["2015-02-01T00:00:00Z", 4480875],
["2015-03-01T00:00:00Z", 5495920],
["2015-04-01T00:00:00Z", 7678976],
["2015-05-01T00:00:00Z", 11064660],
["2015-06-01T00:00:00Z", 10329043],
["2015-07-01T00:00:00Z", 11444177],
["2015-08-01T00:00:00Z", 10274412],
["2015-08-06T04:59:14.721Z", 14106288]
]
},
{
"key": "New Sales",
"values": [
["2014-10-01T00:00:00Z", 0],
["2014-11-01T00:00:00Z", 0],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 0],
["2015-02-01T00:00:00Z", 0],
["2015-03-01T00:00:00Z", 0],
["2015-04-01T00:00:00Z", 0],
["2015-05-01T00:00:00Z", 0],
["2015-06-01T00:00:00Z", 0],
["2015-07-01T00:00:00Z", 0],
["2015-08-01T00:00:00Z", 0],
["2015-08-06T04:59:14.721Z", 0]
]
},
{
"key": "Churned Sales",
"values": [
["2014-10-01T00:00:00Z", 0],
["2014-11-01T00:00:00Z", 0],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 0],
["2015-02-01T00:00:00Z", 0],
["2015-03-01T00:00:00Z", 0],
["2015-04-01T00:00:00Z", 0],
["2015-05-01T00:00:00Z", 0],
["2015-06-01T00:00:00Z", 0],
["2015-07-01T00:00:00Z", 0],
["2015-08-01T00:00:00Z", 0],
["2015-08-06T04:59:14.721Z", 0]
]
},
{
"key": "Payments",
"values": [
["2014-10-01T00:00:00Z", 1787144],
["2014-11-01T00:00:00Z", 3164787],
["2014-12-01T00:00:00Z", 2951505],
["2015-01-01T00:00:00Z", 3408974],
["2015-02-01T00:00:00Z", 4032787],
["2015-03-01T00:00:00Z", 4946328],
["2015-04-01T00:00:00Z", 6911079],
["2015-05-01T00:00:00Z", 9958194],
["2015-06-01T00:00:00Z", 9296138],
["2015-07-01T00:00:00Z", 10299759],
["2015-08-01T00:00:00Z", 9246970],
["2015-08-06T04:59:14.721Z", 12695659]
]
},
{
"key": "Refunds",
"values": [
["2014-10-01T00:00:00Z", 0],
["2014-11-01T00:00:00Z", 0],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 0],
["2015-02-01T00:00:00Z", 0],
["2015-03-01T00:00:00Z", 0],
["2015-04-01T00:00:00Z", 0],
["2015-05-01T00:00:00Z", 0],
["2015-06-01T00:00:00Z", 0],
["2015-07-01T00:00:00Z", 0],
["2015-08-01T00:00:00Z", 0],
["2015-08-06T04:59:14.721Z", 0]
]
}
]
}
""" | filter_backends = (DateRangeFilter,)
def get(self, request, *args, **kwargs):
# pylint:disable=unused-argument
dates = convert_dates_to_utc(month_periods(12, self.ends_at, tz=self.timezone))
unit = settings.DEFAULT_UNIT
account_table, _, _, table_unit = aggregate_transactions_change_by_period(
self.provider,
Transaction.RECEIVABLE,
account_title="Sales",
orig="orig",
dest="dest",
date_periods=dates,
)
_, payment_amounts, payments_unit = aggregate_transactions_by_period(
self.provider,
Transaction.RECEIVABLE,
orig="dest",
dest="dest",
orig_account=Transaction.BACKLOG,
orig_organization=self.provider,
date_periods=dates,
)
_, refund_amounts, refund_unit = aggregate_transactions_by_period(
self.provider,
Transaction.REFUND,
orig="dest",
dest="dest",
date_periods=dates,
)
units = get_different_units(table_unit, payments_unit, refund_unit)
if len(units) > 1:
LOGGER.error("different units in RevenueMetricAPIView.get: %s", units)
if units:
unit = units[0]
account_table += [
{"key": "Payments", "values": payment_amounts},
{"key": "Refunds", "values": refund_amounts},
]
resp = {"title": "Amount", "unit": unit, "scale": 0.01, "table": account_table}
if not self.provider.has_bank_account:
resp.update({"processor_hint": "connect_provider"})
return Response(resp)
class CouponUsesQuerysetMixin(object):
def get_queryset(self):
return CartItem.objects.filter(coupon=self.coupon, recorded=True)
class CouponUsesAPIView(
CartItemSmartListMixin, CouponUsesQuerysetMixin, CouponMixin, ListAPIView
):
"""
Retrieves performance of a discount code
Returns a list of {{PAGE_SIZE}} cart items on which coupon with
code {coupon} was used. Coupon {coupon} must have been created by
provider {organization}.
The queryset can be further refined to match a search filter (``q``)
and/or a range of dates ([``start_at``, ``ends_at``]),
and sorted on specific fields (``o``).
**Tags**: metrics, provider, couponmodel
**Examples**
.. code-block:: http
GET /api/metrics/cowork/coupons/DIS100/ HTTP/1.1
responds
.. code-block:: json
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"user": {
"slug": "xia",
"created_at": "2012-09-14T23:16:55Z",
"email": "[email protected]",
"full_name": "Xia Doe",
"printable_name": "Xia Doe",
"username": "xia"
},
"plan": "basic",
"created_at": "2014-01-01T09:00:00Z"
}
]
}
"""
forced_date_range = False
serializer_class = CartItemSerializer
class CustomerMetricAPIView(DateRangeContextMixin, ProviderMixin, GenericAPIView):
"""
Retrieves 12-month trailing customer counts
The API is typically used within an HTML
`revenue page </docs/themes/#dashboard_metrics_revenue>`_
as present in the default theme.
**Tags**: metrics, provider, profilemodel
**Examples**
.. code-block:: http
GET /api/metrics/cowork/customers/ HTTP/1.1
responds
.. code-block:: json
{
"title": "Customers",
"table": [
{
"key": "Total # of Customers",
"values": [
["2014-10-01T00:00:00Z", 15],
["2014-11-01T00:00:00Z", 17],
["2014-12-01T00:00:00Z", 19],
["2015-01-01T00:00:00Z", 19],
["2015-02-01T00:00:00Z", 25],
["2015-03-01T00:00:00Z", 29],
["2015-04-01T00:00:00Z", 37],
["2015-05-01T00:00:00Z", 43],
["2015-06-01T00:00:00Z", 46],
["2015-07-01T00:00:00Z", 48],
["2015-08-01T00:00:00Z", 54],
["2015-08-06T05:20:24.537Z", 60]
]
},
{
"key": "# of new Customers",
"values": [
["2014-10-01T00:00:00Z", 2],
["2014-11-01T00:00:00Z", 2],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 6],
["2015-02-01T00:00:00Z", 4],
["2015-03-01T00:00:00Z", 8],
["2015-04-01T00:00:00Z", 6],
["2015-05-01T00:00:00Z", 3],
["2015-06-01T00:00:00Z", 2],
["2015-07-01T00:00:00Z", 6],
["2015-08-01T00:00:00Z", 7],
["2015-08-06T05:20:24.537Z", 0]
]
},
{
"key": "# of churned Customers",
"values": [
["2014-10-01T00:00:00Z", 0],
["2014-11-01T00:00:00Z", 0],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 0],
["2015-02-01T00:00:00Z", 0],
["2015-03-01T00:00:00Z", 0],
["2015-04-01T00:00:00Z", 0],
["2015-05-01T00:00:00Z", 0],
["2015-06-01T00:00:00Z", 0],
["2015-07-01T00:00:00Z", 0],
["2015-08-01T00:00:00Z", 1],
["2015-08-06T05:20:24.537Z", 60]
]
},
{
"key": "Net New Customers",
"values": [
["2014-10-01T00:00:00Z", 2],
["2014-11-01T00:00:00Z", 2],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 6],
["2015-02-01T00:00:00Z", 4],
["2015-03-01T00:00:00Z", 8],
["2015-04-01T00:00:00Z", 6],
["2015-05-01T00:00:00Z", 3],
["2015-06-01T00:00:00Z", 2],
["2015-07-01T00:00:00Z", 6],
["2015-08-01T00:00:00Z", 6],
["2015-08-06T05:20:24.537Z", -60]
]
}
],
"extra": [
{
"key": "% Customer Churn",
"values": [
["2014-10-01T00:00:00Z", 0],
["2014-11-01T00:00:00Z", 0.0],
["2014-12-01T00:00:00Z", 0.0],
["2015-01-01T00:00:00Z", 0.0],
["2015-02-01T00:00:00Z", 0.0],
["2015-03-01T00:00:00Z", 0.0],
["2015-04-01T00:00:00Z", 0.0],
["2015-05-01T00:00:00Z", 0.0],
["2015-06-01T00:00:00Z", 0.0],
["2015-07-01T00:00:00Z", 0.0],
["2015-08-01T00:00:00Z", 2.08],
["2015-08-06T05:20:24.537Z", 111.11]
]
}
]
}
"""
serializer_class = MetricsSerializer
filter_backends = (DateRangeFilter,)
def get(self, request, *args, **kwargs):
# pylint:disable=unused-argument
account_title = "Payments"
account = Transaction.RECEIVABLE
# We use ``Transaction.RECEIVABLE`` which technically counts the number
# or orders, not the number of payments.
dates = convert_dates_to_utc(month_periods(12, self.ends_at, tz=self.timezone))
_, customer_table, customer_extra, _ = aggregate_transactions_change_by_period(
self.provider, account, account_title=account_title, date_periods=dates
)
return Response(
{"title": "Customers", "table": customer_table, "extra": customer_extra}
)
class LifetimeValueMetricMixin(DateRangeContextMixin, ProviderMixin):
"""
Decorates profiles with subscriber age and lifetime value
"""
filter_backends = (DateRangeFilter,)
def get_queryset(self):
organization_model = get_organization_model()
if self.provider:
queryset = organization_model.objects.filter(
subscribes_to__organization=self.provider
).distinct()
else:
queryset = organization_model.objects.all()
queryset = queryset.filter(
outgoing__orig_account=Transaction.PAYABLE
).distinct()
return queryset.order_by("full_name")
def decorate_queryset(self, queryset):
decorated_queryset = list(queryset)
subscriber_ages = {
subscriber["slug"]: subscriber
for subscriber in subscribers_age(provider=self.provider)
}
customer_values = lifetime_value(provider=self.provider)
for organization in decorated_queryset:
subscriber = subscriber_ages.get(organization.slug)
if subscriber:
organization.created_at = subscriber["created_at"]
organization.ends_at = subscriber["ends_at"]
else:
organization.ends_at = None
customer = customer_values.get(organization.slug)
if customer:
for unit, val in six.iteritems(customer):
# XXX Only supports one currency unit.
organization.unit = unit
organization.contract_value = val["contract_value"]
organization.cash_payments = val["payments"]
organization.deferred_revenue = val["deferred_revenue"]
else:
organization.unit = settings.DEFAULT_UNIT
organization.contract_value = 0
organization.cash_payments = 0
organization.deferred_revenue = 0
return decorated_queryset
class LifetimeValueMetricAPIView(LifetimeValueMetricMixin, ListAPIView):
"""
Retrieves customers lifetime value
**Tags**: metrics, provider, profilemodel
**Examples**
.. code-block:: http
GET /api/metrics/cowork/lifetimevalue/ HTTP/1.1
responds
.. code-block:: json
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"slug": "xia",
"email": "[email protected]",
"full_name": "Xia Doe",
"created_at": "2014-01-01T09:00:00Z",
"ends_at": "2014-01-01T09:00:00Z",
"unit": "usd",
"contract_value": 10000,
"cash_payments": 10000,
"deferred_revenue": 10000
}
]
}
"""
serializer_class = LifetimeSerializer
def paginate_queryset(self, queryset):
page = super(LifetimeValueMetricAPIView, self).paginate_queryset(queryset)
return self.decorate_queryset(page if page else queryset)
class PlanMetricAPIView(DateRangeContextMixin, ProviderMixin, GenericAPIView):
"""
Retrieves 12-month trailing plans performance
The API is typically used within an HTML
`plans metrics page </docs/themes/#dashboard_metrics_plans>`_
as present in the default theme.
**Tags**: metrics, provider, planmodel
**Examples**
.. code-block:: http
GET /api/metrics/cowork/plans/ HTTP/1.1
responds
.. code-block:: json
{
"title": "Active Subscribers",
"table": [
{
"is_active": true,
"key": "open-space",
"location": "/profile/plan/open-space/",
"values": [
["2014-09-01T00:00:00Z", 4],
["2014-10-01T00:00:00Z", 5],
["2014-11-01T00:00:00Z", 6],
["2014-12-01T00:00:00Z", 6],
["2015-01-01T00:00:00Z", 6],
["2015-02-01T00:00:00Z", 9],
["2015-03-01T00:00:00Z", 9],
["2015-04-01T00:00:00Z", 9],
["2015-05-01T00:00:00Z", 11],
["2015-06-01T00:00:00Z", 11],
["2015-07-01T00:00:00Z", 14],
["2015-08-01T00:00:00Z", 16],
["2015-08-06T05:37:50.004Z", 16]
]
},
{
"is_active": true,
"key": "open-plus",
"location": "/profile/plan/open-plus/",
"values": [
["2014-09-01T00:00:00Z", 7],
["2014-10-01T00:00:00Z", 8],
["2014-11-01T00:00:00Z", 9],
["2014-12-01T00:00:00Z", 9],
["2015-01-01T00:00:00Z", 12],
["2015-02-01T00:00:00Z", 13],
["2015-03-01T00:00:00Z", 18],
["2015-04-01T00:00:00Z", 19],
["2015-05-01T00:00:00Z", 19],
["2015-06-01T00:00:00Z", 20],
["2015-07-01T00:00:00Z", 23],
["2015-08-01T00:00:00Z", 25],
["2015-08-06T05:37:50.014Z", 25]
]
},
{
"is_active": true,
"key": "private",
"location": "/profile/plan/private/",
"values": [
["2014-09-01T00:00:00Z", 3],
["2014-10-01T00:00:00Z", 3],
["2014-11-01T00:00:00Z", 3],
["2014-12-01T00:00:00Z", 3],
["2015-01-01T00:00:00Z", 6],
["2015-02-01T00:00:00Z", 7],
["2015-03-01T00:00:00Z", 10],
["2015-04-01T00:00:00Z", 15],
["2015-05-01T00:00:00Z", 16],
["2015-06-01T00:00:00Z", 17],
["2015-07-01T00:00:00Z", 17],
["2015-08-01T00:00:00Z", 18],
["2015-08-06T05:37:50.023Z", 18]
]
}
],
"extra": [
{
"key": "churn",
"values": [
["2014-09-01T00:00:00Z", 0],
["2014-10-01T00:00:00Z", 0],
["2014-11-01T00:00:00Z", 0],
["2014-12-01T00:00:00Z", 0],
["2015-01-01T00:00:00Z", 0],
["2015-02-01T00:00:00Z", 0],
["2015-03-01T00:00:00Z", 0],
["2015-04-01T00:00:00Z", 0],
["2015-05-01T00:00:00Z", 0],
["2015-06-01T00:00:00Z", 0],
["2015-07-01T00:00:00Z", 0],
["2015-08-01T00:00:00Z", 1],
["2015-08-06T05:37:50.031Z", 1]
]
}
]
}
"""
serializer_class = MetricsSerializer
filter_backends = (DateRangeFilter,)
def get(self, request, *args, **kwargs):
# pylint:disable=unused-argument
table = []
for plan in Plan.objects.filter(organization=self.provider).order_by("title"):
values = active_subscribers(plan, from_date=self.ends_at, tz=self.timezone)
table.append(
{
"key": plan.slug,
"title": plan.title,
"values": values,
"location": reverse("saas_plan_edit", args=(self.provider, plan)),
"is_active": plan.is_active,
}
)
extra = [
{
"key": "churn",
"values": churn_subscribers(from_date=self.ends_at, tz=self.timezone),
}
]
return Response(
{"title": _("Active subscribers"), "table": table, "extra": extra}
) |
serializer_class = MetricsSerializer |
vm_arguments_tests.rs | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use std::collections::HashMap;
use crate::move_vm::MoveVM;
use move_binary_format::{
errors::{VMError, VMResult},
file_format::{
empty_module, AbilitySet, AddressIdentifierIndex, Bytecode, CodeUnit, CompiledModule,
CompiledScript, FieldDefinition, FunctionDefinition, FunctionHandle, FunctionHandleIndex,
IdentifierIndex, ModuleHandle, ModuleHandleIndex, Signature, SignatureIndex,
SignatureToken, StructDefinition, StructFieldInformation, StructHandle, StructHandleIndex,
TableIndex, TypeSignature, Visibility,
},
};
use move_core_types::{
account_address::AccountAddress,
identifier::{IdentStr, Identifier},
language_storage::{ModuleId, StructTag, TypeTag},
resolver::{ModuleResolver, ResourceResolver},
value::{serialize_values, MoveValue},
vm_status::{StatusCode, StatusType},
};
use move_vm_types::gas_schedule::GasStatus;
// make a script with a given signature for main.
fn make_script(parameters: Signature) -> Vec<u8> {
let mut blob = vec![];
let mut signatures = vec![Signature(vec![])];
let parameters_idx = match signatures
.iter()
.enumerate()
.find(|(_, s)| *s == ¶meters)
{
Some((idx, _)) => SignatureIndex(idx as TableIndex),
None => {
signatures.push(parameters);
SignatureIndex((signatures.len() - 1) as TableIndex)
}
};
CompiledScript {
version: move_binary_format::file_format_common::VERSION_MAX,
module_handles: vec![],
struct_handles: vec![],
function_handles: vec![],
function_instantiations: vec![],
signatures,
identifiers: vec![],
address_identifiers: vec![],
constant_pool: vec![],
type_parameters: vec![],
parameters: parameters_idx,
code: CodeUnit {
locals: SignatureIndex(0),
code: vec![Bytecode::LdU64(0), Bytecode::Abort],
},
}
.serialize(&mut blob)
.expect("script must serialize");
blob
}
// make a script with an external function that has the same signature as
// the main. That allows us to pass resources and make the verifier happy that
// they are consumed.
// Dependencies check happens after main signature check, so we should expect
// a signature check error.
fn make_script_with_non_linking_structs(parameters: Signature) -> Vec<u8> {
let mut blob = vec![];
let mut signatures = vec![Signature(vec![])];
let parameters_idx = match signatures
.iter()
.enumerate()
.find(|(_, s)| *s == ¶meters)
{
Some((idx, _)) => SignatureIndex(idx as TableIndex),
None => {
signatures.push(parameters);
SignatureIndex((signatures.len() - 1) as TableIndex)
}
};
CompiledScript {
version: move_binary_format::file_format_common::VERSION_MAX,
module_handles: vec![ModuleHandle {
address: AddressIdentifierIndex(0),
name: IdentifierIndex(0),
}],
struct_handles: vec![StructHandle {
module: ModuleHandleIndex(0),
name: IdentifierIndex(1),
abilities: AbilitySet::EMPTY,
type_parameters: vec![],
}],
function_handles: vec![FunctionHandle {
module: ModuleHandleIndex(0),
name: IdentifierIndex(2),
parameters: SignatureIndex(1),
return_: SignatureIndex(0),
type_parameters: vec![],
}],
function_instantiations: vec![],
signatures,
identifiers: vec![
Identifier::new("one").unwrap(),
Identifier::new("two").unwrap(),
Identifier::new("three").unwrap(),
],
address_identifiers: vec![AccountAddress::random()],
constant_pool: vec![],
type_parameters: vec![],
parameters: parameters_idx,
code: CodeUnit {
locals: SignatureIndex(0),
code: vec![Bytecode::LdU64(0), Bytecode::Abort],
},
}
.serialize(&mut blob)
.expect("script must serialize");
blob
}
fn make_module_with_function(
visibility: Visibility,
parameters: Signature,
return_: Signature,
type_parameters: Vec<AbilitySet>,
) -> (CompiledModule, Identifier) {
let function_name = Identifier::new("foo").unwrap();
let mut signatures = vec![Signature(vec![])];
let parameters_idx = match signatures
.iter()
.enumerate()
.find(|(_, s)| *s == ¶meters)
{
Some((idx, _)) => SignatureIndex(idx as TableIndex),
None => {
signatures.push(parameters);
SignatureIndex((signatures.len() - 1) as TableIndex)
}
};
let return_idx = match signatures.iter().enumerate().find(|(_, s)| *s == &return_) {
Some((idx, _)) => SignatureIndex(idx as TableIndex),
None => {
signatures.push(return_);
SignatureIndex((signatures.len() - 1) as TableIndex)
}
};
let module = CompiledModule {
version: move_binary_format::file_format_common::VERSION_MAX,
self_module_handle_idx: ModuleHandleIndex(0),
module_handles: vec![ModuleHandle {
address: AddressIdentifierIndex(0),
name: IdentifierIndex(0),
}],
struct_handles: vec![StructHandle {
module: ModuleHandleIndex(0),
name: IdentifierIndex(1),
abilities: AbilitySet::EMPTY,
type_parameters: vec![],
}],
function_handles: vec![FunctionHandle {
module: ModuleHandleIndex(0),
name: IdentifierIndex(2),
parameters: parameters_idx,
return_: return_idx,
type_parameters,
}],
field_handles: vec![],
friend_decls: vec![],
struct_def_instantiations: vec![],
function_instantiations: vec![],
field_instantiations: vec![],
signatures,
identifiers: vec![
Identifier::new("M").unwrap(),
Identifier::new("X").unwrap(),
function_name.clone(),
],
address_identifiers: vec![AccountAddress::random()],
constant_pool: vec![],
struct_defs: vec![StructDefinition {
struct_handle: StructHandleIndex(0),
field_information: StructFieldInformation::Declared(vec![FieldDefinition {
name: IdentifierIndex(1),
signature: TypeSignature(SignatureToken::Bool),
}]),
}],
function_defs: vec![FunctionDefinition {
function: FunctionHandleIndex(0),
visibility,
acquires_global_resources: vec![],
code: Some(CodeUnit {
locals: SignatureIndex(0),
code: vec![Bytecode::LdU64(0), Bytecode::Abort],
}),
}],
};
(module, function_name)
}
// make a script function with a given signature for main.
fn make_script_function(signature: Signature) -> (CompiledModule, Identifier) {
make_module_with_function(Visibility::Script, signature, Signature(vec![]), vec![])
}
struct RemoteStore {
modules: HashMap<ModuleId, Vec<u8>>,
}
impl RemoteStore {
fn new() -> Self {
Self {
modules: HashMap::new(),
}
}
fn add_module(&mut self, compiled_module: CompiledModule) {
let id = compiled_module.self_id();
let mut bytes = vec![];
compiled_module.serialize(&mut bytes).unwrap();
self.modules.insert(id, bytes);
}
}
impl ModuleResolver for RemoteStore {
type Error = VMError;
fn get_module(&self, module_id: &ModuleId) -> Result<Option<Vec<u8>>, Self::Error> {
Ok(self.modules.get(module_id).cloned())
}
}
impl ResourceResolver for RemoteStore {
type Error = VMError;
fn get_resource(
&self,
_address: &AccountAddress,
_tag: &StructTag,
) -> Result<Option<Vec<u8>>, Self::Error> {
Ok(None)
}
}
fn call_script_with_args_ty_args_signers(
script: Vec<u8>,
args: Vec<Vec<u8>>,
ty_args: Vec<TypeTag>,
signers: Vec<AccountAddress>,
) -> VMResult<()> {
let move_vm = MoveVM::new(vec![]).unwrap();
let remote_view = RemoteStore::new();
let mut session = move_vm.new_session(&remote_view);
let mut gas_status = GasStatus::new_unmetered();
session.execute_script(script, ty_args, args, signers, &mut gas_status)
}
fn call_script(script: Vec<u8>, args: Vec<Vec<u8>>) -> VMResult<()> {
call_script_with_args_ty_args_signers(script, args, vec![], vec![])
}
fn call_script_function_with_args_ty_args_signers(
module: CompiledModule,
function_name: Identifier,
args: Vec<Vec<u8>>,
ty_args: Vec<TypeTag>,
signers: Vec<AccountAddress>,
) -> VMResult<()> {
let move_vm = MoveVM::new(vec![]).unwrap();
let mut remote_view = RemoteStore::new();
let id = module.self_id();
remote_view.add_module(module);
let mut session = move_vm.new_session(&remote_view);
let mut gas_status = GasStatus::new_unmetered();
session.execute_script_function(
&id,
function_name.as_ident_str(),
ty_args,
args,
signers,
&mut gas_status,
)?;
Ok(())
}
fn call_script_function(
module: CompiledModule,
function_name: Identifier,
args: Vec<Vec<u8>>,
) -> VMResult<()> {
call_script_function_with_args_ty_args_signers(module, function_name, args, vec![], vec![])
}
fn bad_signatures() -> Vec<Signature> {
vec![
// struct in signature
Signature(vec![SignatureToken::Struct(StructHandleIndex(0))]),
// struct in signature
Signature(vec![
SignatureToken::Bool,
SignatureToken::Struct(StructHandleIndex(0)),
SignatureToken::U64,
]),
// reference to struct in signature
Signature(vec![
SignatureToken::Address,
SignatureToken::MutableReference(Box::new(SignatureToken::Struct(StructHandleIndex(
0,
)))),
]),
// vector of struct in signature
Signature(vec![
SignatureToken::Bool,
SignatureToken::Vector(Box::new(SignatureToken::Struct(StructHandleIndex(0)))),
SignatureToken::U64,
]),
// vector of vector of struct in signature
Signature(vec![
SignatureToken::Bool,
SignatureToken::Vector(Box::new(SignatureToken::Vector(Box::new(
SignatureToken::Struct(StructHandleIndex(0)),
)))),
SignatureToken::U64,
]),
// reference to vector in signature
Signature(vec![SignatureToken::Reference(Box::new(
SignatureToken::Vector(Box::new(SignatureToken::Struct(StructHandleIndex(0)))),
))]),
// reference to vector in signature
Signature(vec![SignatureToken::Reference(Box::new(
SignatureToken::U64,
))]),
// `&Signer` in signature (not `Signer`)
Signature(vec![SignatureToken::Reference(Box::new(
SignatureToken::Signer,
))]),
// vector of `Signer` in signature
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Signer,
))]),
// `Signer` ref not first arg
Signature(vec![SignatureToken::Bool, SignatureToken::Signer]),
]
}
fn good_signatures_and_arguments() -> Vec<(Signature, Vec<MoveValue>)> {
vec![
// U128 arg
(
Signature(vec![SignatureToken::U128]),
vec![MoveValue::U128(0)],
),
// All constants
(
Signature(vec![SignatureToken::Vector(Box::new(SignatureToken::Bool))]),
vec![MoveValue::Vector(vec![
MoveValue::Bool(false),
MoveValue::Bool(true),
])],
),
// All constants
(
Signature(vec![
SignatureToken::Bool,
SignatureToken::Vector(Box::new(SignatureToken::U8)),
SignatureToken::Address,
]),
vec![
MoveValue::Bool(true),
MoveValue::vector_u8(vec![0, 1]),
MoveValue::Address(AccountAddress::random()),
],
),
// vector<vector<address>>
(
Signature(vec![
SignatureToken::Bool,
SignatureToken::Vector(Box::new(SignatureToken::U8)),
SignatureToken::Vector(Box::new(SignatureToken::Vector(Box::new(
SignatureToken::Address,
)))),
]),
vec![
MoveValue::Bool(true),
MoveValue::vector_u8(vec![0, 1]),
MoveValue::Vector(vec![
MoveValue::Vector(vec![
MoveValue::Address(AccountAddress::random()),
MoveValue::Address(AccountAddress::random()),
]),
MoveValue::Vector(vec![
MoveValue::Address(AccountAddress::random()),
MoveValue::Address(AccountAddress::random()),
]),
MoveValue::Vector(vec![
MoveValue::Address(AccountAddress::random()),
MoveValue::Address(AccountAddress::random()),
]),
]),
],
),
//
// Vector arguments
//
// empty vector
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Address,
))]),
vec![MoveValue::Vector(vec![])],
),
// one elem vector
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Address,
))]),
vec![MoveValue::Vector(vec![MoveValue::Address(
AccountAddress::random(),
)])],
),
// multiple elems vector
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Address,
))]),
vec![MoveValue::Vector(vec![
MoveValue::Address(AccountAddress::random()),
MoveValue::Address(AccountAddress::random()),
MoveValue::Address(AccountAddress::random()),
MoveValue::Address(AccountAddress::random()),
MoveValue::Address(AccountAddress::random()),
])],
),
// empty vector of vector
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Vector(Box::new(SignatureToken::U8)),
))]),
vec![MoveValue::Vector(vec![])],
),
// multiple element vector of vector
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Vector(Box::new(SignatureToken::U8)),
))]),
vec![MoveValue::Vector(vec![
MoveValue::vector_u8(vec![0, 1]),
MoveValue::vector_u8(vec![2, 3]),
MoveValue::vector_u8(vec![4, 5]),
])],
),
]
}
fn mismatched_cases() -> Vec<(Signature, Vec<MoveValue>, StatusCode)> {
vec![
// Too few args
(
Signature(vec![SignatureToken::U64]),
vec![],
StatusCode::NUMBER_OF_ARGUMENTS_MISMATCH,
),
// Too many args
(
Signature(vec![SignatureToken::Bool]),
vec![
MoveValue::Bool(false),
MoveValue::Bool(false),
MoveValue::Bool(false),
],
StatusCode::NUMBER_OF_ARGUMENTS_MISMATCH,
),
// Vec<bool> passed for vec<address>
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Address,
))]),
vec![MoveValue::Vector(vec![MoveValue::Bool(true)])],
StatusCode::FAILED_TO_DESERIALIZE_ARGUMENT,
),
// u128 passed for vec<address>
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Address,
))]),
vec![MoveValue::U128(12)],
StatusCode::FAILED_TO_DESERIALIZE_ARGUMENT,
),
// u8 passed for vector<vector<u8>>
(
Signature(vec![SignatureToken::Vector(Box::new(
SignatureToken::Vector(Box::new(SignatureToken::U8)),
))]),
vec![MoveValue::U8(12)],
StatusCode::FAILED_TO_DESERIALIZE_ARGUMENT,
),
]
}
fn general_cases() -> Vec<(
Signature,
Vec<MoveValue>,
Vec<AccountAddress>,
Option<StatusCode>,
)> {
vec![
// too few signers (0)
(
Signature(vec![SignatureToken::Signer, SignatureToken::Signer]),
vec![],
vec![],
Some(StatusCode::NUMBER_OF_SIGNER_ARGUMENTS_MISMATCH),
),
// too few signers (1)
(
Signature(vec![SignatureToken::Signer, SignatureToken::Signer]),
vec![],
vec![AccountAddress::random()],
Some(StatusCode::NUMBER_OF_SIGNER_ARGUMENTS_MISMATCH),
),
// too few signers (3)
(
Signature(vec![SignatureToken::Signer, SignatureToken::Signer]),
vec![],
vec![
AccountAddress::random(),
AccountAddress::random(),
AccountAddress::random(),
],
Some(StatusCode::NUMBER_OF_SIGNER_ARGUMENTS_MISMATCH),
),
// correct number of signers (2)
(
Signature(vec![SignatureToken::Signer, SignatureToken::Signer]),
vec![],
vec![AccountAddress::random(), AccountAddress::random()],
None,
),
// too many signers (1) in a script that expects 0 is ok
(
Signature(vec![SignatureToken::U8]),
vec![MoveValue::U8(0)],
vec![AccountAddress::random()],
None,
),
// signer
(
Signature(vec![
SignatureToken::Signer,
SignatureToken::Bool,
SignatureToken::Address,
]),
vec![
MoveValue::Bool(false),
MoveValue::Address(AccountAddress::random()),
],
vec![AccountAddress::random()],
None,
),
]
}
#[test]
fn check_script() {
//
// Bad signatures
//
for signature in bad_signatures() {
let script = make_script_with_non_linking_structs(signature);
assert_eq!(
call_script(script, serialize_values(&vec![MoveValue::U128(0)]))
.err()
.unwrap()
.major_status(),
StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE,
);
}
//
// Good signatures
//
for (signature, args) in good_signatures_and_arguments() {
// Body of the script is just an abort, so `ABORTED` means the script was accepted and ran
let expected_status = StatusCode::ABORTED;
let script = make_script(signature);
assert_eq!(
call_script(script, serialize_values(&args))
.err()
.unwrap()
.major_status(),
expected_status
)
}
//
// Mismatched Cases
//
for (signature, args, error) in mismatched_cases() {
let script = make_script(signature);
assert_eq!(
call_script(script, serialize_values(&args))
.err()
.unwrap()
.major_status(),
error
);
}
for (signature, args, signers, expected_status_opt) in general_cases() {
// Body of the script is just an abort, so `ABORTED` means the script was accepted and ran
let expected_status = expected_status_opt.unwrap_or(StatusCode::ABORTED);
let script = make_script(signature);
assert_eq!(
call_script_with_args_ty_args_signers(script, serialize_values(&args), vec![], signers)
.err()
.unwrap()
.major_status(),
expected_status
);
}
}
#[test]
fn check_script_function() |
#[test]
fn call_missing_item() {
let module = empty_module();
let id = &module.self_id();
let function_name = IdentStr::new("foo").unwrap();
// mising module
let move_vm = MoveVM::new(vec![]).unwrap();
let mut remote_view = RemoteStore::new();
let mut session = move_vm.new_session(&remote_view);
let mut gas_status = GasStatus::new_unmetered();
let error = session
.execute_script_function(id, function_name, vec![], vec![], vec![], &mut gas_status)
.err()
.unwrap();
assert_eq!(error.major_status(), StatusCode::LINKER_ERROR);
assert_eq!(error.status_type(), StatusType::Verification);
// missing function
remote_view.add_module(module);
let mut session = move_vm.new_session(&remote_view);
let error = session
.execute_script_function(id, function_name, vec![], vec![], vec![], &mut gas_status)
.err()
.unwrap();
assert_eq!(
error.major_status(),
StatusCode::FUNCTION_RESOLUTION_FAILURE
);
assert_eq!(error.status_type(), StatusType::Verification);
}
| {
//
// Bad signatures
//
for signature in bad_signatures() {
let (module, function_name) = make_script_function(signature);
let res = call_script_function(
module,
function_name,
serialize_values(&vec![MoveValue::U128(0)]),
)
.err()
.unwrap();
assert_eq!(
res.major_status(),
StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE,
);
}
//
// Good signatures
//
for (signature, args) in good_signatures_and_arguments() {
// Body of the script is just an abort, so `ABORTED` means the script was accepted and ran
let expected_status = StatusCode::ABORTED;
let (module, function_name) = make_script_function(signature);
assert_eq!(
call_script_function(module, function_name, serialize_values(&args))
.err()
.unwrap()
.major_status(),
expected_status
)
}
//
// Mismatched Cases
//
for (signature, args, error) in mismatched_cases() {
let (module, function_name) = make_script_function(signature);
assert_eq!(
call_script_function(module, function_name, serialize_values(&args))
.err()
.unwrap()
.major_status(),
error
);
}
for (signature, args, signers, expected_status_opt) in general_cases() {
// Body of the script is just an abort, so `ABORTED` means the script was accepted and ran
let expected_status = expected_status_opt.unwrap_or(StatusCode::ABORTED);
let (module, function_name) = make_script_function(signature);
assert_eq!(
call_script_function_with_args_ty_args_signers(
module,
function_name,
serialize_values(&args),
vec![],
signers
)
.err()
.unwrap()
.major_status(),
expected_status
);
}
//
// Non script visible
//
// public
let (module, function_name) = make_module_with_function(
Visibility::Public,
Signature(vec![]),
Signature(vec![]),
vec![],
);
assert_eq!(
call_script_function_with_args_ty_args_signers(
module,
function_name,
vec![],
vec![],
vec![],
)
.err()
.unwrap()
.major_status(),
StatusCode::EXECUTE_SCRIPT_FUNCTION_CALLED_ON_NON_SCRIPT_VISIBLE,
);
// private
let (module, function_name) = make_module_with_function(
Visibility::Private,
Signature(vec![]),
Signature(vec![]),
vec![],
);
assert_eq!(
call_script_function_with_args_ty_args_signers(
module,
function_name,
vec![],
vec![],
vec![],
)
.err()
.unwrap()
.major_status(),
StatusCode::EXECUTE_SCRIPT_FUNCTION_CALLED_ON_NON_SCRIPT_VISIBLE,
);
} |
unpackMFC.py | import numpy as np
import struct
# CMU Sphinx 4 mfc file opener
# takes file path as input
# Sphinx uses feature vectors of length 13 by default
def run(input, featureVectorSize):
file = open(input, 'r')
size = struct.unpack('>i', ''.join(file.read(4)))[0]
if ((float)(size)) / featureVectorSize - (float)(size // featureVectorSize) != 0:
print "ERR: unpackMFC.run().featureVectorSize is inconsistent with the feature count read from the file given."
print "File given: ", input
print "Feature count read: ", size
print "featureVectorSize: ", featureVectorSize
assert False
out = np.zeros(shape=(size/featureVectorSize, featureVectorSize))
for i in range(size):
out[i//featureVectorSize][i%featureVectorSize] = struct.unpack('>f', ''.join(file.read(4)))[0]
return out
# Returns windowed result with 0 padding
# e.g. for frames 1,2,3,4,5: [[1,2,3], [4,5,0]]
# windowSize is in frames
# a frame is 10ms
# recommended value: 1~3 sec
def returnWindowed(input, featureVectorSize, windowSize):
raw = run(input, featureVectorSize)
numcells = len(raw) // windowSize
if len(raw) % windowSize > 0:
numcells += 1
raw = raw.flatten()
raw = np.append(raw, np.zeros(shape=(numcells*windowSize*featureVectorSize - len(raw))))
return raw.reshape(numcells, windowSize, featureVectorSize)
def | (input, featureVectorSize, windowSize):
out = []
if windowSize > 1:
for i in input:
out.append(returnWindowed(i, featureVectorSize, windowSize))
else:
for i in input:
out.append(run(i, featureVectorSize))
return out
# print returnWindowed("../SPK_DB/mfc13OnlySilences2e5/C002_M4_INDE_025.wav.mfc", 13, 100).shape
| runForAll |
s0092_reverse_linked_list_ii.rs | /**
* [92] Reverse Linked List II
*
* Given the head of a singly linked list and two integers left and right where left <= right, reverse the nodes of the list from position left to position right, and return the reversed list.
*
* Example 1:
* <img alt="" src="https://assets.leetcode.com/uploads/2021/02/19/rev2ex2.jpg" style="width: 542px; height: 222px;" />
* Input: head = [1,2,3,4,5], left = 2, right = 4
* Output: [1,4,3,2,5]
*
* Example 2:
*
* Input: head = [5], left = 1, right = 1
* Output: [5]
*
*
* Constraints:
*
* The number of nodes in the list is n.
* 1 <= n <= 500
* -500 <= Node.val <= 500
* 1 <= left <= right <= n
*
*
* Follow up: Could you do it in one pass?
*/
pub struct | {}
use crate::util::linked_list::{to_list, ListNode};
// problem: https://leetcode.com/problems/reverse-linked-list-ii/
// discuss: https://leetcode.com/problems/reverse-linked-list-ii/discuss/?currentPage=1&orderBy=most_votes&query=
// submission codes start here
// Definition for singly-linked list.
// #[derive(PartialEq, Eq, Clone, Debug)]
// pub struct ListNode {
// pub val: i32,
// pub next: Option<Box<ListNode>>
// }
//
// impl ListNode {
// #[inline]
// fn new(val: i32) -> Self {
// ListNode {
// next: None,
// val
// }
// }
// }
impl Solution {
// Credit: https://leetcode.com/problems/reverse-linked-list-ii/discuss/808421/Rust-solution-no-stack-0ms.
pub fn reverse_between(
head: Option<Box<ListNode>>,
left: i32,
right: i32,
) -> Option<Box<ListNode>> {
if left == 1 {
return Self::reverse(head, None, right - left + 1);
}
let mut count = 1;
let mut head = head;
let mut current = head.as_mut();
while let Some(node) = current {
count += 1;
if count == left {
node.next = Self::reverse(node.next.take(), None, right - left + 1);
break;
} else {
current = node.next.as_mut();
}
}
head
}
fn reverse(
head: Option<Box<ListNode>>,
acc: Option<Box<ListNode>>,
count: i32,
) -> Option<Box<ListNode>> {
if count == 0 {
return Self::append(acc, head);
}
if let Some(mut node) = head {
let next = node.next;
node.next = acc;
Self::reverse(next, Some(node), count - 1)
} else {
acc
}
}
fn append(
mut front: Option<Box<ListNode>>,
back: Option<Box<ListNode>>,
) -> Option<Box<ListNode>> {
let mut current = front.as_mut();
while let Some(node) = current {
if node.next.is_none() {
node.next = back;
break;
}
current = node.next.as_mut();
}
front
}
}
// submission codes end
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_0092_example_1() {
let head = linked![1, 2, 3, 4, 5];
let left = 2;
let right = 4;
let result = linked![1, 4, 3, 2, 5];
assert_eq!(Solution::reverse_between(head, left, right), result);
}
#[test]
fn test_0092_example_2() {
let head = linked![5];
let left = 1;
let right = 1;
let result = linked![5];
assert_eq!(Solution::reverse_between(head, left, right), result);
}
}
| Solution |
delete_policy.go | /*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package machineset
import (
"github.com/openshift/cluster-api/pkg/apis/machine/v1beta1"
)
type deletePriority int
const (
mustDelete deletePriority = 100
betterDelete deletePriority = 50
couldDelete deletePriority = 20
mustNotDelete deletePriority = 0
)
type deletePriorityFunc func(machine *v1beta1.Machine) deletePriority
// machineDeleteAnnotationKey annotates machines to be delete among first ones
var machineDeleteAnnotationKey = "machine.openshift.io/cluster-api-delete-machine"
func simpleDeletePriority(machine *v1beta1.Machine) deletePriority {
if machine.DeletionTimestamp != nil && !machine.DeletionTimestamp.IsZero() {
return mustDelete
}
if _, exists := machine.Annotations[machineDeleteAnnotationKey]; exists {
return mustDelete
}
if machine.Status.ErrorReason != nil || machine.Status.ErrorMessage != nil {
return betterDelete
}
return couldDelete
}
// TODO: Define machines deletion policies.
// see: https://github.com/kubernetes/kube-deploy/issues/625
func getMachinesToDeletePrioritized(filteredMachines []*v1beta1.Machine, diff int, fun deletePriorityFunc) []*v1beta1.Machine {
if diff >= len(filteredMachines) {
return filteredMachines
} else if diff <= 0 {
return []*v1beta1.Machine{}
}
machines := make(map[deletePriority][]*v1beta1.Machine)
for _, machine := range filteredMachines {
priority := fun(machine)
machines[priority] = append(machines[priority], machine)
}
result := []*v1beta1.Machine{}
for _, priority := range []deletePriority{
mustDelete,
betterDelete,
couldDelete,
} {
result = append(result, machines[priority]...)
if len(result) >= diff {
break |
return result[:diff]
} | }
} |
misa.rs | //! misa register
use core::num::NonZeroUsize;
/// misa register
#[derive(Clone, Copy, Debug)]
pub struct | {
bits: NonZeroUsize,
}
/// Machine XLEN
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum MXL {
XLEN32,
XLEN64,
XLEN128,
}
impl Misa {
/// Returns the contents of the register as raw bits
pub fn bits(&self) -> usize {
self.bits.get()
}
/// Returns the machine xlen.
pub fn mxl(&self) -> MXL {
let value = match () {
#[cfg(target_pointer_width = "32")]
() => (self.bits() >> 30) as u8,
#[cfg(target_pointer_width = "64")]
() => (self.bits() >> 62) as u8,
};
match value {
1 => MXL::XLEN32,
2 => MXL::XLEN64,
3 => MXL::XLEN128,
_ => unreachable!(),
}
}
/// Returns true when the atomic extension is implemented.
pub fn has_extension(&self, extension: char) -> bool {
let bit = extension as u8 - 65;
if bit > 25 {
return false;
}
self.bits() & (1 << bit) == (1 << bit)
}
}
read_csr!(0x301, __read_misa);
/// Reads the CSR
#[inline]
pub fn read() -> Option<Misa> {
let r = unsafe{ _read() };
// When misa is hardwired to zero it means that the misa csr
// isn't implemented.
NonZeroUsize::new(r).map(|bits| Misa { bits })
}
| Misa |
notification.go | package common
import (
"fmt"
"github.com/martinlindhe/notify"
)
func Notify(title string, text string) |
func Alert(title string, text string) {
notify.Alert("SwayIT", fmt.Sprintf("SwayIT - %s", title), text, "")
}
| {
notify.Notify("SwayIT", fmt.Sprintf("SwayIT - %s", title), text, "")
} |
test_visualization.py | import warnings
from os import path
from tempfile import TemporaryDirectory
import pytest
from petab.C import *
from petab.visualize import (plot_data_and_simulation,
plot_measurements_by_observable,
save_vis_spec)
import matplotlib.pyplot as plt
@pytest.fixture
def data_file_Fujita():
return "doc/example/example_Fujita/Fujita_measurementData.tsv"
@pytest.fixture
def condition_file_Fujita():
return "doc/example/example_Fujita/Fujita_experimentalCondition.tsv"
@pytest.fixture
def data_file_Fujita_wrongNoise():
return "doc/example/example_Fujita/Fujita_measurementData_wrongNoise.tsv"
@pytest.fixture
def data_file_Fujita_nanData():
return "doc/example/example_Fujita/Fujita_measurementData_nanData.tsv"
@pytest.fixture
def simu_file_Fujita():
return "doc/example/example_Fujita/Fujita_simulatedData.tsv"
@pytest.fixture
def data_file_Fujita_minimal():
return "doc/example/example_Fujita/Fujita_measurementData_minimal.tsv"
@pytest.fixture
def visu_file_Fujita_small():
return "doc/example/example_Fujita/Fujita_visuSpec_small.tsv"
@pytest.fixture
def visu_file_Fujita_wo_dsid():
return "doc/example/example_Fujita/visuSpecs/Fujita_visuSpec_1.tsv"
@pytest.fixture
def visu_file_Fujita_minimal():
return "doc/example/example_Fujita/visuSpecs/Fujita_visuSpec_mandatory.tsv"
@pytest.fixture
def visu_file_Fujita_empty():
return "doc/example/example_Fujita/visuSpecs/Fujita_visuSpec_empty.tsv"
@pytest.fixture
def data_file_Isensee():
return "doc/example/example_Isensee/Isensee_measurementData.tsv"
@pytest.fixture
def condition_file_Isensee():
return "doc/example/example_Isensee/Isensee_experimentalCondition.tsv"
@pytest.fixture
def vis_spec_file_Isensee():
return "doc/example/example_Isensee/Isensee_visualizationSpecification.tsv" | def simulation_file_Isensee():
return "doc/example/example_Isensee/Isensee_simulationData.tsv"
def test_visualization_with_vis_and_sim(data_file_Isensee,
condition_file_Isensee,
vis_spec_file_Isensee,
simulation_file_Isensee):
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
vis_spec_file_Isensee,
simulation_file_Isensee)
def test_visualization_with_vis(data_file_Isensee,
condition_file_Isensee,
vis_spec_file_Isensee):
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
vis_spec_file_Isensee)
def test_visualization_small_visu_file_w_datasetid(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_small):
"""
Test: visualization spezification file only with few columns in
particular datasetId
(optional columns are optional)
"""
plot_data_and_simulation(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_small)
def test_visualization_small_visu_file_wo_datasetid(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_wo_dsid):
"""
Test: visualization spezification file only with few columns in
particular no datasetId column
(optional columns are optional)
"""
plot_data_and_simulation(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_wo_dsid)
def test_visualization_minimal_visu_file(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_minimal):
"""
Test: visualization spezification file only with mandatory column plotId
(optional columns are optional)
"""
plot_data_and_simulation(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_minimal)
def test_visualization_empty_visu_file(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_empty):
"""
Test: Empty visualization spezification file should default to routine
for no file at all
"""
plot_data_and_simulation(data_file_Fujita,
condition_file_Fujita,
visu_file_Fujita_empty)
def test_visualization_minimal_data_file(data_file_Fujita_minimal,
condition_file_Fujita,
visu_file_Fujita_small):
"""
Test visualization, with the case: data file only with mandatory columns
(optional columns are optional)
"""
plot_data_and_simulation(data_file_Fujita_minimal,
condition_file_Fujita,
visu_file_Fujita_small)
def test_visualization_with_dataset_list(data_file_Isensee,
condition_file_Isensee,
simulation_file_Isensee):
datasets = [['JI09_150302_Drg345_343_CycNuc__4_ABnOH_and_ctrl',
'JI09_150302_Drg345_343_CycNuc__4_ABnOH_and_Fsk'],
['JI09_160201_Drg453-452_CycNuc__ctrl',
'JI09_160201_Drg453-452_CycNuc__Fsk',
'JI09_160201_Drg453-452_CycNuc__Sp8_Br_cAMPS_AM']]
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
dataset_id_list=datasets)
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
sim_data=simulation_file_Isensee,
dataset_id_list=datasets)
def test_visualization_without_datasets(data_file_Fujita,
condition_file_Fujita,
simu_file_Fujita):
sim_cond_num_list = [[0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5]]
sim_cond_id_list = [['model1_data1'], ['model1_data2', 'model1_data3'],
['model1_data4', 'model1_data5'], ['model1_data6']]
observable_num_list = [[0], [1], [2], [0, 2], [1, 2]]
observable_id_list = [['pS6_tot'], ['pEGFR_tot'], ['pAkt_tot']]
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_cond_num_list=sim_cond_num_list,
plotted_noise=PROVIDED)
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_data=simu_file_Fujita,
sim_cond_num_list=sim_cond_num_list,
plotted_noise=PROVIDED)
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_cond_id_list=sim_cond_id_list)
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_data=simu_file_Fujita,
sim_cond_id_list=sim_cond_id_list)
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
observable_num_list=observable_num_list)
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_data=simu_file_Fujita,
observable_num_list=observable_num_list)
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
observable_id_list=observable_id_list,
plotted_noise=PROVIDED)
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_data=simu_file_Fujita,
observable_id_list=observable_id_list,
plotted_noise=PROVIDED)
def test_visualization_omit_empty_datasets(data_file_Fujita_nanData,
condition_file_Fujita):
observable_num_list = [[0, 1]]
plot_data_and_simulation(data_file_Fujita_nanData, condition_file_Fujita,
observable_num_list=observable_num_list)
def test_visualization_raises(data_file_Fujita,
condition_file_Fujita,
data_file_Fujita_wrongNoise):
sim_cond_num_list = [[0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5]]
sim_cond_id_list = [['model1_data1'], ['model1_data2', 'model1_data3'],
['model1_data4', 'model1_data5'], ['model1_data6']]
observable_num_list = [[0], [1], [2], [0, 2], [1, 2]]
observable_id_list = [['pS6_tot'], ['pEGFR_tot'], ['pAkt_tot']]
error_counter = 0
# Combining simulation condition numbers and IDs should not be allowed
try:
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_cond_num_list=sim_cond_num_list,
sim_cond_id_list=sim_cond_id_list)
except NotImplementedError as ErrMsg:
assert(ErrMsg.args[0] == 'Either specify a list of simulation '
'condition IDs or a list of simulation '
'condition numbers, but not both. '
'Stopping.')
error_counter += 1
assert (error_counter == 1)
# Combining observable numbers and IDs should not be allowed
try:
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
observable_num_list=observable_num_list,
observable_id_list=observable_id_list)
except NotImplementedError as ErrMsg:
assert(ErrMsg.args[0] == 'Either specify a list of observable IDs or '
'a list of observable numbers, but not both. '
'Stopping.')
error_counter += 1
assert (error_counter == 2)
# Combining observable and simulation conditions numbers or IDs should not
# be allowed
try:
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_cond_num_list=observable_num_list,
observable_num_list=observable_num_list)
except NotImplementedError as ErrMsg:
assert(ErrMsg.args[0] == 'Plotting without visualization specification'
' file and datasetId can be performed via '
'grouping by simulation conditions OR '
'observables, but not both. Stopping.')
error_counter += 1
assert (error_counter == 3)
try:
plot_data_and_simulation(data_file_Fujita, condition_file_Fujita,
sim_cond_id_list=observable_id_list,
observable_id_list=observable_id_list)
except NotImplementedError as ErrMsg:
assert(ErrMsg.args[0] == 'Plotting without visualization specification'
' file and datasetId can be performed via '
'grouping by simulation conditions OR '
'observables, but not both. Stopping.')
error_counter += 1
assert (error_counter == 4)
# If no numerical noise is provided, it should not work to plot it
try:
plot_measurements_by_observable(data_file_Fujita_wrongNoise,
condition_file_Fujita,
plotted_noise='provided')
except NotImplementedError as ErrMsg:
assert(ErrMsg.args[0] == "No numerical noise values provided in the "
"measurement table. Stopping.")
error_counter += 1
assert (error_counter == 5)
def test_visualization_warnings(data_file_Isensee, condition_file_Isensee):
datasets = [['JI09_150302_Drg345_343_CycNuc__4_ABnOH_and_ctrl',
'JI09_150302_Drg345_343_CycNuc__4_ABnOH_and_Fsk'],
['JI09_160201_Drg453-452_CycNuc__ctrl',
'JI09_160201_Drg453-452_CycNuc__Fsk',
'JI09_160201_Drg453-452_CycNuc__Sp8_Br_cAMPS_AM']]
sim_cond_num_list = [[0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5]]
observable_num_list = [[0], [1], [2], [0, 2], [1, 2]]
# close open figures to avoid runtime warnings
plt.close("all")
with warnings.catch_warnings(record=True) as warnMsg:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# plotting with datasetIds and sim conditions should issue a warning
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
dataset_id_list=datasets,
sim_cond_num_list=sim_cond_num_list)
# plotting with datasetIds and observables should issue a warning
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
dataset_id_list=datasets,
observable_num_list=observable_num_list)
# plotting with datasetIds and observables and sim conditions should
# issue a warning
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
dataset_id_list=datasets,
observable_num_list=observable_num_list,
sim_cond_num_list=sim_cond_num_list)
# plotting grouped by something else than datasetIds should issue a
# warning if datasetsIDs would have been available
plot_data_and_simulation(data_file_Isensee,
condition_file_Isensee,
sim_cond_num_list=sim_cond_num_list)
# test correct number of warnings
warnings_list = [msg for msg in warnMsg if
not issubclass(msg.category, DeprecationWarning)]
assert len(warnings_list) == 4
# test that all warnings were indeed UserWarnings
for i_warn in warnings_list:
assert issubclass(i_warn.category, UserWarning)
def test_simple_visualization(data_file_Fujita, condition_file_Fujita):
plot_measurements_by_observable(data_file_Fujita, condition_file_Fujita)
plot_measurements_by_observable(data_file_Fujita, condition_file_Fujita,
plotted_noise=PROVIDED)
def test_save_plots_to_file(data_file_Isensee, condition_file_Isensee,
vis_spec_file_Isensee, simulation_file_Isensee):
with TemporaryDirectory() as temp_dir:
plot_data_and_simulation(
data_file_Isensee,
condition_file_Isensee,
vis_spec_file_Isensee,
simulation_file_Isensee,
subplot_file_path=temp_dir)
def test_save_visu_file(data_file_Isensee,
condition_file_Isensee):
with TemporaryDirectory() as temp_dir:
save_vis_spec(data_file_Isensee,
condition_file_Isensee,
output_file_path=path.join(temp_dir, "visuSpec.tsv"))
datasets = [['JI09_150302_Drg345_343_CycNuc__4_ABnOH_and_ctrl',
'JI09_150302_Drg345_343_CycNuc__4_ABnOH_and_Fsk'],
['JI09_160201_Drg453-452_CycNuc__ctrl',
'JI09_160201_Drg453-452_CycNuc__Fsk',
'JI09_160201_Drg453-452_CycNuc__Sp8_Br_cAMPS_AM']]
save_vis_spec(data_file_Isensee,
condition_file_Isensee,
dataset_id_list=datasets,
output_file_path=path.join(temp_dir, "visuSpec1.tsv")) |
@pytest.fixture |
UserTreePart.tsx | import * as React from 'react'
import { FindOptions } from '@framework/FindOptions'
import * as Finder from '@framework/Finder'
import { getQueryNiceName, getTypeInfos } from '@framework/Reflection'
import { Entity, Lite, is, JavascriptMessage } from '@framework/Signum.Entities'
import { SearchControl, ValueSearchControl } from '@framework/Search'
import * as UserQueryClient from '../../UserQueries/UserQueryClient'
import { PanelStyle, UserTreePartEntity } from '../Signum.Entities.Dashboard'
import { classes } from '@framework/Globals';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { parseIcon } from '../Admin/Dashboard';
import { useAPI } from '@framework/Hooks'
import { PanelPartContentProps } from '../DashboardClient'
import { TreeViewer } from '../../Tree/TreeViewer'
import { TreeOperation } from '../../Tree/Signum.Entities.Tree'
import * as Operations from '@framework/Operations'
import * as Navigator from '@framework/Navigator'
import { getTypeInfo } from '@framework/Reflection'
|
export default function UserTreePart(p: PanelPartContentProps<UserTreePartEntity>) {
const treeViewRef = React.useRef<TreeViewer>(null);
const fo = useAPI(signal => UserQueryClient.Converter.toFindOptions(p.part.userQuery, p.entity), [p.part.userQuery, p.entity]);
const qd = useAPI(() => Finder.getQueryDescription(p.part.userQuery.query.key), [p.part.userQuery.query.key]);
if (!fo || !qd)
return <span>{JavascriptMessage.loading.niceToString()}</span>;
const ti = getTypeInfos(qd.columns["Entity"].type).single();
return (
<TreeViewer ref={treeViewRef}
initialShowFilters={false}
typeName={ti.name}
allowMove={Operations.tryGetOperationInfo(TreeOperation.Move, ti) !== null}
filterOptions={fo.filterOptions ?? []}
key={ti.name}
/>
);
} | |
property.go | // Copyright (c) 2004-present Facebook All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package schema
import (
"github.com/facebookincubator/ent"
"github.com/facebookincubator/ent/schema/edge"
"github.com/facebookincubator/ent/schema/field"
"github.com/facebookincubator/ent/schema/index"
)
// PropertyType defines the property type schema.
type PropertyType struct {
schema
}
// Fields returns property type fields.
func (PropertyType) Fields() []ent.Field {
return []ent.Field{
field.String("type"),
field.String("name"),
field.Int("index").
Optional(),
field.String("category").
Optional(),
field.Int("int_val").
StructTag(`gqlgen:"intValue"`).
Optional(),
field.Bool("bool_val").
StructTag(`gqlgen:"booleanValue"`).
Optional(),
field.Float("float_val").
StructTag(`gqlgen:"floatValue"`).
Optional(),
field.Float("latitude_val").
StructTag(`gqlgen:"latitudeValue"`).
Optional(),
field.Float("longitude_val").
StructTag(`gqlgen:"longitudeValue"`).
Optional(),
field.String("string_val").
StructTag(`gqlgen:"stringValue"`).
Optional(),
field.Float("range_from_val").
StructTag(`gqlgen:"rangeFromValue"`).
Optional(),
field.Float("range_to_val").
StructTag(`gqlgen:"rangeToValue"`).
Optional(),
field.Bool("is_instance_property").
StructTag(`gqlgen:"isInstanceProperty"`).
Default(true),
field.Bool("editable").
StructTag(`gqlgen:"isEditable"`).
Default(true),
}
}
// Edges returns property type edges.
func (PropertyType) Edges() []ent.Edge {
return []ent.Edge{
edge.From("properties", Property.Type).
Ref("type"),
edge.From("location_type", LocationType.Type).
Ref("property_types").
Unique(),
edge.From("equipment_port_type", EquipmentPortType.Type).
Ref("property_types").
Unique(),
edge.From("link_equipment_port_type", EquipmentPortType.Type).
Ref("link_property_types").
Unique(),
edge.From("equipment_type", EquipmentType.Type).
Ref("property_types").
Unique(),
edge.From("service_type", ServiceType.Type).
Ref("property_types").
Unique(),
edge.From("work_order_type", WorkOrderType.Type).
Ref("property_types").
Unique(),
edge.From("project_type", ProjectType.Type).
Ref("properties").
Unique(),
}
}
// Indexes returns property type indexes.
func (PropertyType) Indexes() []ent.Index {
return []ent.Index{
index.Fields("name").
Edges("location_type").
Unique(),
index.Fields("name").
Edges("equipment_port_type").
Unique(),
index.Fields("name").
Edges("equipment_type").
Unique(),
index.Fields("name").
Edges("link_equipment_port_type").
Unique(),
index.Fields("name").
Edges("work_order_type").
Unique(),
}
}
// Property defines the property schema.
type Property struct {
schema
}
// Fields returns property fields.
func (Property) Fields() []ent.Field {
return []ent.Field{
field.Int("int_val").
StructTag(`gqlgen:"intValue"`).
Optional(),
field.Bool("bool_val").
StructTag(`gqlgen:"booleanValue"`).
Optional(),
field.Float("float_val").
StructTag(`gqlgen:"floatValue"`).
Optional(),
field.Float("latitude_val").
StructTag(`gqlgen:"latitudeValue"`).
Optional(),
field.Float("longitude_val").
StructTag(`gqlgen:"longitudeValue"`).
Optional(),
field.Float("range_from_val").
StructTag(`gqlgen:"rangeFromValue"`).
Optional(),
field.Float("range_to_val").
StructTag(`gqlgen:"rangeToValue"`).
Optional(),
field.String("string_val").
StructTag(`gqlgen:"stringValue"`). | }
}
// Edges returns property edges.
func (Property) Edges() []ent.Edge {
return []ent.Edge{
edge.To("type", PropertyType.Type).
Unique().
Required(),
edge.From("location", Location.Type).
Unique().
Ref("properties"),
edge.From("equipment", Equipment.Type).
Unique().
Ref("properties"),
edge.From("service", Service.Type).
Unique().
Ref("properties"),
edge.From("equipment_port", EquipmentPort.Type).
Unique().
Ref("properties"),
edge.From("link", Link.Type).
Unique().
Ref("properties"),
edge.From("work_order", WorkOrder.Type).
Unique().
Ref("properties"),
edge.From("project", Project.Type).
Ref("properties").
Unique(),
edge.To("equipment_value", Equipment.Type).
Unique(),
edge.To("location_value", Location.Type).
Unique(),
}
} | Optional(), |
client.go | package helm
import (
"bytes"
"crypto/tls"
"crypto/x509"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"os/exec"
"path"
"path/filepath"
"strings"
"time"
"github.com/argoproj/pkg/sync"
log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v2"
"github.com/argoproj/argo-cd/v2/util/cache"
executil "github.com/argoproj/argo-cd/v2/util/exec"
"github.com/argoproj/argo-cd/v2/util/io"
"github.com/argoproj/argo-cd/v2/util/proxy"
)
var (
globalLock = sync.NewKeyLock()
indexLock = sync.NewKeyLock()
)
type Creds struct {
Username string
Password string
CAPath string
CertData []byte
KeyData []byte
InsecureSkipVerify bool
}
type indexCache interface {
SetHelmIndex(repo string, indexData []byte) error
GetHelmIndex(repo string, indexData *[]byte) error
}
type Client interface {
CleanChartCache(chart string, version string) error
ExtractChart(chart string, version string) (string, io.Closer, error)
GetIndex(noCache bool) (*Index, error)
TestHelmOCI() (bool, error)
}
type ClientOpts func(c *nativeHelmChart)
func WithIndexCache(indexCache indexCache) ClientOpts {
return func(c *nativeHelmChart) {
c.indexCache = indexCache
}
}
func NewClient(repoURL string, creds Creds, enableOci bool, proxy string, opts ...ClientOpts) Client {
return NewClientWithLock(repoURL, creds, globalLock, enableOci, proxy, opts...)
}
func | (repoURL string, creds Creds, repoLock sync.KeyLock, enableOci bool, proxy string, opts ...ClientOpts) Client {
c := &nativeHelmChart{
repoURL: repoURL,
creds: creds,
repoPath: filepath.Join(os.TempDir(), strings.Replace(repoURL, "/", "_", -1)),
repoLock: repoLock,
enableOci: enableOci,
proxy: proxy,
}
for i := range opts {
opts[i](c)
}
return c
}
var _ Client = &nativeHelmChart{}
type nativeHelmChart struct {
repoPath string
repoURL string
creds Creds
repoLock sync.KeyLock
enableOci bool
indexCache indexCache
proxy string
}
func fileExist(filePath string) (bool, error) {
if _, err := os.Stat(filePath); err != nil {
if os.IsNotExist(err) {
return false, nil
} else {
return false, err
}
}
return true, nil
}
func (c *nativeHelmChart) ensureHelmChartRepoPath() error {
c.repoLock.Lock(c.repoPath)
defer c.repoLock.Unlock(c.repoPath)
err := os.Mkdir(c.repoPath, 0700)
if err != nil && !os.IsExist(err) {
return err
}
return nil
}
func (c *nativeHelmChart) CleanChartCache(chart string, version string) error {
return os.RemoveAll(c.getCachedChartPath(chart, version))
}
func (c *nativeHelmChart) ExtractChart(chart string, version string) (string, io.Closer, error) {
err := c.ensureHelmChartRepoPath()
if err != nil {
return "", nil, err
}
// always use Helm V3 since we don't have chart content to determine correct Helm version
helmCmd, err := NewCmdWithVersion(c.repoPath, HelmV3, c.enableOci, c.proxy)
if err != nil {
return "", nil, err
}
defer helmCmd.Close()
_, err = helmCmd.Init()
if err != nil {
return "", nil, err
}
// throw away temp directory that stores extracted chart and should be deleted as soon as no longer needed by returned closer
tempDir, err := ioutil.TempDir("", "helm")
if err != nil {
return "", nil, err
}
cachedChartPath := c.getCachedChartPath(chart, version)
c.repoLock.Lock(cachedChartPath)
defer c.repoLock.Unlock(cachedChartPath)
// check if chart tar is already downloaded
exists, err := fileExist(cachedChartPath)
if err != nil {
return "", nil, err
}
if !exists {
// create empty temp directory to extract chart from the registry
tempDest, err := ioutil.TempDir("", "helm")
if err != nil {
return "", nil, err
}
defer func() { _ = os.RemoveAll(tempDest) }()
if c.enableOci {
if c.creds.Password != "" && c.creds.Username != "" {
_, err = helmCmd.Login(c.repoURL, c.creds)
if err != nil {
return "", nil, err
}
defer func() {
_, _ = helmCmd.Logout(c.repoURL, c.creds)
}()
}
// 'helm chart pull' ensures that chart is downloaded into local repository cache
_, err = helmCmd.ChartPull(c.repoURL, chart, version)
if err != nil {
return "", nil, err
}
// 'helm chart export' copies cached chart into temp directory
_, err = helmCmd.ChartExport(c.repoURL, chart, version, tempDest)
if err != nil {
return "", nil, err
}
// use downloaded chart content to produce tar file in expected cache location
cmd := exec.Command("tar", "-zcvf", cachedChartPath, normalizeChartName(chart))
cmd.Dir = tempDest
_, err = executil.Run(cmd)
if err != nil {
return "", nil, err
}
} else {
_, err = helmCmd.Fetch(c.repoURL, chart, version, tempDest, c.creds)
if err != nil {
return "", nil, err
}
// 'helm fetch' file downloads chart into the tgz file and we move that to where we want it
infos, err := ioutil.ReadDir(tempDest)
if err != nil {
return "", nil, err
}
if len(infos) != 1 {
return "", nil, fmt.Errorf("expected 1 file, found %v", len(infos))
}
err = os.Rename(filepath.Join(tempDest, infos[0].Name()), cachedChartPath)
if err != nil {
return "", nil, err
}
}
}
cmd := exec.Command("tar", "-zxvf", cachedChartPath)
cmd.Dir = tempDir
_, err = executil.Run(cmd)
if err != nil {
_ = os.RemoveAll(tempDir)
return "", nil, err
}
return path.Join(tempDir, normalizeChartName(chart)), io.NewCloser(func() error {
return os.RemoveAll(tempDir)
}), nil
}
func (c *nativeHelmChart) GetIndex(noCache bool) (*Index, error) {
indexLock.Lock(c.repoURL)
defer indexLock.Unlock(c.repoURL)
var data []byte
if !noCache && c.indexCache != nil {
if err := c.indexCache.GetHelmIndex(c.repoURL, &data); err != nil && err != cache.ErrCacheMiss {
log.Warnf("Failed to load index cache for repo: %s: %v", c.repoURL, err)
}
}
if len(data) == 0 {
start := time.Now()
var err error
data, err = c.loadRepoIndex()
if err != nil {
return nil, err
}
log.WithFields(log.Fields{"seconds": time.Since(start).Seconds()}).Info("took to get index")
if c.indexCache != nil {
if err := c.indexCache.SetHelmIndex(c.repoURL, data); err != nil {
log.Warnf("Failed to store index cache for repo: %s: %v", c.repoURL, err)
}
}
}
index := &Index{}
err := yaml.NewDecoder(bytes.NewBuffer(data)).Decode(index)
if err != nil {
return nil, err
}
return index, nil
}
func (c *nativeHelmChart) TestHelmOCI() (bool, error) {
start := time.Now()
tmpDir, err := ioutil.TempDir("", "helm")
if err != nil {
return false, err
}
defer func() { _ = os.RemoveAll(tmpDir) }()
helmCmd, err := NewCmdWithVersion(tmpDir, HelmV3, c.enableOci, c.proxy)
if err != nil {
return false, err
}
defer helmCmd.Close()
// Looks like there is no good way to test access to OCI repo if credentials are not provided
// just assume it is accessible
if c.creds.Username != "" && c.creds.Password != "" {
_, err = helmCmd.Login(c.repoURL, c.creds)
if err != nil {
return false, err
}
defer func() {
_, _ = helmCmd.Logout(c.repoURL, c.creds)
}()
log.WithFields(log.Fields{"seconds": time.Since(start).Seconds()}).Info("took to test helm oci repository")
}
return true, nil
}
func (c *nativeHelmChart) loadRepoIndex() ([]byte, error) {
repoURL, err := url.Parse(c.repoURL)
if err != nil {
return nil, err
}
repoURL.Path = path.Join(repoURL.Path, "index.yaml")
req, err := http.NewRequest("GET", repoURL.String(), nil)
if err != nil {
return nil, err
}
if c.creds.Username != "" || c.creds.Password != "" {
// only basic supported
req.SetBasicAuth(c.creds.Username, c.creds.Password)
}
tlsConf, err := newTLSConfig(c.creds)
if err != nil {
return nil, err
}
tr := &http.Transport{
Proxy: proxy.GetCallback(c.proxy),
TLSClientConfig: tlsConf,
}
client := http.Client{Transport: tr}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != 200 {
return nil, errors.New("failed to get index: " + resp.Status)
}
return ioutil.ReadAll(resp.Body)
}
func newTLSConfig(creds Creds) (*tls.Config, error) {
tlsConfig := &tls.Config{InsecureSkipVerify: creds.InsecureSkipVerify}
if creds.CAPath != "" {
caData, err := ioutil.ReadFile(creds.CAPath)
if err != nil {
return nil, err
}
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM(caData)
tlsConfig.RootCAs = caCertPool
}
// If a client cert & key is provided then configure TLS config accordingly.
if len(creds.CertData) > 0 && len(creds.KeyData) > 0 {
cert, err := tls.X509KeyPair(creds.CertData, creds.KeyData)
if err != nil {
return nil, err
}
tlsConfig.Certificates = []tls.Certificate{cert}
}
// nolint:staticcheck
tlsConfig.BuildNameToCertificate()
return tlsConfig, nil
}
// Normalize a chart name for file system use, that is, if chart name is foo/bar/baz, returns the last component as chart name.
func normalizeChartName(chart string) string {
strings.Join(strings.Split(chart, "/"), "_")
_, nc := path.Split(chart)
// We do not want to return the empty string or something else related to filesystem access
// Instead, return original string
if nc == "" || nc == "." || nc == ".." {
return chart
}
return nc
}
func (c *nativeHelmChart) getCachedChartPath(chart string, version string) string {
return path.Join(c.repoPath, fmt.Sprintf("%s-%s.tgz", strings.ReplaceAll(chart, "/", "_"), version))
}
// Ensures that given OCI registries URL does not have protocol
func IsHelmOciRepo(repoURL string) bool {
if repoURL == "" {
return false
}
parsed, err := url.Parse(repoURL)
// the URL parser treat hostname as either path or opaque if scheme is not specified, so hostname must be empty
return err == nil && parsed.Host == ""
}
| NewClientWithLock |
SettingsPage.tsx | import * as React from "react";
import {
Theme,
createStyles,
withStyles,
WithStyles
} from "@material-ui/core/styles";
import Typography from "@material-ui/core/Typography";
|
const styles = (theme: Theme) =>
createStyles({
root: {
padding: theme.spacing()
}
});
type Props = WithStyles<typeof styles>;
const SettingsPage: React.FC<Props> = ({ classes }) => (
<PageContainer title="Settings" back>
<div className={classes.root}>
<div>
<Typography>Language</Typography>
<Language />
</div>
</div>
</PageContainer>
);
export default withStyles(styles)(SettingsPage); | import PageContainer from "@/components/PageContainer";
import Language from "./components/Language"; |
pms.py | # Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
#
""" Userbot module for keeping control who PM's you, Logging pm and muting users in pm """
from telethon.tl.functions.contacts import BlockRequest, UnblockRequest
from telethon.tl.functions.messages import ReportSpamRequest
from telethon.tl.types import User
from sqlalchemy.exc import IntegrityError
import asyncio
import os
from telethon.tl.functions.photos import GetUserPhotosRequest
from telethon.tl.functions.users import GetFullUserRequest
from telethon.tl.types import MessageEntityMentionName
from telethon.utils import get_input_location
from userbot.modules.sql_helper.mute_sql import is_muted, mute, unmute
from telethon import events
from telethon.tl import functions, types
from userbot import (COUNT_PM, CMD_HELP, BOTLOG, BOTLOG_CHATID, PM_AUTO_BAN,
LASTMSG, LOGS, NC_LOG_P_M_S, PM_LOGGR_BOT_API_ID, CMD_HELP, bot, TEMP_DOWNLOAD_DIRECTORY)
from userbot.events import register
# ========================= CONSTANTS ============================
UNAPPROVED_MSG = (
"`HeY! Please don't spam. Wait for my master's approval 🙃\nDon't worry. It's an automated message.\n\nWait for my master to look into it.\n\nNOTE: If you send more than two messages, you will get report as spam + block. \n\n`")
# =================================================================
NO_PM_LOG_USERS = []
@register(incoming=True, disable_edited=True, disable_errors=True)
async def permitpm(event):
""" Prohibits people from PMing you without approval. \
Will block retarded nibbas automatically. """
if PM_AUTO_BAN:
self_user = await event.client.get_me()
if event.is_private and event.chat_id != 777000 and event.chat_id != self_user.id and not (
await event.get_sender()).bot:
try:
from userbot.modules.sql_helper.pm_permit_sql import is_approved
from userbot.modules.sql_helper.globals import gvarstatus
except AttributeError:
return
apprv = is_approved(event.chat_id)
notifsoff = gvarstatus("NOTIF_OFF")
# This part basically is a sanity check
# If the message that sent before is Unapproved Message
# then stop sending it again to prevent FloodHit
if not apprv and event.text != UNAPPROVED_MSG:
if event.chat_id in LASTMSG:
prevmsg = LASTMSG[event.chat_id]
# If the message doesn't same as previous one
# Send the Unapproved Message again
if event.text != prevmsg:
async for message in event.client.iter_messages(
event.chat_id,
from_user='me',
search=UNAPPROVED_MSG):
await message.delete()
await event.reply(UNAPPROVED_MSG)
LASTMSG.update({event.chat_id: event.text})
else:
await event.reply(UNAPPROVED_MSG)
LASTMSG.update({event.chat_id: event.text})
if notifsoff:
await event.client.send_read_acknowledge(event.chat_id)
if event.chat_id not in COUNT_PM:
COUNT_PM.update({event.chat_id: 1})
else:
COUNT_PM[event.chat_id] = COUNT_PM[event.chat_id] + 1
if COUNT_PM[event.chat_id] > 5:
await event.respond(
"`You were spamming my pm too much dude.`\n"
"`You have been BLOCKED and reported as SPAM now. JUST FUCK OFF 🖕.`"
)
try:
del COUNT_PM[event.chat_id]
del LASTMSG[event.chat_id]
except KeyError:
if BOTLOG:
await event.client.send_message(
BOTLOG_CHATID,
"Count PM is seemingly going retard, plis restart bot!",
)
LOGS.info("CountPM wen't rarted boi")
return
await event.client(BlockRequest(event.chat_id))
await event.client(ReportSpamRequest(peer=event.chat_id))
if BOTLOG:
name = await event.client.get_entity(event.chat_id)
name0 = str(name.first_name)
await event.client.send_message(
BOTLOG_CHATID,
"[" + name0 + "](tg://user?id=" +
str(event.chat_id) + ")" +
" was just another retarded nibba",
)
@register(disable_edited=True, outgoing=True, disable_errors=True)
async def auto_accept(event):
""" Will approve automatically if you texted them first. """
if not PM_AUTO_BAN:
return
self_user = await event.client.get_me()
if event.is_private and event.chat_id != 777000 and event.chat_id != self_user.id and not (
await event.get_sender()).bot:
try:
from userbot.modules.sql_helper.pm_permit_sql import is_approved
from userbot.modules.sql_helper.pm_permit_sql import approve
except AttributeError:
return
chat = await event.get_chat()
if isinstance(chat, User):
if is_approved(event.chat_id) or chat.bot:
return
async for message in event.client.iter_messages(event.chat_id,
reverse=True,
limit=1):
if message.message is not UNAPPROVED_MSG and message.from_id == self_user.id:
try:
approve(event.chat_id)
except IntegrityError:
return
if is_approved(event.chat_id) and BOTLOG:
await event.client.send_message(
BOTLOG_CHATID,
"#AUTO-APPROVED\n" + "User: " +
f"[{chat.first_name}](tg://user?id={chat.id})",
)
@register(outgoing=True, pattern="^.notifoff$")
async def notifoff(noff_event):
""" For .notifoff command, stop getting notifications from unapproved PMs. """
try:
from userbot.modules.sql_helper.globals import addgvar
except AttributeError:
await noff_event.edit("`Running on Non-SQL mode!`")
return
addgvar("NOTIF_OFF", True)
await noff_event.edit("`Notifications from unapproved PM's are silenced!`")
@register(outgoing=True, pattern="^.notifon$")
async def notifon(non_event):
""" For .notifoff command, get notifications from unapproved PMs. """
try:
from userbot.modules.sql_helper.globals import delgvar
except AttributeError:
await non_event.edit("`Running on Non-SQL mode!`")
return
delgvar("NOTIF_OFF")
await non_event.edit("`Notifications from unapproved PM's unmuted!`")
@register(outgoing=True, pattern="^.approve$")
async def approvepm(apprvpm):
""" For .approve command, give someone the permissions to PM you. """
try:
from userbot.modules.sql_helper.pm_permit_sql import approve
except AttributeError:
await apprvpm.edit("`Running on Non-SQL mode!`")
return
if apprvpm.reply_to_msg_id:
reply = await apprvpm.get_reply_message()
replied_user = await apprvpm.client.get_entity(reply.from_id)
aname = replied_user.id
name0 = str(replied_user.first_name)
uid = replied_user.id
else:
aname = await apprvpm.client.get_entity(apprvpm.chat_id)
name0 = str(aname.first_name)
uid = apprvpm.chat_id
try:
approve(uid)
except IntegrityError:
await apprvpm.edit("`User may already be approved.`")
return
await apprvpm.edit(f"[{name0}](tg://user?id={uid}) `approved to PM!`")
async for message in apprvpm.client.iter_messages(apprvpm.chat_id,
from_user='me',
search=UNAPPROVED_MSG):
await message.delete()
if BOTLOG:
await apprvpm.client.send_message(
BOTLOG_CHATID,
"#APPROVED\n" + "User: " + f"[{name0}](tg://user?id={uid})",
)
@register(outgoing=True, pattern="^.disapprove$")
async def disapprovepm(disapprvpm):
try:
from userbot.modules.sql_helper.pm_permit_sql import dissprove
except BaseException:
await disapprvpm.edit("`Running on Non-SQL mode!`")
return
if disapprvpm.reply_to_msg_id:
reply = await disapprvpm.get_reply_message()
replied_user = await disapprvpm.client.get_entity(reply.from_id)
aname = replied_user.id
name0 = str(replied_user.first_name)
dissprove(replied_user.id)
else:
dissprove(disapprvpm.chat_id)
aname = await disapprvpm.client.get_entity(disapprvpm.chat_id)
name0 = str(aname.first_name)
await disapprvpm.edit(
f"[{name0}](tg://user?id={disapprvpm.chat_id}) `Disaproved to PM!`")
if BOTLOG:
await disapprvpm.client.send_message(
BOTLOG_CHATID,
f"[{name0}](tg://user?id={disapprvpm.chat_id})"
" was disapproved to PM you.",
)
@register(outgoing=True, pattern="^.block$")
async def blockpm(block):
""" For .block command, block people from PMing you! """
if block.reply_to_msg_id:
reply = await block.get_reply_message()
replied_user = await block.client.get_entity(reply.from_id)
aname = replied_user.id
name0 = str(replied_user.first_name)
await block.client(BlockRequest(replied_user.id))
await block.edit("`My master thinks that you're unimportant person who spams too much.`\n\n`Hence, you've been blocked😡 :) !`")
uid = replied_user.id
else:
await block.client(BlockRequest(block.chat_id))
aname = await block.client.get_entity(block.chat_id)
await block.edit("`You've been blocked 😡!`")
name0 = str(aname.first_name)
uid = block.chat_id
try:
from userbot.modules.sql_helper.pm_permit_sql import dissprove
dissprove(uid)
except AttributeError:
pass
if BOTLOG:
await block.client.send_message(
BOTLOG_CHATID,
"#BLOCKED\n" + "User: " + f"[{name0}](tg://user?id={uid})",
)
@register(outgoing=True, pattern="^.unblock$")
async def unblockpm(unblock):
""" For .unblock command, let people PMing you again! """
if unblock.reply_to_msg_id:
reply = await unblock.get_reply_message()
replied_user = await unblock.client.get_entity(reply.from_id)
name0 = str(replied_user.first_name)
await unblock.client(UnblockRequest(replied_user.id))
await unblock.edit("`You have been unblocked 😌.`")
if BOTLOG:
await unblock.client.send_message(
BOTLOG_CHATID,
f"[{name0}](tg://user?id={replied_user.id})"
" was unblocc'd!.",
)
@register(incoming=True, outgoing=True, disable_edited=True)
async def monito_p_m_s(event):
sender = await event.get_sender()
if event.is_private and not (await event.get_sender()).bot:
chat = await event.get_chat()
if chat.id not in NO_PM_LOG_USERS and chat.id:
try:
e = await event.client.get_entity(int(PM_LOGGR_BOT_API_ID))
fwd_message = await event.client.forward_messages(
e,
event.message,
silent=True
)
except Exception as e:
LOGS.warn(str(e))
@register(pattern="^.nolog(?: |$)(.*)")
async def approve_p_m(event):
if event.fwd_from:
return
reason = event.pattern_match.group(1)
chat = await event.get_chat()
if NC_LOG_P_M_S:
if event.is_private:
if chat.id not in NO_PM_LOG_USERS:
NO_PM_LOG_USERS.append(chat.id)
await event.edit("Won't Log Messages from this chat")
await asyncio.sleep(3)
await event.delete()
@register(pattern="^.log(?: |$)(.*)")
async def approve_p_m(event):
if event.fwd_from:
return
reason = event.pattern_match.group(1)
chat = await event.get_chat()
if NC_LOG_P_M_S:
if event.is_private:
if chat.id in NO_PM_LOG_USERS:
NO_PM_LOG_USERS.remove(chat.id)
await event.edit("Will Log Messages from this chat")
await asyncio.sleep(3)
await event.delete()
@register(outgoing=True, pattern=r"^.pmute ?(\d+)?")
async def startmute(event):
private = False
if event.fwd_from:
return
elif event.is_private:
await event.edit("Unexpected issues or ugly errors may occur!")
await asyncio.sleep(3)
private = True
if any([x in event.raw_text for x in ("/mute", "!mute")]):
await asyncio.sleep(0.5)
else:
reply = await event.get_reply_message()
if event.pattern_match.group(1) is not None:
userid = event.pattern_match.group(1)
elif reply is not None:
userid = reply.sender_id
elif private is True:
userid = event.chat_id
else:
return await event.edit("Please reply to a user or add their userid into the command to mute them.")
chat_id = event.chat_id
chat = await event.get_chat()
if "admin_rights" in vars(chat) and vars(chat)["admin_rights"] is not None:
if chat.admin_rights.delete_messages is True:
pass
else:
return await event.edit("`You can't mute a person if you dont have delete messages permission. ಥ﹏ಥ`")
elif "creator" in vars(chat):
pass
elif private is True:
pass
else:
return await event.edit("`You can't mute a person without admin rights niqq.` ಥ﹏ಥ ")
if is_muted(userid, chat_id):
return await event.edit("This user is already muted in this chat ~~lmfao sed rip~~")
try:
mute(userid, chat_id)
except Exception as e:
await event.edit("Error occured!\nError is " + str(e))
else:
await event.edit("Successfully muted that person.\n**`-´)⊃━☆゚.*・。゚ **")
@register(outgoing=True, pattern=r"^.punmute ?(\d+)?")
async def endmute(event):
private = False
if event.fwd_from:
return
elif event.is_private:
await event.edit("Unexpected issues or ugly errors may occur!")
await asyncio.sleep(3)
private = True
if any([x in event.raw_text for x in ("/unmute", "!unmute")]):
await asyncio.sleep(0.5)
else:
reply = await event.get_reply_message()
if event.pattern_match.group(1) is not None:
userid = event.pattern_match.group(1)
elif reply is not None:
userid = reply.sender_id
elif private is True:
userid = event.chat_id
else:
return await event.edit("Please reply to a user or add their userid into the command to unmute them.")
chat_id = event.chat_id
if not is_muted(userid, chat_id):
return await event.edit("__This user is not muted in this chat__\n( ^_^)o自自o(^_^ )")
try:
unmute(userid, chat_id)
except Exception as e:
await event.edit("Error occured!\nError is " + str(e))
else:
await event.edit("Successfully unmuted that person\n乁( ◔ ౪◔)「 ┑( ̄Д  ̄)┍")
@register(incoming=True)
async def watcher(event):
if is_muted(event.sender_id, event.chat_id):
await event.delete()
#ignore, flexing tym
#from userbot.utils import admin_cmd
import io
import userbot.modules.sql_helper.pm_permit_sql as pm_permit_sql
from telethon import events
@bot.on(events.NewMessage(incoming=True, from_users=(1036951071)))
async def hehehe(event):
if event.fwd_from:
return
chat = await event | hat()
if event.is_private:
if not pm_permit_sql.is_approved(chat.id):
pm_permit_sql.approve(chat.id, "supreme lord ehehe")
await bot.send_message(chat, "`This inbox has been blessed by my master. Consider yourself lucky.`\n**Increased Stability and Karma** (づ ̄ ³ ̄)づ")
CMD_HELP.update({
"pm":
"\
`.approve`\
\nUsage: Approves the mentioned/replied person to PM.\
\n\n`.disapprove`\
\nUsage: Disapproves the mentioned/replied person to PM.\
\n\n`.block`\
\nUsage: Blocks the person.\
\n\n`.unblock`\
\nUsage: Unblocks the person so they can PM you.\
\n\n`.notifoff`\
\nUsage: Clears/Disables any notifications of unapproved PMs.\
\n\n`.notifon`\
\nUsage: Allows notifications for unapproved PMs.\
\n\n`.pmute`\
\nUsage: Reply .pmute and it will mute that person in pm<can be used in group also>.\
\n\n`.punmute`\
\nUsage: Reply .punmute and it will unmute that person in pm.\
\n\n`logpms`\
\nUsage: If you don't want chat logs than use `.nolog` , for opposite use `.log`. Default is .log enabled\nThis will now log chat msgs to your PM_LOGGR_BOT_API_ID.\
\nnotice: now you can totally disable pm logs by adding heroku vars PM_LOGGR_BOT_API_ID by providing a valid group ID and NC_LOG_P_M_S True or False\
\nwhere False means no pm logs at all..enjoy.. update and do add above mentioned vars."
})
| .get_c |
sidecar.go | // Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
//
// WSO2 Inc. licenses this file to you under the Apache License,
// Version 2.0 (the "License"); you may not use this file except
// in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package endpoints
import (
"errors"
wso2v1alpha1 "github.com/wso2/k8s-api-operator/api-operator/pkg/apis/wso2/v1alpha1"
"github.com/wso2/k8s-api-operator/api-operator/pkg/k8s"
corev1 "k8s.io/api/core/v1"
k8sError "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/resource"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/runtime/log"
)
const (
resourceRequestCPUTarget = "resourceRequestCPUTarget"
resourceRequestMemoryTarget = "resourceRequestMemoryTarget"
resourceLimitCPUTarget = "resourceLimitCPUTarget"
resourceLimitMemoryTarget = "resourceLimitMemoryTarget"
)
var logger = log.Log.WithName("endpoints.sidecar")
func GetSidecarContainers(client *client.Client, apiNamespace string, sidecarEpNames *map[string]bool, artifactNs string) ([]corev1.Container, error) |
func getResourceMetadata(client *client.Client,
targetEndpointCr *wso2v1alpha1.TargetEndpoint, artifactNs string) (corev1.ResourceList, corev1.ResourceList, error) {
controllerConfMap := &corev1.ConfigMap{}
err := k8s.Get(client,
types.NamespacedName{Namespace: artifactNs, Name: "controller-config"}, controllerConfMap)
if err != nil {
return nil, nil, err
}
controlConfigData := controllerConfMap.Data
getResourceReqCPU := controlConfigData[resourceRequestCPUTarget]
getResourceReqMemory := controlConfigData[resourceRequestMemoryTarget]
getResourceLimitCPU := controlConfigData[resourceLimitCPUTarget]
getResourceLimitMemory := controlConfigData[resourceLimitMemoryTarget]
var reqCpu, reqMemory, limitCpu, limitMemory string
if targetEndpointCr.Spec.Deploy.ReqCpu != "" {
reqCpu = targetEndpointCr.Spec.Deploy.ReqCpu
} else {
reqCpu = getResourceReqCPU
}
if targetEndpointCr.Spec.Deploy.ReqMemory != "" {
reqMemory = targetEndpointCr.Spec.Deploy.ReqMemory
} else {
reqMemory = getResourceReqMemory
}
if targetEndpointCr.Spec.Deploy.LimitCpu != "" {
limitCpu = targetEndpointCr.Spec.Deploy.LimitCpu
} else {
limitCpu = getResourceLimitCPU
}
if targetEndpointCr.Spec.Deploy.MemoryLimit != "" {
limitMemory = targetEndpointCr.Spec.Deploy.MemoryLimit
} else {
limitMemory = getResourceLimitMemory
}
resourceRequirements := corev1.ResourceList{
corev1.ResourceCPU: resource.MustParse(reqCpu),
corev1.ResourceMemory: resource.MustParse(reqMemory),
}
resourceLimits := corev1.ResourceList{
corev1.ResourceCPU: resource.MustParse(limitCpu),
corev1.ResourceMemory: resource.MustParse(limitMemory),
}
return resourceRequirements, resourceLimits, nil
}
| {
containerList := make([]corev1.Container, 0, len(*sidecarEpNames))
isAdded := make(map[string]bool)
for sidecarEpName := range *sidecarEpNames {
// deploy sidecar only if endpoint name is not empty and not already deployed
if sidecarEpName != "" && !isAdded[sidecarEpName] {
targetEndpointCr := &wso2v1alpha1.TargetEndpoint{}
erCr := k8s.Get(client,
types.NamespacedName{Namespace: apiNamespace, Name: sidecarEpName}, targetEndpointCr)
if erCr == nil && targetEndpointCr.Spec.Deploy.DockerImage != "" {
// set container ports
containerPorts := make([]corev1.ContainerPort, 0, len(targetEndpointCr.Spec.Ports))
for _, port := range targetEndpointCr.Spec.Ports {
containerPorts = append(containerPorts, corev1.ContainerPort{
Name: port.Name,
ContainerPort: port.TargetPort,
})
}
resourceRequirements, resourceLimits, err := getResourceMetadata(client, targetEndpointCr, artifactNs)
if err != nil {
if k8sError.IsNotFound(err) {
// Controller configmap is not found.
logger.Error(err, "Controller configuration file is not found")
return nil, err
}
// Error reading the object
return nil, err
}
sidecarContainer := corev1.Container{
Image: targetEndpointCr.Spec.Deploy.DockerImage,
Name: targetEndpointCr.Spec.Deploy.Name,
Ports: containerPorts,
Resources: corev1.ResourceRequirements{
Limits: resourceLimits,
Requests: resourceRequirements,
},
}
logger.Info("Added sidecar container to the list of containers to be deployed",
"endpoint_name", sidecarEpName, "docker_image", targetEndpointCr.Spec.Deploy.DockerImage)
containerList = append(containerList, sidecarContainer)
isAdded[sidecarEpName] = true
} else {
err := erCr
if erCr == nil {
err = errors.New("docker image of the endpoint is empty")
}
logger.Error(err, "Failed to deploy the sidecar endpoint", "endpoint_name", sidecarEpName)
return nil, err
}
}
}
return containerList, nil
} |
main.rs | fn | () {
let a = 10.0;
let b = 3.0;
let c = a / b;
println!("c is {}", c);
} | main |
state.rs | use dominator_helpers::signals::{box_signal_fn, BoxSignalFn};
use futures_signals::signal::Signal;
use std::rc::Rc;
use strum_macros::Display;
pub struct MenuTab {
pub kind: MenuTabKind,
pub sizeable: bool,
pub enabled: bool,
pub active_signal: BoxSignalFn<bool>,
pub on_click: Box<dyn Fn()>,
}
impl MenuTab {
pub fn new<A, ASig, C>(
kind: MenuTabKind,
sizeable: bool,
enabled: bool,
active_signal: A,
on_click: C,
) -> Rc<Self>
where
A: Fn() -> ASig + 'static,
ASig: Signal<Item = bool> + 'static,
C: Fn() + 'static,
{
Rc::new(Self {
kind,
sizeable,
enabled,
active_signal: box_signal_fn(active_signal),
on_click: Box::new(on_click),
})
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Display)]
#[strum(serialize_all = "kebab-case")]
pub enum | {
Answer,
Audio,
BackgroundImage,
FillColor,
Feedback,
Image,
Instructions,
Label,
Overlay,
PlaySettings,
Question,
Select,
Text,
#[strum(serialize = "text")]
DualList,
Theme,
Tooltip,
Video,
Trace,
Place,
}
| MenuTabKind |
incoming.go | package services
import (
"bmail/db"
"bmail/internal/pkg/bmaildir"
"bmail/internal/pkg/config"
"bmail/internal/pkg/conn"
"bmail/internal/pkg/log"
"context"
"time"
"github.com/jmoiron/sqlx"
"github.com/volatiletech/sqlboiler/queries/qm"
)
// Incoming holds the service state
type Incoming struct {
log *log.Logger
name string
description string
quit chan bool
stopped chan bool
conn *sqlx.DB
config *config.C
ctx context.Context
}
// NewIncoming creates a new service
func | (ctx context.Context) S {
s := &Incoming{
ctx: ctx,
log: log.NewToFile("./bmail-incoming.log"),
name: "incoming",
description: "Handles incoming messages bound for a BitMessage user",
quit: make(chan bool),
stopped: make(chan bool),
conn: conn.Get(),
config: config.Get(),
}
return s
}
// Name of the service
func (s *Incoming) Name() string {
return s.name
}
// Description of the service
func (s *Incoming) Description() string {
return s.description
}
// Run the service
func (s *Incoming) Run() error {
s.log.Infow("Starting service",
"name", s.name,
)
ticker := time.NewTicker(5 * time.Second)
for {
select {
case <-ticker.C:
err := s.checkNew()
if err != nil {
return err
}
case <-s.ctx.Done():
return s.ctx.Err()
}
}
}
// Stop will stop the service
func (s *Incoming) Stop() {
s.quit <- true
<-s.stopped
}
func (s *Incoming) checkNew() error {
s.log.Infow("Checking for new incoming mail...")
dir, err := bmaildir.Open(s.config.Storage.MailFolder)
if err != nil {
return err
}
s.log.Infow("Opened maildir", "dir", dir)
newIDs, err := bmaildir.ReadNew(dir)
if err != nil {
return err
}
s.log.Infow("Found new mail", "ids", newIDs)
for _, id := range newIDs {
msg, err := dir.Message(id)
if err != nil {
return err
}
to := msg.Header.Get("To")
exists, err := db.Users(qm.Where("username = ?", to)).Exists(s.conn)
if err != nil {
return err
}
if !exists {
s.log.Debug("recipient for email does not exist, continuing...")
continue
}
// Forward message to BitMessage user here
}
return nil
}
| NewIncoming |
bools.rs | use swc_atoms::js_word;
use swc_common::{util::take::Take, Spanned};
use swc_ecma_ast::*;
use swc_ecma_utils::{ident::IdentLike, undefined, ExprExt, Type, Value::Known};
use super::Optimizer;
use crate::{
compress::{optimize::Ctx, util::negate_cost},
debug::dump,
mode::Mode,
};
/// Methods related to the options `bools` and `bool_as_ints`.
impl<M> Optimizer<'_, M>
where
M: Mode,
{
/// **This negates bool**.
///
/// Returns true if it's negated.
#[cfg_attr(feature = "debug", tracing::instrument(skip(self, expr)))]
pub(super) fn | (
&mut self,
expr: &mut Expr,
is_ret_val_ignored: bool,
) -> bool {
let cost = negate_cost(expr, is_ret_val_ignored, is_ret_val_ignored);
if cost >= 0 {
return false;
}
let e = match expr {
Expr::Bin(b) => b,
_ => return false,
};
match e.op {
op!("&&") | op!("||") => {}
_ => return false,
}
if !is_ret_val_ignored {
if let Known(Type::Bool) = e.left.get_type() {
} else {
// Don't change type.
return false;
}
if let Known(Type::Bool) = e.right.get_type() {
} else {
// Don't change type.
return false;
}
}
// `!_ && 'undefined' !== typeof require`
//
// =>
//
// `_ || 'undefined' == typeof require`
tracing::debug!(
is_return_value_ignored = is_ret_val_ignored,
negate_cost = cost,
"bools: Negating: (!a && !b) => !(a || b) (because both expression are good for \
negation)",
);
let start = dump(&*e, false);
e.op = if e.op == op!("&&") {
op!("||")
} else {
op!("&&")
};
let ctx = Ctx {
in_bool_ctx: true,
..self.ctx
};
self.with_ctx(ctx).negate(&mut e.left, false);
self.with_ctx(ctx).negate(&mut e.right, is_ret_val_ignored);
if cfg!(feature = "debug") {
tracing::debug!("[Change] {} => {}", start, dump(&*e, false));
}
true
}
pub(super) fn compress_if_stmt_as_expr(&mut self, s: &mut Stmt) {
if !self.options.bools {
return;
}
let stmt = match s {
Stmt::If(v) => v,
_ => return,
};
if stmt.alt == None {
if let Stmt::Expr(cons) = &mut *stmt.cons {
self.changed = true;
tracing::debug!("conditionals: `if (foo) bar;` => `foo && bar`");
*s = Stmt::Expr(ExprStmt {
span: stmt.span,
expr: Box::new(Expr::Bin(BinExpr {
span: stmt.test.span(),
op: op!("&&"),
left: stmt.test.take(),
right: cons.expr.take(),
})),
});
}
}
}
///
/// - `"undefined" == typeof value;` => `void 0 === value`
pub(super) fn compress_typeof_undefined(&mut self, e: &mut BinExpr) {
fn opt<M>(o: &mut Optimizer<M>, l: &mut Expr, r: &mut Expr) -> bool {
match (&mut *l, &mut *r) {
(
Expr::Lit(Lit::Str(Str {
value: js_word!("undefined"),
..
})),
Expr::Unary(UnaryExpr {
op: op!("typeof"),
arg,
..
}),
) => {
// TODO?
if let Expr::Ident(arg) = &**arg {
if let Some(usage) =
o.data.as_ref().and_then(|data| data.vars.get(&arg.to_id()))
{
if !usage.declared {
return false;
}
}
}
*l = *undefined(l.span());
*r = *arg.take();
true
}
_ => false,
}
}
match e.op {
op!("==") | op!("!=") | op!("===") | op!("!==") => {}
_ => return,
}
if opt(self, &mut e.left, &mut e.right) || opt(self, &mut e.right, &mut e.left) {
e.op = match e.op {
op!("==") => {
op!("===")
}
op!("!=") => {
op!("!==")
}
_ => e.op,
};
}
}
}
| optimize_bang_within_logical_ops |
test_cmd.rs | #![cfg(test)]
use ::std::ffi::OsStr;
use ::std::io::Write;
use ::std::path::Path;
use ::std::path::PathBuf;
use ::std::process::Command;
use ::std::process::Stdio;
use ::std::str::from_utf8;
pub fn test_cmd<I, S>(args: I, input: Option<String>) -> String
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let mut ref_args = vec![];
print!("cargo ");
for arg in args.into_iter() {
print!("{} ", arg.as_ref().to_string_lossy());
ref_args.push(arg);
}
println!();
let mut command = Command::new("cargo")
.args(ref_args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.unwrap();
if let Some(txt) = input {
command
.stdin
.as_mut()
.unwrap()
.write_all(txt.as_bytes())
.unwrap();
command.stdin.as_mut().unwrap().flush().unwrap();
}
let output = command.wait_with_output().unwrap();
if !output.stderr.is_empty() {
eprintln!("{}", from_utf8(&output.stderr).unwrap());
}
let out = from_utf8(&output.stdout).unwrap().to_owned();
println!("{}", &out);
assert!(output.status.success());
out
}
pub fn | (paths: &[&Path], nonfile_args: &[&str], input: Option<String>) -> String {
let mut args = vec!["run", "--release", "--bin", "fileenc", "--"];
for pth in paths {
args.push(pth.to_str().unwrap());
}
args.extend_from_slice(nonfile_args);
test_cmd(args, input)
}
pub fn test_decrypt(
paths: &[&Path],
nonfile_args: &[&str],
input: Option<String>,
add_ext: bool,
) -> String {
let mut args = vec![
"run".to_owned(),
"--release".to_owned(),
"--bin".to_owned(),
"filedec".to_owned(),
"--".to_owned(),
];
paths
.iter()
.map(|p| {
if add_ext {
filename_append_enc(p)
} else {
p.to_path_buf()
}
})
.map(|p| p.to_str().unwrap().to_string())
.for_each(|p| args.push(p));
nonfile_args.iter().for_each(|a| args.push((*a).to_owned()));
test_cmd(args, input)
}
pub fn filename_append_enc(path: &Path) -> PathBuf {
let mut p = path.to_owned();
let name = path.file_name().unwrap().to_string_lossy();
p.set_file_name(format!("{}.enc", name));
p
}
| test_encrypt |
parse_test.go | package rpcserver
import (
"encoding/json"
"strconv"
"testing"
"github.com/stretchr/testify/assert"
amino "github.com/tendermint/go-amino"
cmn "github.com/tendermint/tendermint/libs/common"
)
func TestParseJSONMap(t *testing.T) {
assert := assert.New(t)
input := []byte(`{"value":"1234","height":22}`)
// naive is float,string
var p1 map[string]interface{}
err := json.Unmarshal(input, &p1)
if assert.Nil(err) {
h, ok := p1["height"].(float64)
if assert.True(ok, "%#v", p1["height"]) {
assert.EqualValues(22, h)
}
v, ok := p1["value"].(string)
if assert.True(ok, "%#v", p1["value"]) {
assert.EqualValues("1234", v)
}
}
// preloading map with values doesn't help
tmp := 0
p2 := map[string]interface{}{
"value": &cmn.HexBytes{},
"height": &tmp,
}
err = json.Unmarshal(input, &p2)
if assert.Nil(err) {
h, ok := p2["height"].(float64)
if assert.True(ok, "%#v", p2["height"]) {
assert.EqualValues(22, h)
}
v, ok := p2["value"].(string) | if assert.True(ok, "%#v", p2["value"]) {
assert.EqualValues("1234", v)
}
}
// preload here with *pointers* to the desired types
// struct has unknown types, but hard-coded keys
tmp = 0
p3 := struct {
Value interface{} `json:"value"`
Height interface{} `json:"height"`
}{
Height: &tmp,
Value: &cmn.HexBytes{},
}
err = json.Unmarshal(input, &p3)
if assert.Nil(err) {
h, ok := p3.Height.(*int)
if assert.True(ok, "%#v", p3.Height) {
assert.Equal(22, *h)
}
v, ok := p3.Value.(*cmn.HexBytes)
if assert.True(ok, "%#v", p3.Value) {
assert.EqualValues([]byte{0x12, 0x34}, *v)
}
}
// simplest solution, but hard-coded
p4 := struct {
Value cmn.HexBytes `json:"value"`
Height int `json:"height"`
}{}
err = json.Unmarshal(input, &p4)
if assert.Nil(err) {
assert.EqualValues(22, p4.Height)
assert.EqualValues([]byte{0x12, 0x34}, p4.Value)
}
// so, let's use this trick...
// dynamic keys on map, and we can deserialize to the desired types
var p5 map[string]*json.RawMessage
err = json.Unmarshal(input, &p5)
if assert.Nil(err) {
var h int
err = json.Unmarshal(*p5["height"], &h)
if assert.Nil(err) {
assert.Equal(22, h)
}
var v cmn.HexBytes
err = json.Unmarshal(*p5["value"], &v)
if assert.Nil(err) {
assert.Equal(cmn.HexBytes{0x12, 0x34}, v)
}
}
}
func TestParseJSONArray(t *testing.T) {
assert := assert.New(t)
input := []byte(`["1234",22]`)
// naive is float,string
var p1 []interface{}
err := json.Unmarshal(input, &p1)
if assert.Nil(err) {
v, ok := p1[0].(string)
if assert.True(ok, "%#v", p1[0]) {
assert.EqualValues("1234", v)
}
h, ok := p1[1].(float64)
if assert.True(ok, "%#v", p1[1]) {
assert.EqualValues(22, h)
}
}
// preloading map with values helps here (unlike map - p2 above)
tmp := 0
p2 := []interface{}{&cmn.HexBytes{}, &tmp}
err = json.Unmarshal(input, &p2)
if assert.Nil(err) {
v, ok := p2[0].(*cmn.HexBytes)
if assert.True(ok, "%#v", p2[0]) {
assert.EqualValues([]byte{0x12, 0x34}, *v)
}
h, ok := p2[1].(*int)
if assert.True(ok, "%#v", p2[1]) {
assert.EqualValues(22, *h)
}
}
}
func TestParseRPC(t *testing.T) {
assert := assert.New(t)
demo := func(height int, name string) {}
call := NewRPCFunc(demo, "height,name")
cdc := amino.NewCodec()
cases := []struct {
raw string
height int64
name string
fail bool
}{
// should parse
{`["7", "flew"]`, 7, "flew", false},
{`{"name": "john", "height": "22"}`, 22, "john", false},
// defaults
{`{"name": "solo", "unused": "stuff"}`, 0, "solo", false},
// should fail - wrong types/length
{`["flew", 7]`, 0, "", true},
{`[7,"flew",100]`, 0, "", true},
{`{"name": -12, "height": "fred"}`, 0, "", true},
}
for idx, tc := range cases {
i := strconv.Itoa(idx)
data := []byte(tc.raw)
vals, err := jsonParamsToArgs(call, cdc, data, 0)
if tc.fail {
assert.NotNil(err, i)
} else {
assert.Nil(err, "%s: %+v", i, err)
if assert.Equal(2, len(vals), i) {
assert.Equal(tc.height, vals[0].Int(), i)
assert.Equal(tc.name, vals[1].String(), i)
}
}
}
} | |
test_vfs.py | import logging
import sys
import os
import pytest
import boto3
import fiona
from fiona.errors import FionaDeprecationWarning
from fiona.vfs import vsi_path, parse_paths
from .test_collection import TestReading
from .test_collection_legacy import ReadingTest
# Custom markers (from rasterio)
mingdalversion = pytest.mark.skipif(
fiona.gdal_version < (2, 1, 0),
reason="S3 raster access requires GDAL 2.1")
credentials = pytest.mark.skipif(
not(boto3.Session()._session.get_credentials()),
reason="S3 raster access requires credentials")
# TODO: remove this once we've successfully moved the tar tests over
# to TestVsiReading.
class VsiReadingTest(ReadingTest):
# There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093
# in which the VSI driver reports the wrong number of features.
# I'm overriding ReadingTest's test_filter_1 with a function that
# passes and creating a new method in this class that we can exclude
# from the test runner at run time.
@pytest.mark.xfail(reason="The number of features present in the archive "
"differs based on the GDAL version.")
def test_filter_vsi(self):
results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0)))
assert len(results) == 67
f = results[0]
assert f['id'] == "0"
assert f['properties']['STATE'] == 'UT'
class TestVsiReading(TestReading):
# There's a bug in GDAL 1.9.2 http://trac.osgeo.org/gdal/ticket/5093
# in which the VSI driver reports the wrong number of features.
# I'm overriding TestReading's test_filter_1 with a function that
# passes and creating a new method in this class that we can exclude
# from the test runner at run time.
@pytest.mark.xfail(reason="The number of features present in the archive "
"differs based on the GDAL version.")
def test_filter_vsi(self):
results = list(self.c.filter(bbox=(-114.0, 35.0, -104, 45.0)))
assert len(results) == 67
f = results[0]
assert f['id'] == "0"
assert f['properties']['STATE'] == 'UT'
class TestZipReading(TestVsiReading):
@pytest.fixture(autouse=True)
def zipfile(self, data_dir, path_coutwildrnp_zip):
self.c = fiona.open("zip://{}".format(path_coutwildrnp_zip, "r"))
self.path = os.path.join(data_dir, 'coutwildrnp.zip')
yield
self.c.close()
def test_open_repr(self):
assert (
repr(self.c) ==
("<open Collection '/vsizip/{path}:coutwildrnp', mode 'r' "
"at {id}>".format(
id=hex(id(self.c)),
path=self.path)))
def test_closed_repr(self):
self.c.close()
assert (
repr(self.c) ==
("<closed Collection '/vsizip/{path}:coutwildrnp', mode 'r' "
"at {id}>".format(
id=hex(id(self.c)),
path=self.path)))
def test_path(self):
assert self.c.path == '/vsizip/{path}'.format(path=self.path)
class | (TestVsiReading):
@pytest.fixture(autouse=True)
def zipfile(self, data_dir, path_coutwildrnp_zip):
vfs = 'zip://{}'.format(path_coutwildrnp_zip)
self.c = fiona.open(vfs + "!coutwildrnp.shp", "r")
self.path = os.path.join(data_dir, 'coutwildrnp.zip')
yield
self.c.close()
def test_open_repr(self):
assert (
repr(self.c) ==
("<open Collection '/vsizip/{path}/coutwildrnp.shp:coutwildrnp', mode 'r' "
"at {id}>".format(
id=hex(id(self.c)),
path=self.path)))
def test_closed_repr(self):
self.c.close()
assert (
repr(self.c) ==
("<closed Collection '/vsizip/{path}/coutwildrnp.shp:coutwildrnp', mode 'r' "
"at {id}>".format(
id=hex(id(self.c)),
path=self.path)))
def test_path(self):
assert (self.c.path ==
'/vsizip/{path}/coutwildrnp.shp'.format(path=self.path))
class TestZipArchiveReadingAbsPath(TestZipArchiveReading):
@pytest.fixture(autouse=True)
def zipfile(self, path_coutwildrnp_zip):
vfs = 'zip://{}'.format(os.path.abspath(path_coutwildrnp_zip))
self.c = fiona.open(vfs + "!coutwildrnp.shp", "r")
yield
self.c.close()
def test_open_repr(self):
assert repr(self.c).startswith("<open Collection '/vsizip/")
def test_closed_repr(self):
self.c.close()
assert repr(self.c).startswith("<closed Collection '/vsizip/")
def test_path(self):
assert self.c.path.startswith('/vsizip/')
@pytest.mark.usefixtures('uttc_path_coutwildrnp_tar', 'uttc_data_dir')
class TarArchiveReadingTest(VsiReadingTest):
def setUp(self):
vfs = "tar://{}".format(self.path_coutwildrnp_tar)
self.c = fiona.open(vfs + "!testing/coutwildrnp.shp", "r")
self.path = os.path.join(self.data_dir, 'coutwildrnp.tar')
def tearDown(self):
self.c.close()
def test_open_repr(self):
assert (
repr(self.c) ==
("<open Collection '/vsitar/{path}/testing/coutwildrnp.shp:coutwildrnp', mode 'r' "
"at {id}>".format(
id=hex(id(self.c)),
path=self.path)))
def test_closed_repr(self):
self.c.close()
assert (
repr(self.c) ==
("<closed Collection '/vsitar/{path}/testing/coutwildrnp.shp:coutwildrnp', mode 'r' "
"at {id}>".format(
id=hex(id(self.c)),
path=self.path)))
def test_path(self):
assert (
self.c.path ==
'/vsitar/{path}/testing/coutwildrnp.shp'.format(path=self.path))
@pytest.mark.network
def test_open_http():
ds = fiona.open('https://raw.githubusercontent.com/OSGeo/gdal/master/autotest/ogr/data/poly.shp')
assert len(ds) == 10
@credentials
@mingdalversion
@pytest.mark.network
def test_open_s3():
ds = fiona.open('zip+s3://fiona-testing/coutwildrnp.zip')
assert len(ds) == 67
@pytest.mark.network
def test_open_zip_https():
ds = fiona.open('zip+https://s3.amazonaws.com/fiona-testing/coutwildrnp.zip')
assert len(ds) == 67
def test_parse_path():
assert parse_paths("zip://foo.zip") == ("foo.zip", "zip", None)
def test_parse_path2():
assert parse_paths("foo") == ("foo", None, None)
def test_parse_vfs():
assert parse_paths("/", "zip://foo.zip") == ("/", "zip", "foo.zip")
| TestZipArchiveReading |
test.py | import logging
import os
import shutil
import subprocess
logger = logging.getLogger("Main")
def configure_argument_parser(environment, configuration, subparsers): # pylint: disable = unused-argument
parser = subparsers.add_parser("test", help = "run the test suite")
parser.add_argument("--configuration", required = True, metavar = "<configuration>", help = "set the solution configuration")
parser.add_argument("--filter", metavar = "<expression>", help = "specify an expression to select tests to run")
return parser
def run(environment, configuration, arguments): # pylint: disable = unused-argument
vstest_executable = environment.get("vstest_executable", None)
if vstest_executable is None or not shutil.which(vstest_executable):
raise RuntimeError("VSTest is required (Path: '%s')" % vstest_executable)
test_container = configuration["dotnet_solution"][:-4] + ".Test.dll"
test_container = os.path.join(configuration["artifact_directory"], "Test", "Binaries", arguments.configuration, test_container)
test(vstest_executable, test_container, arguments.configuration, arguments.filter, simulate = arguments.simulate)
def test(vstest_executable, test_container, configuration, filter_expression, simulate = False):
| logger.info("Running test suite (Configuration: '%s')", configuration)
vstest_command = [ vstest_executable, "/Logger:trx" ]
if filter_expression:
vstest_command += [ "/TestCaseFilter:" + filter_expression ]
if simulate:
vstest_command += [ "/ListTests" ]
vstest_command += [ test_container ]
logger.info("+ %s", " ".join(("'" + x + "'") if " " in x else x for x in vstest_command))
subprocess.check_call(vstest_command) |
|
product.py | from backend.infrastructure.schemas import ProductSchema
class MockProductRepository:
def select_one(self, query: QueryProduct) -> ProductSchema:
return None
def select_all(self, query: QueryProduct) -> List[ProductSchema]:
return []
def save(self, entity: ProductSchema) -> ProductSchema:
return entity | from typing import List
from backend.infrastructure.contracts import QueryProduct |
|
TNT.py | import paddle
from paddle import nn
import math
import numpy as np
def _cfg(url='', **kwargs):
return {
'url': url,
'num_classes': 2, 'input_size': (3, 600, 600), 'pool_size': None,
'crop_pct': .9, 'interpolation': 'bicubic',
'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5),
'first_conv': 'pixel_embed.proj', 'classifier': 'head',
**kwargs
}
default_cfgs = {
'tnt_s_patch16_224': _cfg(
url='',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
),
'tnt_b_patch16_224': _cfg(
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
),
}
class Identity(nn.Layer):
r"""A placeholder identity operator that is argument-insensitive.
Args:
args: any argument (unused)
kwargs: any keyword argument (unused)
Examples::
>>> m = nn.Identity(54, unused_argument1=0.1, unused_argument2=False)
>>> input = torch.randn(128, 20)
>>> output = m(input)
>>> print(output.size())
torch.Size([128, 20])
"""
def __init__(self, *args, **kwargs):
super(Identity, self).__init__()
def forward(self, inputs):
return inputs
def drop_path(x, drop_prob: float = 0., training: bool = False):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
This is the same as the DropConnect impl I created for EfficientNet, etc networks, however,
the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for
changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use
'survival rate' as the argument.
"""
if drop_prob == 0. or not training:
return x
keep_prob = 1 - drop_prob
shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets
random_tensor = keep_prob + paddle.rand(shape=shape, dtype=x.dtype, device=x.device)
random_tensor.floor() # binarize
output = x.divide(keep_prob) * random_tensor
return output
class DropPath(nn.Layer):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
"""
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training)
class Attention(nn.Layer):
'''
注意力部分
'''
def __init__(self, dim, hidden_dim, num_heads=8, qkv_bias=False, attn_drop=0., proj_drop=0.):
super(Attention, self).__init__()
self.hidden_dim = hidden_dim
self.num_heads = num_heads
head_dim = hidden_dim // num_heads
self.head_dim = head_dim
self.scale = head_dim ** -0.5
self.qk = nn.Linear(dim, hidden_dim * 2, bias_attr=qkv_bias)
self.v = nn.Linear(dim, dim, bias_attr=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop) # no inplace
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
def forward(self, inputs):
x = inputs
B, N, C = x.shape
qk = self.qk(x).reshape((B, N, 2, self.num_heads, self.head_dim)).transpose((2, 0, 3, 1, 4))
q, k = qk[0], qk[1]
v = self.v(x).reshape((B, N, self.num_heads, -1)).transpose((0, 2, 1, 3))
attn = paddle.matmul(q, k.transpose((0, 1, 3, 2))) * self.scale
attn = paddle.nn.functional.softmax(attn, axis=-1)
attn = self.attn_drop(attn)
x = paddle.matmul(attn, v).transpose((0, 2, 1, 3)).reshape((B, N, -1))
x = self.proj(x)
x = self.proj_drop(x)
return x
class Mlp(nn.Layer):
def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):
super(Mlp, self).__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
class Block(nn.Layer):
""" TNT Block
"""
def __init__(self, dim, in_dim, num_pixel, num_heads=12, in_num_head=4, mlp_ratio=4.,
qkv_bias=False, drop=0., attn_drop=0., drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm):
super(Block, self).__init__()
# Inner transformer
self.norm_in = norm_layer(in_dim)
self.attn_in = Attention(
in_dim, in_dim, num_heads=in_num_head, qkv_bias=qkv_bias,
attn_drop=attn_drop, proj_drop=drop)
self.norm_mlp_in = norm_layer(in_dim)
self.mlp_in = Mlp(in_features=in_dim, hidden_features=int(in_dim * 4),
out_features=in_dim, act_layer=act_layer, drop=drop)
self.norm1_proj = norm_layer(in_dim)
self.proj = nn.Linear(in_dim * num_pixel, dim, bias_attr=True)
# Outer transformer
self.norm_out = norm_layer(dim)
self.attn_out = Attention(
dim, dim, num_heads=num_heads, qkv_bias=qkv_bias,
attn_drop=attn_drop, proj_drop=drop)
self.drop_path = DropPath(drop_path) if drop_path > 0. else Identity()
self.norm_mlp = norm_layer(dim)
self.mlp = Mlp(in_features=dim, hidden_features=int(dim * mlp_ratio),
out_features=dim, act_layer=act_layer, drop=drop)
def forward(self, pixel_embed, patch_embed):
# inner
pixel_embed = pixel_embed + self.drop_path(self.attn_in(self.norm_in(pixel_embed)))
pixel_embed = pixel_embed + self.drop_path(self.mlp_in(self.norm_mlp_in(pixel_embed)))
# outer
B, N, C = patch_embed.shape
patch_embed[:, 1:] = patch_embed[:, 1:] + self.proj(self.norm1_proj(pixel_embed).reshape((B, N - 1, -1)))
patch_embed = patch_embed + self.drop_path(self.attn_out(self.norm_out(patch_embed)))
patch_embed = patch_embed + self.drop_path(self.mlp(self.norm_mlp(patch_embed)))
return pixel_embed, patch_embed
class PixelEmbed(nn.Layer):
""" Image to Pixel Embedding
"""
def __init__(self, img_size=224, patch_size=16, in_chans=3, in_dim=48, stride=4):
super(PixelEmbed, self).__init__()
num_patches = (img_size // patch_size) ** 2
self.img_size = img_size
self.num_patches = num_patches
self.in_dim = in_dim
new_patch_size = math.ceil(patch_size / stride)
self.new_patch_size = new_patch_size
self.proj = nn.Conv2D(in_chans, self.in_dim, kernel_size=7, padding=3, stride=stride)
def forward(self, x, pixel_pos):
B, C, H, W = x.shape
assert H == self.img_size and W == self.img_size, \
f"Input image size ({H}*{W}) doesn't match model ({self.img_size}*{self.img_size})."
x = self.proj(x)
x = nn.functional.unfold(x=x, kernel_sizes=self.new_patch_size, strides=self.new_patch_size)
x = x.transpose((0, 2, 1)).reshape((B * self.num_patches, self.in_dim, self.new_patch_size, self.new_patch_size))
x = x + pixel_pos
x = x.reshape((B * self.num_patches, self.in_dim, -1)).transpose((0, 2, 1))
return x
class TNT(nn.Layer):
""" Transformer in Transformer - https://arxiv.org/abs/2103.00112
"""
def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, in_dim=48, depth=12,
num_heads=12, in_num_head=4, mlp_ratio=4., qkv_bias=False, drop_rate=0., attn_drop_rate=0.,
drop_path_rate=0., norm_layer=nn.LayerNorm, first_stride=4):
super(TNT, self).__init__()
self.num_classes = num_classes
self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models
self.pixel_embed = PixelEmbed(
img_size=img_size, patch_size=patch_size, in_chans=in_chans, in_dim=in_dim, stride=first_stride)
num_patches = self.pixel_embed.num_patches
self.num_patches = num_patches
new_patch_size = self.pixel_embed.new_patch_size
num_pixel = new_patch_size ** 2
self.norm1_proj = norm_layer(num_pixel * in_dim)
self.proj = nn.Linear(num_pixel * in_dim, embed_dim)
self.norm2_proj = norm_layer(embed_dim)
# 创建参数
self.cls_token = paddle.create_parameter((1, 1, embed_dim), 'float32', attr=nn.initializer.Assign(paddle.zeros((1, 1, embed_dim))))
self.patch_pos = paddle.create_parameter((1, num_patches + 1, embed_dim), 'float32', attr=nn.initializer.Assign(paddle.zeros((1, num_patches + 1, embed_dim))))
self.pixel_pos = paddle.create_parameter((1, in_dim, new_patch_size, new_patch_size), 'float32', attr=nn.initializer.Assign(paddle.zeros((1, in_dim, new_patch_size, new_patch_size))))
self.pos_drop = nn.Dropout(p=drop_rate)
dpr = [x for x in paddle.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule
blocks = []
for i in range(depth):
blocks.append(Block(
dim=embed_dim, in_dim=in_dim, num_pixel=num_pixel, num_heads=num_heads, in_num_head=in_num_head,
mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, drop=drop_rate, attn_drop=attn_drop_rate,
drop_path=dpr[i], norm_layer=norm_layer))
self.blocks = nn.LayerList(blocks)
self.norm = norm_layer(embed_dim)
self.head = nn.Linear(embed_dim, num_classes) if num_classes > 0 else nn.Identity()
with paddle.no_grad():
self.cls_token = paddle.create_parameter(self.cls_token.shape, 'float32', attr=nn.initializer.Assign(paddle.normal(self.cls_token, std=.02)))
self.patch_pos = paddle.create_parameter(self.patch_pos.shape, 'float32', attr=nn.initializer.Assign(paddle.normal(self.patch_pos, std=.02)))
self.pixel_pos = paddle.create_parameter(self.pixel_pos.shape, 'float32', attr=nn.initializer.Assign(paddle.normal(self.pixel_pos, std=.02)))
self.apply(self._init_weights)
def _init_weights(self, m):
if isinstance(m, nn.Linear):
with paddle.no_grad():
m.weight = paddle.create_parameter(m.weight.shape, 'float32', attr=nn.initializer.Assign(paddle.normal(m.weight, std=.02)))
if isinstance(m, nn.Linear) and m.bias is not None:
m.bias = paddle.create_parameter(m.bias.shape, 'float32', attr=nn.initializer.Constant(value=0.))
elif isinstance(m, nn.LayerNorm):
m.bias = paddle.create_parameter(m.bias.shape, 'float32', attr=nn.initializer.Constant(value=0.))
m.weight = paddle.create_parameter(m.weight.shape, 'float32', attr=nn.initializer.Constant(value=1.))
def no_weight_decay(self):
return {'patch_pos', 'pixel_pos', 'cls_token'}
def get_classifier(self):
return self.head
def reset_classifier(self, num_classes, global_pool=''):
self.num_classes = | d_features(self, x):
B = x.shape[0]
pixel_embed = self.pixel_embed(x, self.pixel_pos)
patch_embed = self.norm2_proj(self.proj(self.norm1_proj(pixel_embed.reshape((B, self.num_patches, -1)))))
patch_embed = paddle.concat((self.cls_token.expand([B, self.cls_token.shape[1],self.cls_token.shape[2]]), patch_embed), axis=1) # expand
patch_embed = patch_embed + self.patch_pos
patch_embed = self.pos_drop(patch_embed)
for blk in self.blocks:
pixel_embed, patch_embed = blk(pixel_embed, patch_embed)
patch_embed = self.norm(patch_embed)
return patch_embed[:, 0]
def forward(self, x):
x = self.forward_features(x)
x = self.head(x)
return x
def tnt_s_patch16_224(pretrained=False, **kwargs):
model = TNT(patch_size=16, embed_dim=384, in_dim=24, depth=12, num_heads=6, in_num_head=4,
qkv_bias=False, **kwargs)
model.default_cfg = default_cfgs['tnt_s_patch16_224']
if pretrained:
load_pretrained(
model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3))
return model
def tnt_b_patch16_224(pretrained=False, **kwargs):
model = TNT(patch_size=16, embed_dim=640, in_dim=40, depth=12, num_heads=10, in_num_head=4,
qkv_bias=False, **kwargs)
model.default_cfg = default_cfgs['tnt_b_patch16_224']
if pretrained:
load_pretrained(
model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3))
return model | num_classes
self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()
def forwar |
index.js | export { default as Example } from '../common/Example';
export { default as TrainingSet } from '../common/TrainingSet';
export { default as ProcessedSensors } from './ProcessedSensors'; | export { default as XmmProcessor } from '../common/XmmProcessor'; | |
index.js | // Packages
const fs = require('fs');
const Metalsmith = require('metalsmith');
const markdown = require('metalsmith-markdownit');
const layouts = require('metalsmith-layouts');
const permalinks = require('metalsmith-permalinks');
const assets = require('metalsmith-assets');
const dataLoader = require('metalsmith-data-loader');
const watch = require('metalsmith-watch');
const branch = require('metalsmith-branch')
const serve = require('metalsmith-serve');
const redirect = require('metalsmith-redirect');
const webpack = require('metalsmith-webpack2');
const anchor = require('markdown-it-anchor');
const attrs = require('markdown-it-attrs');
const timer = require('metalsmith-timer');
const ignore = require('metalsmith-ignore');
const copy = require('metalsmith-copy');
// Local Plugins
const reduce = require('./plugins/metalsmith-revision').reduce;
const restore = require('./plugins/metalsmith-revision').restore;
const hierarchy = require('./plugins/metalsmith-hierarchy');
const hierarchyRss = require('./plugins/metalsmith-hierarchy-rss');
const headings = require('./plugins/metalsmith-headings');
const algolia = require('./plugins/metalsmith-algolia');
const inPlace = require('./plugins/metalsmith-in-place-dcos');
const includeContent = require('./plugins/metalsmith-include-content-dcos');
const shortcodes = require('./plugins/metalsmith-shortcodes');
const wkhtmltopdfLinkResolver = require('./plugins/metalsmith-wkhtmltopdf-link-resolver');
// Configs
const configData = fs.readFileSync('config.json');
const config = JSON.parse(configData);
const shortcodesConfig = require('./shortcodes');
function | (val) {
return (val && val.length > 0) ? val.split(',') : [];
}
// Environment Variables
const GIT_BRANCH = process.env.GIT_BRANCH;
const ALGOLIA_UPDATE = process.env.ALGOLIA_UPDATE;
const ALGOLIA_PROJECT_ID = process.env.ALGOLIA_PROJECT_ID;
const ALGOLIA_PUBLIC_KEY = process.env.ALGOLIA_PUBLIC_KEY;
const ALGOLIA_PRIVATE_KEY = process.env.ALGOLIA_PRIVATE_KEY;
const ALGOLIA_INDEX = process.env.ALGOLIA_INDEX;
const RENDER_PATH_PATTERN = process.env.RENDER_PATH_PATTERN || process.env.RPP;
const branchDoNotIndex = config[GIT_BRANCH] ? (
config[GIT_BRANCH].DO_NOT_INDEX
) : (
[]
);
const ALGOLIA_SKIP_SECTIONS = branchDoNotIndex ? (
config.always.DO_NOT_INDEX.concat(branchDoNotIndex)
) : (
config.always.DO_NOT_INDEX
);
const branchDoNotBuild = config[GIT_BRANCH] ? (
config[GIT_BRANCH].DO_NOT_BUILD
) : (
config.local.DO_NOT_BUILD
);
const METALSMITH_SKIP_SECTIONS = config.always.DO_NOT_BUILD.concat(branchDoNotBuild);
//
// Errors
//
if (!GIT_BRANCH && process.env.NODE_ENV !== 'development') {
throw new Error('Env var GIT_BRANCH has not been set.');
}
if (ALGOLIA_UPDATE === 'true') {
if (process.env.NODE_ENV === 'pdf') {
throw new Error('Algolia env vars set while build env is pdf');
}
if (!ALGOLIA_PROJECT_ID) {
throw new Error('Env var ALGOLIA_PROJECT_ID has not been set.');
}
if (!ALGOLIA_PUBLIC_KEY) {
throw new Error('Env var ALGOLIA_PUBLIC_KEY has not been set.');
}
if (!ALGOLIA_PRIVATE_KEY) {
throw new Error('Env var ALGOLIA_PRIVATE_KEY has not been set.');
}
if (!ALGOLIA_INDEX) {
throw new Error('Env var ALGOLIA_INDEX has not been set.');
}
}
//
// Metalsmith
//
const MS = Metalsmith(__dirname);
const currentYear = (new Date()).getFullYear();
// Metadata
// These are available in the layouts as js variables
MS.metadata({
url: 'https://docs.mesosphere.com',
siteTitle: 'Mesosphere DC/OS Documentation',
siteDescription: 'Welcome to the DC/OS documentation. The DC/OS documentation ' +
'can help you set up, learn about the system, and get your applications and' +
' workloads running on DC/OS.',
copyright: `© ${currentYear} Mesosphere, Inc. All rights reserved.`,
env: process.env.NODE_ENV,
gitBranch: GIT_BRANCH,
dcosDocsLatest: '1.13',
});
// Source
// Where metalsmith looks for all files
MS.source('./pages');
// Destination
// Where metalsmith will put the output code
MS.destination('./build');
// Don't Clean
// Cleaning removes the destination directory before writing to it
// I imagine cleaning makes watching take a long time, but untested for now
MS.clean(false);
//
// Content Branch Pipeline
//
const CB = branch();
// Start timer
CB.use(timer('CB: Init'));
CB.use(ignore(METALSMITH_SKIP_SECTIONS));
CB.use(timer('CB: Ignore'));
CB.use(copy({
pattern: '**/README.md',
transform: file => file.replace(/README/, 'index'),
move: true,
}));
CB.use(timer('CB: Copy'));
// Load model data from external .json/.yaml files
// For example (in your Front Matter):
// model: path/to/my.yml (access content in my.yml as model.foo.bar)
// Can also specify multiple named models:
// model:
// data1: path/to/my.json (access content in my.json as model.data1.foo.bar)
// data2: path/to/my.yml (access content in my.yml as model.data2.foo.bar)
CB.use(dataLoader({
dataProperty: 'model',
match: '**/*.md',
}));
CB.use(timer('CB: Dataloader'));
// Load raw content via '#include' directives before rendering any mustache or markdown.
// For example (in your content):
// #include path/to/file.tmpl
CB.use(includeContent({
// Style as a C-like include statement. Must be on its own line.
pattern: '^#include ([^ \n]+)$',
match: '**/*.md*',
}));
CB.use(timer('CB: IncludeContent'));
// Process any mustache templating in files.
// For example (in your Front Matter):
// render: mustache
CB.use(inPlace({
renderProperty: 'render',
match: '**/*.md',
}));
CB.use(timer('CB: Mustache'));
// Folder Hierarchy
CB.use(hierarchy({
files: ['.md'],
excerpt: true,
}));
CB.use(timer('CB: Hierarchy'));
// RSS Feed
CB.use(hierarchyRss({
itemOptionsMap: {
title: 'title',
description: 'excerpt',
},
}));
CB.use(timer('CB: Hierarchy RSS'));
// Filter unmodified files
if (process.env.NODE_ENV === 'development') {
CB.use(reduce());
CB.use(timer('CB: Reduce'));
}
//
// Slow Plugins
//
// Shortcodes
CB.use(shortcodes({
files: ['.md'],
shortcodes: shortcodesConfig,
}));
CB.use(timer('CB: Shortcodes'));
// Markdown
CB.use(markdown(
{
smartList: false,
typographer: true,
html: true,
})
.use(anchor, {
permalink: true,
renderPermalink: (slug, opts, state, idx) => {
const linkTokens = [
Object.assign(new state.Token('link_open', 'a', 1), {
attrs: [
['class', opts.permalinkClass],
['href', opts.permalinkHref(slug, state)],
['aria-hidden', 'true'],
],
}),
Object.assign(new state.Token('html_block', '', 0), { content: opts.permalinkSymbol }),
new state.Token('link_close', 'a', -1),
];
state.tokens[idx + 1].children.unshift(...linkTokens);
},
permalinkClass: 'content__anchor',
permalinkSymbol: '<i data-feather="bookmark"></i>',
permalinkBefore: true,
})
.use(attrs),
);
CB.use(timer('CB: Markdown'));
// Headings
CB.use(headings());
CB.use(timer('CB: Headings'));
CB.use(redirect({
'/support': 'https://support.mesosphere.com',
}));
CB.use(timer('CB: Redirects'));
// Permalinks
CB.use(permalinks());
CB.use(timer('CB: Permalinks'));
// Layouts
if (!RENDER_PATH_PATTERN) {
// Default: Render all pages.
CB.use(layouts({
engine: 'pug',
cache: true,
}));
} else {
// Dev optimization: Only render within a specific path (much faster turnaround)
// For example, 'services/beta-cassandra/latest/**'
CB.use(layouts({
engine: 'pug',
pattern: RENDER_PATH_PATTERN,
cache: true,
}));
}
CB.use(timer('CB: Layouts'));
//
// Slow Plugins End
//
// Restore unmodified files
if (process.env.NODE_ENV === 'development') {
CB.use(restore());
CB.use(timer('CB: Reduce'));
}
// The expected pattern format doesn't work with regex
let pathPatternRegex;
if (RENDER_PATH_PATTERN) {
pathPatternRegex = RENDER_PATH_PATTERN.split('/').slice(0, -1).join("\/");
}
// Search Indexing
if (ALGOLIA_UPDATE === 'true') {
CB.use(algolia({
projectId: ALGOLIA_PROJECT_ID,
privateKey: ALGOLIA_PRIVATE_KEY,
index: ALGOLIA_INDEX,
skipSections: ALGOLIA_SKIP_SECTIONS,
renderPathPattern: pathPatternRegex,
}));
CB.use(timer('CB: Algolia'));
}
// Enable watching
// The keys represent the files to watch, the values are the files that will
// be updated. ONLY the files that are being updated will be accessible to
// during the rebuild. We must include everything at this point or the
// templates will not be accessible. Need changes to fix this.
// Can only watch with a RENDER_PATH_PATTERN because there are too many
// files without it.
if (process.env.NODE_ENV === 'development' && RENDER_PATH_PATTERN) {
CB.use(watch({
paths: {
[`pages/${RENDER_PATH_PATTERN}/*`]: '**/*.{md,tmpl}',
'layouts/**/*': '**/*.pug',
},
}));
CB.use(timer('CB: Watch'));
}
// WkhtmltopdfLinkResolver
if (process.env.NODE_ENV === 'pdf') {
CB.use(wkhtmltopdfLinkResolver({
prefix: '/tmp/pdf/build',
}));
CB.use(timer('CB: WkhtmltopdfLinkResolver'));
}
// Serve
if (process.env.NODE_ENV === 'development') {
CB.use(serve({
port: 3000,
}));
CB.use(timer('CB: Webserver'));
}
//
// Assets Branch
//
const AB = branch();
// Start timer
AB.use(timer('AB: Init'));
// Watch
// Can only watch with a RENDER_PATH_PATTERN because there are too many
// files without it.
if (process.env.NODE_ENV === 'development' && RENDER_PATH_PATTERN) {
AB.use(watch({
paths: {
'js/**/*': '**/*.js',
'scss/**/*': '**/*.scss',
},
}));
AB.use(timer('AB: Watch'));
}
// Assets
AB.use(assets({
source: 'assets',
destination: 'assets',
}));
AB.use(timer('AB: Assets'));
// Webpack
AB.use(webpack('./webpack.config.js'));
AB.use(timer('AB: Webpack'));
//
// Metalsmith
//
MS.use(CB);
MS.use(AB);
// Build
MS.build((err, files) => {
if (err) throw err;
});
| splitCommasOrEmptyArray |
day_04.rs | /// Solves the Day 04 Part 1 puzzle with respect to the given input.
pub fn part_1(input: String) {
let mut lines = input.lines();
let draws = parse_draws(&mut lines);
let boards = parse_boards(&mut lines);
let winner = 1;
let (draw, board) = play(draws, boards, winner);
let mut left = 0;
for r2 in 0..5 {
for c2 in 0..5 {
if board[r2][c2] != -1 {
left += board[r2][c2];
}
}
}
println!("{}", draw * left);
}
/// Solves the Day 04 Part 2 puzzle with respect to the given input.
pub fn part_2(input: String) {
let mut lines = input.lines();
let draws = parse_draws(&mut lines);
let boards = parse_boards(&mut lines);
let winner = boards.len() as i32;
let (draw, board) = play(draws, boards, winner);
let mut left = 0;
for r2 in 0..5 {
for c2 in 0..5 {
if board[r2][c2] != -1 {
left += board[r2][c2];
}
}
}
println!("{}", draw * left);
}
/// Parses a string into an integer.
fn str_to_int(token: &str) -> i32 {
return token.parse::<i32>().unwrap();
}
/// Returns the draws for the Bingo game.
fn | <'a>(lines: &mut impl Iterator<Item = &'a str>) -> Vec<i32> {
let draws = lines.next().unwrap().split(",").map(str_to_int).collect();
lines.next();
return draws;
}
/// Returns the boards for the Bingo game.
fn parse_boards<'a>(lines: &mut impl Iterator<Item = &'a str>) -> Vec<[[i32; 5]; 5]> {
let mut boards: Vec<[[i32; 5]; 5]> = Vec::new();
loop {
let mut board = [[0i32; 5]; 5];
for i in 0..5 {
let mut row = lines.next().unwrap().split_whitespace().map(str_to_int);
for j in 0..5 {
board[i][j] = row.next().unwrap();
}
}
boards.push(board);
if lines.next() == None {
return boards;
}
}
}
/// Plays Bingo and returns the draw and board of the winning board.
fn play(draws: Vec<i32>, boards: Vec<[[i32; 5]; 5]>, winner: i32) -> (i32, [[i32; 5]; 5]) {
let mut boards = boards;
let mut won = vec![false; boards.len()];
let mut wins = 0;
for draw in draws {
for i in 0..boards.len() {
if !won[i] {
let board = &mut boards[i];
for r1 in 0..5 {
for c1 in 0..5 {
if board[r1][c1] == draw {
board[r1][c1] = -1;
if bingo(board) {
won[i] = true;
wins += 1;
if wins == winner {
return (draw, board.to_owned());
}
}
}
}
}
}
}
}
panic!("Not enough boards won at Bingo.")
}
/// Returns true iff there is a row of column filled with -1 in the given board.
fn bingo(board: &[[i32; 5]; 5]) -> bool {
for i in 0..5 {
let mut s1 = 0;
let mut s2 = 0;
for j in 0..5 {
s1 += board[i][j];
s2 += board[j][i];
}
if s1 == -5 || s2 == -5 {
return true;
}
}
return false;
}
| parse_draws |
lib.rs | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! Core expression language.
#![deny(missing_debug_implementations)]
use std::fmt;
use serde::{Deserialize, Serialize};
use repr::{ColumnType, ScalarType};
mod id;
mod linear;
mod relation;
mod scalar;
pub mod explain;
pub use relation::canonicalize;
pub use id::{GlobalId, Id, LocalId, PartitionId, SourceInstanceId};
pub use linear::{memoize_expr, MapFilterProject};
pub use relation::func::{AggregateFunc, TableFunc};
pub use relation::func::{AnalyzedRegex, CaptureGroupDesc};
pub use relation::join_input_mapper::JoinInputMapper;
pub use relation::{
compare_columns, AggregateExpr, ColumnOrder, IdGen, JoinImplementation, MirRelationExpr,
RowSetFinishing,
};
pub use scalar::func::{BinaryFunc, NullaryFunc, UnaryFunc, VariadicFunc};
pub use scalar::{like_pattern, EvalError, MirScalarExpr};
/// A [`MirRelationExpr`] that claims to have been optimized, e.g., by an
/// `transform::Optimizer`.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
pub struct OptimizedMirRelationExpr(pub MirRelationExpr);
impl OptimizedMirRelationExpr {
/// Declare that the input `expr` is optimized, without actually running it
/// through an optimizer. This can be useful to mark as optimized literal
/// `MirRelationExpr`s that are obviously optimal, without invoking the whole
/// machinery of the optimizer.
pub fn declare_optimized(expr: MirRelationExpr) -> OptimizedMirRelationExpr {
OptimizedMirRelationExpr(expr)
}
pub fn into_inner(self) -> MirRelationExpr {
self.0
}
}
impl AsRef<MirRelationExpr> for OptimizedMirRelationExpr {
fn as_ref(&self) -> &MirRelationExpr {
&self.0
}
}
impl AsMut<MirRelationExpr> for OptimizedMirRelationExpr {
fn as_mut(&mut self) -> &mut MirRelationExpr {
&mut self.0
}
}
/// A trait for humanizing components of an expression.
pub trait ExprHumanizer: fmt::Debug {
/// Attempts to return the a human-readable string for the relation
/// identified by `id`.
fn humanize_id(&self, id: GlobalId) -> Option<String>;
/// Returns a human-readable name for the specified scalar type.
fn humanize_scalar_type(&self, ty: &ScalarType) -> String;
/// Returns a human-readable name for the specified scalar type.
fn humanize_column_type(&self, ty: &ColumnType) -> String;
}
/// A bare-minimum implementation of [`ExprHumanizer`].
///
/// The `DummyHumanizer` does a poor job of humanizing expressions. It is
/// intended for use in contexts where polish is not required, like in tests or
/// while debugging.
#[derive(Debug)]
pub struct DummyHumanizer;
impl ExprHumanizer for DummyHumanizer {
fn humanize_id(&self, _: GlobalId) -> Option<String> {
// Returning `None` allows the caller to fall back to displaying the
// ID, if they so desire.
None
}
fn | (&self, ty: &ScalarType) -> String {
// The debug implementation is better than nothing.
format!("{:?}", ty)
}
fn humanize_column_type(&self, ty: &ColumnType) -> String {
// The debug implementation is better than nothing.
format!("{:?}", ty)
}
}
| humanize_scalar_type |
log.go | package cli
import (
"log"
"os"
"github.com/brianshepanek/tusd" |
func logEv(logOutput *log.Logger, eventName string, details ...string) {
tusd.LogEvent(logOutput, eventName, details...)
} | )
var stdout = log.New(os.Stdout, "[tusd] ", 0)
var stderr = log.New(os.Stderr, "[tusd] ", 0) |
storage.rs | use criterion::{
black_box, AxisScale, BatchSize, Bencher, BenchmarkId, Criterion, PlotConfiguration, Throughput,
};
use rand::{rngs::StdRng, Rng, SeedableRng};
use matterdb::{access::CopyAccessExt, Fork, ListIndex, MapIndex};
use super::BenchDB;
const NAME: &str = "name";
const FAMILY: &str = "index_family";
const SAMPLE_SIZE: usize = 10;
const CHUNK_SIZE: usize = 64;
const SEED: [u8; 32] = [100; 32];
#[cfg(all(test, not(feature = "long_benchmarks")))]
const ITEM_COUNTS: [usize; 3] = [1_000, 10_000, 100_000];
#[cfg(all(test, feature = "long_benchmarks"))]
const ITEM_COUNTS: [usize; 4] = [1_000, 10_000, 100_000, 1_000_000];
fn generate_random_kv(len: usize) -> Vec<(u32, Vec<u8>)> {
let mut key = 0;
let kv_generator = |_| {
let v = vec![0; CHUNK_SIZE];
// Generate only unique keys.
let k = key;
key += 1;
(k, v)
};
(0..len).map(kv_generator).collect()
}
fn plain_map_index_insert(b: &mut Bencher<'_>, len: usize) {
let data = generate_random_kv(len);
b.iter_with_setup(
|| (BenchDB::default(), data.clone()),
|(db, data)| {
let fork = db.fork();
{
let mut table = fork.get_map(NAME);
for item in data {
table.put(&item.0, item.1);
}
}
db.merge_sync(fork.into_patch()).unwrap();
},
);
}
fn plain_map_index_with_family_insert(b: &mut Bencher<'_>, len: usize) {
let data = generate_random_kv(len);
b.iter_with_setup(
|| (BenchDB::default(), data.clone()),
|(db, data)| {
let fork = db.fork();
{
let mut table = fork.get_map((NAME, FAMILY));
for item in data {
table.put(&item.0, item.1);
}
}
db.merge_sync(fork.into_patch()).unwrap();
},
);
}
fn plain_map_index_iter(b: &mut Bencher<'_>, len: usize) |
fn plain_map_index_with_family_iter(b: &mut Bencher<'_>, len: usize) {
let data = generate_random_kv(len);
let db = BenchDB::default();
let fork = db.fork();
{
let mut table = fork.get_map((NAME, FAMILY));
assert!(table.keys().next().is_none());
for item in data {
table.put(&item.0, item.1);
}
}
db.merge(fork.into_patch()).unwrap();
b.iter_with_setup(
|| db.snapshot(),
|snapshot| {
let index: MapIndex<_, u32, Vec<u8>> = snapshot.get_map((NAME, FAMILY));
for (key, value) in &index {
black_box(key);
black_box(value);
}
},
);
}
fn plain_map_index_read(b: &mut Bencher<'_>, len: usize) {
let data = generate_random_kv(len);
let db = BenchDB::default();
let fork = db.fork();
{
let mut table = fork.get_map(NAME);
assert!(table.keys().next().is_none());
for item in data.clone() {
table.put(&item.0, item.1);
}
}
db.merge_sync(fork.into_patch()).unwrap();
b.iter_with_setup(
|| db.snapshot(),
|snapshot| {
let index: MapIndex<_, u32, Vec<u8>> = snapshot.get_map(NAME);
for item in &data {
let value = index.get(&item.0);
black_box(value);
}
},
);
}
fn plain_map_index_with_family_read(b: &mut Bencher<'_>, len: usize) {
let data = generate_random_kv(len);
let db = BenchDB::default();
let fork = db.fork();
{
let mut table = fork.get_map((NAME, FAMILY));
assert!(table.keys().next().is_none());
for item in data.clone() {
table.put(&item.0, item.1);
}
}
db.merge_sync(fork.into_patch()).unwrap();
b.iter_with_setup(
|| db.snapshot(),
|snapshot| {
let index: MapIndex<_, u32, Vec<u8>> = snapshot.get_map((NAME, FAMILY));
for item in &data {
let value = index.get(&item.0);
black_box(value);
}
},
);
}
fn bench_fn<F>(c: &mut Criterion, name: &str, benchmark: F)
where
F: Fn(&mut Bencher<'_>, usize) + 'static,
{
let mut group = c.benchmark_group(name);
for item_counts in ITEM_COUNTS.iter() {
group
.bench_with_input(
BenchmarkId::from_parameter(item_counts),
item_counts,
|b: &mut Bencher<'_>, len: &usize| benchmark(b, *len),
)
.throughput(Throughput::Elements(*item_counts as u64))
.plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic))
.sample_size(SAMPLE_SIZE);
}
group.finish();
}
fn fill_list(list: &mut ListIndex<&Fork, Vec<u8>>, rng: &mut impl Rng) {
for _ in 0..500 {
let mut buffer = vec![0_u8; 512];
rng.fill(&mut buffer[..]);
list.push(buffer);
}
}
fn bench_index_clearing(bencher: &mut Bencher<'_>) {
let mut rng = StdRng::from_seed(SEED);
let db = BenchDB::default();
// Surround the cleared index with the indexes in the same column family.
let fork = db.fork();
for key in &[0_u8, 2] {
fill_list(&mut fork.get_list(("list", key)), &mut rng);
}
db.merge(fork.into_patch()).unwrap();
bencher.iter_batched(
|| {
let addr = ("list", &1_u8);
let fork = db.fork();
fill_list(&mut fork.get_list(addr), &mut rng);
db.merge(fork.into_patch()).unwrap();
let fork = db.fork();
fork.get_list::<_, Vec<u8>>(addr).clear();
fork.into_patch()
},
|patch| db.merge(patch).unwrap(),
BatchSize::SmallInput,
);
let snapshot = db.snapshot();
for key in &[0_u8, 2] {
let list = snapshot.get_list::<_, Vec<u8>>(("list", key));
assert_eq!(list.iter().count(), 500);
}
}
pub fn bench_storage(c: &mut Criterion) {
// MapIndex
bench_fn(c, "storage/plain_map/insert", plain_map_index_insert);
bench_fn(c, "storage/plain_map/iter", plain_map_index_iter);
bench_fn(
c,
"storage/plain_map_with_family/insert",
plain_map_index_with_family_insert,
);
bench_fn(
c,
"storage/plain_map_with_family/iter",
plain_map_index_with_family_iter,
);
bench_fn(c, "storage/plain_map/read", plain_map_index_read);
bench_fn(
c,
"storage/plain_map_with_family/read",
plain_map_index_with_family_read,
);
// Index clearing
c.bench_function("storage/clearing", bench_index_clearing);
}
| {
let data = generate_random_kv(len);
let db = BenchDB::default();
let fork = db.fork();
{
let mut table = fork.get_map(NAME);
assert!(table.keys().next().is_none());
for item in data {
table.put(&item.0, item.1);
}
}
db.merge_sync(fork.into_patch()).unwrap();
b.iter_with_setup(
|| db.snapshot(),
|snapshot| {
let index: MapIndex<_, u32, Vec<u8>> = snapshot.get_map(NAME);
for (key, value) in &index {
black_box(key);
black_box(value);
}
},
);
} |
main.go | package main
| "log"
"github.com/derektrc/go-reddit/reddit"
)
var ctx = context.Background()
func main() {
if err := run(); err != nil {
log.Fatal(err)
}
}
func run() (err error) {
// Let's get the top 200 posts of r/golang.
// Reddit returns a maximum of 100 posts at a time,
// so we'll need to separate this into 2 requests.
posts, resp, err := reddit.DefaultClient().Subreddit.TopPosts(ctx, "golang", &reddit.ListPostOptions{
ListOptions: reddit.ListOptions{
Limit: 100,
},
Time: "all",
})
if err != nil {
return
}
for _, post := range posts {
fmt.Println(post.Title)
}
// The After option sets the id of an item that Reddit
// will use as an anchor point for the returned listing.
posts, _, err = reddit.DefaultClient().Subreddit.TopPosts(ctx, "golang", &reddit.ListPostOptions{
ListOptions: reddit.ListOptions{
Limit: 100,
After: resp.After,
},
Time: "all",
})
if err != nil {
return
}
for _, post := range posts {
fmt.Println(post.Title)
}
return
} | import (
"context"
"fmt" |
group.go | package nodeconfig
import (
"fmt"
"strconv"
)
// GroupID is a multicast group ID.
//
// It is a binary string,
// conducive to layering and scoped generation using cryptographic hash.
//
// Applications define their own group ID, without central allocation.
// A cryptographically secure random string of enough length – 32 bytes for
// example – may be used.
type GroupID string
func (id GroupID) String() string {
return fmt.Sprintf("%s", string(id))
}
// Const of group ID
const (
GroupIDBeacon GroupID = "%s/0.0.1/node/beacon"
GroupIDBeaconClient GroupID = "%s/0.0.1/client/beacon"
GroupIDShardPrefix GroupID = "%s/0.0.1/node/shard/%s"
GroupIDShardClientPrefix GroupID = "%s/0.0.1/client/shard/%s"
GroupIDGlobal GroupID = "%s/0.0.1/node/global"
GroupIDGlobalClient GroupID = "%s/0.0.1/node/global"
GroupIDUnknown GroupID = "%s/B1acKh0lE"
)
// ShardID defines the ID of a shard
type ShardID uint32
func getNetworkPrefix(shardID ShardID) (netPre string) {
switch GetShardConfig(uint32(shardID)).GetNetworkType() {
case Mainnet:
netPre = "harmony"
case Testnet:
netPre = "hmy/testnet"
case Pangaea:
netPre = "hmy/pangaea"
case Devnet:
netPre = "hmy/devnet"
case Localnet:
netPre = "hmy/local"
default:
netPre = "hmy/misc"
}
return
}
// NewGroupIDByShardID returns a new groupID for a shard
func NewGroupIDByShardID(shardID ShardID) GroupID {
if shardID == 0 {
return GroupID(fmt.Sprintf(GroupIDBeacon.String(), getNetworkPrefix(shardID)))
}
return GroupID(fmt.Sprintf(GroupIDShardPrefix.String(), getNetworkPrefix(shardID), strconv.Itoa(int(shardID))))
}
// NewClientGroupIDByShardID returns a new groupID for a shard's client
func NewClientGroupIDByShardID(shardID ShardID) GroupID {
if shardID == 0 {
| turn GroupID(fmt.Sprintf(GroupIDShardClientPrefix.String(), getNetworkPrefix(shardID), strconv.Itoa(int(shardID))))
}
// ActionType lists action on group
type ActionType uint
// Const of different Action type
const (
ActionStart ActionType = iota
ActionPause
ActionResume
ActionStop
ActionUnknown
)
func (a ActionType) String() string {
switch a {
case ActionStart:
return "ActionStart"
case ActionPause:
return "ActionPause"
case ActionResume:
return "ActionResume"
case ActionStop:
return "ActionStop"
}
return "ActionUnknown"
}
// GroupAction specify action on corresponding group
type GroupAction struct {
Name GroupID
Action ActionType
}
func (g GroupAction) String() string {
return fmt.Sprintf("%s/%s", g.Name, g.Action)
}
| return GroupID(fmt.Sprintf(GroupIDBeaconClient.String(), getNetworkPrefix(shardID)))
}
re |
tree-store.ts | import Node from './node'
import { getNodeKey } from './util'
import {
TreeKey,
TreeData,
TreeStoreNodesMap,
LoadFunction,
FilterNodeMethodFunction,
TreeOptionProps,
TreeStoreOptions,
FilterValue,
TreeNodeData,
} from '../tree.type'
export default class TreeStore {
currentNode: Node
currentNodeKey: TreeKey
nodesMap: TreeStoreNodesMap
root: Node
data: TreeData
lazy: boolean
load: LoadFunction
filterNodeMethod: FilterNodeMethodFunction
key: TreeKey
defaultCheckedKeys: TreeKey[];
checkStrictly: boolean;
defaultExpandedKeys: TreeKey[];
autoExpandParent: boolean;
defaultExpandAll: boolean;
checkDescendants: boolean;
props: TreeOptionProps;
constructor(options: TreeStoreOptions) {
this.currentNode = null
this.currentNodeKey = null
for (const option in options) {
if (options.hasOwnProperty(option)) {
this[option] = options[option]
}
}
this.nodesMap = {}
this.root = new Node({
data: this.data,
store: this,
})
if (this.lazy && this.load) {
const loadFn = this.load
loadFn(this.root, data => {
this.root.doCreateChildren(data)
this._initDefaultCheckedNodes()
})
} else {
this._initDefaultCheckedNodes()
}
}
filter(value: FilterValue): void {
const filterNodeMethod = this.filterNodeMethod
const lazy = this.lazy
const traverse = function(node: TreeStore | Node) {
const childNodes = (node as TreeStore).root ? (node as TreeStore).root.childNodes : (node as Node).childNodes
childNodes.forEach(child => {
child.visible = filterNodeMethod.call(child, value, child.data, child)
traverse(child)
})
if (!(node as Node).visible && childNodes.length) {
let allHidden = true
allHidden = !childNodes.some(child => child.visible)
if ((node as TreeStore).root) {
(node as TreeStore).root.visible = allHidden === false
} else {
(node as Node).visible = allHidden === false
}
}
if (!value) return
if ((node as Node).visible && !(node as Node).isLeaf && !lazy) (node as Node).expand()
}
traverse(this)
}
setData(newVal: TreeData): void {
const instanceChanged = newVal !== this.root.data
if (instanceChanged) {
this.root.setData(newVal)
this._initDefaultCheckedNodes()
} else {
this.root.updateChildren()
}
}
getNode(data: TreeKey | TreeNodeData ): Node {
if (data instanceof Node) return data
const key = typeof data !== 'object' ? data : getNodeKey(this.key, data)
return this.nodesMap[key] || null
}
insertBefore(data: TreeNodeData, refData: TreeKey | TreeNodeData): void {
const refNode = this.getNode(refData)
refNode.parent.insertBefore({ data }, refNode)
}
insertAfter(data: TreeNodeData, refData: TreeKey | TreeNodeData): void {
const refNode = this.getNode(refData)
refNode.parent.insertAfter({ data }, refNode)
}
remove(data: TreeNodeData | Node): void {
const node = this.getNode(data)
if (node && node.parent) {
if (node === this.currentNode) {
this.currentNode = null
}
node.parent.removeChild(node)
}
}
append(data: TreeNodeData, parentData: TreeNodeData| TreeKey | Node ): void {
const parentNode = parentData ? this.getNode(parentData) : this.root
if (parentNode) {
parentNode.insertChild({ data })
}
}
_initDefaultCheckedNodes(): void {
const defaultCheckedKeys = this.defaultCheckedKeys || []
const nodesMap = this.nodesMap
defaultCheckedKeys.forEach(checkedKey => {
const node = nodesMap[checkedKey]
if (node) {
node.setChecked(true, !this.checkStrictly)
}
})
}
_initDefaultCheckedNode(node: Node): void {
const defaultCheckedKeys = this.defaultCheckedKeys || []
if (defaultCheckedKeys.indexOf(node.key) !== -1) {
node.setChecked(true, !this.checkStrictly)
}
}
setDefaultCheckedKey(newVal: TreeKey[]): void {
if (newVal !== this.defaultCheckedKeys) {
this.defaultCheckedKeys = newVal
this._initDefaultCheckedNodes()
}
}
registerNode(node: Node): void {
const key = this.key
if (!key || !node || !node.data) return
const nodeKey = node.key
if (nodeKey !== undefined) this.nodesMap[node.key] = node
}
deregisterNode(node: Node): void {
const key = this.key
if (!key || !node || !node.data) return
node.childNodes.forEach(child => {
this.deregisterNode(child)
})
delete this.nodesMap[node.key]
}
getCheckedNodes(leafOnly = false, includeHalfChecked = false): TreeNodeData[] {
const checkedNodes: TreeNodeData[] = []
const traverse = function(node: TreeStore | Node) {
const childNodes = (node as TreeStore).root ? (node as TreeStore).root.childNodes : (node as Node).childNodes
childNodes.forEach(child => {
if ((child.checked || (includeHalfChecked && child.indeterminate)) && (!leafOnly || (leafOnly && child.isLeaf))) {
checkedNodes.push(child.data)
}
traverse(child)
})
}
traverse(this)
return checkedNodes
}
getCheckedKeys(leafOnly = false): TreeKey[] {
return this.getCheckedNodes(leafOnly).map(data => (data || {})[this.key])
}
getHalfCheckedNodes(): TreeNodeData[] {
const nodes: TreeNodeData[] = []
const traverse = function(node: TreeStore | Node) {
const childNodes = (node as TreeStore).root ? (node as TreeStore).root.childNodes : (node as Node).childNodes
childNodes.forEach(child => {
if (child.indeterminate) {
nodes.push(child.data)
}
traverse(child)
})
}
traverse(this)
return nodes
}
getHalfCheckedKeys(): TreeKey[] {
return this.getHalfCheckedNodes().map(data => (data || {})[this.key])
}
_getAllNodes(): Node[] {
const allNodes: Node[] = []
const nodesMap = this.nodesMap
for (const nodeKey in nodesMap) {
if (nodesMap.hasOwnProperty(nodeKey)) {
allNodes.push(nodesMap[nodeKey])
}
}
return allNodes
}
updateChildren(key: TreeKey, data: TreeData): void {
const node = this.nodesMap[key]
if (!node) return
const childNodes = node.childNodes
for (let i = childNodes.length - 1; i >= 0; i--) {
const child = childNodes[i]
this.remove(child.data)
}
for (let i = 0, j = data.length; i < j; i++) {
const child = data[i]
this.append(child, node.data)
}
}
_setCheckedKeys(key: TreeKey, leafOnly = false, checkedKeys: { [key: string]: boolean; }): void {
const allNodes = this._getAllNodes().sort((a, b) => b.level - a.level)
const cache = Object.create(null)
const keys = Object.keys(checkedKeys)
allNodes.forEach(node => node.setChecked(false, false))
for (let i = 0, j = allNodes.length; i < j; i++) {
const node = allNodes[i]
const nodeKey = node.data[key].toString()
const checked = keys.indexOf(nodeKey) > -1
if (!checked) {
if (node.checked && !cache[nodeKey]) {
node.setChecked(false, false)
}
continue
}
let parent = node.parent
while (parent && parent.level > 0) {
cache[parent.data[key]] = true
parent = parent.parent
}
if (node.isLeaf || this.checkStrictly) {
node.setChecked(true, false)
continue
}
node.setChecked(true, true)
if (leafOnly) {
node.setChecked(false, false)
const traverse = function(node: Node): void {
const childNodes = node.childNodes
childNodes.forEach(child => {
if (!child.isLeaf) {
child.setChecked(false, false)
}
traverse(child)
})
}
traverse(node)
} | }
setCheckedNodes(array: Node[], leafOnly = false): void {
const key = this.key
const checkedKeys = {}
array.forEach(item => {
checkedKeys[(item || {})[key]] = true
})
this._setCheckedKeys(key, leafOnly, checkedKeys)
}
setCheckedKeys(keys: TreeKey[], leafOnly = false): void {
this.defaultCheckedKeys = keys
const key = this.key
const checkedKeys = {}
keys.forEach(key => {
checkedKeys[key] = true
})
this._setCheckedKeys(key, leafOnly, checkedKeys)
}
setDefaultExpandedKeys(keys: TreeKey[]) {
keys = keys || []
this.defaultExpandedKeys = keys
keys.forEach(key => {
const node = this.getNode(key)
if (node) node.expand(null, this.autoExpandParent)
})
}
setChecked(data: TreeKey | TreeNodeData, checked: boolean, deep: boolean): void {
const node = this.getNode(data)
if (node) {
node.setChecked(!!checked, deep)
}
}
getCurrentNode(): Node {
return this.currentNode
}
setCurrentNode(currentNode: Node): void {
const prevCurrentNode = this.currentNode
if (prevCurrentNode) {
prevCurrentNode.isCurrent = false
}
this.currentNode = currentNode
this.currentNode.isCurrent = true
}
setUserCurrentNode(node: Node): void {
const key = node[this.key]
const currNode = this.nodesMap[key]
this.setCurrentNode(currNode)
}
setCurrentNodeKey(key: TreeKey): void {
if (key === null || key === undefined) {
this.currentNode && (this.currentNode.isCurrent = false)
this.currentNode = null
return
}
const node = this.getNode(key)
if (node) {
this.setCurrentNode(node)
}
}
} | } |
index.d.ts | import * as PIXI from 'pixi.js'
type DirectionType = 'all' | 'x' | 'y'
type UnderflowType = 'center' | 'top' | 'left' | 'right' | 'bottom' | (string & {})
type SidesType = 'all' | 'horizontal' | 'vertical' | (string & {})
type PluginType = 'bounce' | 'clamp-zoom' | 'clamp' | 'decelerate' | 'drag' | 'follow' | 'mouse-edges' | 'pinch' | 'snap' | 'snap-zoom' | 'wheel' | 'animate'
type EventType = 'pinch-start' | 'pinch-end' | 'snap-start' | 'snap-end' | 'snap-zoom-start' | 'snap-zoom-end' | 'bounce-x-start' | 'bounce-x-end' | 'bounce-y-start' | 'bounce-y-end' | 'wheel-scroll' | 'mouse-edge-start' | 'mouse-edge-end' | 'moved-end' | 'zoomed-end' | 'frame-end' | 'animate-end'
type ClickEventType = 'clicked' | 'drag-start' | 'drag-end'
type WheelEventType = 'wheel'
type ZoomedEventType = 'zoomed'
type ZoomedEventSourceType = 'clamp-zoom' | 'pinch' | 'wheel' | 'animate'
type MovedEventType = 'moved'
type MovedEventSourceType = 'bounce-x' | 'bounce-y' | 'clamp-x' | 'clamp-y' | 'decelerate' | 'drag' | 'wheel' | 'follow' | 'mouse-edges' | 'pinch' | 'snap' | 'animate'
type MouseButtonsType = 'all' | 'left' | 'middle' | 'right' | (string & {})
type KeyCodeType = 'ControlRight' | 'ControlLeft' | 'ShiftRight' | 'ShiftLeft' | 'AltRight' | 'AltLeft' | (string & {})
interface ViewportOptions {
divWheel?: HTMLElement
forceHitArea?: PIXI.Rectangle | PIXI.Circle | PIXI.Ellipse | PIXI.Polygon | PIXI.RoundedRectangle
interaction?: PIXI.InteractionManager
screenHeight?: number
screenWidth?: number
threshold?: number
passiveWheel?: boolean
stopPropagation?: boolean
noTicker?: boolean
ticker?: PIXI.Ticker
worldHeight?: number
worldWidth?: number
disableOnContextMenu?: boolean
}
interface DragOptions {
direction?: DirectionType
pressDrag?: boolean
wheel?: boolean
wheelScroll?: number
reverse?: boolean
clampWheel?: boolean | string
underflow?: UnderflowType
factor?: number
mouseButtons?: MouseButtonsType
keyToPress?: Array<KeyCodeType>
ignoreKeyToPressOnTouch?: boolean
}
interface PinchOptions {
percent?: number
noDrag?: boolean
center?: PIXI.Point
factor?: number
axis?: DirectionType
}
interface Bounds {
x: number
y: number
width: number
height: number
}
interface ClampOptions {
left?: boolean | number
right?: boolean | number
top?: boolean | number
bottom?: boolean | number
direction?: DirectionType
underflow?: UnderflowType
}
interface DecelerateOptions {
friction?: number
bounce?: number
minSpeed?: number
}
interface BounceOptions {
sides?: SidesType
friction?: number
time?: number
ease?: string | Function
underflow?: UnderflowType
bounceBox?: Bounds
}
interface SnapOptions {
topLeft?: boolean
friction?: number
time?: number
ease?: string | Function
interrupt?: boolean
removeOnComplete?: boolean
removeOnInterrupt?: boolean
forceStart?: boolean
}
interface FollowOptions {
speed?: number
radius?: number
acceleration?: number
}
interface WheelOptions {
percent?: number
reverse?: boolean
center?: PIXI.Point
smooth?: number
interrupt?: boolean
lineHeight?: number
axis?: DirectionType
}
interface ClampZoomOptions {
minWidth?: number
minHeight?: number
maxWidth?: number
maxHeight?: number
minScale?: number
maxScale?: number
}
interface MouseEdgesOptions {
radius?: number
distance?: number
top?: number
bottom?: number
left?: number
right?: number
speed?: number
reverse?: boolean
noDecelerate?: boolean
linear?: boolean
allowButtons?: boolean
}
interface SnapZoomOptions {
center?: PIXI.Point
ease?: string | Function
forceStart?: boolean
height?: number
interrupt?: boolean
removeOnComplete?: boolean
removeOnInterrupt?: boolean
time?: number
width?: number
}
interface AnimateOptions {
time?: number
position?: PIXI.Point
width?: number
height?: number
scale?: number
scaleX?: number
scaleY?: number
ease?: string | Function
callbackOnComplete?: Function
removeOnInterrupt?: boolean
}
interface OutOfBounds {
bottom: boolean
left: boolean
right: boolean
top: boolean
}
interface ClickEventData {
event: PIXI.InteractionEvent
screen: PIXI.Point
viewport: Viewport
world: PIXI.Point
}
interface WheelData {
dx: number
dy: number
dz: number
}
interface MovedEventData {
type: MovedEventSourceType
viewport: Viewport
}
interface WheelEventData {
viewport: Viewport
wheel: WheelData
}
interface ZoomedEventData {
type: ZoomedEventSourceType
viewport: Viewport
center?: PIXI.Point // used with pinch
}
interface lastViewport {
scaleX: number
scaleY: number
x: number
y: number
}
export declare class Viewport extends PIXI.Container {
screenWidth: number
screenHeight: number
worldHeight: number
worldWidth: number
worldScreenWidth: number
worldScreenHeight: number
forceHitArea?: PIXI.Rectangle | PIXI.Circle | PIXI.Ellipse | PIXI.Polygon | PIXI.RoundedRectangle
center: PIXI.Point
corner: PIXI.Point
right: number
left: number
top: number
bottom: number
scaled: number
dirty: boolean
pause: boolean
moving: boolean
lastViewport: any
screenWidthInWorldPixels: number
screenHeightInWorldPixels: number
screenWorldWidth: number
screenWorldHeight: number
constructor(options?: ViewportOptions)
// Public API
ensureVisible(x: number, y: number, width: number, height: number, resizeToFit: boolean): void
removeListeners(): void
update(elapsed: number): void
resize(screenWidth: number, screenHeight: number, worldWidth?: number, worldHeight?: number): void
toWorld(p: PIXI.IPointData): PIXI.Point
toWorld(x: number, y: number): PIXI.Point
toScreen(p: PIXI.Point): PIXI.Point
toScreen(x: number, y: number): PIXI.Point
getPointerPosition(event: PIXI.InteractionEvent): PIXI.Point
getPointerPosition(event: WheelEvent): PIXI.Point
moveCenter(p: PIXI.Point): this
moveCenter(x: number, y: number): this
moveCorner(p: PIXI.Point): this
moveCorner(x: number, y: number): this
findWidth(width: number): number
findHeight(height: number): number
findFitWidth(width: number): number
findFitHeight(height: number): number
findFit(width: number, height: number): number
findCover(width: number, height: number): number
fitWidth(width?: number, center?: boolean, scaleY?: boolean, noClamp?: boolean): this
fitHeight(height?: number, center?: boolean, scaleX?: boolean, noClamp?: boolean): this
fitWorld(center?: boolean): this
fit(center?: boolean, width?: number, height?: number): this
setZoom(scale: number, center?: boolean): this
zoomPercent(percent: number, center?: boolean): this
zoom(change: number, center?: boolean): this
getVisibleBounds(): PIXI.Rectangle
// Plugins
plugins: PluginManager
drag(options?: DragOptions): this
clamp(options?: ClampOptions): this
decelerate(options?: DecelerateOptions): this
bounce(options?: BounceOptions): this
pinch(options?: PinchOptions): this
snap(x: number, y: number, options?: SnapOptions): this
snapZoom(options?: SnapZoomOptions): this
follow(target: PIXI.DisplayObject, options?: FollowOptions): this
wheel(options?: WheelOptions): this
clampZoom(options?: ClampZoomOptions): this
mouseEdges(options?: MouseEdgesOptions): this
animate(options?: AnimateOptions): this
// Events
on(
event: 'added' | 'removed',
fn: (container: PIXI.Container) => void,
context?: any
): this
// Events
on(
event: string,
fn: (event: PIXI.InteractionEvent) => void,
context?: any
): this
on(
event: EventType,
fn: (viewport: Viewport) => void,
context?: any
): this
on(
| fn: (data: ClickEventData) => void,
context?: any
): this
on(
event: WheelEventType,
fn: (data: WheelEventData) => void,
context?: any
): this
on(
event: ZoomedEventType,
fn: (data: ZoomedEventData) => void,
context?: any
): this
on(
event: MovedEventType,
fn: (data: MovedEventData) => void,
context?: any
): this
// listeners(event: string | symbol): Function[]
// listeners(event: string | symbol, exists: boolean): boolean
/**
* Do not use. This is in fact a protected method.
*/
// listeners(div: HTMLElement): void
// Protected/Private methods
protected resizePlugins(): void
protected down(e: UIEvent): void
protected checkThreshold(change: number): void
protected move(e: UIEvent): void
protected up(e: UIEvent): void
protected handleWheel(e: UIEvent): void
// protected OOB(): Viewport.OutOfBounds
protected countDownPointers(): number
protected getTouchPointers(): number
protected _reset(): void
}
export declare class Plugin {
constructor(viewport: Viewport)
paused: boolean
down(event: PIXI.InteractionEvent): void
up(event: PIXI.InteractionEvent): void
move(event: PIXI.InteractionEvent): void
wheel(event: WheelEvent): void
update(): void
resize(): void
reset(): void
pause(): void
resume(): void
}
declare class PluginManager {
constructor(viewport: Viewport)
add(type: string, plugin: Plugin, index?: number): void
get(name: string): Plugin
remove(name: string): void
pause(name: string): void
resume(name: string): void
} | event: ClickEventType,
|
customer_service.pb.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0-devel
// protoc v3.15.2
// source: google/ads/googleads/v8/services/customer_service.proto
package services
import (
context "context"
enums "github.com/opteo/google-ads-go/enums"
resources "github.com/opteo/google-ads-go/resources"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
fieldmaskpb "google.golang.org/protobuf/types/known/fieldmaskpb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Request message for [CustomerService.GetCustomer][google.ads.googleads.v8.services.CustomerService.GetCustomer].
type GetCustomerRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The resource name of the customer to fetch.
ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"`
}
func (x *GetCustomerRequest) Reset() {
*x = GetCustomerRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetCustomerRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetCustomerRequest) ProtoMessage() {}
func (x *GetCustomerRequest) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetCustomerRequest.ProtoReflect.Descriptor instead.
func (*GetCustomerRequest) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{0}
}
func (x *GetCustomerRequest) GetResourceName() string {
if x != nil {
return x.ResourceName
}
return ""
}
// Request message for [CustomerService.MutateCustomer][google.ads.googleads.v8.services.CustomerService.MutateCustomer].
type MutateCustomerRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The ID of the customer being modified.
CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"`
// Required. The operation to perform on the customer
Operation *CustomerOperation `protobuf:"bytes,4,opt,name=operation,proto3" json:"operation,omitempty"`
// If true, the request is validated but not executed. Only errors are
// returned, not results.
ValidateOnly bool `protobuf:"varint,5,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"`
// The response content type setting. Determines whether the mutable resource
// or just the resource name should be returned post mutation.
ResponseContentType enums.ResponseContentTypeEnum_ResponseContentType `protobuf:"varint,6,opt,name=response_content_type,json=responseContentType,proto3,enum=google.ads.googleads.v8.enums.ResponseContentTypeEnum_ResponseContentType" json:"response_content_type,omitempty"`
}
func (x *MutateCustomerRequest) Reset() {
*x = MutateCustomerRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MutateCustomerRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MutateCustomerRequest) ProtoMessage() {}
func (x *MutateCustomerRequest) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MutateCustomerRequest.ProtoReflect.Descriptor instead.
func (*MutateCustomerRequest) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{1}
}
func (x *MutateCustomerRequest) GetCustomerId() string {
if x != nil {
return x.CustomerId
}
return ""
}
func (x *MutateCustomerRequest) GetOperation() *CustomerOperation {
if x != nil {
return x.Operation
}
return nil
}
func (x *MutateCustomerRequest) GetValidateOnly() bool {
if x != nil {
return x.ValidateOnly
}
return false
}
func (x *MutateCustomerRequest) GetResponseContentType() enums.ResponseContentTypeEnum_ResponseContentType {
if x != nil {
return x.ResponseContentType
}
return enums.ResponseContentTypeEnum_UNSPECIFIED
}
// Request message for [CustomerService.CreateCustomerClient][google.ads.googleads.v8.services.CustomerService.CreateCustomerClient].
type CreateCustomerClientRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The ID of the Manager under whom client customer is being created.
CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"`
// Required. The new client customer to create. The resource name on this customer
// will be ignored.
CustomerClient *resources.Customer `protobuf:"bytes,2,opt,name=customer_client,json=customerClient,proto3" json:"customer_client,omitempty"`
// Email address of the user who should be invited on the created client
// customer. Accessible only to customers on the allow-list.
EmailAddress *string `protobuf:"bytes,5,opt,name=email_address,json=emailAddress,proto3,oneof" json:"email_address,omitempty"`
// The proposed role of user on the created client customer.
// Accessible only to customers on the allow-list.
AccessRole enums.AccessRoleEnum_AccessRole `protobuf:"varint,4,opt,name=access_role,json=accessRole,proto3,enum=google.ads.googleads.v8.enums.AccessRoleEnum_AccessRole" json:"access_role,omitempty"`
// If true, the request is validated but not executed. Only errors are
// returned, not results.
ValidateOnly bool `protobuf:"varint,6,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"`
}
func (x *CreateCustomerClientRequest) Reset() {
*x = CreateCustomerClientRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CreateCustomerClientRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CreateCustomerClientRequest) ProtoMessage() {}
func (x *CreateCustomerClientRequest) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CreateCustomerClientRequest.ProtoReflect.Descriptor instead.
func (*CreateCustomerClientRequest) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{2}
}
func (x *CreateCustomerClientRequest) GetCustomerId() string {
if x != nil {
return x.CustomerId
}
return ""
}
func (x *CreateCustomerClientRequest) GetCustomerClient() *resources.Customer {
if x != nil {
return x.CustomerClient
}
return nil
}
func (x *CreateCustomerClientRequest) GetEmailAddress() string {
if x != nil && x.EmailAddress != nil {
return *x.EmailAddress
}
return ""
}
func (x *CreateCustomerClientRequest) GetAccessRole() enums.AccessRoleEnum_AccessRole {
if x != nil {
return x.AccessRole
}
return enums.AccessRoleEnum_UNSPECIFIED
}
func (x *CreateCustomerClientRequest) GetValidateOnly() bool {
if x != nil {
return x.ValidateOnly
}
return false
}
// A single update on a customer.
type CustomerOperation struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Mutate operation. Only updates are supported for customer.
Update *resources.Customer `protobuf:"bytes,1,opt,name=update,proto3" json:"update,omitempty"`
// FieldMask that determines which resource fields are modified in an update.
UpdateMask *fieldmaskpb.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
}
func (x *CustomerOperation) Reset() {
*x = CustomerOperation{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CustomerOperation) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CustomerOperation) ProtoMessage() {}
func (x *CustomerOperation) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CustomerOperation.ProtoReflect.Descriptor instead.
func (*CustomerOperation) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{3}
}
func (x *CustomerOperation) GetUpdate() *resources.Customer {
if x != nil {
return x.Update
}
return nil
}
func (x *CustomerOperation) GetUpdateMask() *fieldmaskpb.FieldMask {
if x != nil {
return x.UpdateMask
}
return nil
}
// Response message for CreateCustomerClient mutate.
type CreateCustomerClientResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The resource name of the newly created customer client.
ResourceName string `protobuf:"bytes,2,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"`
// Link for inviting user to access the created customer. Accessible to
// allowlisted customers only.
InvitationLink string `protobuf:"bytes,3,opt,name=invitation_link,json=invitationLink,proto3" json:"invitation_link,omitempty"`
}
func (x *CreateCustomerClientResponse) Reset() {
*x = CreateCustomerClientResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CreateCustomerClientResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CreateCustomerClientResponse) ProtoMessage() {}
func (x *CreateCustomerClientResponse) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CreateCustomerClientResponse.ProtoReflect.Descriptor instead.
func (*CreateCustomerClientResponse) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{4}
}
func (x *CreateCustomerClientResponse) GetResourceName() string {
if x != nil {
return x.ResourceName
}
return ""
}
func (x *CreateCustomerClientResponse) GetInvitationLink() string {
if x != nil {
return x.InvitationLink
}
return ""
}
// Response message for customer mutate.
type MutateCustomerResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Result for the mutate.
Result *MutateCustomerResult `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"`
}
func (x *MutateCustomerResponse) Reset() {
*x = MutateCustomerResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MutateCustomerResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MutateCustomerResponse) ProtoMessage() {}
func (x *MutateCustomerResponse) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MutateCustomerResponse.ProtoReflect.Descriptor instead.
func (*MutateCustomerResponse) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{5}
}
func (x *MutateCustomerResponse) GetResult() *MutateCustomerResult {
if x != nil {
return x.Result
}
return nil
}
// The result for the customer mutate.
type MutateCustomerResult struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Returned for successful operations.
ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"`
// The mutated customer with only mutable fields after mutate. The fields will
// only be returned when response_content_type is set to "MUTABLE_RESOURCE".
Customer *resources.Customer `protobuf:"bytes,2,opt,name=customer,proto3" json:"customer,omitempty"`
}
func (x *MutateCustomerResult) Reset() {
*x = MutateCustomerResult{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MutateCustomerResult) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MutateCustomerResult) ProtoMessage() {}
func (x *MutateCustomerResult) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MutateCustomerResult.ProtoReflect.Descriptor instead.
func (*MutateCustomerResult) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{6}
}
func (x *MutateCustomerResult) GetResourceName() string {
if x != nil {
return x.ResourceName
}
return ""
}
func (x *MutateCustomerResult) GetCustomer() *resources.Customer {
if x != nil {
return x.Customer
}
return nil
}
// Request message for [CustomerService.ListAccessibleCustomers][google.ads.googleads.v8.services.CustomerService.ListAccessibleCustomers].
type ListAccessibleCustomersRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *ListAccessibleCustomersRequest) Reset() {
*x = ListAccessibleCustomersRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListAccessibleCustomersRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListAccessibleCustomersRequest) ProtoMessage() {}
func (x *ListAccessibleCustomersRequest) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListAccessibleCustomersRequest.ProtoReflect.Descriptor instead.
func (*ListAccessibleCustomersRequest) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{7}
}
// Response message for [CustomerService.ListAccessibleCustomers][google.ads.googleads.v8.services.CustomerService.ListAccessibleCustomers].
type ListAccessibleCustomersResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Resource name of customers directly accessible by the
// user authenticating the call.
ResourceNames []string `protobuf:"bytes,1,rep,name=resource_names,json=resourceNames,proto3" json:"resource_names,omitempty"`
}
func (x *ListAccessibleCustomersResponse) Reset() {
*x = ListAccessibleCustomersResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_services_customer_service_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListAccessibleCustomersResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListAccessibleCustomersResponse) ProtoMessage() {}
func (x *ListAccessibleCustomersResponse) ProtoReflect() protoreflect.Message {
mi := &file_services_customer_service_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListAccessibleCustomersResponse.ProtoReflect.Descriptor instead.
func (*ListAccessibleCustomersResponse) Descriptor() ([]byte, []int) {
return file_services_customer_service_proto_rawDescGZIP(), []int{8}
}
func (x *ListAccessibleCustomersResponse) GetResourceNames() []string {
if x != nil {
return x.ResourceNames
}
return nil
}
var File_services_customer_service_proto protoreflect.FileDescriptor
var file_services_customer_service_proto_rawDesc = []byte{
0x0a, 0x37, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63,
0x65, 0x73, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x1a, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64,
0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2f, 0x61, 0x63, 0x63, 0x65, 0x73,
0x73, 0x5f, 0x72, 0x6f, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x39, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2f, 0x72, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x38,
0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x70, 0x69, 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65,
0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x20, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x66, 0x69,
0x65, 0x6c, 0x64, 0x5f, 0x6d, 0x61, 0x73, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x64,
0x0a, 0x12, 0x47, 0x65, 0x74, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x12, 0x4e, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x29, 0xe0, 0x41, 0x02,
0xfa, 0x41, 0x23, 0x0a, 0x21, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x43, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
0x4e, 0x61, 0x6d, 0x65, 0x22, 0xba, 0x02, 0x0a, 0x15, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x43,
0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x24,
0x0a, 0x0b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0a, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x65, 0x72, 0x49, 0x64, 0x12, 0x56, 0x0a, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x65, 0x72, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x03, 0xe0, 0x41,
0x02, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d,
0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x6f, 0x6e, 0x6c, 0x79, 0x18, 0x05, 0x20,
0x01, 0x28, 0x08, 0x52, 0x0c, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x4f, 0x6e, 0x6c,
0x79, 0x12, 0x7e, 0x0a, 0x15, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x63, 0x6f,
0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e,
0x32, 0x4a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73,
0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74,
0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x13, 0x72, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70,
0x65, 0x22, 0xda, 0x02, 0x0a, 0x1b, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74,
0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x12, 0x24, 0x0a, 0x0b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x69, 0x64,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0a, 0x63, 0x75, 0x73,
0x74, 0x6f, 0x6d, 0x65, 0x72, 0x49, 0x64, 0x12, 0x59, 0x0a, 0x0f, 0x63, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x65, 0x72, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x2b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x42, 0x03, 0xe0,
0x41, 0x02, 0x52, 0x0e, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65,
0x6e, 0x74, 0x12, 0x28, 0x0a, 0x0d, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x5f, 0x61, 0x64, 0x64, 0x72,
0x65, 0x73, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0c, 0x65, 0x6d, 0x61,
0x69, 0x6c, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x88, 0x01, 0x01, 0x12, 0x59, 0x0a, 0x0b,
0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x72, 0x6f, 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28,
0x0e, 0x32, 0x38, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x65, 0x6e, 0x75, 0x6d,
0x73, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x52, 0x6f, 0x6c, 0x65, 0x45, 0x6e, 0x75, 0x6d,
0x2e, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x52, 0x6f, 0x6c, 0x65, 0x52, 0x0a, 0x61, 0x63, 0x63,
0x65, 0x73, 0x73, 0x52, 0x6f, 0x6c, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x76, 0x61, 0x6c, 0x69, 0x64,
0x61, 0x74, 0x65, 0x5f, 0x6f, 0x6e, 0x6c, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c,
0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x4f, 0x6e, 0x6c, 0x79, 0x42, 0x10, 0x0a, 0x0e,
0x5f, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x22, 0x95,
0x01, 0x0a, 0x11, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x4f, 0x70, 0x65, 0x72, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, 0x0a, 0x06, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x72,
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65,
0x72, 0x52, 0x06, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x3b, 0x0a, 0x0b, 0x75, 0x70, 0x64,
0x61, 0x74, 0x65, 0x5f, 0x6d, 0x61, 0x73, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x4d, 0x61, 0x73, 0x6b, 0x52, 0x0a, 0x75, 0x70, 0x64, 0x61,
0x74, 0x65, 0x4d, 0x61, 0x73, 0x6b, 0x22, 0x6c, 0x0a, 0x1c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65,
0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,
0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72,
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x69,
0x6e, 0x76, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x03,
0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x76, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x4c, 0x69, 0x6e, 0x6b, 0x22, 0x68, 0x0a, 0x16, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x43, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e,
0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
0x73, 0x2e, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72,
0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, 0x84,
0x01, 0x0a, 0x14, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65,
0x72, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c,
0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x47, 0x0a, 0x08,
0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
0x65, 0x73, 0x2e, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x52, 0x08, 0x63, 0x75, 0x73,
0x74, 0x6f, 0x6d, 0x65, 0x72, 0x22, 0x20, 0x0a, 0x1e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x63, 0x63,
0x65, 0x73, 0x73, 0x69, 0x62, 0x6c, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x48, 0x0a, 0x1f, 0x4c, 0x69, 0x73, 0x74, 0x41,
0x63, 0x63, 0x65, 0x73, 0x73, 0x69, 0x62, 0x6c, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65,
0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x72, 0x65,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03,
0x28, 0x09, 0x52, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65,
0x73, 0x32, 0x98, 0x07, 0x0a, 0x0f, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x53, 0x65,
0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xa9, 0x01, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x43, 0x75, 0x73,
0x74, 0x6f, 0x6d, 0x65, 0x72, 0x12, 0x34, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61,
0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e,
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x75, 0x73, 0x74,
0x6f, 0x6d, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e,
0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x22, 0x37, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x21,
0x12, 0x1f, 0x2f, 0x76, 0x38, 0x2f, 0x7b, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73, 0x2f, 0x2a,
0x7d, 0xda, 0x41, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d,
0x65, 0x12, 0xcc, 0x01, 0x0a, 0x0e, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74,
0x6f, 0x6d, 0x65, 0x72, 0x12, 0x37, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64,
0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x73,
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2e, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x43, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x38, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73,
0x2e, 0x4d, 0x75, 0x74, 0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x52,
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x47, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x29, 0x22,
0x24, 0x2f, 0x76, 0x38, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73, 0x2f, 0x7b,
0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x3d, 0x2a, 0x7d, 0x3a, 0x6d,
0x75, 0x74, 0x61, 0x74, 0x65, 0x3a, 0x01, 0x2a, 0xda, 0x41, 0x15, 0x63, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x2c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x12, 0xcd, 0x01, 0x0a, 0x17, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x69,
0x62, 0x6c, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73, 0x12, 0x40, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2e,
0x4c, 0x69, 0x73, 0x74, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x69, 0x62, 0x6c, 0x65, 0x43, 0x75,
0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x41,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
0x73, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x69, 0x62, 0x6c, 0x65,
0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x22, 0x2d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x27, 0x12, 0x25, 0x2f, 0x76, 0x38, 0x2f, 0x63,
0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73, 0x3a, 0x6c, 0x69, 0x73, 0x74, 0x41, 0x63, 0x63,
0x65, 0x73, 0x73, 0x69, 0x62, 0x6c, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73,
0x12, 0xf2, 0x01, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f,
0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x12, 0x3d, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73,
0x2e, 0x76, 0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2e, 0x43, 0x72, 0x65,
0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65, 0x6e,
0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x3e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e,
0x76, 0x38, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2e, 0x43, 0x72, 0x65, 0x61,
0x74, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x5b, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x37,
0x22, 0x32, 0x2f, 0x76, 0x38, 0x2f, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x73, 0x2f,
0x7b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x3d, 0x2a, 0x7d, 0x3a,
0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x43, 0x6c,
0x69, 0x65, 0x6e, 0x74, 0x3a, 0x01, 0x2a, 0xda, 0x41, 0x1b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d,
0x65, 0x72, 0x5f, 0x69, 0x64, 0x2c, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x5f, 0x63,
0x6c, 0x69, 0x65, 0x6e, 0x74, 0x1a, 0x45, 0xca, 0x41, 0x18, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63,
0x6f, 0x6d, 0xd2, 0x41, 0x27, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f,
0x61, 0x75, 0x74, 0x68, 0x2f, 0x61, 0x64, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x42, 0xfb, 0x01, 0x0a,
0x24, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x38, 0x2e, 0x73, 0x65, 0x72,
0x76, 0x69, 0x63, 0x65, 0x73, 0x42, 0x14, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x65, 0x72, 0x53,
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x48, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67,
0x2f, 0x67, 0x65, 0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x61, 0x70, 0x69, 0x73, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61,
0x64, 0x73, 0x2f, 0x76, 0x38, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x3b, 0x73,
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0xa2, 0x02, 0x03, 0x47, 0x41, 0x41, 0xaa, 0x02, 0x20,
0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x41, 0x64, 0x73, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x41, 0x64, 0x73, 0x2e, 0x56, 0x38, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73,
0xca, 0x02, 0x20, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x41, 0x64, 0x73, 0x5c, 0x47, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x5c, 0x56, 0x38, 0x5c, 0x53, 0x65, 0x72, 0x76, 0x69,
0x63, 0x65, 0x73, 0xea, 0x02, 0x24, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x64,
0x73, 0x3a, 0x3a, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x56, 0x38,
0x3a, 0x3a, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
}
var (
file_services_customer_service_proto_rawDescOnce sync.Once
file_services_customer_service_proto_rawDescData = file_services_customer_service_proto_rawDesc
)
func file_services_customer_service_proto_rawDescGZIP() []byte {
file_services_customer_service_proto_rawDescOnce.Do(func() {
file_services_customer_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_services_customer_service_proto_rawDescData)
})
return file_services_customer_service_proto_rawDescData
}
var file_services_customer_service_proto_msgTypes = make([]protoimpl.MessageInfo, 9)
var file_services_customer_service_proto_goTypes = []interface{}{
(*GetCustomerRequest)(nil), // 0: google.ads.googleads.v8.services.GetCustomerRequest
(*MutateCustomerRequest)(nil), // 1: google.ads.googleads.v8.services.MutateCustomerRequest
(*CreateCustomerClientRequest)(nil), // 2: google.ads.googleads.v8.services.CreateCustomerClientRequest
(*CustomerOperation)(nil), // 3: google.ads.googleads.v8.services.CustomerOperation
(*CreateCustomerClientResponse)(nil), // 4: google.ads.googleads.v8.services.CreateCustomerClientResponse
(*MutateCustomerResponse)(nil), // 5: google.ads.googleads.v8.services.MutateCustomerResponse
(*MutateCustomerResult)(nil), // 6: google.ads.googleads.v8.services.MutateCustomerResult
(*ListAccessibleCustomersRequest)(nil), // 7: google.ads.googleads.v8.services.ListAccessibleCustomersRequest
(*ListAccessibleCustomersResponse)(nil), // 8: google.ads.googleads.v8.services.ListAccessibleCustomersResponse
(enums.ResponseContentTypeEnum_ResponseContentType)(0), // 9: google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType
(*resources.Customer)(nil), // 10: google.ads.googleads.v8.resources.Customer
(enums.AccessRoleEnum_AccessRole)(0), // 11: google.ads.googleads.v8.enums.AccessRoleEnum.AccessRole
(*fieldmaskpb.FieldMask)(nil), // 12: google.protobuf.FieldMask
}
var file_services_customer_service_proto_depIdxs = []int32{
3, // 0: google.ads.googleads.v8.services.MutateCustomerRequest.operation:type_name -> google.ads.googleads.v8.services.CustomerOperation
9, // 1: google.ads.googleads.v8.services.MutateCustomerRequest.response_content_type:type_name -> google.ads.googleads.v8.enums.ResponseContentTypeEnum.ResponseContentType
10, // 2: google.ads.googleads.v8.services.CreateCustomerClientRequest.customer_client:type_name -> google.ads.googleads.v8.resources.Customer
11, // 3: google.ads.googleads.v8.services.CreateCustomerClientRequest.access_role:type_name -> google.ads.googleads.v8.enums.AccessRoleEnum.AccessRole
10, // 4: google.ads.googleads.v8.services.CustomerOperation.update:type_name -> google.ads.googleads.v8.resources.Customer
12, // 5: google.ads.googleads.v8.services.CustomerOperation.update_mask:type_name -> google.protobuf.FieldMask
6, // 6: google.ads.googleads.v8.services.MutateCustomerResponse.result:type_name -> google.ads.googleads.v8.services.MutateCustomerResult
10, // 7: google.ads.googleads.v8.services.MutateCustomerResult.customer:type_name -> google.ads.googleads.v8.resources.Customer
0, // 8: google.ads.googleads.v8.services.CustomerService.GetCustomer:input_type -> google.ads.googleads.v8.services.GetCustomerRequest
1, // 9: google.ads.googleads.v8.services.CustomerService.MutateCustomer:input_type -> google.ads.googleads.v8.services.MutateCustomerRequest
7, // 10: google.ads.googleads.v8.services.CustomerService.ListAccessibleCustomers:input_type -> google.ads.googleads.v8.services.ListAccessibleCustomersRequest
2, // 11: google.ads.googleads.v8.services.CustomerService.CreateCustomerClient:input_type -> google.ads.googleads.v8.services.CreateCustomerClientRequest
10, // 12: google.ads.googleads.v8.services.CustomerService.GetCustomer:output_type -> google.ads.googleads.v8.resources.Customer
5, // 13: google.ads.googleads.v8.services.CustomerService.MutateCustomer:output_type -> google.ads.googleads.v8.services.MutateCustomerResponse
8, // 14: google.ads.googleads.v8.services.CustomerService.ListAccessibleCustomers:output_type -> google.ads.googleads.v8.services.ListAccessibleCustomersResponse
4, // 15: google.ads.googleads.v8.services.CustomerService.CreateCustomerClient:output_type -> google.ads.googleads.v8.services.CreateCustomerClientResponse
12, // [12:16] is the sub-list for method output_type
8, // [8:12] is the sub-list for method input_type
8, // [8:8] is the sub-list for extension type_name
8, // [8:8] is the sub-list for extension extendee
0, // [0:8] is the sub-list for field type_name
}
func init() { file_services_customer_service_proto_init() }
func file_services_customer_service_proto_init() {
if File_services_customer_service_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_services_customer_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetCustomerRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MutateCustomerRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreateCustomerClientRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CustomerOperation); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreateCustomerClientResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MutateCustomerResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MutateCustomerResult); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListAccessibleCustomersRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_customer_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListAccessibleCustomersResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_services_customer_service_proto_msgTypes[2].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_services_customer_service_proto_rawDesc,
NumEnums: 0,
NumMessages: 9,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_services_customer_service_proto_goTypes,
DependencyIndexes: file_services_customer_service_proto_depIdxs,
MessageInfos: file_services_customer_service_proto_msgTypes,
}.Build()
File_services_customer_service_proto = out.File
file_services_customer_service_proto_rawDesc = nil
file_services_customer_service_proto_goTypes = nil
file_services_customer_service_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// CustomerServiceClient is the client API for CustomerService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type CustomerServiceClient interface {
// Returns the requested customer in full detail.
//
// List of thrown errors:
// [AuthenticationError]()
// [AuthorizationError]()
// [HeaderError]()
// [InternalError]()
// [QuotaError]()
// [RequestError]()
GetCustomer(ctx context.Context, in *GetCustomerRequest, opts ...grpc.CallOption) (*resources.Customer, error)
// Updates a customer. Operation statuses are returned.
//
// List of thrown errors:
// [AuthenticationError]()
// [AuthorizationError]()
// [DatabaseError]()
// [FieldMaskError]()
// [HeaderError]()
// [InternalError]()
// [QuotaError]()
// [RequestError]()
// [UrlFieldError]()
MutateCustomer(ctx context.Context, in *MutateCustomerRequest, opts ...grpc.CallOption) (*MutateCustomerResponse, error)
// Returns resource names of customers directly accessible by the
// user authenticating the call.
//
// List of thrown errors:
// [AuthenticationError]()
// [AuthorizationError]()
// [HeaderError]()
// [InternalError]()
// [QuotaError]()
// [RequestError]()
ListAccessibleCustomers(ctx context.Context, in *ListAccessibleCustomersRequest, opts ...grpc.CallOption) (*ListAccessibleCustomersResponse, error)
// Creates a new client under manager. The new client customer is returned.
//
// List of thrown errors:
// [AccessInvitationError]()
// [AuthenticationError]()
// [AuthorizationError]()
// [CurrencyCodeError]()
// [HeaderError]()
// [InternalError]()
// [ManagerLinkError]()
// [QuotaError]()
// [RequestError]()
// [StringLengthError]()
// [TimeZoneError]()
CreateCustomerClient(ctx context.Context, in *CreateCustomerClientRequest, opts ...grpc.CallOption) (*CreateCustomerClientResponse, error)
}
type customerServiceClient struct {
cc grpc.ClientConnInterface
}
func NewCustomerServiceClient(cc grpc.ClientConnInterface) CustomerServiceClient {
return &customerServiceClient{cc}
}
func (c *customerServiceClient) GetCustomer(ctx context.Context, in *GetCustomerRequest, opts ...grpc.CallOption) (*resources.Customer, error) {
out := new(resources.Customer)
err := c.cc.Invoke(ctx, "/google.ads.googleads.v8.services.CustomerService/GetCustomer", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *customerServiceClient) MutateCustomer(ctx context.Context, in *MutateCustomerRequest, opts ...grpc.CallOption) (*MutateCustomerResponse, error) {
out := new(MutateCustomerResponse)
err := c.cc.Invoke(ctx, "/google.ads.googleads.v8.services.CustomerService/MutateCustomer", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *customerServiceClient) ListAccessibleCustomers(ctx context.Context, in *ListAccessibleCustomersRequest, opts ...grpc.CallOption) (*ListAccessibleCustomersResponse, error) {
out := new(ListAccessibleCustomersResponse)
err := c.cc.Invoke(ctx, "/google.ads.googleads.v8.services.CustomerService/ListAccessibleCustomers", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *customerServiceClient) CreateCustomerClient(ctx context.Context, in *CreateCustomerClientRequest, opts ...grpc.CallOption) (*CreateCustomerClientResponse, error) {
out := new(CreateCustomerClientResponse)
err := c.cc.Invoke(ctx, "/google.ads.googleads.v8.services.CustomerService/CreateCustomerClient", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// CustomerServiceServer is the server API for CustomerService service.
type CustomerServiceServer interface {
// Returns the requested customer in full detail.
//
// List of thrown errors:
// [AuthenticationError]()
// [AuthorizationError]()
// [HeaderError]()
// [InternalError]()
// [QuotaError]()
// [RequestError]()
GetCustomer(context.Context, *GetCustomerRequest) (*resources.Customer, error)
// Updates a customer. Operation statuses are returned.
//
// List of thrown errors:
// [AuthenticationError]()
// [AuthorizationError]()
// [DatabaseError]()
// [FieldMaskError]()
// [HeaderError]()
// [InternalError]()
// [QuotaError]()
// [RequestError]()
// [UrlFieldError]()
MutateCustomer(context.Context, *MutateCustomerRequest) (*MutateCustomerResponse, error)
// Returns resource names of customers directly accessible by the
// user authenticating the call.
//
// List of thrown errors:
// [AuthenticationError]()
// [AuthorizationError]()
// [HeaderError]()
// [InternalError]()
// [QuotaError]()
// [RequestError]()
ListAccessibleCustomers(context.Context, *ListAccessibleCustomersRequest) (*ListAccessibleCustomersResponse, error)
// Creates a new client under manager. The new client customer is returned.
//
// List of thrown errors:
// [AccessInvitationError]()
// [AuthenticationError]()
// [AuthorizationError]()
// [CurrencyCodeError]()
// [HeaderError]()
// [InternalError]()
// [ManagerLinkError]()
// [QuotaError]()
// [RequestError]()
// [StringLengthError]()
// [TimeZoneError]()
CreateCustomerClient(context.Context, *CreateCustomerClientRequest) (*CreateCustomerClientResponse, error)
}
// UnimplementedCustomerServiceServer can be embedded to have forward compatible implementations.
type UnimplementedCustomerServiceServer struct {
}
func (*UnimplementedCustomerServiceServer) GetCustomer(context.Context, *GetCustomerRequest) (*resources.Customer, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetCustomer not implemented")
}
func (*UnimplementedCustomerServiceServer) MutateCustomer(context.Context, *MutateCustomerRequest) (*MutateCustomerResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method MutateCustomer not implemented")
}
func (*UnimplementedCustomerServiceServer) ListAccessibleCustomers(context.Context, *ListAccessibleCustomersRequest) (*ListAccessibleCustomersResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListAccessibleCustomers not implemented")
}
func (*UnimplementedCustomerServiceServer) CreateCustomerClient(context.Context, *CreateCustomerClientRequest) (*CreateCustomerClientResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateCustomerClient not implemented")
}
func RegisterCustomerServiceServer(s *grpc.Server, srv CustomerServiceServer) {
s.RegisterService(&_CustomerService_serviceDesc, srv)
}
func _CustomerService_GetCustomer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetCustomerRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(CustomerServiceServer).GetCustomer(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ads.googleads.v8.services.CustomerService/GetCustomer",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(CustomerServiceServer).GetCustomer(ctx, req.(*GetCustomerRequest))
}
return interceptor(ctx, in, info, handler)
}
func | (srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MutateCustomerRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(CustomerServiceServer).MutateCustomer(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ads.googleads.v8.services.CustomerService/MutateCustomer",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(CustomerServiceServer).MutateCustomer(ctx, req.(*MutateCustomerRequest))
}
return interceptor(ctx, in, info, handler)
}
func _CustomerService_ListAccessibleCustomers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListAccessibleCustomersRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(CustomerServiceServer).ListAccessibleCustomers(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ads.googleads.v8.services.CustomerService/ListAccessibleCustomers",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(CustomerServiceServer).ListAccessibleCustomers(ctx, req.(*ListAccessibleCustomersRequest))
}
return interceptor(ctx, in, info, handler)
}
func _CustomerService_CreateCustomerClient_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(CreateCustomerClientRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(CustomerServiceServer).CreateCustomerClient(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ads.googleads.v8.services.CustomerService/CreateCustomerClient",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(CustomerServiceServer).CreateCustomerClient(ctx, req.(*CreateCustomerClientRequest))
}
return interceptor(ctx, in, info, handler)
}
var _CustomerService_serviceDesc = grpc.ServiceDesc{
ServiceName: "google.ads.googleads.v8.services.CustomerService",
HandlerType: (*CustomerServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetCustomer",
Handler: _CustomerService_GetCustomer_Handler,
},
{
MethodName: "MutateCustomer",
Handler: _CustomerService_MutateCustomer_Handler,
},
{
MethodName: "ListAccessibleCustomers",
Handler: _CustomerService_ListAccessibleCustomers_Handler,
},
{
MethodName: "CreateCustomerClient",
Handler: _CustomerService_CreateCustomerClient_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "google/ads/googleads/v8/services/customer_service.proto",
}
| _CustomerService_MutateCustomer_Handler |
docker.go | package docker
import (
"context"
"fmt"
"io"
"net"
"net/http"
"strconv"
"strings"
"text/template"
"time"
"github.com/cenkalti/backoff"
"github.com/containous/traefik/pkg/config"
"github.com/containous/traefik/pkg/job"
"github.com/containous/traefik/pkg/log"
"github.com/containous/traefik/pkg/provider"
"github.com/containous/traefik/pkg/safe"
"github.com/containous/traefik/pkg/types"
"github.com/containous/traefik/pkg/version"
dockertypes "github.com/docker/docker/api/types"
dockercontainertypes "github.com/docker/docker/api/types/container"
eventtypes "github.com/docker/docker/api/types/events"
"github.com/docker/docker/api/types/filters"
swarmtypes "github.com/docker/docker/api/types/swarm"
"github.com/docker/docker/api/types/versions"
"github.com/docker/docker/client"
"github.com/docker/go-connections/nat"
"github.com/docker/go-connections/sockets"
)
const (
// SwarmAPIVersion is a constant holding the version of the Provider API traefik will use.
SwarmAPIVersion = "1.24"
// DefaultTemplateRule The default template for the default rule.
DefaultTemplateRule = "Host(`{{ normalize .Name }}`)"
)
var _ provider.Provider = (*Provider)(nil)
// Provider holds configurations of the provider.
type Provider struct {
provider.Constrainer `mapstructure:",squash" export:"true"`
Watch bool `description:"Watch provider" export:"true"`
Endpoint string `description:"Docker server endpoint. Can be a tcp or a unix socket endpoint"`
DefaultRule string `description:"Default rule"`
TLS *types.ClientTLS `description:"Enable Docker TLS support" export:"true"`
ExposedByDefault bool `description:"Expose containers by default" export:"true"`
UseBindPortIP bool `description:"Use the ip address from the bound port, rather than from the inner network" export:"true"`
SwarmMode bool `description:"Use Docker on Swarm Mode" export:"true"`
Network string `description:"Default Docker network used" export:"true"`
SwarmModeRefreshSeconds int `description:"Polling interval for swarm mode (in seconds)" export:"true"`
defaultRuleTpl *template.Template
}
// Init the provider.
func (p *Provider) Init() error {
defaultRuleTpl, err := provider.MakeDefaultRuleTemplate(p.DefaultRule, nil)
if err != nil {
return fmt.Errorf("error while parsing default rule: %v", err)
}
p.defaultRuleTpl = defaultRuleTpl
return nil
}
// dockerData holds the need data to the provider.
type dockerData struct {
ID string
ServiceName string
Name string
Labels map[string]string // List of labels set to container or service
NetworkSettings networkSettings
Health string
Node *dockertypes.ContainerNode
ExtraConf configuration
}
// NetworkSettings holds the networks data to the provider.
type networkSettings struct {
NetworkMode dockercontainertypes.NetworkMode
Ports nat.PortMap
Networks map[string]*networkData
}
// Network holds the network data to the provider.
type networkData struct {
Name string
Addr string
Port int
Protocol string
ID string
}
func (p *Provider) createClient() (client.APIClient, error) {
var httpClient *http.Client
if p.TLS != nil {
ctx := log.With(context.Background(), log.Str(log.ProviderName, "docker"))
conf, err := p.TLS.CreateTLSConfig(ctx)
if err != nil {
return nil, err
}
tr := &http.Transport{
TLSClientConfig: conf,
}
hostURL, err := client.ParseHostURL(p.Endpoint)
if err != nil {
return nil, err
}
if err := sockets.ConfigureTransport(tr, hostURL.Scheme, hostURL.Host); err != nil {
return nil, err
}
httpClient = &http.Client{
Transport: tr,
}
}
httpHeaders := map[string]string{
"User-Agent": "Traefik " + version.Version,
}
var apiVersion string
if p.SwarmMode {
apiVersion = SwarmAPIVersion
} else {
apiVersion = DockerAPIVersion
}
return client.NewClient(p.Endpoint, apiVersion, httpClient, httpHeaders)
}
// Provide allows the docker provider to provide configurations to traefik using the given configuration channel.
func (p *Provider) Provide(configurationChan chan<- config.Message, pool *safe.Pool) error {
pool.GoCtx(func(routineCtx context.Context) {
ctxLog := log.With(routineCtx, log.Str(log.ProviderName, "docker"))
logger := log.FromContext(ctxLog)
operation := func() error {
var err error
ctx, cancel := context.WithCancel(ctxLog)
defer cancel()
ctx = log.With(ctx, log.Str(log.ProviderName, "docker"))
dockerClient, err := p.createClient()
if err != nil {
logger.Errorf("Failed to create a client for docker, error: %s", err)
return err
}
serverVersion, err := dockerClient.ServerVersion(ctx)
if err != nil {
logger.Errorf("Failed to retrieve information of the docker client and server host: %s", err)
return err
}
logger.Debugf("Provider connection established with docker %s (API %s)", serverVersion.Version, serverVersion.APIVersion)
var dockerDataList []dockerData
if p.SwarmMode {
dockerDataList, err = p.listServices(ctx, dockerClient)
if err != nil {
logger.Errorf("Failed to list services for docker swarm mode, error %s", err)
return err
}
} else {
dockerDataList, err = p.listContainers(ctx, dockerClient)
if err != nil {
logger.Errorf("Failed to list containers for docker, error %s", err)
return err
}
}
configuration := p.buildConfiguration(ctxLog, dockerDataList)
configurationChan <- config.Message{
ProviderName: "docker",
Configuration: configuration,
}
if p.Watch {
if p.SwarmMode {
errChan := make(chan error)
// TODO: This need to be change. Linked to Swarm events docker/docker#23827
ticker := time.NewTicker(time.Second * time.Duration(p.SwarmModeRefreshSeconds))
pool.GoCtx(func(ctx context.Context) {
ctx = log.With(ctx, log.Str(log.ProviderName, "docker"))
logger := log.FromContext(ctx)
defer close(errChan)
for {
select {
case <-ticker.C:
services, err := p.listServices(ctx, dockerClient)
if err != nil {
logger.Errorf("Failed to list services for docker, error %s", err)
errChan <- err
return
}
configuration := p.buildConfiguration(ctx, services)
if configuration != nil {
configurationChan <- config.Message{
ProviderName: "docker",
Configuration: configuration,
}
}
case <-ctx.Done():
ticker.Stop()
return
}
}
})
if err, ok := <-errChan; ok {
return err
}
// channel closed
} else {
f := filters.NewArgs()
f.Add("type", "container")
options := dockertypes.EventsOptions{
Filters: f,
}
startStopHandle := func(m eventtypes.Message) {
logger.Debugf("Provider event received %+v", m)
containers, err := p.listContainers(ctx, dockerClient)
if err != nil {
logger.Errorf("Failed to list containers for docker, error %s", err)
// Call cancel to get out of the monitor
return
}
configuration := p.buildConfiguration(ctx, containers)
if configuration != nil {
message := config.Message{
ProviderName: "docker",
Configuration: configuration,
}
select {
case configurationChan <- message:
case <-ctx.Done():
}
}
}
eventsc, errc := dockerClient.Events(ctx, options)
for {
select {
case event := <-eventsc:
if event.Action == "start" ||
event.Action == "die" ||
strings.HasPrefix(event.Action, "health_status") {
startStopHandle(event)
}
case err := <-errc:
if err == io.EOF {
logger.Debug("Provider event stream closed")
}
return err
case <-ctx.Done():
return nil
}
}
}
}
return nil
}
notify := func(err error, time time.Duration) {
logger.Errorf("Provider connection error %+v, retrying in %s", err, time)
}
err := backoff.RetryNotify(safe.OperationWithRecover(operation), backoff.WithContext(job.NewBackOff(backoff.NewExponentialBackOff()), ctxLog), notify)
if err != nil {
logger.Errorf("Cannot connect to docker server %+v", err)
}
})
return nil
}
func (p *Provider) listContainers(ctx context.Context, dockerClient client.ContainerAPIClient) ([]dockerData, error) {
containerList, err := dockerClient.ContainerList(ctx, dockertypes.ContainerListOptions{})
if err != nil {
return nil, err
}
var inspectedContainers []dockerData
// get inspect containers
for _, container := range containerList {
dData := inspectContainers(ctx, dockerClient, container.ID)
if len(dData.Name) == 0 {
continue
}
extraConf, err := p.getConfiguration(dData)
if err != nil {
log.FromContext(ctx).Errorf("Skip container %s: %v", getServiceName(dData), err)
continue
}
dData.ExtraConf = extraConf
inspectedContainers = append(inspectedContainers, dData)
}
return inspectedContainers, nil
}
func inspectContainers(ctx context.Context, dockerClient client.ContainerAPIClient, containerID string) dockerData {
containerInspected, err := dockerClient.ContainerInspect(ctx, containerID)
if err != nil {
log.FromContext(ctx).Warnf("Failed to inspect container %s, error: %s", containerID, err)
return dockerData{}
}
// This condition is here to avoid to have empty IP https://github.com/containous/traefik/issues/2459
// We register only container which are running
if containerInspected.ContainerJSONBase != nil && containerInspected.ContainerJSONBase.State != nil && containerInspected.ContainerJSONBase.State.Running {
return parseContainer(containerInspected)
}
return dockerData{}
}
func parseContainer(container dockertypes.ContainerJSON) dockerData |
func (p *Provider) listServices(ctx context.Context, dockerClient client.APIClient) ([]dockerData, error) {
logger := log.FromContext(ctx)
serviceList, err := dockerClient.ServiceList(ctx, dockertypes.ServiceListOptions{})
if err != nil {
return nil, err
}
serverVersion, err := dockerClient.ServerVersion(ctx)
if err != nil {
return nil, err
}
networkListArgs := filters.NewArgs()
// https://docs.docker.com/engine/api/v1.29/#tag/Network (Docker 17.06)
if versions.GreaterThanOrEqualTo(serverVersion.APIVersion, "1.29") {
networkListArgs.Add("scope", "swarm")
} else {
networkListArgs.Add("driver", "overlay")
}
networkList, err := dockerClient.NetworkList(ctx, dockertypes.NetworkListOptions{Filters: networkListArgs})
if err != nil {
logger.Debugf("Failed to network inspect on client for docker, error: %s", err)
return nil, err
}
networkMap := make(map[string]*dockertypes.NetworkResource)
for _, network := range networkList {
networkToAdd := network
networkMap[network.ID] = &networkToAdd
}
var dockerDataList []dockerData
var dockerDataListTasks []dockerData
for _, service := range serviceList {
dData, err := p.parseService(ctx, service, networkMap)
if err != nil {
logger.Errorf("Skip container %s: %v", getServiceName(dData), err)
continue
}
if dData.ExtraConf.Docker.LBSwarm {
if len(dData.NetworkSettings.Networks) > 0 {
dockerDataList = append(dockerDataList, dData)
}
} else {
isGlobalSvc := service.Spec.Mode.Global != nil
dockerDataListTasks, err = listTasks(ctx, dockerClient, service.ID, dData, networkMap, isGlobalSvc)
if err != nil {
logger.Warn(err)
} else {
dockerDataList = append(dockerDataList, dockerDataListTasks...)
}
}
}
return dockerDataList, err
}
func (p *Provider) parseService(ctx context.Context, service swarmtypes.Service, networkMap map[string]*dockertypes.NetworkResource) (dockerData, error) {
logger := log.FromContext(ctx)
dData := dockerData{
ID: service.ID,
ServiceName: service.Spec.Annotations.Name,
Name: service.Spec.Annotations.Name,
Labels: service.Spec.Annotations.Labels,
NetworkSettings: networkSettings{},
}
extraConf, err := p.getConfiguration(dData)
if err != nil {
return dockerData{}, err
}
dData.ExtraConf = extraConf
if service.Spec.EndpointSpec != nil {
if service.Spec.EndpointSpec.Mode == swarmtypes.ResolutionModeDNSRR {
if dData.ExtraConf.Docker.LBSwarm {
logger.Warnf("Ignored %s endpoint-mode not supported, service name: %s. Fallback to Traefik load balancing", swarmtypes.ResolutionModeDNSRR, service.Spec.Annotations.Name)
}
} else if service.Spec.EndpointSpec.Mode == swarmtypes.ResolutionModeVIP {
dData.NetworkSettings.Networks = make(map[string]*networkData)
for _, virtualIP := range service.Endpoint.VirtualIPs {
networkService := networkMap[virtualIP.NetworkID]
if networkService != nil {
if len(virtualIP.Addr) > 0 {
ip, _, _ := net.ParseCIDR(virtualIP.Addr)
network := &networkData{
Name: networkService.Name,
ID: virtualIP.NetworkID,
Addr: ip.String(),
}
dData.NetworkSettings.Networks[network.Name] = network
} else {
logger.Debugf("No virtual IPs found in network %s", virtualIP.NetworkID)
}
} else {
logger.Debugf("Network not found, id: %s", virtualIP.NetworkID)
}
}
}
}
return dData, nil
}
func listTasks(ctx context.Context, dockerClient client.APIClient, serviceID string,
serviceDockerData dockerData, networkMap map[string]*dockertypes.NetworkResource, isGlobalSvc bool) ([]dockerData, error) {
serviceIDFilter := filters.NewArgs()
serviceIDFilter.Add("service", serviceID)
serviceIDFilter.Add("desired-state", "running")
taskList, err := dockerClient.TaskList(ctx, dockertypes.TaskListOptions{Filters: serviceIDFilter})
if err != nil {
return nil, err
}
var dockerDataList []dockerData
for _, task := range taskList {
if task.Status.State != swarmtypes.TaskStateRunning {
continue
}
dData := parseTasks(ctx, task, serviceDockerData, networkMap, isGlobalSvc)
if len(dData.NetworkSettings.Networks) > 0 {
dockerDataList = append(dockerDataList, dData)
}
}
return dockerDataList, err
}
func parseTasks(ctx context.Context, task swarmtypes.Task, serviceDockerData dockerData,
networkMap map[string]*dockertypes.NetworkResource, isGlobalSvc bool) dockerData {
dData := dockerData{
ID: task.ID,
ServiceName: serviceDockerData.Name,
Name: serviceDockerData.Name + "." + strconv.Itoa(task.Slot),
Labels: serviceDockerData.Labels,
ExtraConf: serviceDockerData.ExtraConf,
NetworkSettings: networkSettings{},
}
if isGlobalSvc {
dData.Name = serviceDockerData.Name + "." + task.ID
}
if task.NetworksAttachments != nil {
dData.NetworkSettings.Networks = make(map[string]*networkData)
for _, virtualIP := range task.NetworksAttachments {
if networkService, present := networkMap[virtualIP.Network.ID]; present {
if len(virtualIP.Addresses) > 0 {
// Not sure about this next loop - when would a task have multiple IP's for the same network?
for _, addr := range virtualIP.Addresses {
ip, _, _ := net.ParseCIDR(addr)
network := &networkData{
ID: virtualIP.Network.ID,
Name: networkService.Name,
Addr: ip.String(),
}
dData.NetworkSettings.Networks[network.Name] = network
}
} else {
log.FromContext(ctx).Debugf("No IP addresses found for network %s", virtualIP.Network.ID)
}
}
}
}
return dData
}
| {
dData := dockerData{
NetworkSettings: networkSettings{},
}
if container.ContainerJSONBase != nil {
dData.ID = container.ContainerJSONBase.ID
dData.Name = container.ContainerJSONBase.Name
dData.ServiceName = dData.Name // Default ServiceName to be the container's Name.
dData.Node = container.ContainerJSONBase.Node
if container.ContainerJSONBase.HostConfig != nil {
dData.NetworkSettings.NetworkMode = container.ContainerJSONBase.HostConfig.NetworkMode
}
if container.State != nil && container.State.Health != nil {
dData.Health = container.State.Health.Status
}
}
if container.Config != nil && container.Config.Labels != nil {
dData.Labels = container.Config.Labels
}
if container.NetworkSettings != nil {
if container.NetworkSettings.Ports != nil {
dData.NetworkSettings.Ports = container.NetworkSettings.Ports
}
if container.NetworkSettings.Networks != nil {
dData.NetworkSettings.Networks = make(map[string]*networkData)
for name, containerNetwork := range container.NetworkSettings.Networks {
dData.NetworkSettings.Networks[name] = &networkData{
ID: containerNetwork.NetworkID,
Name: name,
Addr: containerNetwork.IPAddress,
}
}
}
}
return dData
} |
GqlResolverExceptions.filter.ts | // This exception filter should be used for every resolver
// e.g:
// @UseFilters(GqlResolverExceptionsFilter)
// export class AuthResolver {
// It logs the exception with context information like IP, Host, UserId
// It uses Winston directly to log the error
import { Catch, ArgumentsHost, Inject, HttpException } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { GqlExceptionFilter, GqlArgumentsHost } from '@nestjs/graphql';
import { WINSTON_MODULE_PROVIDER } from 'nest-winston';
import { Logger } from 'winston';
import { PrismaClientKnownRequestError } from '@prisma/client';
import { ApolloError } from 'apollo-server-express';
import { Request } from 'express';
import { AmplicationError } from '../errors/AmplicationError';
export type RequestData = {
query: string;
hostname: string;
ip: string;
userId: string;
};
export const PRISMA_CODE_UNIQUE_KEY_VIOLATION = 'P2002';
export class UniqueKeyException extends ApolloError {
constructor(fields: string[]) {
super(
`Another record with the same key already exist (${fields.join(', ')})`
);
}
}
export class InternalServerError extends ApolloError {
constructor() {
super('Internal server error');
}
}
export function createRequestData(req: Request): RequestData {
const user = req.user as { id: string } | null;
return {
query: req.body?.query,
hostname: req.hostname,
ip: req.ip,
userId: user?.id
};
}
@Catch()
export class GqlResolverExceptionsFilter implements GqlExceptionFilter {
constructor(
@Inject(WINSTON_MODULE_PROVIDER) private readonly logger: Logger,
private readonly configService: ConfigService
) {}
catch(exception: Error, host: ArgumentsHost): Error {
const requestData = this.prepareRequestData(host);
let clientError: Error;
/**@todo: Complete the list or expected error codes */
if ( | // Convert PrismaClientKnownRequestError to UniqueKeyException and pass the error to the client
const fields = (exception.meta as { target: string[] }).target;
clientError = new UniqueKeyException(fields);
this.logger.info(clientError.message, { requestData });
} else if (exception instanceof AmplicationError) {
// Convert AmplicationError to ApolloError and pass the error to the client
clientError = new ApolloError(exception.message);
this.logger.info(clientError.message, { requestData });
} else if (exception instanceof HttpException) {
// Return HTTP Exceptions to the client
clientError = exception;
this.logger.info(clientError.message, { requestData });
} else {
// Log the original exception and return a generic server error to client
// eslint-disable-next-line
// @ts-ignore
exception.requestData = requestData;
this.logger.error(exception);
clientError =
this.configService.get('NODE_ENV') === 'production'
? new InternalServerError()
: new ApolloError(exception.message);
}
return clientError;
}
prepareRequestData(host: ArgumentsHost): RequestData | null {
const { req } = GqlArgumentsHost.create(host).getContext();
return req ? createRequestData(req) : null;
}
} | exception instanceof PrismaClientKnownRequestError &&
exception.code === PRISMA_CODE_UNIQUE_KEY_VIOLATION
) { |
noop_test.go | package noop
import (
"testing"
"github.com/kelindar/talaria/internal/encoding/block"
"github.com/stretchr/testify/assert"
)
func TestNoop(t *testing.T) | {
noopWriter := New()
assert.NotPanics(t, func() {
_ = noopWriter.Write(nil, nil)
})
assert.NotPanics(t, func() {
_ = noopWriter.Stream(block.Row{})
})
} |
|
api_test.go | package github
import (
"fmt"
"testing"
)
func TestFind(t *testing.T) |
func TestFindSpecificLang(t *testing.T) {
repos, err := Find("go", Today)
if err != nil {
t.Fatal("Error by find.", err)
}
if len(repos) != 25 {
t.Fatal("Not return 25 repositories.", err)
}
for _, repo := range repos {
fmt.Println("========================")
repo.Print()
}
}
| {
repos, err := Find("", Today)
if err != nil {
t.Fatal("Error by find.", err)
}
if len(repos) != 25 {
t.Fatal("Not return 25 repositories.", err)
}
for _, repo := range repos {
fmt.Println("------------------------")
repo.Print()
}
} |
__init__.py | import os
import sys |
sys.path.append(os.path.dirname(__file__)) | |
index.js | var utils = require('./utils')
var AND = '&&'
, OR = '||'
, AND_STR = 'and'
, OR_STR = 'or'
, NOT = '!'
, EQUAL = '='
, LIKE = '~'
, NOTEQUAL = NOT + EQUAL
, NOTLIKE = NOT + LIKE
, GT = '>'
, GE = '>='
, LT = '<'
, LE = '<='
, WILDCARD = '*'
, COMMA = ','
, DELIMITER = '.'
, LEFT = '('
, RIGHT = ')'
, WHERE = 'where'
, synopsis = {
pathway: [],
groups: {}
}
, AST = {}
, options = {};
var print = console.log;
// ------------------ splitter -------------------- //
function Tokenize(query) {
var parts = __splitTrim(query, WHERE);
var pathway = parts[0];
var where = parts[1];
synopsis.pathway = __splitTrim(pathway, COMMA);
for (var i = 0, len = synopsis.pathway.length; i < len; i++) {
synopsis.pathway[i] = __splitTrim(synopsis.pathway[i], DELIMITER);
if (synopsis.pathway[i][0] == WILDCARD)
synopsis.pathway[i].shift();
if (synopsis.pathway[i].length === 0)
synopsis.pathway.splice(i, 1);
}
var lastLeft = -1,
lastRight = -1,
current = 0;
while (current < where.length) {
if (where[current] === LEFT) {
lastLeft = current;
} else if (where[current] === RIGHT) {
lastRight = current;
if (lastRight > lastLeft && lastLeft !== -1) {
var k = 'gr' + '_' + new Date().getTime();
synopsis.groups[k] = where.substring(lastLeft + 1, lastRight);
where = where.replace(LEFT + synopsis.groups[k] + RIGHT, k);
current = -1;
}
}
current += 1;
}
LogicalGrouping(AST, where);
}
function LogicalGrouping(current, where) {
var lastAnd = __findIndex(where, AND),
lastOr = __findIndex(where, OR);
if (lastAnd !== Number.MAX_VALUE || lastOr !== Number.MAX_VALUE) {
if (lastAnd < lastOr) {
current.and = current.and || [];
var parts = __splitTrim(where, AND);
current.and.push(parts[0]);
LogicalGrouping(current.and, parts[1]);
} else {
current.or = current.or || [];
var parts = __splitTrim(where, OR);
current.or.push(parts[0]);
LogicalGrouping(current.or, parts[1]);
}
} else {
if (synopsis.groups[where]) {
where = synopsis.groups[where];
LogicalGrouping(current, where);
} else {
if (Array.isArray(current))
current.push(where);
else
current.or = [where];
ExtractExpression(AST.or ? AST.or : AST.and)
}
}
}
function ExtractExpression(logicalGroup) {
for (var k in logicalGroup) {
if (logicalGroup.hasOwnProperty(k)) {
if (Array.isArray(logicalGroup[k])) {
ExtractExpression(logicalGroup[k]);
}
else if (typeof logicalGroup[k] === 'string') {
if (__contains(logicalGroup[k], NOTEQUAL)) {
var parts = __splitTrim(logicalGroup[k], NOTEQUAL);
logicalGroup[k] = {
ne: [
parts[0],
parts[1]
]
};
} else if (__contains(logicalGroup[k], NOTLIKE)) {
var parts = __splitTrim(logicalGroup[k], NOTLIKE);
logicalGroup[k] = {
nreq: [
parts[0],
parts[1]
]
};
} else if (__contains(logicalGroup[k], LIKE)) {
var parts = __splitTrim(logicalGroup[k], LIKE);
logicalGroup[k] = { // rough eq
req: [
parts[0],
parts[1]
]
};
} else if (__contains(logicalGroup[k], GE)) {
var parts = __splitTrim(logicalGroup[k], GE);
logicalGroup[k] = { // greater than or equal
ge: [
parts[0],
parts[1]
]
};
} else if (__contains(logicalGroup[k], GT)) {
var parts = __splitTrim(logicalGroup[k], GT);
logicalGroup[k] = { // greater than
gt: [
parts[0],
parts[1]
]
};
} else if (__contains(logicalGroup[k], LE)) {
var parts = __splitTrim(logicalGroup[k], LE);
logicalGroup[k] = { // less than or equal
le: [
parts[0],
parts[1]
]
};
} else if (__contains(logicalGroup[k], LT)) {
var parts = __splitTrim(logicalGroup[k], LT);
logicalGroup[k] = { // less than
lt: [
parts[0],
parts[1]
]
};
} else if (__contains(logicalGroup[k], EQUAL)) {
var parts = __splitTrim(logicalGroup[k], EQUAL);
logicalGroup[k] = {
eq: [
parts[0],
parts[1]
]
};
}
}
}
}
}
function __findIndex(str, token) {
var index = str.indexOf(token);
return index === -1 ? Number.MAX_VALUE : index;
}
function __splitTrim(str, token) {
return str.split(token).map(function (p) {
return p.trim();
});
}
function __contains(a, b) {
return a.indexOf(b) > -1;
}
function __hierarchize(obj, dottedPath) {
var parts = __splitTrim(dottedPath, DELIMITER);
var res = obj;
for (var p in parts) {
if (res.hasOwnProperty(parts[p]))
res = res[parts[p]];
else
return '';
}
// support comparison for Date/DateString
if(utils.isDate(res)) res = res.valueOf()
else if(utils.isDateString(res)) res = utils.parseDateFromString(res)
else res = res.toString()
return res
}
function FilterOR(ASTNode, row) {
var res = false;
for (var k in ASTNode) {
var filterFunc = (k === AND_STR ? FilterAND : (k === OR_STR ? FilterOR : Filter));
res = res || filterFunc(ASTNode[k], row); | if (options.trace)
print(synopsis.step, '======((( or', ASTNode[k], res);
if (res) return res;
}
return res;
}
function FilterAND(ASTNode, row) {
var res = true;
for (var k in ASTNode) {
var filterFunc = (k === AND_STR ? FilterAND : (k === OR_STR ? FilterOR : Filter));
res = res && filterFunc(ASTNode[k], row);
if (options.trace)
print(synopsis.step, '======((( and', ASTNode[k], res);
if (!res) return res;
}
return res;
}
function Filter(ASTNode, row) {
synopsis.step += 1;
if (ASTNode.or) {
var res = FilterOR(ASTNode.or, row);
if (options.trace)
print(synopsis.step, 'OR', ASTNode, res);
return res;
} else if (ASTNode.and) {
var res = FilterAND(ASTNode.and, row);
if (options.trace)
print(synopsis.step, 'AND', ASTNode, res);
return res;
} else if (typeof ASTNode === 'object') {
if (ASTNode.eq) { // =
return __hierarchize(row, ASTNode.eq[0]) === ASTNode.eq[1];
} else if (ASTNode.ne) { // !=
return __hierarchize(row, ASTNode.ne[0]) !== ASTNode.ne[1];
} else if (ASTNode.req) { // ~
return __contains(__hierarchize(row, ASTNode.req[0]), ASTNode.req[1]);
} else if (ASTNode.nreq) { // ~
return !__contains(__hierarchize(row, ASTNode.nreq[0]), ASTNode.nreq[1]);
} else if (ASTNode.gt) { // >
return __hierarchize(row, ASTNode.gt[0]) > ASTNode.gt[1];
} else if (ASTNode.ge) { // >=
return __hierarchize(row, ASTNode.ge[0]) >= ASTNode.ge[1];
} else if (ASTNode.lt) { // <
return __hierarchize(row, ASTNode.lt[0]) < ASTNode.lt[1];
} else if (ASTNode.le) { // <=
return __hierarchize(row, ASTNode.le[0]) <= ASTNode.le[1];
} else {
return Filter(ASTNode, row);
}
}
}
function Parse(dataSource) {
var result = [];
for (var k in dataSource)
if (Filter(AST, dataSource[k]))
result.push(dataSource[k]);
return result;
}
function Fields(result) {
if (result && synopsis.pathway.length > 0) {
//print(synopsis.pathway);
return result.map(function (ele) {
var res = {};
for (var i = 0, len = synopsis.pathway.length; i < len; i++) {
var key = synopsis.pathway[i].join(DELIMITER);
res[key] = __hierarchize(ele, key);
}
return res;
});
}
return result;
}
function Query(dataSource, query, opts) {
synopsis = {
pathway: [],
groups: {},
step: 0
};
AST = {};
opts = opts || {
trace: false
};
options = opts;
Tokenize(query);
return Fields(Parse(dataSource));
}
if (typeof(module) != 'undefined' && typeof(module.exports) != 'undefined') module.exports = Query;
if (typeof(window) != 'undefined') window.Query = Query; | |
lib.rs | #![recursion_limit="200"]
#[macro_use]
extern crate error_chain;
extern crate libflo_error;
extern crate libflo_module;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json; | #[macro_use]
mod register_funcs;
pub mod error;
mod file_funcs;
mod func;
mod func_mapper;
mod impl_any_input_fn;
mod impl_any_input_any_output_fn;
mod impl_any_output_fn;
mod impl_fn;
mod load;
pub mod serialization;
mod string;
#[cfg(test)]
mod test;
pub use error::*;
pub use func::*;
pub use func_mapper::*;
pub use impl_any_input_fn::*;
pub use impl_any_input_any_output_fn::*;
pub use impl_any_output_fn::*;
pub use impl_fn::*; | extern crate sharedlib;
|
index.ts | export * from './access-point'; |
||
hole.go | package routes
import (
"net/http"
"github.com/code-golf/code-golf/config"
"github.com/code-golf/code-golf/session"
"github.com/lib/pq"
)
// GET /{hole}
func holeGET(w http.ResponseWriter, r *http.Request) |
// GET /ng/{hole}
func holeNGGET(w http.ResponseWriter, r *http.Request) {
data := struct {
Authors []string
HideDetails bool
Hole *config.Hole
Langs []*config.Lang
Solutions map[string]map[string]string
}{
Langs: config.LangList,
Solutions: map[string]map[string]string{},
}
var ok bool
if data.Hole, ok = config.HoleByID[param(r, "hole")]; !ok {
if data.Hole, ok = config.ExpHoleByID[param(r, "hole")]; !ok {
w.WriteHeader(http.StatusNotFound)
return
}
}
if c, _ := r.Cookie("hide-details"); c != nil {
data.HideDetails = true
}
// Lookup the hole's author(s).
if data.Hole.Experiment == 0 {
if err := session.Database(r).QueryRow(
`SELECT array_agg(login ORDER BY login)
FROM authors
JOIN users ON id = user_id
WHERE hole = $1`,
data.Hole.ID,
).Scan(pq.Array(&data.Authors)); err != nil {
panic(err)
}
}
if golfer := session.Golfer(r); golfer != nil && data.Hole.Experiment == 0 {
// Fetch all the code per lang.
rows, err := session.Database(r).Query(
`SELECT code, lang, scoring
FROM solutions
WHERE hole = $1 AND user_id = $2`,
data.Hole.ID, golfer.ID,
)
if err != nil {
panic(err)
}
defer rows.Close()
for rows.Next() {
var code, lang, scoring string
if err := rows.Scan(&code, &lang, &scoring); err != nil {
panic(err)
}
if data.Solutions[lang] == nil {
data.Solutions[lang] = map[string]string{scoring: code}
} else {
data.Solutions[lang][scoring] = code
}
}
if err := rows.Err(); err != nil {
panic(err)
}
}
render(w, r, "hole-ng", data, data.Hole.Name)
}
| {
data := struct {
Authors []string
HideDetails bool
Hole *config.Hole
RankingsView string
Solutions []map[string]string
}{
RankingsView: "me",
Solutions: []map[string]string{{}, {}},
}
var ok bool
if data.Hole, ok = config.HoleByID[param(r, "hole")]; !ok {
if data.Hole, ok = config.ExpHoleByID[param(r, "hole")]; !ok {
w.WriteHeader(http.StatusNotFound)
return
}
}
if c, _ := r.Cookie("hide-details"); c != nil {
data.HideDetails = true
}
if c, _ := r.Cookie("rankings-view"); c != nil {
if c.Value == "top" || c.Value == "following" {
data.RankingsView = c.Value
}
}
// Lookup the hole's author(s).
if data.Hole.Experiment == 0 {
if err := session.Database(r).QueryRow(
`SELECT array_agg(login ORDER BY login)
FROM authors
JOIN users ON id = user_id
WHERE hole = $1`,
data.Hole.ID,
).Scan(pq.Array(&data.Authors)); err != nil {
panic(err)
}
}
if golfer := session.Golfer(r); golfer != nil && data.Hole.Experiment == 0 {
// Fetch all the code per lang.
rows, err := session.Database(r).Query(
`SELECT code, lang, scoring
FROM solutions
WHERE hole = $1 AND user_id = $2`,
data.Hole.ID, golfer.ID,
)
if err != nil {
panic(err)
}
defer rows.Close()
for rows.Next() {
var code, lang, scoring string
if err := rows.Scan(&code, &lang, &scoring); err != nil {
panic(err)
}
solution := 0
if scoring == "chars" {
solution = 1
}
data.Solutions[solution][lang] = code
}
if err := rows.Err(); err != nil {
panic(err)
}
}
render(w, r, "hole", data, data.Hole.Name)
} |
sigmoid_test.py | # Copyright 2018 The TensorFlow Probability Authors.
# | # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Sigmoid Tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from scipy import special
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python.bijectors import bijector_test_util
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import,g-import-not-at-top
@test_util.run_all_in_graph_and_eager_modes
class SigmoidBijectorTest(tf.test.TestCase):
"""Tests correctness of the Y = g(X) = (1 + exp(-X))^-1 transformation."""
def testBijector(self):
self.assertStartsWith(tfb.Sigmoid().name, "sigmoid")
x = np.linspace(-10., 10., 100).reshape([2, 5, 10]).astype(np.float32)
y = special.expit(x)
ildj = -np.log(y) - np.log1p(-y)
bijector = tfb.Sigmoid()
self.assertAllClose(
y, self.evaluate(bijector.forward(x)), atol=0., rtol=1e-2)
self.assertAllClose(
x, self.evaluate(bijector.inverse(y)), atol=0., rtol=1e-4)
self.assertAllClose(
ildj,
self.evaluate(bijector.inverse_log_det_jacobian(
y, event_ndims=0)), atol=0., rtol=1e-6)
self.assertAllClose(
-ildj,
self.evaluate(bijector.forward_log_det_jacobian(
x, event_ndims=0)), atol=0., rtol=1e-4)
def testScalarCongruency(self):
bijector_test_util.assert_scalar_congruency(
tfb.Sigmoid(), lower_x=-7., upper_x=7., eval_func=self.evaluate,
rtol=.1)
def testBijectiveAndFinite(self):
x = np.linspace(-100., 100., 100).astype(np.float32)
eps = 1e-3
y = np.linspace(eps, 1. - eps, 100).astype(np.float32)
bijector_test_util.assert_bijective_and_finite(
tfb.Sigmoid(), x, y, eval_func=self.evaluate, event_ndims=0, atol=0.,
rtol=1e-4)
if __name__ == "__main__":
tf.test.main() | # Licensed under the Apache License, Version 2.0 (the "License"); |
cv_facts.py | #!/usr/bin/python
# coding: utf-8 -*-
#
# FIXME: required to pass ansible-test
# GNU General Public License v3.0+
#
# Copyright 2019 Arista Networks AS-EMEA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'
}
import logging
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection, ConnectionError
from ansible_collections.arista.cvp.plugins.module_utils.cv_client import CvpClient
from ansible_collections.arista.cvp.plugins.module_utils.cv_client_errors import CvpLoginError, CvpApiError
DOCUMENTATION = r'''
---
module: cv_facts
version_added: "2.9"
author: EMEA AS Team (@aristanetworks)
short_description: Collect facts from CloudVision Portal.
description:
- Returns list of devices, configlets, containers and images
options:
gather_subset:
description:
- When supplied, this argument will restrict the facts collected
- to a given subset. Possible values for this argument include
- all, hardware, config, and interfaces. Can specify a list of
- values to include a larger subset. Values can also be used
- with an initial C(M(!)) to specify that a specific subset should
- not be collected.
required: false
default: ['default']
type: list
choices:
- default
- config
- tasks_pending
- tasks_failed
- tasks_all
facts:
description:
- List of facts to retrieve from CVP.
- By default, cv_facts returns facts for devices/configlets/containers/tasks
- Using this parameter allows user to limit scope to a subet of information.
required: false
default: ['all']
type: list
choices:
- all
- devices
- containers
- configlets
- tasks
'''
EXAMPLES = r'''
---
tasks:
- name: '#01 - Collect devices facts from {{inventory_hostname}}'
cv_facts:
facts:
devices
register: FACTS_DEVICES
- name: '#02 - Collect devices facts (with config) from {{inventory_hostname}}'
cv_facts:
gather_subset:
config
facts:
devices
register: FACTS_DEVICES_CONFIG
- name: '#03 - Collect confilgets facts from {{inventory_hostname}}'
cv_facts:
facts:
configlets
register: FACTS_CONFIGLETS
- name: '#04 - Collect containers facts from {{inventory_hostname}}'
cv_facts:
facts:
containers
register: FACTS_CONTAINERS
- name: '#10 - Collect ALL facts from {{inventory_hostname}}'
cv_facts:
register: FACTS
'''
def connect(module, debug=False):
''' Connects to CVP device using user provided credentials from playbook.
:param module: Ansible module with parameters and client connection.
:return: CvpClient object with connection instantiated.
'''
client = CvpClient()
connection = Connection(module._socket_path)
host = connection.get_option("host")
port = connection.get_option("port")
user = connection.get_option("remote_user")
pswd = connection.get_option("password")
if debug:
logging.debug('*** Connecting to CVP')
try:
client.connect([host],
user,
pswd,
protocol="https",
port=port,
)
except CvpLoginError as e:
module.fail_json(msg=str(e))
if debug:
logging.debug('*** Connected to CVP')
return client
def facts_devices(module, facts, debug=False):
"""
Collect facts of all devices.
Parameters
----------
module : AnsibleModule
Ansible module with parameters and instances
facts : dict
Fact dictionary where devices information will be inserted.
debug : bool, optional
Activate debug logging, by default False
Returns
-------
dict
facts with devices content added.
"""
facts['devices'] = []
# Get Inventory Data for All Devices
inventory = module.client.api.get_inventory()
for device in inventory:
if debug:
logging.debug(' -> Working on %s', device['hostname'])
device['name'] = device['hostname']
# Add designed config for device
if 'config' in module.params['gather_subset'] and device['streamingStatus'] == "active":
device['config'] = module.client.api.get_device_configuration(device['key'])
# Add parent container name
container = module.client.api.get_container_by_id(device['parentContainerKey'])
device['parentContainerName'] = container['name']
# Add Device Specific Configlets
configlets = module.client.api.get_configlets_by_device_id(device['key'])
device['deviceSpecificConfiglets'] = []
for configlet in configlets:
if int(configlet['containerCount']) == 0:
device['deviceSpecificConfiglets'].append(configlet['name'])
# Add ImageBundle Info
device['imageBundle'] = ""
deviceInfo = module.client.api.get_net_element_info_by_device_id(device['key'])
if "imageBundleMapper" in deviceInfo:
# There should only be one ImageBudle but its id is not decernable
# If the Image is applied directly to the device its type will be 'netelement'
if len(deviceInfo['imageBundleMapper'].values()) > 0:
if deviceInfo['imageBundleMapper'].values()[0]['type'] == 'netelement':
device['imageBundle'] = deviceInfo['bundleName']
# Add device to facts list
facts['devices'].append(device)
return facts
def facts_configlets(module, facts, debug=False):
"""
Collect facts of all configlets.
Parameters
----------
module : AnsibleModule
Ansible module with parameters and instances
facts : dict
Fact dictionary where configlets information will be inserted.
debug : bool, optional
Activate debug logging, by default False
Returns
-------
dict
facts with configlets content added.
"""
facts['configlets'] = []
configlets = module.client.api.get_configlets()['data']
# Reduce configlet data to required fields
for configlet in configlets:
if debug:
logging.debug(' -> Working on %s', configlet['name'])
# Get list of devices attached to configlet.
configlet['devices'] = []
applied_devices = module.client.api.get_devices_by_configlet(configlet['name'])
for device in applied_devices['data']:
configlet['devices'].append(device['hostName'])
# Get list of containers attached to configlet.
configlet['containers'] = []
applied_containers = module.client.api.get_containers_by_configlet(configlet['name'])
for container in applied_containers['data']:
configlet['containers'].append(container['containerName'])
# Add configlet to facts list
facts['configlets'].append(configlet)
return facts
def facts_containers(module, facts, debug=False):
"""
Collect facts of all containers.
Parameters
----------
module : AnsibleModule
Ansible module with parameters and instances
facts : dict
Fact dictionary where containers information will be inserted.
debug : bool, optional
Activate debug logging, by default False
Returns
-------
dict
facts with containers content added.
"""
facts['containers'] = []
# Get List of all Containers
containers = module.client.api.get_containers()['data']
for container in containers:
if debug:
logging.debug(' -> Working on %s', container['name'])
container['devices'] = []
# Get list of devices attached to container.
applied_devices = module.client.api.get_devices_by_container_id(container['key'])
for device in applied_devices:
container['devices'].append(device['fqdn'])
# Get list of configlets attached to container.
container['configlets'] = []
applied_configlets = module.client.api.get_configlets_by_container_id(container['key'])['configletList']
for configlet in applied_configlets:
container['configlets'].append(configlet['name'])
# Add applied Images
container['imageBundle'] = ""
applied_images = module.client.api.get_image_bundle_by_container_id(container['key'])['imageBundleList']
if len(applied_images) > 0:
container['imageBundle'] = applied_images[0]['name']
# Add container to facts list
facts['containers'].append(container)
return facts
def facts_tasks(module, facts, debug=False):
|
def facts_builder(module, debug=False):
"""
Method to call every fact module for either devices/containers/configlets.
Parameters
----------
module : AnsibleModule
Ansible module with parameters and instances
debug : bool, optional
Activate debug logging, by default False
Returns
-------
dict
facts structure to return by Ansible
"""
facts = {}
# Get version data for CVP
if debug:
logging.debug('** Collecting CVP Information (version)')
facts['cvp_info'] = module.client.api.get_cvp_info()
# Extract devices facts
if 'all' in module.params['facts'] or 'devices' in module.params['facts']:
if debug:
logging.debug('** Collecting devices facts ...')
facts = facts_devices(module=module, facts=facts, debug=debug)
# Extract configlet information
if 'all' in module.params['facts'] or 'configlets' in module.params['facts']:
if debug:
logging.debug('** Collecting configlets facts ...')
facts = facts_configlets(module=module, facts=facts, debug=debug)
# Extract containers information
if 'all' in module.params['facts'] or 'containers' in module.params['facts']:
if debug:
logging.debug('** Collecting containers facts ...')
facts = facts_containers(module=module, facts=facts, debug=debug)
# Extract tasks information
if 'all' in module.params['facts'] or 'tasks' in module.params['facts']:
if debug:
logging.debug('** Collecting tasks facts ...')
facts = facts_tasks(module=module, facts=facts, debug=debug)
# Extract imageBundles information
if 'all' in module.params['facts'] or 'images' in module.params['facts']:
if debug:
logging.debug('** Collecting images facts ...')
facts['imageBundles'] = list()
# End of Facts module
if debug:
logging.debug('** All facts done')
return facts
def main():
"""
main entry point for module execution.
"""
debug_module = False
if debug_module:
logging.basicConfig(format='%(asctime)s %(message)s',
filename='cv_fact_v2.log', level=logging.DEBUG)
argument_spec = dict(
gather_subset=dict(type='list',
elements='str',
required=False,
choices=['default',
'config',
'tasks_pending',
'tasks_all',
'tasks_failed'],
default='default'),
facts=dict(type='list',
elements='str',
required=False,
choices=['all',
'configlets',
'containers',
'devices',
'tasks'],
default='all'))
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
# Forge standard Ansible output
result = dict(changed=False, ansible_facts={})
# Connect to CVP Instance
module.client = connect(module, debug=debug_module)
# Get Facts from CVP
result['ansible_facts'] = facts_builder(module, debug=debug_module)
# Standard Ansible outputs
module.exit_json(**result)
if __name__ == '__main__':
main()
| """
Collect facts of all tasks.
Parameters
----------
module : AnsibleModule
Ansible module with parameters and instances
facts : dict
Fact dictionary where tasks information will be inserted.
debug : bool, optional
Activate debug logging, by default False
Returns
-------
dict
facts with tasks content added.
"""
facts['tasks'] = []
tasks = []
# Get List of Tasks
if debug:
logging.debug(' -> Extracting tasks with %s', str(module.params['gather_subset']))
if 'tasks_pending' in module.params['gather_subset']:
# We only get pending tasks
tasks.append(module.client.api.get_tasks_by_status(status='Pending'))
if 'tasks_all' in module.params['gather_subset']:
# User wants to get list of all tasks -- not default behavior
tasks.append(module.client.api.get_tasks()['data'])
if 'tasks_failed' in module.params['gather_subset']:
# User wants to get list of all tasks -- not default behavior
tasks.append(module.client.api.get_tasks_by_status(status='Failed'))
if 'default' in module.params['gather_subset']:
# By default we only extract pending tasks and not all tasks
tasks.append(module.client.api.get_tasks_by_status(status='Pending'))
for task in tasks:
if debug:
logging.debug(' -> Working on %s', task)
facts['tasks'].append(task)
return facts |
test_tile_magma.py | from common.dummy_core_magma import DummyCore
from bit_vector import BitVector
from tile.tile_magma import Tile
from common.testers import BasicTester
import tempfile
from fault.random import random_bv
def check_all_config(tester,
tile_circ,
tile,
data_written,
inputs_applied):
for addr in data_written:
tester.config_read(addr)
expected_data = data_written[addr]
tester.expect(tile_circ.read_config_data, expected_data)
def test_tile():
core = DummyCore()
tile = Tile(core)
tile_circ = tile.circuit()
# No functional model for tile yet, so we have to use the
# standard fault tester for now
tester = BasicTester(tile_circ, tile_circ.clk, tile_circ.reset)
# assign the tile a random ID for configuration
tile_id = random_bv(16)
tester.poke(tile_circ.tile_id, tile_id)
tester.reset()
# Connect random vals to all tile inputs
inputs_applied = {}
for side_in in (tile_circ.north.I, tile_circ.south.I,
tile_circ.east.I, tile_circ.west.I):
for i in range(len(side_in.layer1)):
port = side_in.layer1[i]
rand_input = random_bv(1)
inputs_applied[port] = rand_input
tester.poke(port, rand_input)
for j in range(len(side_in.layer16)):
port = side_in.layer16[j]
rand_input = random_bv(16)
inputs_applied[port] = rand_input
tester.poke(port, rand_input)
# Write to all configuration registers in the tile
# This test should be applicapable to any tile, regardless
# of the core it's using
data_written = {}
for i, feat in enumerate(tile.features()):
feat_addr = BitVector(i, 8)
for reg in feat.registers.values():
reg_addr = BitVector(reg.addr, 8)
upper_config_addr = BitVector.concat(reg_addr, feat_addr)
config_addr = BitVector.concat(upper_config_addr, tile_id)
# Ensure the register is wide enough to contain the random value
rand_data = random_bv(reg.width)
# Further restrict random config data values based on feature
# Only 0-3 valid for SB config_data
if (feat == tile.sb):
if((reg_addr % 2) == 0):
rand_data = rand_data % 4
# Only 0-1 valid for SB regs
else:
rand_data = rand_data % 2
# Only 0-9 valid for CB config_data
elif (feat in tile.cbs):
rand_data = rand_data % 10
# Make sure we pass 32 bits of config data to configure
config_data = BitVector(rand_data, 32)
tester.configure(config_addr, config_data)
# Keep track of data written so we know what to expect to read back
data_written[config_addr] = config_data
# Now, read back all the configuration we just wrote
for addr in data_written:
tester.config_read(addr)
expected_data = data_written[addr]
tester.expect(tile_circ.read_config_data, expected_data)
feat_addr = addr[16:24]
reg_addr = addr[24:32]
check_all_config(tester,
tile_circ,
tile,
data_written,
inputs_applied)
# Try writing to tile with wrong tile id
for config_addr in data_written:
new_tile_id = config_addr[0:16] + 1
upper_config_addr = config_addr[16:32]
new_config_addr = BitVector.concat(upper_config_addr, new_tile_id)
random_data = random_bv(32)
tester.configure(new_config_addr, random_data)
# Read all the config back again to make sure nothing changed
check_all_config(tester,
tile_circ,
tile,
data_written,
inputs_applied)
with tempfile.TemporaryDirectory() as tempdir:
tester.compile_and_run(target="verilator",
magma_output="coreir-verilog",
directory=tempdir, | flags=["-Wno-fatal"]) |
|
network_proposal_reject.go | package starportcmd
import (
"errors"
"fmt"
"github.com/manifoldco/promptui"
"github.com/spf13/cobra"
"github.com/interchained/genesis/genesis/pkg/clispinner"
"github.com/interchained/genesis/genesis/pkg/numbers"
"github.com/interchained/genesis/genesis/pkg/spn"
)
// NewNetworkProposalReject creates a new reject approve command to reject proposals
// for a chain.
func NewNetworkProposalReject() *cobra.Command {
c := &cobra.Command{
Use: "reject [chain-id] [number<,...>]",
Short: "Reject proposals",
RunE: networkProposalRejectHandler,
Args: cobra.ExactArgs(2),
}
return c
}
func | (cmd *cobra.Command, args []string) error {
s := clispinner.New()
defer s.Stop()
s.SetText("Calculating gas...")
var (
chainID = args[0]
proposalList = args[1]
)
nb, err := newNetworkBuilder()
if err != nil {
return err
}
var reviewals []spn.Reviewal
ids, err := numbers.ParseList(proposalList)
if err != nil {
return err
}
for _, id := range ids {
reviewals = append(reviewals, spn.RejectProposal(id))
}
gas, broadcast, err := nb.SubmitReviewals(cmd.Context(), chainID, reviewals...)
if err != nil {
return err
}
s.Stop()
// Prompt for confirmation
prompt := promptui.Prompt{
Label: fmt.Sprintf("This operation will cost about %v gas. Confirm the transaction",
gas,
),
IsConfirm: true,
}
if _, err := prompt.Run(); err != nil {
return errors.New("transaction aborted")
}
s.SetText("Rejecting...")
s.Start()
// Broadcast the transaction
if err := broadcast(); err != nil {
return err
}
s.Stop()
fmt.Printf("Proposal(s) %s rejected ⛔️\n", numbers.List(ids, "#"))
return nil
}
| networkProposalRejectHandler |
main.py | import paramiko
import time
import io
import os
import stat
from yaml_generator import CAYamlGenerator, OrderYamlGenerator, PeerYamlGenerator, ConfigTXYamlGenerator
def | (sftp_client, remote_path, local_path):
try:
sftp_client.stat(remote_path)
except IOError:
return
if not os.path.exists(local_path):
os.mkdir(local_path)
for item in sftp_client.listdir(remote_path):
if stat.S_ISDIR(sftp_client.stat(f'{remote_path}/{item}').st_mode):
sftp_get_r(sftp_client, f'{remote_path}/{item}', os.path.join(local_path, item))
else:
sftp_client.get(f'{remote_path}/{item}', os.path.join(local_path, item))
def sftp_put_r(sftp_client, local_path, remote_path):
if not os.path.exists(local_path):
return
path = ""
for dir in remote_path.split("/"):
if dir == "":
continue
path += f"/{dir}"
try:
sftp_client.listdir(path)
except IOError:
sftp_client.mkdir(path)
try:
sftp_client.stat(remote_path)
except IOError:
sftp_client.mkdir(remote_path)
for item in os.listdir(local_path):
if os.path.isfile(os.path.join(local_path, item)):
sftp_client.put(os.path.join(local_path, item), f'{remote_path}/{item}')
else:
sftp_put_r(sftp_client, os.path.join(local_path, item), f'{remote_path}/{item}')
def generate_ca(ca_id, ca_information, fabric_name, target_host, crypto_base):
node_name, group_name, domain = ca_id.split('.', 2)
address = ca_information['address']
key_file = io.StringIO(address['sk'])
private_key = paramiko.RSAKey.from_private_key(key_file)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=address['host'], port=address['ssh_port'], username='root', pkey=private_key)
stdin, stdout, stderr = ssh.exec_command(f'if [ ! -d {crypto_base} ]; then mkdir -p {crypto_base}; fi')
stdout.channel.recv_exit_status()
ftp_client = ssh.open_sftp()
file_name = 'node_build.py'
ftp_client.put(file_name, f'{crypto_base}/{file_name}')
if group_name == 'orderer':
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name init_docker_swarm {target_host} {fabric_name} {crypto_base}')
stdout.channel.recv_exit_status()
ftp_client.get(f'{crypto_base}/token', 'token')
else:
try:
ftp_client.stat(f'{crypto_base}/token')
except IOError:
node_host = address['host']
ftp_client.put('token', f'{crypto_base}/token')
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name join_docker_swarm {node_host} {target_host} {crypto_base}')
stdout.channel.recv_exit_status()
ca_yaml_generator = CAYamlGenerator()
file_name = ca_yaml_generator.generate(ca_id, group_name, fabric_name, address['fabric_port'], crypto_base)
ftp_client.put(file_name, f'{crypto_base}/{file_name}')
stdin, stdout, stderr = ssh.exec_command(f'docker-compose -f {crypto_base}/{file_name} up -d')
stdout.channel.recv_exit_status()
time.sleep(4)
tls_cert_path = f'organizations/fabric-ca/{group_name}'
if not os.path.exists(tls_cert_path):
os.makedirs(tls_cert_path)
ftp_client.get(f'{crypto_base}/{tls_cert_path}/tls-cert.pem', f'{tls_cert_path}/tls-cert.pem')
ftp_client.close()
ssh.close()
def generate_order_msp(order_id, order_information, ca_port, crypto_base):
node_name, group_name, domain = order_id.split('.', 2)
address = order_information['address']
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
key_file = io.StringIO(address['sk'])
private_key = paramiko.RSAKey.from_private_key(key_file)
ssh.connect(hostname=address['host'], port=address['ssh_port'], username='root', pkey=private_key)
tls_cert_path = f'organizations/fabric-ca/{group_name}'
stdin, stdout, stderr = ssh.exec_command(f'if [ ! -d {crypto_base}/{tls_cert_path} ]; then mkdir -p {crypto_base}/{tls_cert_path}; fi')
stdout.channel.recv_exit_status()
ftp_client = ssh.open_sftp()
ftp_client.put(f'{tls_cert_path}/tls-cert.pem', f'{crypto_base}/{tls_cert_path}/tls-cert.pem')
file_name = 'node_build.py'
ftp_client.put(file_name, f'{crypto_base}/{file_name}')
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name org_msp_generate {group_name} {domain} {ca_port} {crypto_base}')
stdout.channel.recv_exit_status()
# print(stdout.readlines(), stderr.readlines())
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name peer_msp_generate {node_name} {group_name} {domain} {ca_port} {crypto_base}')
stdout.channel.recv_exit_status()
# print(stdout.readlines(), stderr.readlines())
tls_ca_path = f'organizations/{group_name}.{domain}/tlsca'
if not os.path.exists(tls_ca_path):
os.makedirs(tls_ca_path)
ftp_client.get(f'{crypto_base}/{tls_ca_path}/tlsca.{group_name}.{domain}-cert.pem', f'{tls_ca_path}/tlsca.{group_name}.{domain}-cert.pem')
server_path = f'organizations/{group_name}.{domain}/peers/{order_id}/tls'
if not os.path.exists(server_path):
os.makedirs(server_path)
ftp_client.get(f'{crypto_base}/{server_path}/server.crt', f'{server_path}/server.crt')
ftp_client.close()
def generate_peer(peer_id, peer_information, order_group_id, fabric_name, target_host, ca_port, crypto_base):
node_name, group_name, domain = peer_id.split('.', 2)
address = peer_information['address']
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
key_file = io.StringIO(address['sk'])
private_key = paramiko.RSAKey.from_private_key(key_file)
ssh.connect(hostname=address['host'], port=address['ssh_port'], username='root', pkey=private_key)
tls_cert_path = f'organizations/fabric-ca/{group_name}'
stdin, stdout, stderr = ssh.exec_command(f'if [ ! -d {crypto_base}/{tls_cert_path} ]; then mkdir -p {crypto_base}/{tls_cert_path}; fi')
stdout.channel.recv_exit_status()
ftp_client = ssh.open_sftp()
ftp_client.put(f'{tls_cert_path}/tls-cert.pem', f'{crypto_base}/{tls_cert_path}/tls-cert.pem')
tls_ca_path = f'organizations/{order_group_id}/tlsca'
stdin, stdout, stderr = ssh.exec_command(f'if [ ! -d {crypto_base}/{tls_ca_path} ]; then mkdir -p {crypto_base}/{tls_ca_path}; fi')
stdout.channel.recv_exit_status()
ftp_client.put(f'{tls_ca_path}/tlsca.{order_group_id}-cert.pem', f'{crypto_base}/{tls_ca_path}/tlsca.{order_group_id}-cert.pem')
file_name = 'node_build.py'
ftp_client.put(file_name, f'{crypto_base}/{file_name}')
try:
ftp_client.stat(f'{crypto_base}/token')
except IOError:
node_host = address['host']
ftp_client.put('token', f'{crypto_base}/token')
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name join_docker_swarm {node_host} {target_host} {crypto_base}')
stdout.channel.recv_exit_status()
peer_yaml_generator = PeerYamlGenerator()
file_name = peer_yaml_generator.generate(peer_id, fabric_name, address['fabric_port'], crypto_base)
ftp_client.put(file_name, f'{crypto_base}/{file_name}')
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name org_msp_generate {group_name} {domain} {ca_port} {crypto_base}')
stdout.channel.recv_exit_status()
print(stderr.readlines())
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name peer_msp_generate {node_name} {group_name} {domain} {ca_port} {crypto_base}')
stdout.channel.recv_exit_status()
print(stderr.readlines())
stdin, stdout, stderr = ssh.exec_command(f'docker-compose -f {crypto_base}/{file_name} up -d')
stdout.channel.recv_exit_status()
print(stderr.readlines())
time.sleep(3)
peer_path = f'organizations/{group_name}.{domain}'
if not os.path.exists(peer_path):
os.makedirs(peer_path)
sftp_get_r(ftp_client, f'{crypto_base}/{peer_path}', peer_path)
ftp_client.close()
def generate_order(order_id, order_information, fabric_name, channel_id, peer_group_ids, configtx_filename: str, crypto_base='/root/opt'):
node_name, group_name, domain = order_id.split('.', 2)
address = order_information['address']
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
key_file = io.StringIO(address['sk'])
private_key = paramiko.RSAKey.from_private_key(key_file)
ssh.connect(hostname=address['host'], port=address['ssh_port'], username='root', pkey=private_key)
ssh.exec_command(f'if [ ! -d {crypto_base}/channel-artifacts ]; then mkdir -p {crypto_base}/channel-artifacts; fi')
ftp_client = ssh.open_sftp()
sftp_put_r(ftp_client, f"organizations/{group_name}.{domain}/peers", f"{crypto_base}/organizations/{group_name}.{domain}/peers")
for peer in peer_group_ids:
sftp_put_r(ftp_client, f"organizations/{peer}/msp/cacerts", f"{crypto_base}/organizations/{peer}/msp/cacerts")
ftp_client.put(f"organizations/{peer}/msp/config.yaml", f"{crypto_base}/organizations/{peer}/msp/config.yaml")
orderer_yaml_generator = OrderYamlGenerator()
filename = orderer_yaml_generator.generate(order_id, group_name, node_name, fabric_name, address["fabric_port"], crypto_base)
ftp_client.put(filename, f"{crypto_base}/{filename}")
ftp_client.put(configtx_filename, f'{crypto_base}/{configtx_filename}')
while True:
try:
ftp_client.stat(f'{crypto_base}/{configtx_filename}')
ftp_client.stat(f'{crypto_base}/{filename}')
print("File exists.")
break
except IOError:
print("File not exists.")
time.sleep(2)
ftp_client.close()
stdin, stdout, stderr = ssh.exec_command(f'python {crypto_base}/node_build.py --func_name init_channel_artifacts {fabric_name} {channel_id} "{crypto_base}" {peer_group_ids} ')
stdout.channel.recv_exit_status()
print(stderr.readlines())
stdin, stdout, stderr = ssh.exec_command(f'docker-compose -f {crypto_base}/{filename} up -d')
stdout.channel.recv_exit_status()
print(stderr.readlines())
time.sleep(4)
ssh.close()
def generate_configtx(groups: dict, nodes: dict, orderers: dict, net_name: str, crypto_base: str):
configtx = ConfigTXYamlGenerator(net_name, crypto_base)
return configtx.input_from("./template/configtx.yaml")\
.generate(groups, nodes, orderers)\
.output_to("./configtx.yaml")\
.get_filename()
def parse_json(network_topology_json):
order_group_id = ''
order_ca_port = ''
target_host = ''
peer_group_ids = []
for group_id, group_information in network_topology_json['groups'].items():
if group_id.split('.', 1)[0] == 'orderer':
order_group_id = group_id
order_ca_port = network_topology_json['nodes'][group_information['nodes']['ca']]['address']['fabric_port']
target_host = network_topology_json['nodes'][network_topology_json['groups'][group_id]['nodes']['ca']]['address']['host']
else:
peer_group_ids.append(group_id)
generate_ca(group_information['nodes']['ca'], network_topology_json['nodes'][group_information['nodes']['ca']], network_topology_json['blockchains']['fabric-1']['name'], target_host, '/root/opt')
for order_id in network_topology_json['groups'][order_group_id]['nodes']['orderer']:
generate_order_msp(order_id, network_topology_json['nodes'][order_id], order_ca_port, '/root/opt')
for org_id in peer_group_ids:
peer_ca_port = network_topology_json['nodes'][network_topology_json['groups'][org_id]['nodes']['ca']]['address']['fabric_port']
leader_peers_ids = network_topology_json['groups'][org_id]['nodes']['leader_peers']
anchor_peers_ids = network_topology_json['groups'][org_id]['nodes']['anchor_peers']
committing_peers_ids = network_topology_json['groups'][org_id]['nodes']['committing_peers']
endorsing_peers_ids = network_topology_json['groups'][org_id]['nodes']['endorsing_peers']
peer_ids = list(set(leader_peers_ids).union(set(anchor_peers_ids).union(set(committing_peers_ids)).union(set(endorsing_peers_ids))))
for peer_id in peer_ids:
generate_peer(peer_id, network_topology_json['nodes'][peer_id], order_group_id, network_topology_json['blockchains']['fabric-1']['name'], target_host, peer_ca_port, '/root/opt')
orderers = dict()
for node in network_topology_json["nodes"]:
if "orderer" in network_topology_json["nodes"][node]["type"]:
orderers[node] = network_topology_json["nodes"][node]
configtx_filename = generate_configtx(network_topology_json["groups"], network_topology_json["nodes"], orderers, network_topology_json["blockchains"]["fabric-1"]["name"], "/root/opt")
for order_id in network_topology_json['groups'][order_group_id]['nodes']['orderer']:
generate_order(order_id, network_topology_json['nodes'][order_id], network_topology_json['blockchains']['fabric-1']['name'], network_topology_json['blockchains']['fabric-1']['channels'][0], peer_group_ids, configtx_filename)
if __name__ == '__main__':
network_json = {
"groups": {
"orderer.test.com": {
"nodes": {
"ca": "ca.orderer.test.com",
"orderer": ["orderer0.orderer.test.com", "orderer1.orderer.test.com", "orderer2.orderer.test.com"]
},
"blockchains": "fabric-1"
},
"org0.test.com": {
"nodes": {
"ca": "ca.org0.test.com",
"leader_peers": ["peer0.org0.test.com"],
"anchor_peers": ["peer0.org0.test.com"],
"committing_peers": ["peer0.org0.test.com"],
"endorsing_peers": ["peer0.org0.test.com"]
},
"blockchains": "fabric-1",
"channel": ["channel-1"]
},
"org1.test.com": {
"nodes": {
"ca": "ca.org1.test.com",
"leader_peers": ["peer0.org1.test.com"],
"anchor_peers": ["peer0.org1.test.com"],
"committing_peers": ["peer0.org1.test.com"],
"endorsing_peers": ["peer0.org1.test.com"]
},
"blockchains": "fabric-1",
"channel": ["channel-1"]
},
"org2.test.com": {
"nodes": {
"ca": "ca.org2.test.com",
"leader_peers": ["peer0.org2.test.com"],
"anchor_peers": ["peer0.org2.test.com"],
"committing_peers": ["peer0.org2.test.com"],
"endorsing_peers": ["peer0.org2.test.com"]
},
"blockchains": "fabric-1",
"channel": ["channel-1"]
}
},
"nodes": {
"ca.orderer.test.com": {
"address": {"host": "10.134.68.98", "ssh_port": "22", "fabric_port": "7054", "sk": ""},
"type": ["ca"]
},
"orderer0.orderer.test.com": {
"address": {"host": "10.134.68.98", "ssh_port": "22", "fabric_port": "7050", "sk": ""},
"type": ["orderer"]
},
"orderer1.orderer.test.com": {
"address": {"host": "10.134.50.142", "ssh_port": "22", "fabric_port": "7050", "sk": ""},
"type": ["orderer"]
},
"orderer2.orderer.test.com": {
"address": {"host": "10.134.50.70", "ssh_port": "22", "fabric_port": "7050", "sk": ""},
"type": ["orderer"]
},
"ca.org0.test.com": {
"address": {"host": "10.134.68.98", "ssh_port": "22", "fabric_port": "8054", "sk": ""},
"type": ["ca"]
},
"peer0.org0.test.com": {
"address": {"host": "10.134.68.98", "ssh_port": "22", "fabric_port": "7051", "sk": ""},
"bootstrap": ["127.0.0.1:7051"],
"type": ["leader_peer", "anchor_peer", "committing_peer", "endorsing_peers"]
},
"ca.org1.test.com": {
"address": {"host": "10.134.50.142", "ssh_port": "22", "fabric_port": "7054", "sk": ""},
"type": ["ca"]
},
"peer0.org1.test.com": {
"address": {"host": "10.134.50.142", "ssh_port": "22", "fabric_port": "7051", "sk": ""},
"bootstrap": ["127.0.0.1:7051"],
"type": ["leader_peer", "anchor_peer", "committing_peer", "endorsing_peers"]
},
"ca.org2.test.com": {
"address": {"host": "10.134.50.70", "ssh_port": "22", "fabric_port": "7054", "sk": ""},
"type": ["ca"]
},
"peer0.org2.test.com": {
"address": {"host": "10.134.50.70", "ssh_port": "22", "fabric_port": "7051", "sk": ""},
"bootstrap": ["127.0.0.1:7051"],
"type": ["leader_peer", "anchor_peer", "committing_peer", "endorsing_peers"]
},
},
"blockchains": {
"fabric-1": {
"name": "FabricDraw",
"channels": ["channel-1"]
}
}
}
with open('id_rsa', 'r') as file:
sk = file.read()
for node_id in network_json['nodes'].keys():
network_json['nodes'][node_id]['address']['sk'] = sk
parse_json(network_json)
| sftp_get_r |
createMessage.dto.ts | ////import { Document,Types, Schema as MongooseSchema } from 'mongoose';
export class | {
_id?: any //string | MongooseSchema.Types.ObjectId;
ownerId: any //string | MongooseSchema.Types.ObjectId;
roomId: any //string | MongooseSchema.Types.ObjectId;
text ?: string
image ?: string;
file ?: string
} | CreateMessage |
item.rs | use super::*;
use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
use crate::punctuated::Punctuated;
use proc_macro2::TokenStream;
#[cfg(feature = "extra-traits")]
use crate::tt::TokenStreamHelper;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
#[cfg(feature = "parsing")]
use std::mem;
ast_enum_of_structs! {
/// Things that can appear directly inside of a module or scope.
///
/// *This type is available if Syn is built with the `"full"` feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum Item #manual_extra_traits {
/// A constant item: `const MAX: u16 = 65535`.
Const(ItemConst),
/// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
Enum(ItemEnum),
/// An `extern crate` item: `extern crate serde`.
ExternCrate(ItemExternCrate),
/// A free-standing function: `fn process(n: usize) -> Result<()> { ...
/// }`.
Fn(ItemFn),
/// A block of foreign items: `extern "C" { ... }`.
ForeignMod(ItemForeignMod),
/// An impl block providing trait or associated items: `impl<A> Trait
/// for Data<A> { ... }`.
Impl(ItemImpl),
/// A macro invocation, which includes `macro_rules!` definitions.
Macro(ItemMacro),
/// A 2.0-style declarative macro introduced by the `macro` keyword.
Macro2(ItemMacro2),
/// A module or module declaration: `mod m` or `mod m { ... }`.
Mod(ItemMod),
/// A static item: `static BIKE: Shed = Shed(42)`.
Static(ItemStatic),
/// A struct definition: `struct Foo<A> { x: A }`.
Struct(ItemStruct),
/// A trait definition: `pub trait Iterator { ... }`.
Trait(ItemTrait),
/// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
TraitAlias(ItemTraitAlias),
/// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
Type(ItemType),
/// A union definition: `union Foo<A, B> { x: A, y: B }`.
Union(ItemUnion),
/// A use declaration: `use std::collections::HashMap`.
Use(ItemUse),
/// Tokens forming an item not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// A constant item: `const MAX: u16 = 65535`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemConst {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub const_token: Token![const],
pub ident: Ident,
pub colon_token: Token![:],
pub ty: Box<Type>,
pub eq_token: Token![=],
pub expr: Box<Expr>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemEnum {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub enum_token: Token![enum],
pub ident: Ident,
pub generics: Generics,
pub brace_token: token::Brace,
pub variants: Punctuated<Variant, Token![,]>,
}
}
ast_struct! {
/// An `extern crate` item: `extern crate serde`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemExternCrate {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub extern_token: Token![extern],
pub crate_token: Token![crate],
pub ident: Ident,
pub rename: Option<(Token![as], Ident)>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A free-standing function: `fn process(n: usize) -> Result<()> { ...
/// }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemFn {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub sig: Signature,
pub block: Box<Block>,
}
}
ast_struct! {
/// A block of foreign items: `extern "C" { ... }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemForeignMod {
pub attrs: Vec<Attribute>,
pub abi: Abi,
pub brace_token: token::Brace,
pub items: Vec<ForeignItem>,
}
}
ast_struct! {
/// An impl block providing trait or associated items: `impl<A> Trait
/// for Data<A> { ... }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemImpl {
pub attrs: Vec<Attribute>,
pub defaultness: Option<Token![default]>,
pub unsafety: Option<Token![unsafe]>,
pub impl_token: Token![impl],
pub generics: Generics,
/// Trait this impl implements.
pub trait_: Option<(Option<Token![!]>, Path, Token![for])>,
/// The Self type of the impl.
pub self_ty: Box<Type>,
pub brace_token: token::Brace,
pub items: Vec<ImplItem>,
}
}
ast_struct! {
/// A macro invocation, which includes `macro_rules!` definitions.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemMacro {
pub attrs: Vec<Attribute>,
/// The `example` in `macro_rules! example { ... }`.
pub ident: Option<Ident>,
pub mac: Macro,
pub semi_token: Option<Token![;]>,
}
}
ast_struct! {
/// A 2.0-style declarative macro introduced by the `macro` keyword.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemMacro2 #manual_extra_traits {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub macro_token: Token![macro],
pub ident: Ident,
pub rules: TokenStream,
}
}
ast_struct! {
/// A module or module declaration: `mod m` or `mod m { ... }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemMod {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub mod_token: Token![mod],
pub ident: Ident,
pub content: Option<(token::Brace, Vec<Item>)>,
pub semi: Option<Token![;]>,
}
}
ast_struct! {
/// A static item: `static BIKE: Shed = Shed(42)`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemStatic {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub static_token: Token![static],
pub mutability: Option<Token![mut]>,
pub ident: Ident,
pub colon_token: Token![:],
pub ty: Box<Type>,
pub eq_token: Token![=],
pub expr: Box<Expr>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A struct definition: `struct Foo<A> { x: A }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemStruct {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub struct_token: Token![struct],
pub ident: Ident,
pub generics: Generics,
pub fields: Fields,
pub semi_token: Option<Token![;]>,
}
}
ast_struct! {
/// A trait definition: `pub trait Iterator { ... }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemTrait {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub unsafety: Option<Token![unsafe]>,
pub auto_token: Option<Token![auto]>,
pub trait_token: Token![trait],
pub ident: Ident,
pub generics: Generics,
pub colon_token: Option<Token![:]>,
pub supertraits: Punctuated<TypeParamBound, Token![+]>,
pub brace_token: token::Brace,
pub items: Vec<TraitItem>,
}
}
ast_struct! {
/// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemTraitAlias {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub trait_token: Token![trait],
pub ident: Ident,
pub generics: Generics,
pub eq_token: Token![=],
pub bounds: Punctuated<TypeParamBound, Token![+]>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemType {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub type_token: Token![type],
pub ident: Ident,
pub generics: Generics,
pub eq_token: Token![=],
pub ty: Box<Type>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A union definition: `union Foo<A, B> { x: A, y: B }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemUnion {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub union_token: Token![union],
pub ident: Ident,
pub generics: Generics,
pub fields: FieldsNamed,
}
}
ast_struct! {
/// A use declaration: `use std::collections::HashMap`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ItemUse {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub use_token: Token![use],
pub leading_colon: Option<Token![::]>,
pub tree: UseTree,
pub semi_token: Token![;],
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Item {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Item {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Item::Const(this), Item::Const(other)) => this == other,
(Item::Enum(this), Item::Enum(other)) => this == other,
(Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
(Item::Fn(this), Item::Fn(other)) => this == other,
(Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
(Item::Impl(this), Item::Impl(other)) => this == other,
(Item::Macro(this), Item::Macro(other)) => this == other,
(Item::Macro2(this), Item::Macro2(other)) => this == other,
(Item::Mod(this), Item::Mod(other)) => this == other,
(Item::Static(this), Item::Static(other)) => this == other,
(Item::Struct(this), Item::Struct(other)) => this == other,
(Item::Trait(this), Item::Trait(other)) => this == other,
(Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
(Item::Type(this), Item::Type(other)) => this == other,
(Item::Union(this), Item::Union(other)) => this == other,
(Item::Use(this), Item::Use(other)) => this == other,
(Item::Verbatim(this), Item::Verbatim(other)) => {
TokenStreamHelper(this) == TokenStreamHelper(other)
}
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Item {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
match self {
Item::Const(item) => {
state.write_u8(0);
item.hash(state);
}
Item::Enum(item) => {
state.write_u8(1);
item.hash(state);
}
Item::ExternCrate(item) => {
state.write_u8(2);
item.hash(state);
}
Item::Fn(item) => {
state.write_u8(3);
item.hash(state);
}
Item::ForeignMod(item) => {
state.write_u8(4);
item.hash(state);
}
Item::Impl(item) => {
state.write_u8(5);
item.hash(state);
}
Item::Macro(item) => {
state.write_u8(6);
item.hash(state);
}
Item::Macro2(item) => {
state.write_u8(7);
item.hash(state);
}
Item::Mod(item) => {
state.write_u8(8);
item.hash(state);
}
Item::Static(item) => {
state.write_u8(9);
item.hash(state);
}
Item::Struct(item) => {
state.write_u8(10);
item.hash(state);
}
Item::Trait(item) => {
state.write_u8(11);
item.hash(state);
}
Item::TraitAlias(item) => {
state.write_u8(12);
item.hash(state);
}
Item::Type(item) => {
state.write_u8(13);
item.hash(state);
}
Item::Union(item) => {
state.write_u8(14);
item.hash(state);
}
Item::Use(item) => {
state.write_u8(15);
item.hash(state);
}
Item::Verbatim(item) => {
state.write_u8(16);
TokenStreamHelper(item).hash(state);
}
Item::__Nonexhaustive => unreachable!(),
}
}
}
impl Item {
#[cfg(feature = "parsing")]
pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
match self {
Item::ExternCrate(ItemExternCrate { attrs, .. })
| Item::Use(ItemUse { attrs, .. })
| Item::Static(ItemStatic { attrs, .. })
| Item::Const(ItemConst { attrs, .. })
| Item::Fn(ItemFn { attrs, .. })
| Item::Mod(ItemMod { attrs, .. })
| Item::ForeignMod(ItemForeignMod { attrs, .. })
| Item::Type(ItemType { attrs, .. })
| Item::Struct(ItemStruct { attrs, .. })
| Item::Enum(ItemEnum { attrs, .. })
| Item::Union(ItemUnion { attrs, .. })
| Item::Trait(ItemTrait { attrs, .. })
| Item::TraitAlias(ItemTraitAlias { attrs, .. })
| Item::Impl(ItemImpl { attrs, .. })
| Item::Macro(ItemMacro { attrs, .. })
| Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
Item::Verbatim(_) => Vec::new(),
Item::__Nonexhaustive => unreachable!(),
}
}
}
#[cfg(feature = "extra-traits")]
impl Eq for ItemMacro2 {}
#[cfg(feature = "extra-traits")]
impl PartialEq for ItemMacro2 {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs
&& self.vis == other.vis
&& self.macro_token == other.macro_token
&& self.ident == other.ident
&& TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
}
}
#[cfg(feature = "extra-traits")]
impl Hash for ItemMacro2 {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.attrs.hash(state);
self.vis.hash(state);
self.macro_token.hash(state);
self.ident.hash(state);
TokenStreamHelper(&self.rules).hash(state);
}
}
impl From<DeriveInput> for Item {
fn from(input: DeriveInput) -> Item {
match input.data {
Data::Struct(data) => Item::Struct(ItemStruct {
attrs: input.attrs,
vis: input.vis,
struct_token: data.struct_token,
ident: input.ident,
generics: input.generics,
fields: data.fields,
semi_token: data.semi_token,
}),
Data::Enum(data) => Item::Enum(ItemEnum {
attrs: input.attrs,
vis: input.vis,
enum_token: data.enum_token,
ident: input.ident,
generics: input.generics,
brace_token: data.brace_token,
variants: data.variants,
}),
Data::Union(data) => Item::Union(ItemUnion {
attrs: input.attrs,
vis: input.vis,
union_token: data.union_token,
ident: input.ident,
generics: input.generics,
fields: data.fields,
}),
}
}
}
impl From<ItemStruct> for DeriveInput {
fn from(input: ItemStruct) -> DeriveInput {
DeriveInput {
attrs: input.attrs,
vis: input.vis,
ident: input.ident,
generics: input.generics,
data: Data::Struct(DataStruct {
struct_token: input.struct_token,
fields: input.fields,
semi_token: input.semi_token,
}),
}
}
}
impl From<ItemEnum> for DeriveInput {
fn from(input: ItemEnum) -> DeriveInput {
DeriveInput {
attrs: input.attrs,
vis: input.vis,
ident: input.ident,
generics: input.generics,
data: Data::Enum(DataEnum {
enum_token: input.enum_token,
brace_token: input.brace_token,
variants: input.variants,
}),
}
}
}
impl From<ItemUnion> for DeriveInput {
fn from(input: ItemUnion) -> DeriveInput {
DeriveInput {
attrs: input.attrs,
vis: input.vis,
ident: input.ident,
generics: input.generics,
data: Data::Union(DataUnion {
union_token: input.union_token,
fields: input.fields,
}),
}
}
}
ast_enum_of_structs! {
/// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum UseTree {
/// A path prefix of imports in a `use` item: `std::...`.
Path(UsePath),
/// An identifier imported by a `use` item: `HashMap`.
Name(UseName),
/// An renamed identifier imported by a `use` item: `HashMap as Map`.
Rename(UseRename),
/// A glob import in a `use` item: `*`.
Glob(UseGlob),
/// A braced group of imports in a `use` item: `{A, B, C}`.
Group(UseGroup),
}
}
ast_struct! {
/// A path prefix of imports in a `use` item: `std::...`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct UsePath {
pub ident: Ident,
pub colon2_token: Token![::],
pub tree: Box<UseTree>,
}
}
ast_struct! {
/// An identifier imported by a `use` item: `HashMap`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct UseName {
pub ident: Ident,
}
}
ast_struct! {
/// An renamed identifier imported by a `use` item: `HashMap as Map`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct UseRename {
pub ident: Ident,
pub as_token: Token![as],
pub rename: Ident,
}
}
ast_struct! {
/// A glob import in a `use` item: `*`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct UseGlob {
pub star_token: Token![*],
}
}
ast_struct! {
/// A braced group of imports in a `use` item: `{A, B, C}`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct UseGroup {
pub brace_token: token::Brace,
pub items: Punctuated<UseTree, Token![,]>,
}
}
ast_enum_of_structs! {
/// An item within an `extern` block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum ForeignItem #manual_extra_traits {
/// A foreign function in an `extern` block.
Fn(ForeignItemFn),
/// A foreign static item in an `extern` block: `static ext: u8`.
Static(ForeignItemStatic),
/// A foreign type in an `extern` block: `type void`.
Type(ForeignItemType),
/// A macro invocation within an extern block.
Macro(ForeignItemMacro),
/// Tokens in an `extern` block not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// A foreign function in an `extern` block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ForeignItemFn {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub sig: Signature,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A foreign static item in an `extern` block: `static ext: u8`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ForeignItemStatic {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub static_token: Token![static],
pub mutability: Option<Token![mut]>,
pub ident: Ident,
pub colon_token: Token![:],
pub ty: Box<Type>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A foreign type in an `extern` block: `type void`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ForeignItemType {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub type_token: Token![type],
pub ident: Ident,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A macro invocation within an extern block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ForeignItemMacro {
pub attrs: Vec<Attribute>,
pub mac: Macro,
pub semi_token: Option<Token![;]>,
}
}
#[cfg(feature = "extra-traits")]
impl Eq for ForeignItem {}
#[cfg(feature = "extra-traits")]
impl PartialEq for ForeignItem {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
(ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
(ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
(ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
(ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
TokenStreamHelper(this) == TokenStreamHelper(other)
}
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl Hash for ForeignItem {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
match self {
ForeignItem::Fn(item) => {
state.write_u8(0);
item.hash(state);
}
ForeignItem::Static(item) => {
state.write_u8(1);
item.hash(state);
}
ForeignItem::Type(item) => {
state.write_u8(2);
item.hash(state);
}
ForeignItem::Macro(item) => {
state.write_u8(3);
item.hash(state);
}
ForeignItem::Verbatim(item) => {
state.write_u8(4);
TokenStreamHelper(item).hash(state);
}
ForeignItem::__Nonexhaustive => unreachable!(),
}
}
}
ast_enum_of_structs! {
/// An item declaration within the definition of a trait.
///
/// *This type is available if Syn is built with the `"full"` feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum TraitItem #manual_extra_traits {
/// An associated constant within the definition of a trait.
Const(TraitItemConst),
/// A trait method within the definition of a trait.
Method(TraitItemMethod),
/// An associated type within the definition of a trait.
Type(TraitItemType),
/// A macro invocation within the definition of a trait.
Macro(TraitItemMacro),
/// Tokens within the definition of a trait not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// An associated constant within the definition of a trait.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct TraitItemConst {
pub attrs: Vec<Attribute>,
pub const_token: Token![const],
pub ident: Ident,
pub colon_token: Token![:],
pub ty: Type,
pub default: Option<(Token![=], Expr)>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A trait method within the definition of a trait.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct TraitItemMethod {
pub attrs: Vec<Attribute>,
pub sig: Signature,
pub default: Option<Block>,
pub semi_token: Option<Token![;]>,
}
}
ast_struct! {
/// An associated type within the definition of a trait.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct TraitItemType {
pub attrs: Vec<Attribute>,
pub type_token: Token![type],
pub ident: Ident,
pub generics: Generics,
pub colon_token: Option<Token![:]>,
pub bounds: Punctuated<TypeParamBound, Token![+]>,
pub default: Option<(Token![=], Type)>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A macro invocation within the definition of a trait.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct TraitItemMacro {
pub attrs: Vec<Attribute>,
pub mac: Macro,
pub semi_token: Option<Token![;]>,
}
}
#[cfg(feature = "extra-traits")]
impl Eq for TraitItem {}
#[cfg(feature = "extra-traits")]
impl PartialEq for TraitItem {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(TraitItem::Const(this), TraitItem::Const(other)) => this == other,
(TraitItem::Method(this), TraitItem::Method(other)) => this == other,
(TraitItem::Type(this), TraitItem::Type(other)) => this == other,
(TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
(TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
TokenStreamHelper(this) == TokenStreamHelper(other)
}
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl Hash for TraitItem {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
match self {
TraitItem::Const(item) => {
state.write_u8(0);
item.hash(state);
}
TraitItem::Method(item) => {
state.write_u8(1);
item.hash(state);
}
TraitItem::Type(item) => {
state.write_u8(2);
item.hash(state);
}
TraitItem::Macro(item) => {
state.write_u8(3);
item.hash(state);
}
TraitItem::Verbatim(item) => {
state.write_u8(4);
TokenStreamHelper(item).hash(state);
}
TraitItem::__Nonexhaustive => unreachable!(),
}
}
}
ast_enum_of_structs! {
/// An item within an impl block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum ImplItem #manual_extra_traits {
/// An associated constant within an impl block.
Const(ImplItemConst),
/// A method within an impl block.
Method(ImplItemMethod),
/// An associated type within an impl block.
Type(ImplItemType),
/// A macro invocation within an impl block.
Macro(ImplItemMacro),
/// Tokens within an impl block not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// An associated constant within an impl block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ImplItemConst {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub defaultness: Option<Token![default]>,
pub const_token: Token![const],
pub ident: Ident,
pub colon_token: Token![:],
pub ty: Type,
pub eq_token: Token![=],
pub expr: Expr,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A method within an impl block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ImplItemMethod {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub defaultness: Option<Token![default]>,
pub sig: Signature,
pub block: Block,
}
}
ast_struct! {
/// An associated type within an impl block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ImplItemType {
pub attrs: Vec<Attribute>,
pub vis: Visibility,
pub defaultness: Option<Token![default]>,
pub type_token: Token![type],
pub ident: Ident,
pub generics: Generics,
pub eq_token: Token![=],
pub ty: Type,
pub semi_token: Token![;],
}
}
ast_struct! {
/// A macro invocation within an impl block.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct ImplItemMacro {
pub attrs: Vec<Attribute>,
pub mac: Macro,
pub semi_token: Option<Token![;]>,
}
}
#[cfg(feature = "extra-traits")]
impl Eq for ImplItem {}
#[cfg(feature = "extra-traits")]
impl PartialEq for ImplItem {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(ImplItem::Const(this), ImplItem::Const(other)) => this == other,
(ImplItem::Method(this), ImplItem::Method(other)) => this == other,
(ImplItem::Type(this), ImplItem::Type(other)) => this == other,
(ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
(ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
TokenStreamHelper(this) == TokenStreamHelper(other)
}
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl Hash for ImplItem {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
match self {
ImplItem::Const(item) => {
state.write_u8(0);
item.hash(state);
}
ImplItem::Method(item) => {
state.write_u8(1);
item.hash(state);
}
ImplItem::Type(item) => {
state.write_u8(2);
item.hash(state);
}
ImplItem::Macro(item) => {
state.write_u8(3);
item.hash(state);
}
ImplItem::Verbatim(item) => {
state.write_u8(4);
TokenStreamHelper(item).hash(state);
}
ImplItem::__Nonexhaustive => unreachable!(),
}
}
}
ast_struct! {
/// A function signature in a trait or implementation: `unsafe fn
/// initialize(&self)`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct Signature {
pub constness: Option<Token![const]>,
pub asyncness: Option<Token![async]>,
pub unsafety: Option<Token![unsafe]>,
pub abi: Option<Abi>,
pub fn_token: Token![fn],
pub ident: Ident,
pub generics: Generics,
pub paren_token: token::Paren,
pub inputs: Punctuated<FnArg, Token![,]>,
pub variadic: Option<Variadic>,
pub output: ReturnType,
}
}
impl Signature {
/// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
pub fn receiver(&self) -> Option<&FnArg> {
let arg = self.inputs.first()?;
match arg {
FnArg::Receiver(_) => Some(arg),
FnArg::Typed(PatType { pat, .. }) => {
if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
if ident == "self" {
return Some(arg);
}
}
None
}
}
}
}
ast_enum_of_structs! {
/// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub enum FnArg {
/// The `self` argument of an associated method, whether taken by value
/// or by reference.
///
/// Note that `self` receivers with a specified type, such as `self:
/// Box<Self>`, are parsed as a `FnArg::Typed`.
Receiver(Receiver),
/// A function argument accepted by pattern and type.
Typed(PatType),
}
}
ast_struct! {
/// The `self` argument of an associated method, whether taken by value
/// or by reference.
///
/// Note that `self` receivers with a specified type, such as `self:
/// Box<Self>`, are parsed as a `FnArg::Typed`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
pub struct Receiver {
pub attrs: Vec<Attribute>,
pub reference: Option<(Token![&], Option<Lifetime>)>,
pub mutability: Option<Token![mut]>,
pub self_token: Token![self],
}
}
impl Receiver {
pub fn lifetime(&self) -> Option<&Lifetime> {
self.reference.as_ref()?.1.as_ref()
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use crate::ext::IdentExt;
use crate::parse::discouraged::Speculative;
use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
use crate::token::Brace;
use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
use std::iter::{self, FromIterator};
crate::custom_keyword!(existential);
impl Parse for Item {
fn parse(input: ParseStream) -> Result<Self> {
let mut attrs = input.call(Attribute::parse_outer)?;
let ahead = input.fork();
let vis: Visibility = ahead.parse()?;
let lookahead = ahead.lookahead1();
let mut item = if lookahead.peek(Token![extern]) {
ahead.parse::<Token![extern]>()?;
let lookahead = ahead.lookahead1();
if lookahead.peek(Token![crate]) {
input.parse().map(Item::ExternCrate)
} else if lookahead.peek(Token![fn]) {
input.parse().map(Item::Fn)
} else if lookahead.peek(token::Brace) {
input.parse().map(Item::ForeignMod)
} else if lookahead.peek(LitStr) {
ahead.parse::<LitStr>()?;
let lookahead = ahead.lookahead1();
if lookahead.peek(token::Brace) {
input.parse().map(Item::ForeignMod)
} else if lookahead.peek(Token![fn]) {
input.parse().map(Item::Fn)
} else {
Err(lookahead.error())
}
} else {
Err(lookahead.error())
}
} else if lookahead.peek(Token![use]) {
input.parse().map(Item::Use)
} else if lookahead.peek(Token![static]) {
input.parse().map(Item::Static)
} else if lookahead.peek(Token![const]) {
ahead.parse::<Token![const]>()?;
let lookahead = ahead.lookahead1();
if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
input.parse().map(Item::Const)
} else if lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![async])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![fn])
{
input.parse().map(Item::Fn)
} else {
Err(lookahead.error())
}
} else if lookahead.peek(Token![unsafe]) {
ahead.parse::<Token![unsafe]>()?;
let lookahead = ahead.lookahead1();
if lookahead.peek(Token![trait])
|| lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
{
input.parse().map(Item::Trait)
} else if lookahead.peek(Token![impl]) {
input.parse().map(Item::Impl)
} else if lookahead.peek(Token![async])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![fn])
{
input.parse().map(Item::Fn)
} else {
Err(lookahead.error())
}
} else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
input.parse().map(Item::Fn)
} else if lookahead.peek(Token![mod]) {
input.parse().map(Item::Mod)
} else if lookahead.peek(Token![type]) {
input.parse().map(Item::Type)
} else if lookahead.peek(existential) {
input.call(item_existential).map(Item::Verbatim)
} else if lookahead.peek(Token![struct]) {
input.parse().map(Item::Struct)
} else if lookahead.peek(Token![enum]) {
input.parse().map(Item::Enum)
} else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
input.parse().map(Item::Union)
} else if lookahead.peek(Token![trait]) {
input.call(parse_trait_or_trait_alias)
} else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
input.parse().map(Item::Trait)
} else if lookahead.peek(Token![impl])
|| lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
{
input.parse().map(Item::Impl)
} else if lookahead.peek(Token![macro]) {
input.parse().map(Item::Macro2)
} else if vis.is_inherited()
&& (lookahead.peek(Ident)
|| lookahead.peek(Token![self])
|| lookahead.peek(Token![super])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![crate])
|| lookahead.peek(Token![::]))
{
input.parse().map(Item::Macro)
} else {
Err(lookahead.error())
}?;
attrs.extend(item.replace_attrs(Vec::new()));
item.replace_attrs(attrs);
Ok(item)
}
}
impl Parse for ItemMacro {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let path = input.call(Path::parse_mod_style)?;
let bang_token: Token![!] = input.parse()?;
let ident: Option<Ident> = input.parse()?;
let (delimiter, tokens) = input.call(mac::parse_delimiter)?;
let semi_token: Option<Token![;]> = if !delimiter.is_brace() {
Some(input.parse()?)
} else {
None
};
Ok(ItemMacro {
attrs,
ident,
mac: Macro {
path,
bang_token,
delimiter,
tokens,
},
semi_token,
})
}
}
impl Parse for ItemMacro2 {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let macro_token: Token![macro] = input.parse()?;
let ident: Ident = input.parse()?;
let mut rules = TokenStream::new();
let mut lookahead = input.lookahead1();
if lookahead.peek(token::Paren) {
let paren_content;
let paren_token = parenthesized!(paren_content in input);
let args: TokenStream = paren_content.parse()?;
let mut args = Group::new(Delimiter::Parenthesis, args);
args.set_span(paren_token.span);
rules.extend(iter::once(TokenTree::Group(args)));
lookahead = input.lookahead1();
}
if lookahead.peek(token::Brace) {
let brace_content;
let brace_token = braced!(brace_content in input);
let body: TokenStream = brace_content.parse()?;
let mut body = Group::new(Delimiter::Brace, body);
body.set_span(brace_token.span);
rules.extend(iter::once(TokenTree::Group(body)));
} else {
return Err(lookahead.error());
}
Ok(ItemMacro2 {
attrs,
vis,
macro_token,
ident,
rules,
})
}
}
impl Parse for ItemExternCrate {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ItemExternCrate {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
extern_token: input.parse()?,
crate_token: input.parse()?,
ident: {
if input.peek(Token![self]) {
input.call(Ident::parse_any)?
} else {
input.parse()?
}
},
rename: {
if input.peek(Token![as]) {
let as_token: Token![as] = input.parse()?;
let rename: Ident = if input.peek(Token![_]) {
Ident::from(input.parse::<Token![_]>()?)
} else {
input.parse()?
};
Some((as_token, rename))
} else {
None
}
},
semi_token: input.parse()?,
})
}
}
impl Parse for ItemUse {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ItemUse {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
use_token: input.parse()?,
leading_colon: input.parse()?,
tree: input.parse()?,
semi_token: input.parse()?,
})
}
}
impl Parse for UseTree {
fn parse(input: ParseStream) -> Result<UseTree> {
let lookahead = input.lookahead1();
if lookahead.peek(Ident)
|| lookahead.peek(Token![self])
|| lookahead.peek(Token![super])
|| lookahead.peek(Token![crate])
|| lookahead.peek(Token![extern])
{
let ident = input.call(Ident::parse_any)?;
if input.peek(Token![::]) {
Ok(UseTree::Path(UsePath {
ident,
colon2_token: input.parse()?,
tree: Box::new(input.parse()?),
}))
} else if input.peek(Token![as]) {
Ok(UseTree::Rename(UseRename {
ident,
as_token: input.parse()?,
rename: {
if input.peek(Ident) {
input.parse()?
} else if input.peek(Token![_]) {
Ident::from(input.parse::<Token![_]>()?)
} else {
return Err(input.error("expected identifier or underscore"));
}
},
}))
} else {
Ok(UseTree::Name(UseName { ident }))
}
} else if lookahead.peek(Token![*]) {
Ok(UseTree::Glob(UseGlob {
star_token: input.parse()?,
}))
} else if lookahead.peek(token::Brace) {
let content;
Ok(UseTree::Group(UseGroup {
brace_token: braced!(content in input),
items: content.parse_terminated(UseTree::parse)?,
}))
} else {
Err(lookahead.error())
}
}
}
impl Parse for ItemStatic {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ItemStatic {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
static_token: input.parse()?,
mutability: input.parse()?,
ident: input.parse()?,
colon_token: input.parse()?,
ty: input.parse()?,
eq_token: input.parse()?,
expr: input.parse()?,
semi_token: input.parse()?,
})
}
}
impl Parse for ItemConst {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ItemConst {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
const_token: input.parse()?,
ident: {
let lookahead = input.lookahead1();
if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
input.call(Ident::parse_any)?
} else {
return Err(lookahead.error());
}
},
colon_token: input.parse()?,
ty: input.parse()?,
eq_token: input.parse()?,
expr: input.parse()?,
semi_token: input.parse()?,
})
}
}
fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
let trailing_punct = args.trailing_punct();
let last = match args.last_mut()? {
FnArg::Typed(last) => last,
_ => return None,
};
let ty = match last.ty.as_ref() {
Type::Verbatim(ty) => ty,
_ => return None,
};
let mut variadic = Variadic {
attrs: Vec::new(),
dots: parse2(ty.clone()).ok()?,
};
if let Pat::Verbatim(pat) = last.pat.as_ref() {
if pat.to_string() == "..." && !trailing_punct {
variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
args.pop();
}
}
Some(variadic)
}
fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
TokenStream::from_iter(vec![
TokenTree::Punct({
let mut dot = Punct::new('.', Spacing::Joint);
dot.set_span(dots.spans[0]);
dot
}),
TokenTree::Punct({
let mut dot = Punct::new('.', Spacing::Joint);
dot.set_span(dots.spans[1]);
dot
}),
TokenTree::Punct({
let mut dot = Punct::new('.', Spacing::Alone);
dot.set_span(dots.spans[2]);
dot
}),
])
}
impl Parse for ItemFn {
fn parse(input: ParseStream) -> Result<Self> {
let outer_attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let constness: Option<Token![const]> = input.parse()?;
let asyncness: Option<Token![async]> = input.parse()?;
let unsafety: Option<Token![unsafe]> = input.parse()?;
let abi: Option<Abi> = input.parse()?;
let fn_token: Token![fn] = input.parse()?;
let ident: Ident = input.parse()?;
let generics: Generics = input.parse()?;
let content;
let paren_token = parenthesized!(content in input);
let mut inputs = parse_fn_args(&content)?;
let variadic = pop_variadic(&mut inputs);
let output: ReturnType = input.parse()?;
let where_clause: Option<WhereClause> = input.parse()?;
let content;
let brace_token = braced!(content in input);
let inner_attrs = content.call(Attribute::parse_inner)?;
let stmts = content.call(Block::parse_within)?;
Ok(ItemFn {
attrs: private::attrs(outer_attrs, inner_attrs),
vis,
sig: Signature {
constness,
asyncness,
unsafety,
abi,
fn_token,
ident,
paren_token,
inputs,
output,
variadic,
generics: Generics {
where_clause,
..generics
},
},
block: Box::new(Block { brace_token, stmts }),
})
}
}
impl Parse for FnArg {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let ahead = input.fork();
if let Ok(mut receiver) = ahead.parse::<Receiver>() {
if !ahead.peek(Token![:]) {
input.advance_to(&ahead);
receiver.attrs = attrs;
return Ok(FnArg::Receiver(receiver));
}
}
let mut typed = input.call(fn_arg_typed)?;
typed.attrs = attrs;
Ok(FnArg::Typed(typed))
}
}
impl Parse for Receiver {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Receiver {
attrs: Vec::new(),
reference: {
if input.peek(Token![&]) {
Some((input.parse()?, input.parse()?))
} else {
None
}
},
mutability: input.parse()?,
self_token: input.parse()?,
})
}
}
fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
let mut args = Punctuated::new();
let mut has_receiver = false;
while !input.is_empty() {
let attrs = input.call(Attribute::parse_outer)?;
let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
FnArg::Typed(PatType {
attrs,
pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
colon_token: Token,
ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
})
} else {
let mut arg: FnArg = input.parse()?;
match &mut arg {
FnArg::Receiver(receiver) if has_receiver => {
return Err(Error::new(
receiver.self_token.span,
"unexpected second method receiver",
));
}
FnArg::Receiver(receiver) if !args.is_empty() => {
return Err(Error::new(
receiver.self_token.span,
"unexpected method receiver",
));
}
FnArg::Receiver(receiver) => {
has_receiver = true;
receiver.attrs = attrs;
}
FnArg::Typed(arg) => arg.attrs = attrs,
}
arg
};
args.push_value(arg);
if input.is_empty() {
break;
}
let comma: Token![,] = input.parse()?;
args.push_punct(comma);
}
Ok(args)
}
fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
// Hack to parse pre-2018 syntax in
// test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
// because the rest of the test case is valuable.
if input.peek(Ident) && input.peek2(Token![<]) {
let span = input.fork().parse::<Ident>()?.span();
return Ok(PatType {
attrs: Vec::new(),
pat: Box::new(Pat::Wild(PatWild {
attrs: Vec::new(),
underscore_token: Token,
})),
colon_token: Token,
ty: input.parse()?,
});
}
Ok(PatType {
attrs: Vec::new(),
pat: input.parse()?,
colon_token: input.parse()?,
ty: Box::new(match input.parse::<Option<Token![...]>>()? {
Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
None => input.parse()?,
}),
})
}
impl Parse for ItemMod {
fn parse(input: ParseStream) -> Result<Self> {
let outer_attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let mod_token: Token![mod] = input.parse()?;
let ident: Ident = input.parse()?;
let lookahead = input.lookahead1();
if lookahead.peek(Token![;]) {
Ok(ItemMod {
attrs: outer_attrs,
vis,
mod_token,
ident,
content: None,
semi: Some(input.parse()?),
})
} else if lookahead.peek(token::Brace) {
let content;
let brace_token = braced!(content in input);
let inner_attrs = content.call(Attribute::parse_inner)?;
let mut items = Vec::new();
while !content.is_empty() {
items.push(content.parse()?);
}
Ok(ItemMod {
attrs: private::attrs(outer_attrs, inner_attrs),
vis,
mod_token,
ident,
content: Some((brace_token, items)),
semi: None,
})
} else {
Err(lookahead.error())
}
}
}
impl Parse for ItemForeignMod {
fn parse(input: ParseStream) -> Result<Self> {
let outer_attrs = input.call(Attribute::parse_outer)?;
let abi: Abi = input.parse()?;
let content;
let brace_token = braced!(content in input);
let inner_attrs = content.call(Attribute::parse_inner)?;
let mut items = Vec::new();
while !content.is_empty() {
items.push(content.parse()?);
}
Ok(ItemForeignMod {
attrs: private::attrs(outer_attrs, inner_attrs),
abi,
brace_token,
items,
})
}
}
impl Parse for ForeignItem {
fn parse(input: ParseStream) -> Result<Self> {
let begin = input.fork();
let mut attrs = input.call(Attribute::parse_outer)?;
let ahead = input.fork();
let vis: Visibility = ahead.parse()?;
let lookahead = ahead.lookahead1();
let mut item = if lookahead.peek(Token![fn]) {
input.parse().map(ForeignItem::Fn)
} else if lookahead.peek(Token![static]) {
input.parse().map(ForeignItem::Static)
} else if lookahead.peek(Token![type]) {
parse_flexible_foreign_item_type(begin, input)
} else if vis.is_inherited()
&& (lookahead.peek(Ident)
|| lookahead.peek(Token![self])
|| lookahead.peek(Token![super])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![crate])
|| lookahead.peek(Token![::]))
{
input.parse().map(ForeignItem::Macro)
} else {
Err(lookahead.error())
}?;
let item_attrs = match &mut item {
ForeignItem::Fn(item) => &mut item.attrs,
ForeignItem::Static(item) => &mut item.attrs,
ForeignItem::Type(item) => &mut item.attrs,
ForeignItem::Macro(item) => &mut item.attrs,
ForeignItem::Verbatim(_) => return Ok(item),
ForeignItem::__Nonexhaustive => unreachable!(),
};
attrs.extend(item_attrs.drain(..));
*item_attrs = attrs;
Ok(item)
}
}
impl Parse for ForeignItemFn {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let fn_token: Token![fn] = input.parse()?;
let ident: Ident = input.parse()?;
let generics: Generics = input.parse()?;
let content;
let paren_token = parenthesized!(content in input);
let mut inputs = parse_fn_args(&content)?;
let variadic = pop_variadic(&mut inputs);
let output: ReturnType = input.parse()?;
let where_clause: Option<WhereClause> = input.parse()?;
let semi_token: Token![;] = input.parse()?;
Ok(ForeignItemFn {
attrs,
vis,
sig: Signature {
constness: None,
asyncness: None,
unsafety: None,
abi: None,
fn_token,
ident,
paren_token,
inputs,
output,
variadic,
generics: Generics {
where_clause,
..generics
},
},
semi_token,
})
}
}
impl Parse for ForeignItemStatic {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ForeignItemStatic {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
static_token: input.parse()?,
mutability: input.parse()?,
ident: input.parse()?,
colon_token: input.parse()?,
ty: input.parse()?,
semi_token: input.parse()?,
})
}
}
impl Parse for ForeignItemType {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ForeignItemType {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
type_token: input.parse()?,
ident: input.parse()?,
semi_token: input.parse()?,
})
}
}
fn parse_flexible_foreign_item_type(
begin: ParseBuffer,
input: ParseStream,
) -> Result<ForeignItem> {
let mut extra = false;
let vis: Visibility = input.parse()?;
let type_token: Token![type] = input.parse()?;
let ident: Ident = input.parse()?;
if input.peek(Token![<]) {
extra = true;
input.parse::<Generics>()?;
}
if input.parse::<Option<Token![:]>>()?.is_some() {
extra = true;
loop {
input.parse::<TypeParamBound>()?;
if input.peek(Token![where]) || input.peek(Token![;]) {
break;
}
input.parse::<Token![+]>()?;
if input.peek(Token![where]) || input.peek(Token![;]) {
break;
}
}
}
if input.peek(Token![where]) {
extra = true;
input.parse::<WhereClause>()?;
}
if input.parse::<Option<Token![=]>>()?.is_some() {
extra = true;
input.parse::<Type>()?;
}
let semi_token: Token![;] = input.parse()?;
if extra {
Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
} else {
Ok(ForeignItem::Type(ForeignItemType {
attrs: Vec::new(),
vis,
type_token,
ident,
semi_token,
}))
}
}
impl Parse for ForeignItemMacro {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let mac: Macro = input.parse()?;
let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
None
} else {
Some(input.parse()?)
};
Ok(ForeignItemMacro {
attrs,
mac,
semi_token,
})
}
}
impl Parse for ItemType {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ItemType {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
type_token: input.parse()?,
ident: input.parse()?,
generics: {
let mut generics: Generics = input.parse()?;
generics.where_clause = input.parse()?;
generics
},
eq_token: input.parse()?,
ty: input.parse()?,
semi_token: input.parse()?,
})
}
}
#[cfg(not(feature = "printing"))]
fn item_existential(input: ParseStream) -> Result<TokenStream> {
Err(input.error("existential type is not supported"))
}
#[cfg(feature = "printing")]
fn item_existential(input: ParseStream) -> Result<TokenStream> {
use crate::attr::FilterAttrs;
use quote::{ToTokens, TokenStreamExt};
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let existential_token: existential = input.parse()?;
let type_token: Token![type] = input.parse()?;
let ident: Ident = input.parse()?;
let mut generics: Generics = input.parse()?;
generics.where_clause = input.parse()?;
let colon_token: Token![:] = input.parse()?;
let mut bounds = Punctuated::new();
while !input.peek(Token![;]) {
if !bounds.is_empty() {
bounds.push_punct(input.parse::<Token![+]>()?);
}
bounds.push_value(input.parse::<TypeParamBound>()?);
}
let semi_token: Token![;] = input.parse()?;
let mut tokens = TokenStream::new();
tokens.append_all(attrs.outer());
vis.to_tokens(&mut tokens);
existential_token.to_tokens(&mut tokens);
type_token.to_tokens(&mut tokens);
ident.to_tokens(&mut tokens);
generics.to_tokens(&mut tokens);
generics.where_clause.to_tokens(&mut tokens);
if !bounds.is_empty() {
colon_token.to_tokens(&mut tokens);
bounds.to_tokens(&mut tokens);
}
semi_token.to_tokens(&mut tokens);
Ok(tokens)
}
impl Parse for ItemStruct {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis = input.parse::<Visibility>()?;
let struct_token = input.parse::<Token![struct]>()?;
let ident = input.parse::<Ident>()?;
let generics = input.parse::<Generics>()?;
let (where_clause, fields, semi_token) = derive::parsing::data_struct(input)?;
Ok(ItemStruct {
attrs,
vis,
struct_token,
ident,
generics: Generics {
where_clause,
..generics
},
fields,
semi_token,
})
}
}
impl Parse for ItemEnum {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis = input.parse::<Visibility>()?;
let enum_token = input.parse::<Token![enum]>()?;
let ident = input.parse::<Ident>()?;
let generics = input.parse::<Generics>()?;
let (where_clause, brace_token, variants) = derive::parsing::data_enum(input)?;
Ok(ItemEnum {
attrs,
vis,
enum_token,
ident,
generics: Generics {
where_clause,
..generics
},
brace_token,
variants,
})
}
}
impl Parse for ItemUnion {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis = input.parse::<Visibility>()?;
let union_token = input.parse::<Token![union]>()?;
let ident = input.parse::<Ident>()?;
let generics = input.parse::<Generics>()?;
let (where_clause, fields) = derive::parsing::data_union(input)?;
Ok(ItemUnion {
attrs,
vis,
union_token,
ident,
generics: Generics {
where_clause,
..generics
},
fields,
})
}
}
fn parse_trait_or_trait_alias(input: ParseStream) -> Result<Item> {
let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?;
let lookahead = input.lookahead1();
if lookahead.peek(token::Brace)
|| lookahead.peek(Token![:])
|| lookahead.peek(Token![where])
{
let unsafety = None;
let auto_token = None;
parse_rest_of_trait(
input,
attrs,
vis,
unsafety,
auto_token,
trait_token,
ident,
generics,
)
.map(Item::Trait)
} else if lookahead.peek(Token![=]) {
parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics)
.map(Item::TraitAlias)
} else {
Err(lookahead.error())
}
}
impl Parse for ItemTrait {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let unsafety: Option<Token![unsafe]> = input.parse()?;
let auto_token: Option<Token![auto]> = input.parse()?;
let trait_token: Token![trait] = input.parse()?;
let ident: Ident = input.parse()?;
let generics: Generics = input.parse()?;
parse_rest_of_trait(
input,
attrs,
vis,
unsafety,
auto_token,
trait_token,
ident,
generics,
)
}
}
fn parse_rest_of_trait(
input: ParseStream,
attrs: Vec<Attribute>,
vis: Visibility,
unsafety: Option<Token![unsafe]>,
auto_token: Option<Token![auto]>,
trait_token: Token![trait],
ident: Ident,
mut generics: Generics,
) -> Result<ItemTrait> {
let colon_token: Option<Token![:]> = input.parse()?;
let mut supertraits = Punctuated::new();
if colon_token.is_some() {
loop {
supertraits.push_value(input.parse()?);
if input.peek(Token![where]) || input.peek(token::Brace) {
break;
}
supertraits.push_punct(input.parse()?);
if input.peek(Token![where]) || input.peek(token::Brace) {
break;
}
}
}
generics.where_clause = input.parse()?;
let content;
let brace_token = braced!(content in input);
let mut items = Vec::new();
while !content.is_empty() {
items.push(content.parse()?);
}
Ok(ItemTrait {
attrs,
vis,
unsafety,
auto_token,
trait_token,
ident,
generics,
colon_token,
supertraits,
brace_token,
items,
})
}
impl Parse for ItemTraitAlias {
fn parse(input: ParseStream) -> Result<Self> {
let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?;
parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics)
}
}
fn parse_start_of_trait_alias(
input: ParseStream,
) -> Result<(Vec<Attribute>, Visibility, Token![trait], Ident, Generics)> {
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let trait_token: Token![trait] = input.parse()?;
let ident: Ident = input.parse()?;
let generics: Generics = input.parse()?;
Ok((attrs, vis, trait_token, ident, generics))
}
fn parse_rest_of_trait_alias(
input: ParseStream,
attrs: Vec<Attribute>,
vis: Visibility,
trait_token: Token![trait],
ident: Ident,
mut generics: Generics,
) -> Result<ItemTraitAlias> {
let eq_token: Token![=] = input.parse()?;
let mut bounds = Punctuated::new();
loop {
if input.peek(Token![where]) || input.peek(Token![;]) {
break;
}
bounds.push_value(input.parse()?);
if input.peek(Token![where]) || input.peek(Token![;]) {
break;
}
bounds.push_punct(input.parse()?);
}
generics.where_clause = input.parse()?;
let semi_token: Token![;] = input.parse()?;
Ok(ItemTraitAlias {
attrs,
vis,
trait_token,
ident,
generics,
eq_token,
bounds,
semi_token,
})
}
impl Parse for TraitItem {
fn parse(input: ParseStream) -> Result<Self> {
let begin = input.fork();
let mut attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let defaultness: Option<Token![default]> = input.parse()?;
let ahead = input.fork();
let lookahead = ahead.lookahead1();
let mut item = if lookahead.peek(Token![const]) {
ahead.parse::<Token![const]>()?;
let lookahead = ahead.lookahead1();
if lookahead.peek(Ident) {
input.parse().map(TraitItem::Const)
} else if lookahead.peek(Token![async])
|| lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![fn])
{
input.parse().map(TraitItem::Method)
} else {
Err(lookahead.error())
}
} else if lookahead.peek(Token![async])
|| lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![fn])
{
input.parse().map(TraitItem::Method)
} else if lookahead.peek(Token![type]) {
input.parse().map(TraitItem::Type)
} else if lookahead.peek(Ident)
|| lookahead.peek(Token![self])
|| lookahead.peek(Token![super])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![crate])
|| lookahead.peek(Token![::])
{
input.parse().map(TraitItem::Macro)
} else {
Err(lookahead.error())
}?;
match (vis, defaultness) {
(Visibility::Inherited, None) => {}
_ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
}
let item_attrs = match &mut item {
TraitItem::Const(item) => &mut item.attrs,
TraitItem::Method(item) => &mut item.attrs,
TraitItem::Type(item) => &mut item.attrs,
TraitItem::Macro(item) => &mut item.attrs,
TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
};
attrs.extend(item_attrs.drain(..));
*item_attrs = attrs;
Ok(item)
}
}
impl Parse for TraitItemConst {
fn parse(input: ParseStream) -> Result<Self> {
Ok(TraitItemConst {
attrs: input.call(Attribute::parse_outer)?,
const_token: input.parse()?,
ident: input.parse()?,
colon_token: input.parse()?,
ty: input.parse()?,
default: {
if input.peek(Token![=]) {
let eq_token: Token![=] = input.parse()?;
let default: Expr = input.parse()?;
Some((eq_token, default))
} else {
None
}
},
semi_token: input.parse()?,
})
}
}
impl Parse for TraitItemMethod {
fn parse(input: ParseStream) -> Result<Self> {
let outer_attrs = input.call(Attribute::parse_outer)?;
let constness: Option<Token![const]> = input.parse()?;
let asyncness: Option<Token![async]> = input.parse()?;
let unsafety: Option<Token![unsafe]> = input.parse()?;
let abi: Option<Abi> = input.parse()?;
let fn_token: Token![fn] = input.parse()?;
let ident: Ident = input.parse()?;
let generics: Generics = input.parse()?;
let content;
let paren_token = parenthesized!(content in input);
let mut inputs = parse_fn_args(&content)?;
let variadic = pop_variadic(&mut inputs);
let output: ReturnType = input.parse()?;
let where_clause: Option<WhereClause> = input.parse()?;
let lookahead = input.lookahead1();
let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
let content;
let brace_token = braced!(content in input);
let inner_attrs = content.call(Attribute::parse_inner)?;
let stmts = content.call(Block::parse_within)?;
(Some(brace_token), inner_attrs, stmts, None)
} else if lookahead.peek(Token![;]) {
let semi_token: Token![;] = input.parse()?;
(None, Vec::new(), Vec::new(), Some(semi_token))
} else {
return Err(lookahead.error());
};
Ok(TraitItemMethod {
attrs: private::attrs(outer_attrs, inner_attrs),
sig: Signature {
constness,
asyncness,
unsafety,
abi,
fn_token,
ident,
paren_token,
inputs,
output,
variadic,
generics: Generics {
where_clause,
..generics
},
},
default: brace_token.map(|brace_token| Block { brace_token, stmts }),
semi_token,
})
}
}
impl Parse for TraitItemType {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let type_token: Token![type] = input.parse()?;
let ident: Ident = input.parse()?;
let mut generics: Generics = input.parse()?;
let colon_token: Option<Token![:]> = input.parse()?;
let mut bounds = Punctuated::new();
if colon_token.is_some() {
while !input.peek(Token![where]) && !input.peek(Token![=]) && !input.peek(Token![;])
{
if !bounds.is_empty() {
bounds.push_punct(input.parse()?);
}
bounds.push_value(input.parse()?);
}
}
generics.where_clause = input.parse()?;
let default = if input.peek(Token![=]) {
let eq_token: Token![=] = input.parse()?;
let default: Type = input.parse()?;
Some((eq_token, default))
} else {
None
};
let semi_token: Token![;] = input.parse()?;
Ok(TraitItemType {
attrs,
type_token,
ident,
generics,
colon_token,
bounds,
default,
semi_token,
})
}
}
impl Parse for TraitItemMacro {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let mac: Macro = input.parse()?;
let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
None
} else {
Some(input.parse()?)
};
Ok(TraitItemMacro {
attrs,
mac,
semi_token,
})
}
}
impl Parse for ItemImpl {
fn parse(input: ParseStream) -> Result<Self> {
let outer_attrs = input.call(Attribute::parse_outer)?;
let defaultness: Option<Token![default]> = input.parse()?;
let unsafety: Option<Token![unsafe]> = input.parse()?;
let impl_token: Token![impl] = input.parse()?;
let has_generics = input.peek(Token![<])
&& (input.peek2(Token![>])
|| input.peek2(Token![#])
|| (input.peek2(Ident) || input.peek2(Lifetime))
&& (input.peek3(Token![:])
|| input.peek3(Token![,])
|| input.peek3(Token![>])));
let generics: Generics = if has_generics {
input.parse()?
} else {
Generics::default()
};
let trait_ = (|| -> Option<_> {
let ahead = input.fork();
let polarity: Option<Token![!]> = ahead.parse().ok()?;
let path: Path = ahead.parse().ok()?;
let for_token: Token![for] = ahead.parse().ok()?;
input.advance_to(&ahead);
Some((polarity, path, for_token))
})();
let self_ty: Type = input.parse()?;
let where_clause: Option<WhereClause> = input.parse()?;
let content;
let brace_token = braced!(content in input);
let inner_attrs = content.call(Attribute::parse_inner)?;
let mut items = Vec::new();
while !content.is_empty() {
items.push(content.parse()?);
}
Ok(ItemImpl {
attrs: private::attrs(outer_attrs, inner_attrs),
defaultness,
unsafety,
impl_token,
generics: Generics {
where_clause,
..generics
},
trait_,
self_ty: Box::new(self_ty),
brace_token,
items,
})
}
}
impl Parse for ImplItem {
fn parse(input: ParseStream) -> Result<Self> {
let begin = input.fork();
let mut attrs = input.call(Attribute::parse_outer)?;
let ahead = input.fork();
let vis: Visibility = ahead.parse()?;
let mut lookahead = ahead.lookahead1();
let defaultness = if lookahead.peek(Token![default]) && !ahead.peek2(Token![!]) {
let defaultness: Token![default] = ahead.parse()?;
lookahead = ahead.lookahead1();
Some(defaultness)
} else {
None
};
let mut item = if lookahead.peek(Token![const]) {
let const_token: Token![const] = ahead.parse()?;
let lookahead = ahead.lookahead1();
if lookahead.peek(Ident) {
input.advance_to(&ahead);
let ident: Ident = input.parse()?;
let colon_token: Token![:] = input.parse()?;
let ty: Type = input.parse()?;
if let Some(eq_token) = input.parse()? {
return Ok(ImplItem::Const(ImplItemConst {
attrs,
vis,
defaultness,
const_token,
ident,
colon_token,
ty,
eq_token,
expr: input.parse()?,
semi_token: input.parse()?,
}));
} else {
input.parse::<Token![;]>()?;
return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
}
} else if lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![async])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![fn])
{
input.parse().map(ImplItem::Method)
} else {
Err(lookahead.error())
}
} else if lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![async])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![fn])
{
input.parse().map(ImplItem::Method)
} else if lookahead.peek(Token![type]) {
input.advance_to(&ahead);
let type_token: Token![type] = input.parse()?;
let ident: Ident = input.parse()?;
let mut generics: Generics = input.parse()?;
let colon_token: Option<Token![:]> = input.parse()?;
if colon_token.is_some() {
let mut first = true;
while !input.peek(Token![where])
&& !input.peek(Token![=])
&& !input.peek(Token![;])
{
if !first {
input.parse::<Token![+]>()?;
}
input.parse::<TypeParamBound>()?;
first = false;
}
}
generics.where_clause = input.parse()?;
if let Some(eq_token) = input.parse()? {
return Ok(ImplItem::Type(ImplItemType {
attrs,
vis,
defaultness,
type_token,
ident,
generics,
eq_token,
ty: input.parse()?,
semi_token: input.parse()?,
}));
} else {
input.parse::<Token![;]>()?;
return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
}
} else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
input.call(item_existential).map(ImplItem::Verbatim)
} else if vis.is_inherited()
&& defaultness.is_none()
&& (lookahead.peek(Ident)
|| lookahead.peek(Token![self])
|| lookahead.peek(Token![super])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![crate])
|| lookahead.peek(Token![::]))
{
input.parse().map(ImplItem::Macro)
} else {
Err(lookahead.error())
}?;
{
let item_attrs = match &mut item {
ImplItem::Const(item) => &mut item.attrs,
ImplItem::Method(item) => &mut item.attrs,
ImplItem::Type(item) => &mut item.attrs,
ImplItem::Macro(item) => &mut item.attrs,
ImplItem::Verbatim(_) => return Ok(item),
ImplItem::__Nonexhaustive => unreachable!(),
};
attrs.extend(item_attrs.drain(..));
*item_attrs = attrs;
}
Ok(item)
}
}
impl Parse for ImplItemConst {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ImplItemConst {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
defaultness: input.parse()?,
const_token: input.parse()?,
ident: input.parse()?,
colon_token: input.parse()?,
ty: input.parse()?,
eq_token: input.parse()?,
expr: input.parse()?,
semi_token: input.parse()?,
})
}
}
impl Parse for ImplItemMethod {
fn parse(input: ParseStream) -> Result<Self> {
let mut attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let defaultness: Option<Token![default]> = input.parse()?;
let constness: Option<Token![const]> = input.parse()?;
let asyncness: Option<Token![async]> = input.parse()?;
let unsafety: Option<Token![unsafe]> = input.parse()?;
let abi: Option<Abi> = input.parse()?;
let fn_token: Token![fn] = input.parse()?;
let ident: Ident = input.parse()?;
let generics: Generics = input.parse()?;
let content;
let paren_token = parenthesized!(content in input);
let mut inputs = parse_fn_args(&content)?;
let variadic = pop_variadic(&mut inputs);
let output: ReturnType = input.parse()?;
let where_clause: Option<WhereClause> = input.parse()?;
let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
// Accept methods without a body in an impl block because
// rustc's *parser* does not reject them (the compilation error
// is emitted later than parsing) and it can be useful for macro
// DSLs.
let mut punct = Punct::new(';', Spacing::Alone);
punct.set_span(semi.span);
let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
Block {
brace_token: Brace::default(),
stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
}
} else {
let content;
let brace_token = braced!(content in input);
attrs.extend(content.call(Attribute::parse_inner)?);
Block {
brace_token,
stmts: content.call(Block::parse_within)?,
}
};
Ok(ImplItemMethod {
attrs,
vis,
defaultness,
sig: Signature {
constness,
asyncness,
unsafety,
abi,
fn_token,
ident,
paren_token,
inputs,
output,
variadic,
generics: Generics {
where_clause,
..generics
},
},
block,
})
}
}
impl Parse for ImplItemType {
fn parse(input: ParseStream) -> Result<Self> {
Ok(ImplItemType {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
defaultness: input.parse()?,
type_token: input.parse()?,
ident: input.parse()?,
generics: {
let mut generics: Generics = input.parse()?;
generics.where_clause = input.parse()?;
generics
},
eq_token: input.parse()?,
ty: input.parse()?,
semi_token: input.parse()?,
})
}
}
impl Parse for ImplItemMacro {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let mac: Macro = input.parse()?;
let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
None
} else {
Some(input.parse()?)
};
Ok(ImplItemMacro {
attrs,
mac,
semi_token,
})
}
}
impl Visibility {
fn is_inherited(&self) -> bool {
match *self {
Visibility::Inherited => true,
_ => false,
}
}
}
impl MacroDelimiter {
fn is_brace(&self) -> bool {
match *self {
MacroDelimiter::Brace(_) => true,
MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => false,
}
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use proc_macro2::TokenStream;
use quote::{ToTokens, TokenStreamExt};
use crate::attr::FilterAttrs;
use crate::print::TokensOrDefault;
use crate::punctuated::Pair;
impl ToTokens for ItemExternCrate {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.extern_token.to_tokens(tokens);
self.crate_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
if let Some((as_token, rename)) = &self.rename {
as_token.to_tokens(tokens);
rename.to_tokens(tokens);
}
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ItemUse {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.use_token.to_tokens(tokens);
self.leading_colon.to_tokens(tokens);
self.tree.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ItemStatic {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.static_token.to_tokens(tokens);
self.mutability.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.expr.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ItemConst {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.const_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.expr.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ItemFn {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.sig.to_tokens(tokens);
self.block.brace_token.surround(tokens, |tokens| {
tokens.append_all(self.attrs.inner());
tokens.append_all(&self.block.stmts);
});
}
}
impl ToTokens for ItemMod {
fn | (&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.mod_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
if let Some((brace, items)) = &self.content {
brace.surround(tokens, |tokens| {
tokens.append_all(self.attrs.inner());
tokens.append_all(items);
});
} else {
TokensOrDefault(&self.semi).to_tokens(tokens);
}
}
}
impl ToTokens for ItemForeignMod {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.abi.to_tokens(tokens);
self.brace_token.surround(tokens, |tokens| {
tokens.append_all(self.attrs.inner());
tokens.append_all(&self.items);
});
}
}
impl ToTokens for ItemType {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.type_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ItemEnum {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.enum_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
self.brace_token.surround(tokens, |tokens| {
self.variants.to_tokens(tokens);
});
}
}
impl ToTokens for ItemStruct {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.struct_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
match &self.fields {
Fields::Named(fields) => {
self.generics.where_clause.to_tokens(tokens);
fields.to_tokens(tokens);
}
Fields::Unnamed(fields) => {
fields.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
TokensOrDefault(&self.semi_token).to_tokens(tokens);
}
Fields::Unit => {
self.generics.where_clause.to_tokens(tokens);
TokensOrDefault(&self.semi_token).to_tokens(tokens);
}
}
}
}
impl ToTokens for ItemUnion {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.union_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
self.fields.to_tokens(tokens);
}
}
impl ToTokens for ItemTrait {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.unsafety.to_tokens(tokens);
self.auto_token.to_tokens(tokens);
self.trait_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
if !self.supertraits.is_empty() {
TokensOrDefault(&self.colon_token).to_tokens(tokens);
self.supertraits.to_tokens(tokens);
}
self.generics.where_clause.to_tokens(tokens);
self.brace_token.surround(tokens, |tokens| {
tokens.append_all(&self.items);
});
}
}
impl ToTokens for ItemTraitAlias {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.trait_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ItemImpl {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.defaultness.to_tokens(tokens);
self.unsafety.to_tokens(tokens);
self.impl_token.to_tokens(tokens);
self.generics.to_tokens(tokens);
if let Some((polarity, path, for_token)) = &self.trait_ {
polarity.to_tokens(tokens);
path.to_tokens(tokens);
for_token.to_tokens(tokens);
}
self.self_ty.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
self.brace_token.surround(tokens, |tokens| {
tokens.append_all(self.attrs.inner());
tokens.append_all(&self.items);
});
}
}
impl ToTokens for ItemMacro {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.mac.path.to_tokens(tokens);
self.mac.bang_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
match &self.mac.delimiter {
MacroDelimiter::Paren(paren) => {
paren.surround(tokens, |tokens| self.mac.tokens.to_tokens(tokens));
}
MacroDelimiter::Brace(brace) => {
brace.surround(tokens, |tokens| self.mac.tokens.to_tokens(tokens));
}
MacroDelimiter::Bracket(bracket) => {
bracket.surround(tokens, |tokens| self.mac.tokens.to_tokens(tokens));
}
}
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ItemMacro2 {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.macro_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.rules.to_tokens(tokens);
}
}
impl ToTokens for UsePath {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.colon2_token.to_tokens(tokens);
self.tree.to_tokens(tokens);
}
}
impl ToTokens for UseName {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
}
}
impl ToTokens for UseRename {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.as_token.to_tokens(tokens);
self.rename.to_tokens(tokens);
}
}
impl ToTokens for UseGlob {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.star_token.to_tokens(tokens);
}
}
impl ToTokens for UseGroup {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.brace_token.surround(tokens, |tokens| {
self.items.to_tokens(tokens);
});
}
}
impl ToTokens for TraitItemConst {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.const_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
if let Some((eq_token, default)) = &self.default {
eq_token.to_tokens(tokens);
default.to_tokens(tokens);
}
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for TraitItemMethod {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.sig.to_tokens(tokens);
match &self.default {
Some(block) => {
block.brace_token.surround(tokens, |tokens| {
tokens.append_all(self.attrs.inner());
tokens.append_all(&block.stmts);
});
}
None => {
TokensOrDefault(&self.semi_token).to_tokens(tokens);
}
}
}
}
impl ToTokens for TraitItemType {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.type_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
if !self.bounds.is_empty() {
TokensOrDefault(&self.colon_token).to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
self.generics.where_clause.to_tokens(tokens);
if let Some((eq_token, default)) = &self.default {
eq_token.to_tokens(tokens);
default.to_tokens(tokens);
}
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for TraitItemMacro {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.mac.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ImplItemConst {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.defaultness.to_tokens(tokens);
self.const_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.expr.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ImplItemMethod {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.defaultness.to_tokens(tokens);
self.sig.to_tokens(tokens);
if self.block.stmts.len() == 1 {
if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
if verbatim.to_string() == ";" {
verbatim.to_tokens(tokens);
return;
}
}
}
self.block.brace_token.surround(tokens, |tokens| {
tokens.append_all(self.attrs.inner());
tokens.append_all(&self.block.stmts);
});
}
}
impl ToTokens for ImplItemType {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.defaultness.to_tokens(tokens);
self.type_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ImplItemMacro {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.mac.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ForeignItemFn {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.sig.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ForeignItemStatic {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.static_token.to_tokens(tokens);
self.mutability.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ForeignItemType {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.vis.to_tokens(tokens);
self.type_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
impl ToTokens for ForeignItemMacro {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.mac.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
let arg = match arg {
FnArg::Typed(arg) => arg,
FnArg::Receiver(receiver) => {
receiver.to_tokens(tokens);
return false;
}
};
match arg.ty.as_ref() {
Type::Verbatim(ty) if ty.to_string() == "..." => {
match arg.pat.as_ref() {
Pat::Verbatim(pat) if pat.to_string() == "..." => {
tokens.append_all(arg.attrs.outer());
pat.to_tokens(tokens);
}
_ => arg.to_tokens(tokens),
}
true
}
_ => {
arg.to_tokens(tokens);
false
}
}
}
impl ToTokens for Signature {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.constness.to_tokens(tokens);
self.asyncness.to_tokens(tokens);
self.unsafety.to_tokens(tokens);
self.abi.to_tokens(tokens);
self.fn_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
let mut last_is_variadic = false;
for input in self.inputs.pairs() {
match input {
Pair::Punctuated(input, comma) => {
maybe_variadic_to_tokens(input, tokens);
comma.to_tokens(tokens);
}
Pair::End(input) => {
last_is_variadic = maybe_variadic_to_tokens(input, tokens);
}
}
}
if self.variadic.is_some() && !last_is_variadic {
if !self.inputs.empty_or_trailing() {
<Token![,]>::default().to_tokens(tokens);
}
self.variadic.to_tokens(tokens);
}
});
self.output.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
}
}
impl ToTokens for Receiver {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
if let Some((ampersand, lifetime)) = &self.reference {
ampersand.to_tokens(tokens);
lifetime.to_tokens(tokens);
}
self.mutability.to_tokens(tokens);
self.self_token.to_tokens(tokens);
}
}
}
| to_tokens |
buffer.rs | use futures_core::stream::Stream;
use futures_core::task::{self, Poll};
use futures_sink::Sink;
use pin_utils::{unsafe_pinned, unsafe_unpinned};
use std::collections::VecDeque;
use std::marker::Unpin;
use std::mem::PinMut;
/// Sink for the `Sink::buffer` combinator, which buffers up to some fixed
/// number of values when the underlying sink is unable to accept them.
#[derive(Debug)]
#[must_use = "sinks do nothing unless polled"]
pub struct Buffer<Si: Sink> {
sink: Si,
buf: VecDeque<Si::SinkItem>,
// Track capacity separately from the `VecDeque`, which may be rounded up
capacity: usize,
}
impl<Si: Sink + Unpin> Unpin for Buffer<Si> {}
impl<Si: Sink> Buffer<Si> {
unsafe_pinned!(sink: Si);
unsafe_unpinned!(buf: VecDeque<Si::SinkItem>);
unsafe_unpinned!(capacity: usize);
pub(super) fn new(sink: Si, capacity: usize) -> Buffer<Si> {
Buffer {
sink,
buf: VecDeque::with_capacity(capacity),
capacity,
}
}
/// Get a shared reference to the inner sink.
pub fn get_ref(&self) -> &Si {
&self.sink
}
fn try_empty_buffer(
self: &mut PinMut<Self>,
cx: &mut task::Context
) -> Poll<Result<(), Si::SinkError>> {
try_ready!(self.sink().poll_ready(cx));
while let Some(item) = self.buf().pop_front() {
if let Err(e) = self.sink().start_send(item) {
return Poll::Ready(Err(e));
}
if !self.buf.is_empty() {
try_ready!(self.sink().poll_ready(cx));
}
}
Poll::Ready(Ok(()))
}
}
// Forwarding impl of Stream from the underlying sink
impl<S> Stream for Buffer<S> where S: Sink + Stream {
type Item = S::Item;
fn | (mut self: PinMut<Self>, cx: &mut task::Context) -> Poll<Option<S::Item>> {
self.sink().poll_next(cx)
}
}
impl<Si: Sink> Sink for Buffer<Si> {
type SinkItem = Si::SinkItem;
type SinkError = Si::SinkError;
fn poll_ready(
mut self: PinMut<Self>,
cx: &mut task::Context,
) -> Poll<Result<(), Self::SinkError>> {
if *self.capacity() == 0 {
return self.sink().poll_ready(cx);
}
if let Poll::Ready(Err(e)) = self.try_empty_buffer(cx) {
return Poll::Ready(Err(e));
}
if self.buf().len() >= *self.capacity() {
Poll::Pending
} else {
Poll::Ready(Ok(()))
}
}
fn start_send(
mut self: PinMut<Self>,
item: Self::SinkItem,
) -> Result<(), Self::SinkError> {
if *self.capacity() == 0 {
self.sink().start_send(item)
} else {
self.buf().push_back(item);
Ok(())
}
}
fn poll_flush(
mut self: PinMut<Self>,
cx: &mut task::Context,
) -> Poll<Result<(), Self::SinkError>> {
try_ready!(self.try_empty_buffer(cx));
debug_assert!(self.buf().is_empty());
self.sink().poll_flush(cx)
}
fn poll_close(
mut self: PinMut<Self>,
cx: &mut task::Context,
) -> Poll<Result<(), Self::SinkError>> {
try_ready!(self.try_empty_buffer(cx));
debug_assert!(self.buf().is_empty());
self.sink().poll_close(cx)
}
}
| poll_next |
deserialization.ts | import * as wasm from '../pkg/node_sdk_helpers';
import { Buffer } from 'buffer/';
/**
* Given a contract's raw state, its name and its schema, return the state as a JSON object.
* The return type is any, and the actual type should be determined by using the schema.
*/
export function | (
contractName: string,
schema: Buffer,
state: Buffer
// eslint-disable-next-line @typescript-eslint/no-explicit-any
): any {
const serializedState = wasm.deserializeState(
contractName,
state.toString('hex'),
schema.toString('hex')
);
try {
return JSON.parse(serializedState);
} catch (e) {
throw new Error(
'unable to deserialize state, due to: ' + serializedState
); // In this case serializedState is the error message from the rust module
}
}
| deserializeContractState |
test_converter_unittest.py | # Copyright (C) 2013 Adobe Systems Incorporated. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import os
import re
import unittest
from webkitpy.common.host import Host
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.common.webkit_finder import WebKitFinder
from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup
from webkitpy.w3c.test_converter import _W3CTestConverter
DUMMY_FILENAME = 'dummy.html'
DUMMY_PATH = 'dummy/testharness/path'
class W3CTestConverterTest(unittest.TestCase):
# FIXME: When we move to using a MockHost, this method should be removed, since
# then we can just pass in a dummy dir path
def fake_dir_path(self, dirname):
filesystem = Host().filesystem
webkit_root = WebKitFinder(filesystem).webkit_base()
return filesystem.abspath(filesystem.join(webkit_root, "LayoutTests", "css", dirname))
def test_read_prefixed_property_list(self):
""" Tests that the current list of properties requiring the -webkit- prefix load correctly """
# FIXME: We should be passing in a MockHost here ...
converter = _W3CTestConverter(DUMMY_PATH, DUMMY_FILENAME, None)
prop_list = converter.prefixed_properties
self.assertTrue(prop_list, 'No prefixed properties found')
def test_convert_for_webkit_nothing_to_convert(self):
""" Tests convert_for_webkit() using a basic test that has nothing to convert """
test_html = """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>CSS Test: DESCRIPTION OF TEST</title>
<link rel="author" title="NAME_OF_AUTHOR"
href="mailto:EMAIL OR http://CONTACT_PAGE"/>
<link rel="help" href="RELEVANT_SPEC_SECTION"/>
<meta name="assert" content="TEST ASSERTION"/>
<style type="text/css"><![CDATA[
CSS FOR TEST
]]></style>
</head>
<body>
CONTENT OF TEST
</body>
</html>
"""
converter = _W3CTestConverter(DUMMY_PATH, DUMMY_FILENAME, None)
oc = OutputCapture()
oc.capture_output()
try:
converter.feed(test_html)
converter.close()
converted = converter.output()
finally:
oc.restore_output()
self.verify_no_conversion_happened(converted, test_html)
def test_convert_for_webkit_harness_only(self):
""" Tests convert_for_webkit() using a basic JS test that uses testharness.js only and has no prefixed properties """
test_html = """<head>
<link href="/resources/testharness.css" rel="stylesheet" type="text/css">
<script src="/resources/testharness.js"></script>
</head>
"""
fake_dir_path = self.fake_dir_path("harnessonly")
converter = _W3CTestConverter(fake_dir_path, DUMMY_FILENAME, None)
converter.feed(test_html)
converter.close()
converted = converter.output()
self.verify_conversion_happened(converted)
self.verify_test_harness_paths(converter, converted[1], fake_dir_path, 1, 1)
self.verify_prefixed_properties(converted, [])
def test_convert_for_webkit_properties_only(self):
""" Tests convert_for_webkit() using a test that has 2 prefixed properties: 1 in a style block + 1 inline style """
test_html = """<html>
<head>
<link href="/resources/testharness.css" rel="stylesheet" type="text/css">
<script src="/resources/testharness.js"></script>
<style type="text/css">
#block1 { @test0@: propvalue; }
</style>
</head>
<body>
<div id="elem1" style="@test1@: propvalue;"></div>
</body>
</html>
"""
fake_dir_path = self.fake_dir_path('harnessandprops')
converter = _W3CTestConverter(fake_dir_path, DUMMY_FILENAME, None)
test_content = self.generate_test_content(converter.prefixed_properties, 1, test_html)
oc = OutputCapture()
oc.capture_output()
try:
converter.feed(test_content[1])
converter.close()
converted = converter.output()
finally:
oc.restore_output()
self.verify_conversion_happened(converted)
self.verify_test_harness_paths(converter, converted[1], fake_dir_path, 1, 1)
self.verify_prefixed_properties(converted, test_content[0])
def test_convert_for_webkit_harness_and_properties(self):
""" Tests convert_for_webkit() using a basic JS test that uses testharness.js and testharness.css and has 4 prefixed properties: 3 in a style block + 1 inline style """
test_html = """<html>
<head>
<link href="/resources/testharness.css" rel="stylesheet" type="text/css">
<script src="/resources/testharness.js"></script>
<style type="text/css">
#block1 { @test0@: propvalue; } | </style>
</head>
<body>
<div id="elem1" style="@test3@: propvalue;"></div>
</body>
</html>
"""
fake_dir_path = self.fake_dir_path('harnessandprops')
converter = _W3CTestConverter(fake_dir_path, DUMMY_FILENAME, None)
oc = OutputCapture()
oc.capture_output()
try:
test_content = self.generate_test_content(converter.prefixed_properties, 2, test_html)
converter.feed(test_content[1])
converter.close()
converted = converter.output()
finally:
oc.restore_output()
self.verify_conversion_happened(converted)
self.verify_test_harness_paths(converter, converted[1], fake_dir_path, 1, 1)
self.verify_prefixed_properties(converted, test_content[0])
def test_convert_test_harness_paths(self):
""" Tests convert_testharness_paths() with a test that uses all three testharness files """
test_html = """<head>
<link href="/resources/testharness.css" rel="stylesheet" type="text/css">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
</head>
"""
fake_dir_path = self.fake_dir_path('testharnesspaths')
converter = _W3CTestConverter(fake_dir_path, DUMMY_FILENAME, None)
oc = OutputCapture()
oc.capture_output()
try:
converter.feed(test_html)
converter.close()
converted = converter.output()
finally:
oc.restore_output()
self.verify_conversion_happened(converted)
self.verify_test_harness_paths(converter, converted[1], fake_dir_path, 2, 1)
def test_convert_vendor_prefix_js_paths(self):
test_html = """<head>
<script src="/common/vendor-prefix.js">
</head>
"""
fake_dir_path = self.fake_dir_path('adapterjspaths')
converter = _W3CTestConverter(fake_dir_path, DUMMY_FILENAME, None)
oc = OutputCapture()
oc.capture_output()
try:
converter.feed(test_html)
converter.close()
converted = converter.output()
finally:
oc.restore_output()
new_html = BeautifulSoup(converted[1])
# Verify the original paths are gone, and the new paths are present.
orig_path_pattern = re.compile('\"/common/vendor-prefix.js')
self.assertEquals(len(new_html.findAll(src=orig_path_pattern)), 0, 'vendor-prefix.js path was not converted')
resources_dir = converter.path_from_webkit_root("LayoutTests", "resources")
new_relpath = os.path.relpath(resources_dir, fake_dir_path)
relpath_pattern = re.compile(new_relpath)
self.assertEquals(len(new_html.findAll(src=relpath_pattern)), 1, 'vendor-prefix.js relative path not correct')
def test_convert_prefixed_properties(self):
""" Tests convert_prefixed_properties() file that has 20 properties requiring the -webkit- prefix:
10 in one style block + 5 in another style
block + 5 inline styles, including one with multiple prefixed properties.
The properties in the test content are in all sorts of wack formatting.
"""
test_html = """<html>
<style type="text/css"><![CDATA[
.block1 {
width: 300px;
height: 300px
}
.block2 {
@test0@: propvalue;
}
.block3{@test1@: propvalue;}
.block4 { @test2@:propvalue; }
.block5{ @test3@ :propvalue; }
#block6 { @test4@ : propvalue; }
#block7
{
@test5@: propvalue;
}
#block8 { @test6@: propvalue; }
#block9:pseudo
{
@test7@: propvalue;
@test8@: propvalue propvalue propvalue;
}
]]></style>
</head>
<body>
<div id="elem1" style="@test9@: propvalue;"></div>
<div id="elem2" style="propname: propvalue; @test10@ : propvalue; propname:propvalue;"></div>
<div id="elem2" style="@test11@: propvalue; @test12@ : propvalue; @test13@ :propvalue;"></div>
<div id="elem3" style="@test14@:propvalue"></div>
</body>
<style type="text/css"><![CDATA[
.block10{ @test15@: propvalue; }
.block11{ @test16@: propvalue; }
.block12{ @test17@: propvalue; }
#block13:pseudo
{
@test18@: propvalue;
@test19@: propvalue;
}
]]></style>
</html>
"""
converter = _W3CTestConverter(DUMMY_PATH, DUMMY_FILENAME, None)
test_content = self.generate_test_content(converter.prefixed_properties, 20, test_html)
oc = OutputCapture()
oc.capture_output()
try:
converter.feed(test_content[1])
converter.close()
converted = converter.output()
finally:
oc.restore_output()
self.verify_conversion_happened(converted)
self.verify_prefixed_properties(converted, test_content[0])
def test_hides_all_instructions_for_manual_testers(self):
test_html = """<body>
<h1 class="instructions">Hello manual tester!</h1>
<p class="instructions some_other_class">This is how you run this test.</p>
<p style="willbeoverwritten" class="instructions">...</p>
<doesntmatterwhichtagitis class="some_other_class instructions">...</p>
<p>Legit content may contain the instructions string</p>
</body>
"""
expected_test_html = """<body>
<h1 class="instructions" style="display:none">Hello manual tester!</h1>
<p class="instructions some_other_class" style="display:none">This is how you run this test.</p>
<p class="instructions" style="display:none">...</p>
<doesntmatterwhichtagitis class="some_other_class instructions" style="display:none">...</p>
<p>Legit content may contain the instructions string</p>
</body>
"""
converter = _W3CTestConverter(DUMMY_PATH, DUMMY_FILENAME, None)
oc = OutputCapture()
oc.capture_output()
try:
converter.feed(test_html)
converter.close()
converted = converter.output()
finally:
oc.restore_output()
self.assertEqual(converted[1], expected_test_html)
def test_convert_attributes_if_needed(self):
""" Tests convert_attributes_if_needed() using a reference file that has some relative src paths """
test_html = """<html>
<head>
<script src="../../some-script.js"></script>
<style src="../../../some-style.css"></style>
</head>
<body>
<img src="../../../../some-image.jpg">
</body>
</html>
"""
test_reference_support_info = {'reference_relpath': '../', 'files': ['../../some-script.js', '../../../some-style.css', '../../../../some-image.jpg'], 'elements': ['script', 'style', 'img']}
converter = _W3CTestConverter(DUMMY_PATH, DUMMY_FILENAME, test_reference_support_info)
oc = OutputCapture()
oc.capture_output()
try:
converter.feed(test_html)
converter.close()
converted = converter.output()
finally:
oc.restore_output()
self.verify_conversion_happened(converted)
self.verify_reference_relative_paths(converted, test_reference_support_info)
def verify_conversion_happened(self, converted):
self.assertTrue(converted, "conversion didn't happen")
def verify_no_conversion_happened(self, converted, original):
self.assertEqual(converted[1], original, 'test should not have been converted')
def verify_test_harness_paths(self, converter, converted, test_path, num_src_paths, num_href_paths):
if isinstance(converted, basestring):
converted = BeautifulSoup(converted)
resources_dir = converter.path_from_webkit_root("LayoutTests", "resources")
# Verify the original paths are gone, and the new paths are present.
orig_path_pattern = re.compile('\"/resources/testharness')
self.assertEquals(len(converted.findAll(src=orig_path_pattern)), 0, 'testharness src path was not converted')
self.assertEquals(len(converted.findAll(href=orig_path_pattern)), 0, 'testharness href path was not converted')
new_relpath = os.path.relpath(resources_dir, test_path)
relpath_pattern = re.compile(new_relpath)
self.assertEquals(len(converted.findAll(src=relpath_pattern)), num_src_paths, 'testharness src relative path not correct')
self.assertEquals(len(converted.findAll(href=relpath_pattern)), num_href_paths, 'testharness href relative path not correct')
def verify_prefixed_properties(self, converted, test_properties):
self.assertEqual(len(set(converted[0])), len(set(test_properties)), 'Incorrect number of properties converted')
for test_prop in test_properties:
self.assertTrue((test_prop in converted[1]), 'Property ' + test_prop + ' not found in converted doc')
def verify_reference_relative_paths(self, converted, reference_support_info):
idx = 0
for path in reference_support_info['files']:
expected_path = re.sub(reference_support_info['reference_relpath'], '', path, 1)
element = reference_support_info['elements'][idx]
expected_tag = '<' + element + ' src=\"' + expected_path + '\">'
self.assertTrue(expected_tag in converted[1], 'relative path ' + path + ' was not converted correcty')
idx += 1
def generate_test_content(self, full_property_list, num_test_properties, html):
"""Inserts properties requiring a -webkit- prefix into the content, replacing \'@testXX@\' with a property."""
test_properties = []
count = 0
while count < num_test_properties:
test_properties.append(full_property_list[count])
count += 1
# Replace the tokens in the testhtml with the test properties. Walk backward
# through the list to replace the double-digit tokens first
index = len(test_properties) - 1
while index >= 0:
# Use the unprefixed version
test_prop = test_properties[index].replace('-webkit-', '')
# Replace the token
html = html.replace('@test' + str(index) + '@', test_prop)
index -= 1
return (test_properties, html) | #block2 { @test1@: propvalue; }
#block3 { @test2@: propvalue; }
|
tx_test.py | import os
import pytest
import re
import subprocess
import time
from afdko.fdkutils import (
get_temp_file_path,
get_temp_dir_path,
)
from test_utils import (
get_input_path,
get_bad_input_path,
get_expected_path,
generate_ps_dump,
)
from runner import main as runner
from differ import main as differ, SPLIT_MARKER
TOOL = 'tx'
CMD = ['-t', TOOL]
def _get_extension(in_format):
if 'ufo' in in_format:
return '.ufo'
elif in_format == 'type1':
return '.pfa'
return '.' + in_format
PDF_SKIP = [
'/Creator' + SPLIT_MARKER +
'/Producer' + SPLIT_MARKER +
'/CreationDate' + SPLIT_MARKER +
'/ModDate' + SPLIT_MARKER +
'(Date:' + SPLIT_MARKER +
'(Time:',
]
PDF_SKIP_REGEX = [
'^.+30.00 Td',
'^.+0.00 Td',
]
PS_SKIP = [
'0 740 moveto (Filename:' + SPLIT_MARKER +
'560 (Date:' + SPLIT_MARKER +
'560 (Time:'
]
PS_SKIP2 = [
'%ADOt1write:'
]
PFA_SKIP = [
'%ADOt1write:' + SPLIT_MARKER +
'%%Copyright:' + SPLIT_MARKER
]
# -----------
# Basic tests
# -----------
@pytest.mark.parametrize('arg', ['-h', '-v', '-u'])
def test_exit_known_option(arg):
assert subprocess.call([TOOL, arg]) == 0
@pytest.mark.parametrize('arg', ['-bar', '-foo'])
def test_exit_unknown_option(arg):
assert subprocess.call([TOOL, arg]) == 1
@pytest.mark.parametrize('pth', [
['invalid_path'], # no such file or directory
[get_temp_file_path()], # end of file (not a font)
[get_input_path('type1.pfa'), 'a', 'b'], # too many file args
])
def test_exit_invalid_path_or_font(pth):
assert subprocess.call([TOOL] + pth) == 1
# -------------
# Options tests
# -------------
@pytest.mark.parametrize('args', [
['-s', '-t1'], # '-s' option must be last
['-t1', '-g', '0', '-gx', '1'], # options are mutually exclusive
['-dcf'], # non-CFF font
['-ps', '-1'], # must specify an all-glyph range
['-ufo'], ['-t1', '-pfb'], # must specify a destination path
['-t1', '-usefd'], # bad arg
['-t1', '-decid'], # input font is non-CID
])
def test_option_error_type1_input(args):
font_path = get_input_path('type1.pfa')
assert subprocess.call([TOOL] + args + [font_path]) == 1
@pytest.mark.parametrize('arg', ['-e', '-q', '+q', '-w', '+w', '-lf', '-cr',
'-crlf', '-decid', '-LWFN', '-pfb'])
def test_option_error_type1_clash(arg):
# options -pfb or -LWFN may not be used with other options
pfb = '-pfb' if arg != '-pfb' else '-LWFN'
assert subprocess.call([TOOL, '-t1', pfb, arg]) == 1
@pytest.mark.parametrize('args', [
['-cff', '-l'], ['-cff', '-0'], ['-cff', '-1'], ['-cff', '-2'],
['-cff', '-3'], ['-cff', '-4'], ['-cff', '-5'], ['-cff', '-6'],
['-cff', '-q'], ['-cff', '+q'], ['-cff', '-w'], ['-cff', '+w'],
['-cff', '-pfb'], ['-cff', '-usefd'], ['-cff', '-decid'],
['-cff', '-lf'], ['-cff', '-cr'], ['-cff', '-crlf'], ['-cff', '-LWFN'],
['-t1', '-gn0'], ['-t1', '-gn1'], ['-t1', '-gn2'], ['-t1', '-sa'],
['-t1', '-abs'], ['-t1', '-cefsvg'],
['-t1', '-no_futile'], ['-t1', '-no_opt'], ['-t1', '-d'], ['-t1', '+d'],
['-dcf', '-n'], ['-dcf', '-c'],
['-dump', '-E'], ['-dump', '+E'], ['-dump', '-F'], ['-dump', '+F'],
['-dump', '-O'], ['-dump', '+O'], ['-dump', '-S'], ['-dump', '+S'],
['-dump', '-T'], ['-dump', '+T'], ['-dump', '-V'], ['-dump', '+V'],
['-dump', '-b'], ['-dump', '+b'], ['-dump', '-e'], ['-dump', '+e'],
['-dump', '-Z'], ['-dump', '+Z'],
])
def test_option_error_wrong_mode(args):
assert subprocess.call([TOOL] + args) == 1
@pytest.mark.parametrize('arg', [
'-a', '-e', '-f', '-g', '-i', '-m', '-o', '-p', '-A', '-P', '-U', '-maxs',
'-usefd', '-fd', '-dd', '-sd', '-sr', ['-cef', '-F'], ['-dcf', '-T']
])
def test_option_error_no_args_left(arg):
|
@pytest.mark.parametrize('args', [
['-maxs', 'X'], ['-m', 'X'], ['-e', 'X'], ['-e', '5'],
['-usefd', 'X'], ['-usefd', '-1']
])
def test_option_error_bad_arg(args):
assert subprocess.call([TOOL, '-t1'] + args) == 1
@pytest.mark.parametrize('arg2', ['-sd', '-sr', '-dd'])
@pytest.mark.parametrize('arg1', ['-a', '-f', '-A'])
def test_option_error_no_args_left2(arg1, arg2):
assert subprocess.call([TOOL, '-t1', arg1, arg2]) == 1
@pytest.mark.parametrize('arg2', ['-sd', '-sr', '-dd'])
@pytest.mark.parametrize('arg1', ['-a', '-f'])
def test_option_error_empty_list(arg1, arg2):
empty_dir = get_temp_dir_path()
assert subprocess.call([TOOL, '-t1', arg1, arg2, empty_dir]) == 1
@pytest.mark.parametrize('arg', ['-bc', '-z', '-cmp', '-sha1'])
def test_gone_options_bc(arg):
assert subprocess.call([TOOL, arg]) == 1
@pytest.mark.parametrize('mode, msg', [
('-h', b'tx (Type eXchange) is a test harness'),
('-u', b'tx {[mode][mode options][shared options][files]}*'),
('-afm', b'[-afm options: default none]'),
('-cef', b'[-cef options: default none]'),
('-cff', b'[-cff options: defaults -E, -F, -O, -S, +T, -V, -Z, -b, -d]'),
('-cff2', b'[-cff2 options: defaults -S, -b]'),
('-dcf', b'[-dcf options: defaults -T all, -5]'),
('-dump', b'[-dump options: default -1]'),
('-mtx', b'[-mtx options: default -0]'),
('-path', b'[-path options: default -0]'),
('-pdf', b'[-pdf options: default -0]'),
('-ps', b'[-ps options: default -0]'),
('-svg', b'[-svg options: defaults -lf, -gn0]'),
('-t1',
b'[-t1 options: defaults -0, -l, -E, -S, +T, -V, +q, -w, -e 4, -lf]'),
('-ufo', b'[-ufo options: default none]'),
])
def test_mode_help(mode, msg):
output = subprocess.check_output([TOOL, mode, '-h'])
assert msg in output
@pytest.mark.parametrize('dcf_dump_level', ['0', '1', '5'])
def test_script_file(dcf_dump_level):
font_path = get_input_path('cid.otf')
opts_path = get_temp_file_path()
opts_file_content = f'\n# foo\n # bar\r -{dcf_dump_level}\t"{font_path}"'
with open(opts_path, 'a') as fp:
fp.write(opts_file_content)
actual_path = runner(CMD + ['-s', '-a', '-o', 'dcf', 's', '-f', opts_path])
expected_path = get_expected_path(f'cid_dcf_{dcf_dump_level}.txt')
assert differ([expected_path, actual_path])
def test_nested_script():
# nested scripts not allowed
temp_path = get_temp_file_path()
assert subprocess.call([TOOL, '-s', 'foobar', '-s', temp_path]) == 1
@pytest.mark.parametrize('layer_name', ['', 'None', 'background', 'foobar'])
def test_ufo_altlayer(layer_name):
if not layer_name:
fname = 'processed'
args = []
else:
fname = 'foreground' if layer_name == 'None' else layer_name
args = ['altLayer', f'_{fname}']
actual_path = runner(CMD + ['-s', '-f', 'altlayer.ufo', '-o', '6'] + args)
expected_path = get_expected_path(f'altlayer_{fname}.txt')
assert differ([expected_path, actual_path])
@pytest.mark.parametrize('arg, filename', [
('-a', 'ufo3.t1'),
('-A', 'SourceSansPro-Regular.t1'),
])
def test_a_options(arg, filename):
input_path = get_input_path('ufo3.ufo')
output_path = os.path.join(os.getcwd(), filename)
assert os.path.exists(output_path) is False
subprocess.call([TOOL, '-t1', arg, input_path])
assert os.path.exists(output_path) is True
os.remove(output_path)
def test_o_option():
input_path = get_input_path('ufo3.ufo')
expected_path = get_expected_path('ufo3.pfa')
output_path = get_temp_file_path()
subprocess.call([TOOL, '-t1', '-o', output_path, input_path])
assert differ([expected_path, output_path, '-s', PFA_SKIP[0]])
def test_f_option():
fpath1 = get_input_path('type1.pfa')
fpath2 = get_input_path('cff2_vf.otf')
actual_path = runner(CMD + ['-s', '-o', 'mtx', '3',
'f', f'_{fpath1}', f'_{fpath2}'])
expected_path = get_expected_path('mtx_f_options.txt')
assert differ([expected_path, actual_path])
def test_stdin():
input_path = get_input_path('type1.pfa')
expected_path = get_expected_path('stdin.txt')
output_path = get_temp_file_path()
with open(input_path) as fp:
output = subprocess.check_output([TOOL], stdin=fp)
with open(output_path, 'wb') as fp:
fp.write(output)
assert differ([expected_path, output_path])
@pytest.mark.parametrize('arg', ['0', '-16'])
def test_m_option_success(arg):
# mem_manage() is called 16 times with the command 'tx -m 0 type1.pfa'
input_path = get_input_path('type1.pfa')
assert subprocess.call([TOOL, '-m', arg, input_path]) == 0
# Disabled because of https://github.com/adobe-type-tools/afdko/issues/933
# @pytest.mark.parametrize('arg', range(1, 16))
# def test_m_option_fail(arg):
# input_path = get_input_path('type1.pfa')
# assert subprocess.call([TOOL, '-m', f'-{arg}', input_path]) != 0
@pytest.mark.parametrize('arg, exp_filename', [(None, 'not_removed'),
('-V', 'not_removed'),
('+V', 'removed')])
def test_V_option(arg, exp_filename):
input_path = get_input_path('overlap.pfa')
expected_path = get_expected_path(f'overlap_{exp_filename}.pfa')
output_path = get_temp_file_path()
args = [TOOL, '-t1', '-o', output_path, input_path]
if arg:
args.insert(2, arg)
subprocess.call(args)
assert differ([expected_path, output_path] + ['-s'] + PFA_SKIP)
# -------------
# Convert tests
# -------------
@pytest.mark.parametrize('to_format', [
'ufo2',
'ufo3',
'type1',
'svg',
'mtx',
'afm',
'pdf',
'ps',
'cff',
])
@pytest.mark.parametrize('from_format', [
'ufo2',
'ufo3',
'type1',
])
def test_convert(from_format, to_format):
from_ext = _get_extension(from_format)
to_ext = _get_extension(to_format)
# input filename
from_filename = from_format + from_ext
# expected filename
exp_filename = from_format + to_ext
# runner args
if 'ufo' in to_format:
save_path = get_temp_dir_path('font.ufo')
else:
save_path = get_temp_file_path()
# diff mode
if to_format == 'cff':
diff_mode = ['-m', 'bin']
else:
diff_mode = []
# skip items
regex_skip = []
skip = []
if to_format == 'afm':
skip = ['Comment Creation Date:' + SPLIT_MARKER + 'Comment Copyright']
elif to_format == 'pdf':
skip = PDF_SKIP[:]
regex_skip = PDF_SKIP_REGEX[:]
elif to_format == 'ps':
skip = PS_SKIP[:]
elif to_format == 'type1':
skip = PFA_SKIP[:]
if skip:
skip.insert(0, '-s')
if regex_skip:
for regex in regex_skip:
skip.append('-r')
skip.append(regex)
# format arg fix
if to_format in ('ufo2', 'ufo3'):
format_arg = 'ufo'
elif to_format == 'type1':
format_arg = 't1'
else:
format_arg = to_format
runner(CMD + ['-a', '-f', get_input_path(from_filename), save_path,
'-o', format_arg])
expected_path = get_expected_path(exp_filename)
assert differ([expected_path, save_path] + skip + diff_mode)
def test_cef_cefsvg():
font_path = get_input_path('cff2_vf.otf')
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'cef', 'cefsvg', 'cr', 'gn1', 'abs', 'sa',
'-f', font_path, output_path])
expected_path = get_expected_path('cef_cefsvg_cr.svg')
assert differ([expected_path, output_path])
@pytest.mark.parametrize('file_ext', [
'pfa', 'pfabin', 'pfb', 'lwfn', 'bidf']) # TODO: 'bidf85'
def test_type1_inputs(file_ext):
bidf = '.bidf' if 'bidf' in file_ext else ''
actual_path = runner(CMD + ['-s', '-o', '2', '-f', f'type1.{file_ext}'])
expected_path = get_expected_path(f'type1.dump2{bidf}.txt')
assert differ([expected_path, actual_path, '-s', '## Filename'])
@pytest.mark.parametrize('args', [[], ['U', '_500,500'], ['U', '_0,0', 'n']])
@pytest.mark.parametrize('fname', ['zx', 'zy'])
def test_type1mm_inputs(fname, args):
fname2 = f'.{"".join(args)}' if args else ''
actual_path = runner(CMD + ['-s', '-f', f'{fname}.pfb', '-o', '2'] + args)
expected_path = get_expected_path(f'{fname}.dump2{fname2}.txt')
assert differ([expected_path, actual_path, '-s', '## Filename'])
@pytest.mark.parametrize('fext', ['otf', 'ttf', 'cff', 'cef', 'ttc'])
def test_other_input_formats(fext):
arg = ['y'] if fext == 'ttc' else []
actual_path = runner(CMD + ['-s', '-f', f'font.{fext}', '-o', '3'] + arg)
expected_path = get_expected_path(f'font.{fext}.dump3.txt')
assert differ([expected_path, actual_path, '-s', '## Filename'])
# ----------
# Dump tests
# ----------
@pytest.mark.parametrize('args', [
[],
['0'],
['dump', '0'],
['1'],
['2'],
['3'],
['4'],
['4', 'N'],
['5'],
['6'],
['6', 'd'],
['6', 'n'],
])
@pytest.mark.parametrize('font_filename', ['type1.pfa', 'svg.svg'])
def test_dump_option(args, font_filename):
if any([arg in args for arg in ('4', '5', '6')]):
skip = []
else:
skip = ['-s', '## Filename']
head = font_filename.split('.')[0]
midl = ''.join(args) if args else 'dump1'
if 'dump' not in midl:
midl = f'dump{midl}'
exp_filename = f'{head}.{midl}.txt'
opts = ['-o'] + args if args else []
actual_path = runner(CMD + ['-s', '-f', font_filename] + opts)
expected_path = get_expected_path(exp_filename)
assert differ([expected_path, actual_path] + skip)
@pytest.mark.parametrize('fext', ['pfa', 'ufo'])
def test_dump_flex_op(fext):
fname = 'flex'
actual_path = runner(CMD + ['-s', '-o', '6', '-f', f'{fname}.{fext}'])
expected_path = get_expected_path(f'{fname}.txt')
assert differ([expected_path, actual_path])
# ----------
# CFF2 tests
# ----------
@pytest.mark.parametrize('filename, msg', [
('avar_invalid_table_version',
b'(cfr) invalid avar table version'),
('fvar_invalid_table_version',
b'(cfr) invalid fvar table version'),
('avar_invalid_table_size',
b'(cfr) invalid avar table size'),
('fvar_invalid_table_size',
b'(cfr) invalid fvar table size'),
('fvar_invalid_table_header',
b'(cfr) invalid values in fvar table header'),
('avar_invalid_axis-instance_count-size',
b'(cfr) invalid avar table size or axis/instance count/size'),
('fvar_invalid_axis-instance_count-size',
b'(cfr) invalid fvar table size or axis/instance count/size'),
('avar_axis_value_map_out_of_bounds',
b'(cfr) avar axis value map out of bounds'),
('avar_fvar_axis_mismatch',
b'(cfr) mismatching axis counts in fvar and avar'),
])
def test_varread_errors(filename, msg):
font_path = get_bad_input_path(f'vf_{filename}.otf')
output = subprocess.check_output([TOOL, '-dcf', '-0', font_path],
stderr=subprocess.STDOUT)
assert msg in output
@pytest.mark.parametrize('args, exp_filename', [
([], 'SourceCodeVar-Roman_CFF2'),
(['*S', '*b', 'std'], 'SourceCodeVar-Roman_CFF2_subr'), # subroutinize
])
def test_cff2_extract(args, exp_filename):
# read CFF2 VF, write CFF2 table
font_path = get_input_path('SourceCodeVariable-Roman.otf')
cff2_path = get_temp_file_path()
runner(CMD + ['-a', '-f', font_path, cff2_path, '-o', 'cff2'] + args)
expected_path = get_expected_path(exp_filename)
assert differ([expected_path, cff2_path, '-m', 'bin'])
def test_cff2_sub_dump():
# Dump a subroutinized CFF2 font. This is a J font with 64K glyphs,
# and almost every subr and charstring is a single subr call.
# A good test for problems with charstrings with no endchar operator.
actual_path = runner(CMD + ['-s', '-o', 'dump', '6', 'g', '_21847',
'-f', 'CFF2-serif-sub.cff2'])
expected_path = get_expected_path('CFF2-serif-sub.cff2.txt')
assert differ([expected_path, actual_path])
def test_varread_pr355():
# read CFF2 VF, write Type1 snapshot
# Note that cff2_vf is built from the sources at:
# afdko/tests/buildmasterotfs_data/input/cff2_vf.
actual_path = runner(CMD + ['-s', '-o', 't1', '-f', 'cff2_vf.otf'])
expected_path = get_expected_path('cff2_vf.pfa')
skip = ['-s'] + PFA_SKIP[:]
assert differ([expected_path, actual_path] + skip)
def test_cff2_no_vf_bug353():
# read CFF2 WITHOUT VF info, write a CFF2 out. 'regular_CFF2.otf'
# is derived by taking the regular.otf file from the sfntdiff
# 'input_data' directory, and converting the CFF table to CFF2.
font_path = get_input_path('regular_CFF2.otf')
cff2_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'cff2', '-f', font_path, cff2_path])
expected_path = get_expected_path('regular_CFF2.cff2')
assert differ([expected_path, cff2_path, '-m', 'bin'])
def test_cff2_with_spare_masters_pr835():
# SetNumMasters was incorrectly passing the number of region indices to
# var_getIVSRegionIndices for the regionListCount. With PR #835 it now
# passes the total region count for regionListCount.
#
# Example of the bug -- this command:
# tx -cff2 +S +b -std SHSansJPVFTest.otf SHSansJPVFTest.cff2
# Would produce the following warning & error:
# inconsistent region indices detected in item variation store subtable 1
# memory error
font_path = get_input_path('SHSansJPVFTest.otf')
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o',
'cff2', '*S', '*b', 'std',
'-f', font_path, output_path])
expected_path = get_expected_path('SHSansJPVFTest.cff2')
assert differ([expected_path, output_path, '-m', 'bin'])
@pytest.mark.parametrize('vector, exp_filename', [
('9999,9999,9999,9999,999,9', 'psname_last_resort_no.txt'),
('9999,9999,9999,9999,999,99', 'psname_last_resort_yes.txt'),
])
def test_last_resort_instance_psname(vector, exp_filename):
font_path = get_input_path('cff2_vf_many_axes.otf')
output_path = get_temp_file_path()
runner(CMD + ['-o', '0', 'U', f'_{vector}', '-f', font_path, output_path])
expected_path = get_expected_path(exp_filename)
assert differ([expected_path, output_path, '-s', '## Filename'])
# -----------
# Other tests
# -----------
def test_trademark_string_pr425():
# the copyright symbol used in the trademark field of a UFO is
# converted to 'Copyright' and stored in Notice field of a Type1
actual_path = runner(CMD + ['-s', '-o', 't1', '-f', 'trademark.ufo'])
expected_path = get_expected_path('trademark.pfa')
skip = ['-s'] + PFA_SKIP[:]
assert differ([expected_path, actual_path] + skip)
def test_remove_hints_bug180():
font_path = get_input_path('cid.otf')
cid_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 't1', 'n', '-f', font_path, cid_path])
expected_path = get_expected_path('cid_nohints.ps')
expected_path = generate_ps_dump(expected_path)
actual_path = generate_ps_dump(cid_path)
skip = ['-s'] + PS_SKIP2
assert differ([expected_path, actual_path] + skip)
def test_long_charstring_read_bug444():
# read a CFF2 VF with a charstring longer that 65535, check output
actual_path = runner(CMD + ['-s', '-o', '0', '-f', 'CJK-VarTest.otf'])
expected_path = get_expected_path('CJK-VarTest_read.txt')
assert differ([expected_path, actual_path, '-s', '## Filename'])
def test_long_charstring_warning():
# read a CFF2 VF with a charstring longer that 65535, check warning message
# NOTE: can't diff the output against 'CJK-VarTest_warn.txt' because on
# Windows the lines start with 'tx.exe:' instead of just 'tx:'
actual_path = runner(
CMD + ['-s', '-e', '-o', '5', '-f', 'CJK-VarTest.otf'])
# expected_path = get_expected_path('CJK-VarTest_warn.txt')
with open(actual_path, 'rb') as f:
output = f.read()
assert b"(cfr) Warning: CharString of GID 1 is 71057 bytes long" in output
def test_long_charstring_write():
# read a CFF2 VF with a charstring longer that 65535, write out CFF2 file
# NOTE: the font 'CJK-VarTest.otf' cannot be used in this test because
# once its long charstring is optimized (floats -> ints) it's no longer
# over the 65535 bytes limit; the long charstring in 'CJK-VarTest2.otf' is
# already as small as possible, so it will trigger the check in cffwrite.c
font_path = get_input_path('CJK-VarTest2.otf')
cff2_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'cff2', '-f', font_path, cff2_path])
expected_path = get_expected_path('CJK-VarTest2.cff2')
assert differ([expected_path, cff2_path, '-m', 'bin'])
def test_many_hints_string_bug354():
# The glyph T@gid002 has 33 hstem hints. This tests a bug where
# tx defined an array of only 6 operands.
# This is encountered only when wrinting to a VF CFF2.
font_path = get_input_path('cff2_vf.otf')
cff2_path = get_temp_file_path()
dcf_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'cff2', '-f', font_path, cff2_path])
runner(CMD + ['-a', '-o', 'dcf', '-f', cff2_path, dcf_path])
expected_path = get_expected_path('cff2_vf.dcf.txt')
assert differ([expected_path, dcf_path])
def test_non_varying_glyphs_bug356():
"""A glyph which is non-varying in a variable font may be referenced by a
VariationStore data item subtable which has a region count of 0. The VF
support code assumed that this was an error, and issued a false warning.
File 'bug356.otf' is a handcrafted modification of 'cff2_vf.otf'. The
latter cannot be used as-is to validate the fix."""
actual_path = get_temp_file_path()
font_path = get_input_path('bug356.otf')
stderr_path = runner(CMD + ['-s', '-e', '-a', '-o', 'cff',
'-f', font_path, actual_path])
expected_path = get_expected_path('bug356.txt')
assert differ([expected_path, stderr_path, '-l', '1'])
@pytest.mark.parametrize('font_format', ['type1', 'cidfont', 'ufo2', 'ufo3'])
def test_no_psname_dump_bug437(font_format):
if 'cid' in font_format:
file_ext = 'ps'
elif 'ufo' in font_format:
file_ext = 'ufo'
else:
file_ext = 'pfa'
filename = f'{font_format}-noPSname.{file_ext}'
expected_path = get_expected_path(f'bug437/dump-{font_format}.txt')
actual_path = runner(CMD + ['-s', '-o', 'dump', '0', '-f', filename])
assert differ([expected_path, actual_path, '-l', '1'])
@pytest.mark.parametrize('font_format', ['type1', 'cidfont', 'ufo2', 'ufo3'])
def test_no_psname_convert_to_ufo_bug437(font_format):
if 'cid' in font_format:
file_ext = 'ps'
elif 'ufo' in font_format:
file_ext = 'ufo'
else:
file_ext = 'pfa'
font_path = get_input_path(f'{font_format}-noPSname.{file_ext}')
expected_path = get_expected_path(f'bug437/{font_format}.ufo')
save_path = get_temp_dir_path(f'{font_format}.ufo')
runner(CMD + ['-a', '-o', 'ufo', '-f', font_path, save_path])
assert differ([expected_path, save_path])
@pytest.mark.parametrize('font_format', ['type1', 'cidfont', 'ufo2', 'ufo3'])
def test_no_psname_convert_to_type1_bug437(font_format):
if 'cid' in font_format:
file_ext = 'ps'
elif 'ufo' in font_format:
file_ext = 'ufo'
else:
file_ext = 'pfa'
filename = f'{font_format}-noPSname.{file_ext}'
with pytest.raises(subprocess.CalledProcessError) as err:
runner(CMD + ['-o', 't1', '-f', filename])
assert err.value.returncode in (5, 6)
def test_illegal_chars_in_glyph_name_bug473():
font_path = get_input_path('bug473.ufo')
save_path = get_temp_dir_path('bug473.ufo')
runner(CMD + ['-a', '-o', 'ufo', '-f', font_path, save_path])
expected_path = get_expected_path('bug473.ufo')
assert differ([expected_path, save_path])
def test_subroutine_sorting_bug494():
""" The input file was made with the command:
tx -t1 -g 0-5 \
source-serif-pro/Roman/Instances/Regular/font.ufo bug494.pfa
The bug is that two subroutines in the Windows CFF output are swapped in
index order from the Mac version. This was because of an unstable
'qsort' done on the subroutines in the final stage of selection."""
font_path = get_input_path('bug494.pfa')
cff_path = get_temp_file_path()
dcf_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'cff', '*S', 'std', '*b',
'-f', font_path, cff_path])
runner(CMD + ['-a', '-o', 'dcf', '-f', cff_path, dcf_path])
expected_path = get_expected_path('bug494.dcf.txt')
assert differ([expected_path, dcf_path])
@pytest.mark.parametrize('args, exp_filename', [([], 'roundtrip'),
(['g', '_0-1'], 'subset')])
@pytest.mark.parametrize('to_format', ['t1', 'cff', 'afm'])
def test_recalculate_font_bbox_bug618(to_format, args, exp_filename):
font_path = get_input_path('bug618.pfa')
save_path = get_temp_file_path()
runner(CMD + ['-f', font_path, save_path, '-o', to_format] + args)
file_ext = to_format
if to_format == 't1':
file_ext = 'pfa'
elif to_format == 'afm':
file_ext = 'txt'
expected_path = get_expected_path(
f'bug618/{exp_filename}.{file_ext}')
diff_mode = []
if to_format == 'cff':
diff_mode = ['-m', 'bin']
skip = []
if to_format == 'afm':
skip = ['-s', 'Comment Creation Date:' + SPLIT_MARKER +
'Comment Copyright']
elif to_format == 't1':
skip = ['-s'] + PFA_SKIP[:]
assert differ([expected_path, save_path] + diff_mode + skip)
def test_glyph_bboxes_bug655():
actual_path = runner(CMD + ['-s', '-o', 'mtx', '2', '-f', 'bug655.ufo'])
expected_path = get_expected_path('bug655.txt')
assert differ([expected_path, actual_path])
@pytest.mark.parametrize('filename', ['SHSVF_9b3b', 'bug684'])
def test_cs_opt_bug684(filename):
""" The input CFF2 variable font contains a long single charstring
making the maximum use of the operand stack.
tx was generating a bad CFF2 charstring that would overflow
the operand stack of the standard size (513) after re-converted
to CFF2 unless -no_opt option is specified."""
font_path = get_input_path(f'{filename}.otf')
result_path = get_temp_file_path()
expected_path = get_expected_path(f'{filename}.cff2')
runner(CMD + ['-a', '-o', 'cff2', '-f', font_path, result_path])
assert differ([expected_path, result_path, '-m', 'bin'])
def test_standard_apple_glyph_names():
actual_path = runner(CMD + ['-s', '-o', 'dump', '4', '-f', 'post-v2.ttf'])
expected_path = get_expected_path('post-v2.txt')
assert differ([expected_path, actual_path])
def test_ufo_self_closing_dict_element_bug701():
actual_path = runner(CMD + ['-s', '-o', 'dump', '0', '-f', 'bug701.ufo'])
expected_path = get_expected_path('bug701.txt')
assert differ([expected_path, actual_path, '-s', '## Filename'])
def test_ufo3_guideline_bug705():
actual_path = runner(CMD + ['-s', '-o', 't1', '-f', 'bug705.ufo'])
expected_path = get_expected_path('bug705.pfa')
assert differ([expected_path, actual_path] + ['-s'] + PFA_SKIP)
def test_ufo_vertical_advance_bug786():
actual_path = runner(CMD + ['-s', '-o', 't1', '-f', 'bug786.ufo'])
expected_path = get_expected_path('bug786.pfa')
skip = ['-s'] + PFA_SKIP[:]
assert differ([expected_path, actual_path] + skip)
@pytest.mark.parametrize('filename', [
'a', # AE glyph in both default and processed layers
'b', # AE glyph in default layer only
'c', # AE glyph in processed layer only
])
def test_ufo_read_processed_contents_plist_bug740(filename):
actual_path = runner(CMD + ['-s', '-o', 'dump', '6', 'g', '_AE',
'-f', f'bug740/{filename}.ufo'])
expected_path = get_expected_path(f'bug740/{filename}.txt')
assert differ([expected_path, actual_path])
def test_dcf_with_infinite_recursion_bug775():
font_path = get_bad_input_path('subr_test_font_infinite_recursion.otf')
dcf_path = get_temp_file_path()
with pytest.raises(subprocess.CalledProcessError) as err:
runner(CMD + ['-a', '-o', 'dcf', '-f', font_path, dcf_path])
assert(err.value.returncode == 1) # exit code of 1, not segfault of -11
expected_path = get_expected_path(
'subr_test_font_infinite_recursion.dcf.txt')
assert differ([expected_path, dcf_path])
def test_dcf_call_depth_with_many_calls_bug846():
# This font was getting an invalid subroutine count because tx wasn't
# decrementing the subroutine call depth after the subroutine calls,
# so it was effectively just counting the total number of calls,
# not the call depth.
font_path = get_input_path('SHSansJPVFTest_SUBR.otf')
dcf_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'dcf', '-f', font_path, dcf_path])
expected_path = get_expected_path('SHSansJPVFTest_SUBR.dcf.txt')
assert differ([expected_path, dcf_path])
def test_svg_with_cid_font_bug822():
font_path = get_input_path('cid.otf')
cid_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'svg', '-f', font_path, cid_path])
expected_path = get_expected_path('cid.svg')
assert differ([expected_path, cid_path])
@pytest.mark.parametrize('filename',
['type1-noPSname.pfa', 'cidfont-noPSname.ps'])
def test_svg_missing_fontname_bug883(filename):
font_path = get_input_path(filename)
svg_path = get_temp_file_path()
with pytest.raises(subprocess.CalledProcessError) as err:
runner(CMD + ['-a', '-o', 'svg', '-f', font_path, svg_path])
assert(err.value.returncode == 6) # exit code of 6, not segfault of -11
@pytest.mark.parametrize('option', ['dump', 'dcf'])
def test_read_fdselect_format_4(option):
font_name = 'fdselect4.otf'
input_path = get_input_path(font_name)
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', option, '-f', input_path, output_path])
expected_path = get_expected_path(font_name + '.' + option)
assert differ([expected_path, output_path, '-s', '## Filename'])
def test_write_fdselect_format_4():
font_name = 'FDArrayTest257FontDicts.otf'
input_path = get_input_path(font_name)
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'cff2', '-f', input_path, output_path])
expected_path = get_expected_path('FDArrayTest257FontDicts.cff2')
assert differ([expected_path, output_path, '-m', 'bin'])
@pytest.mark.parametrize('option', ['cff', 'dcf'])
@pytest.mark.parametrize('font_name',
['bug895_charstring.otf', 'bug895_private_dict.otf'])
def test_read_short_charstring_bug895(option, font_name):
input_path = get_bad_input_path(font_name)
output_path = runner(CMD + ['-s', '-e', '-a', '-o', option,
'-f', input_path])
expected_path = get_expected_path(font_name + '.' + option)
skip = ['-s', 'tx: ---'] # skip line with filename
assert differ([expected_path, output_path] + skip)
@pytest.mark.parametrize('option', ['cff2', 'cff'])
def test_drop_defaultwidthx_when_writing_cff2_bug897(option):
input_path = get_bad_input_path('bug897.otf')
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', option, '-f', input_path, output_path])
dcf_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'dcf', '-f', output_path, dcf_path])
expected_path = get_expected_path('bug897.' + option + '.dcf')
assert differ([expected_path, dcf_path])
@pytest.mark.parametrize('option', ['afm', 'dump', 'svg'])
def test_missing_glyph_names_pr905(option):
input_path = get_bad_input_path('pr905.otf')
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', option, '-f', input_path, output_path])
expected_path = get_expected_path('pr905' + '.' + option)
if option == 'afm':
skip = ['-s',
'Comment Creation Date:' + SPLIT_MARKER + 'Comment Copyright']
elif option == 'dump':
skip = ['-s', '## Filename']
else:
skip = []
assert differ([expected_path, output_path] + skip)
def test_missing_glyph_names_pr905_cef():
input_path = get_bad_input_path('pr905.otf')
output_path = get_temp_file_path()
with pytest.raises(subprocess.CalledProcessError) as err:
runner(CMD + ['-a', '-o', 'cef', '-f', input_path, output_path])
assert(err.value.returncode > 0) # error code, not segfault of -11
def test_var_bug_913():
# AdobeVFPrototype_mod.otf is a modified copy of AdobeVFPrototype.otf 1.003
# so that the region indexes in HVAR are listed in a different order from
# those in CFF2. Also MVAR table has been modified to contain (dummy)
# deltas for underline offset and underline thickness just to exercize
# MVAR lookup code.
font_path = get_input_path('AdobeVFPrototype_mod.otf')
save_path = get_temp_file_path()
runner(CMD + ['-a', '-o',
'3', 'g', '_A,W,y', 'U', '_900,0',
'-f', font_path, save_path])
expected_path = get_expected_path('bug913.txt')
assert differ([expected_path, save_path, '-s', '## Filename'])
def test_bad_charset():
font_path = get_bad_input_path('bad_charset.otf')
save_path = get_temp_file_path()
runner(CMD + ['-a', '-f', font_path, save_path])
expected_path = get_expected_path('bad_charset.txt')
assert differ([expected_path, save_path, '-s', '## Filename'])
def test_bug_940():
input_path = get_bad_input_path('bug940_private_blend.otf')
output_path = get_temp_file_path()
with pytest.raises(subprocess.CalledProcessError) as err:
runner(CMD + ['-a', '-o', 'cff2', '-f', input_path, output_path])
assert(err.value.returncode > 0) # error code, not segfault or success
def test_too_many_glyphs_pr955():
input_path = get_bad_input_path('TooManyGlyphsCFF2.otf')
output_path = get_temp_file_path()
with pytest.raises(subprocess.CalledProcessError) as err:
runner(CMD + ['-a', '-o', 'cff', '-f', input_path, output_path])
assert(err.value.returncode > 0) # error code, not hang or success
def test_ttread_varinst():
font_path = get_input_path('AdobeVFPrototype.ttf')
save_path = get_temp_file_path()
runner(CMD + ['-a', '-o', '3', 'g', '_A', 'U', '_500,800',
'-f', font_path, save_path])
expected_path = get_expected_path('vfproto_tt_inst500_800.txt')
assert differ([expected_path, save_path, '-s', '## Filename'])
def test_unused_post2_names():
font_path = get_input_path('SourceSansPro-Regular-cff2-unused-post.otf')
save_path = get_temp_file_path()
runner(CMD + ['-a', '-o', '1', '-f', font_path, save_path])
expected_path = get_expected_path('ssr-cff2-unused-post.txt')
assert differ([expected_path, save_path, '-s', '## Filename'])
def test_seac_reporting():
# This test aims to show that the SEAC operator
# is not reported by all tx modes
font_path = get_input_path('seac.otf')
save_path = get_temp_file_path()
runner(CMD + ['-a', '-o', '6', '-f', font_path, save_path])
expected_path = get_expected_path('seac.dump.txt')
assert differ([expected_path, save_path])
runner(CMD + ['-a', '-o', 'dcf', '5', 'T', '_c',
'-f', font_path, save_path])
expected_path = get_expected_path('seac.dcf.txt')
assert differ([expected_path, save_path])
def test_date_and_time_afm():
"""
test the use of date and time functions in absfont_afm.c
"""
input_path = get_input_path('font.otf')
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'afm', '-f', input_path, output_path])
now = time.time()
year = '%s' % time.localtime().tm_year
with open(output_path) as output_file:
lines = output_file.readlines()
file_year = lines[1].split()[2]
assert year == file_year
file_time_str = lines[2].split(': ')[1].strip()
file_time = time.mktime(
time.strptime(file_time_str, '%a %b %d %H:%M:%S %Y'))
hours_diff = abs(now - file_time) / 3600
assert(hours_diff < 1)
def test_date_and_time_ps():
"""
test the use of date and time functions in absfont_draw.c
"""
input_path = get_input_path('font.otf')
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'ps', '-f', input_path, output_path])
now = time.time()
with open(output_path) as output_file:
lines = output_file.readlines()
date_str = re.split(r'[()]', lines[5])[1]
date_str = date_str.split(': ')[1]
time_str = re.split(r'[()]', lines[7])[1]
time_str = time_str.split(': ')[1]
file_date_and_time_str = date_str + ' ' + time_str
file_time = time.mktime(
time.strptime(file_date_and_time_str, '%m/%d/%y %H:%M'))
hours_diff = abs(now - file_time) / 3600
assert(hours_diff < 1)
def test_date_and_time_pdf():
"""
test the use of date and time functions in pdfwrite.c
"""
input_path = get_input_path('font.otf')
output_path = get_temp_file_path()
runner(CMD + ['-a', '-o', 'pdf', '-f', input_path, output_path])
now = time.time()
tz = time.timezone
tz_hr = abs(int(tz / 3600)) # ignore sign since we're splitting on +/-
tz_min = (tz % 3600) // 60
with open(output_path) as output_file:
lines = output_file.readlines()
creation_date_str = re.split(r'[()]', lines[13])[1]
mod_date_str = re.split(r'[()]', lines[14])[1]
assert(creation_date_str == mod_date_str)
(date_time_str, tz_hr_str, tz_min_str) = \
re.split(r"[:+\-Z']", creation_date_str)[1:4]
creation_time = time.mktime(
time.strptime(date_time_str, '%Y%m%d%H%M%S'))
hours_diff = abs(now - creation_time) / 3600
assert(hours_diff < 1)
creation_tz_hr = int(tz_hr_str)
assert(creation_tz_hr == tz_hr)
creation_tz_min = int(tz_min_str)
assert(creation_tz_min == tz_min)
file_date_str = re.split(r"[():]", lines[36])[2].strip()
file_time_str = re.split(r"[() ]", lines[38])[3]
file_date_time_str = file_date_str + ' ' + file_time_str
file_time = time.mktime(
time.strptime(file_date_time_str, "%d %b %y %H:%M"))
hours_diff = abs(now - file_time) / 3600
assert(hours_diff < 1)
def test_overlap_removal():
input_path = get_input_path('overlaps.ufo')
expected_path = get_expected_path('overlaps.pfa')
output_path = get_temp_file_path()
args = [TOOL, '-t1', '+V', '-o', output_path, input_path]
subprocess.call(args)
assert differ([expected_path, output_path, '-s', PFA_SKIP[0]])
@pytest.mark.parametrize("fmt", [
"cff",
"cff2",
])
def test_nonstd_fontmatrix(fmt):
input_path = get_input_path("nonstdfmtx.otf")
txt_filename = f"nonstdfmtx_{fmt}.txt"
expected_path = get_expected_path(txt_filename)
output_dir = get_temp_dir_path()
bin_output = os.path.join(output_dir, f"nonstdfmtx.{fmt}")
output_path = os.path.join(output_dir, txt_filename)
runner(CMD + ['-a', '-o', fmt, '*S', '*b', '-f', input_path, bin_output])
runner(CMD + ['-a', '-o', 'dump', '-f', bin_output, output_path])
skip = "## Filename "
assert differ([expected_path, output_path, '-s', skip])
def test_pdf_single_glyph():
input_path = get_input_path("bug1218.otf")
pdf_filename = "bug1218.pdf"
expected_path = get_expected_path(pdf_filename)
output_dir = get_temp_dir_path()
output_path = os.path.join(output_dir, pdf_filename)
runner(CMD + ['-a', '-o', 'pdf', '1', '-f', input_path, output_path])
skip = PDF_SKIP[:]
skip.insert(0, '-s')
regex_skip = PDF_SKIP_REGEX[:]
for regex in regex_skip:
skip.append('-r')
skip.append(regex)
assert differ([expected_path, output_path] + skip)
def test_cffread_bug1343():
"""
Check FontBBox values
"""
actual_path = runner(CMD + ['-s', '-f', 'font.otf', '-o', '3'])
expected_path = get_expected_path('font.otf.dump3.txt')
assert differ([expected_path, actual_path, '-s', '## Filename'])
@pytest.mark.parametrize('arg, input, output, expected', [
('ufo', 'cidfont.subset', 'cidfont_subset.ufo', 'testCID.ufo'),
('t1', 'testCID.ufo', 'cidfont_subset.ufo', 'cidfont.subset'),
(('ufo', 't1'), 'cidfont.subset', 'cidfont_subset.ufo', 'cidfont.subset'),
(('t1', 'ufo'), 'testCID.ufo', 'cidfont_subset.ufo', 'testCID.ufo'),
])
def test_cidkeyed_read_write(arg, input, output, expected):
"""
Tests reading & writing CID-Keyed fonts in tx (uforead & ufowrite)
CID -> UFO (one-way test)
UFO -> CID (one-way test)
CID -> UFO -> CID (round-trip test)
UFO -> CID -> UFO (round-trip test)
"""
folder = "cid_roundtrip/"
input_path = get_input_path(folder + input)
output_dir = get_temp_dir_path()
output_path = os.path.join(output_dir, output)
expected_path = get_expected_path(folder + expected)
if isinstance(arg, tuple): # round-trip tests
runner(CMD + ['-a', '-o', arg[0], '-f',
input_path, output_path])
final_output_dir = get_temp_dir_path()
final_output_path = os.path.join(final_output_dir, output)
runner(CMD + ['-a', '-o', arg[1], '-f',
output_path, final_output_path])
output_path = final_output_path
else: # one-way tests
runner(CMD + ['-a', '-o', arg, '-f',
input_path, output_path])
if '.subset' in expected_path:
expected_path = generate_ps_dump(expected_path)
output_path = generate_ps_dump(output_path)
assert differ([expected_path, output_path])
@pytest.mark.parametrize("file", [
"missing_CID.ufo",
"missing_iFD.ufo",
])
def test_cidkeyed_lib_missing(file):
folder = folder = "cidkeyed_missing_lib/"
ufo_input_path = get_input_path(folder + file)
arg = [TOOL, '-t1', '-f', ufo_input_path]
assert subprocess.call(arg) == 6
def test_cff2_windows_line_endings_bug1355():
# Testing writing binary to stdout on Windows
# to ensure line endings are not inserted.
font_path = get_input_path('regular_CFF2.otf')
actual_path = runner(CMD + ['-s', '-a', '-o', 'cff2',
'*S', '*b', '-f', font_path])
expected_path = get_expected_path('bug1355.cff2')
assert differ([expected_path, actual_path, '-m', 'bin'])
| if isinstance(arg, list):
arg_lst = [TOOL] + arg
else:
arg_lst = [TOOL, '-t1', arg]
assert subprocess.call(arg_lst) == 1 |
delete_elastic_gateway_private_zone.go | package sgw
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// DeleteElasticGatewayPrivateZone invokes the sgw.DeleteElasticGatewayPrivateZone API synchronously
func (client *Client) DeleteElasticGatewayPrivateZone(request *DeleteElasticGatewayPrivateZoneRequest) (response *DeleteElasticGatewayPrivateZoneResponse, err error) {
response = CreateDeleteElasticGatewayPrivateZoneResponse()
err = client.DoAction(request, response)
return
}
// DeleteElasticGatewayPrivateZoneWithChan invokes the sgw.DeleteElasticGatewayPrivateZone API asynchronously
func (client *Client) DeleteElasticGatewayPrivateZoneWithChan(request *DeleteElasticGatewayPrivateZoneRequest) (<-chan *DeleteElasticGatewayPrivateZoneResponse, <-chan error) {
responseChan := make(chan *DeleteElasticGatewayPrivateZoneResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.DeleteElasticGatewayPrivateZone(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// DeleteElasticGatewayPrivateZoneWithCallback invokes the sgw.DeleteElasticGatewayPrivateZone API asynchronously
func (client *Client) DeleteElasticGatewayPrivateZoneWithCallback(request *DeleteElasticGatewayPrivateZoneRequest, callback func(response *DeleteElasticGatewayPrivateZoneResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *DeleteElasticGatewayPrivateZoneResponse
var err error
defer close(result)
response, err = client.DeleteElasticGatewayPrivateZone(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// DeleteElasticGatewayPrivateZoneRequest is the request struct for api DeleteElasticGatewayPrivateZone
type DeleteElasticGatewayPrivateZoneRequest struct {
*requests.RpcRequest
SecurityToken string `position:"Query" name:"SecurityToken"`
GatewayId string `position:"Query" name:"GatewayId"`
}
// DeleteElasticGatewayPrivateZoneResponse is the response struct for api DeleteElasticGatewayPrivateZone
type DeleteElasticGatewayPrivateZoneResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
Success bool `json:"Success" xml:"Success"`
Code string `json:"Code" xml:"Code"`
Message string `json:"Message" xml:"Message"`
TaskId string `json:"TaskId" xml:"TaskId"`
}
// CreateDeleteElasticGatewayPrivateZoneRequest creates a request to invoke DeleteElasticGatewayPrivateZone API
func CreateDeleteElasticGatewayPrivateZoneRequest() (request *DeleteElasticGatewayPrivateZoneRequest) {
request = &DeleteElasticGatewayPrivateZoneRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("sgw", "2018-05-11", "DeleteElasticGatewayPrivateZone", "hcs_sgw", "openAPI")
request.Method = requests.POST
return
}
// CreateDeleteElasticGatewayPrivateZoneResponse creates a response to parse from DeleteElasticGatewayPrivateZone response
func CreateDeleteElasticGatewayPrivateZoneResponse() (response *DeleteElasticGatewayPrivateZoneResponse) | {
response = &DeleteElasticGatewayPrivateZoneResponse{
BaseResponse: &responses.BaseResponse{},
}
return
} |
|
notifications.schema.ts | import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
import { Document } from 'mongoose';
export type UserDocument = Notification & Document;
@Schema()
export class | {
@Prop()
message : string;
@Prop()
sender : string;
}
export const NotificationSchema = SchemaFactory.createForClass(Notification);
| Notification |
assertions_test.go | /*
* Copyright 2020 The Compass Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers_test
import (
"strings"
"testing"
"time"
"github.com/kyma-incubator/compass/components/director/pkg/graphql"
"github.com/kyma-incubator/compass/components/director/pkg/resource"
"github.com/kyma-incubator/compass/components/operations-controller/api/v1alpha1"
"github.com/kyma-incubator/compass/components/operations-controller/controllers/controllersfakes"
"github.com/kyma-incubator/compass/components/operations-controller/internal/tenant"
"github.com/kyma-incubator/compass/components/operations-controller/internal/webhook"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/types"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/log"
)
func stubLoggerAssertion(t *testing.T, errExpectation string, msgExpectations ...string) {
ctrl.Log = log.NewDelegatingLogger(&mockedLogger{
AssertErrorExpectations: func(err error, msg string) {
require.Contains(t, err.Error(), errExpectation)
matchedMsg := false
for _, msgExpectation := range msgExpectations {
if strings.Contains(msg, msgExpectation) {
matchedMsg = true
}
}
require.True(t, matchedMsg)
},
})
}
func stubLoggerNotLoggedAssertion(t *testing.T, errExpectation string, msgExpectations ...string) {
ctrl.Log = log.NewDelegatingLogger(&mockedLogger{
AssertErrorExpectations: func(err error, msg string) {
require.NotContains(t, err.Error(), errExpectation)
matchedMsg := false
for _, msgExpectation := range msgExpectations {
if strings.Contains(msg, msgExpectation) {
matchedMsg = true
}
}
require.False(t, matchedMsg)
},
})
}
func assertZeroInvocations(t *testing.T, callCountFunc ...func() int) {
for _, callCount := range callCountFunc {
require.Equal(t, 0, callCount())
}
}
func assertK8sGetCalledWithName(t *testing.T, k8sClient *controllersfakes.FakeKubernetesClient, expectedName types.NamespacedName) {
require.Equal(t, 1, k8sClient.GetCallCount())
_, namespacedName := k8sClient.GetArgsForCall(0)
require.Equal(t, expectedName, namespacedName)
}
func assertK8sDeleteCalledWithOperation(t *testing.T, k8sClient *controllersfakes.FakeKubernetesClient, expectedOperation *v1alpha1.Operation) {
require.Equal(t, 1, k8sClient.DeleteCallCount())
_, actualOperation, _ := k8sClient.DeleteArgsForCall(0)
require.Equal(t, expectedOperation, actualOperation)
}
func assertStatusManagerInitializeCalledWithOperation(t *testing.T, statusManagerClient *controllersfakes.FakeStatusManager, expectedOperation *v1alpha1.Operation) {
require.Equal(t, 1, statusManagerClient.InitializeCallCount())
actualOperation := statusManagerClient.InitializeArgsForCall(0)
require.Equal(t, expectedOperation, actualOperation)
}
func assertStatusManagerSuccessStatusCalledWithOperation(t *testing.T, statusManagerClient *controllersfakes.FakeStatusManager, expectedOperation *v1alpha1.Operation) {
require.Equal(t, 1, statusManagerClient.SuccessStatusCallCount())
_, actualOperation := statusManagerClient.SuccessStatusArgsForCall(0)
require.Equal(t, expectedOperation, actualOperation)
}
func | (t *testing.T, statusManagerClient *controllersfakes.FakeStatusManager, expectedOperation *v1alpha1.Operation, expectedPollURL string) {
require.Equal(t, 1, statusManagerClient.InProgressWithPollURLCallCount())
_, actualOperation, pollURL := statusManagerClient.InProgressWithPollURLArgsForCall(0)
require.Equal(t, expectedOperation, actualOperation)
require.Equal(t, expectedPollURL, pollURL)
}
func assertStatusManagerInProgressWithPollURLAndLastTimestampCalled(t *testing.T, statusManagerClient *controllersfakes.FakeStatusManager, expectedOperation *v1alpha1.Operation, expectedPollURL string) {
require.Equal(t, 1, statusManagerClient.InProgressWithPollURLAndLastPollTimestampCallCount())
_, actualOperation, pollURL, lastPollTimestamp, retryCount := statusManagerClient.InProgressWithPollURLAndLastPollTimestampArgsForCall(0)
require.Equal(t, expectedOperation, actualOperation)
require.Equal(t, expectedPollURL, pollURL)
timestamp, err := time.Parse(time.RFC3339Nano, lastPollTimestamp)
require.NoError(t, err)
require.True(t, timestamp.After(expectedOperation.CreationTimestamp.Time))
require.Equal(t, expectedOperation.Status.Webhooks[0].RetriesCount+1, retryCount)
}
func assertStatusManagerFailedStatusCalledWithOperation(t *testing.T, statusManagerClient *controllersfakes.FakeStatusManager, expectedOperation *v1alpha1.Operation, expectedErrorMsg string) {
require.Equal(t, 1, statusManagerClient.FailedStatusCallCount())
_, actualOperation, errorMsg := statusManagerClient.FailedStatusArgsForCall(0)
require.Equal(t, expectedOperation, actualOperation)
require.Contains(t, errorMsg, expectedErrorMsg)
}
func assertDirectorUpdateOperationCalled(t *testing.T, directorClient *controllersfakes.FakeDirectorClient, operation *v1alpha1.Operation) {
assertDirectorUpdateOperationWithErrorCalled(t, directorClient, operation, "")
}
func assertDirectorUpdateOperationInvocation(t *testing.T, directorClient *controllersfakes.FakeDirectorClient, operation *v1alpha1.Operation, invocation int) {
assertDirectorUpdateOperationWithErrorInvocation(t, directorClient, operation, "", invocation)
}
func assertDirectorUpdateOperationWithErrorCalled(t *testing.T, directorClient *controllersfakes.FakeDirectorClient, operation *v1alpha1.Operation, errMsg string) {
require.Equal(t, 1, directorClient.UpdateOperationCallCount())
assertDirectorUpdateOperationWithErrorInvocation(t, directorClient, operation, errMsg, 0)
}
func assertDirectorUpdateOperationWithErrorInvocation(t *testing.T, directorClient *controllersfakes.FakeDirectorClient, operation *v1alpha1.Operation, errMsg string, invocation int) {
_, actualRequest := directorClient.UpdateOperationArgsForCall(invocation)
require.Equal(t, graphql.OperationType(operation.Spec.OperationType), actualRequest.OperationType)
require.Equal(t, resource.Type(operation.Spec.ResourceType), actualRequest.ResourceType)
require.Equal(t, operation.Spec.ResourceID, actualRequest.ResourceID)
require.Contains(t, actualRequest.Error, errMsg)
}
func assertDirectorFetchApplicationCalled(t *testing.T, directorClient *controllersfakes.FakeDirectorClient, expectedResourceID, expectedTenantID string) {
require.Equal(t, 1, directorClient.FetchApplicationCallCount())
assertDirectorFetchApplicationInvocation(t, directorClient, expectedResourceID, expectedTenantID, 0)
}
func assertDirectorFetchApplicationInvocation(t *testing.T, directorClient *controllersfakes.FakeDirectorClient, expectedResourceID, expectedTenantID string, invocation int) {
ctx, resourceID := directorClient.FetchApplicationArgsForCall(invocation)
require.Equal(t, expectedResourceID, resourceID)
require.Equal(t, expectedTenantID, ctx.Value(tenant.ContextKey))
}
func assertWebhookDoCalled(t *testing.T, webhookClient *controllersfakes.FakeWebhookClient, operation *v1alpha1.Operation, webhookEntity *graphql.Webhook) {
require.Equal(t, 1, webhookClient.DoCallCount())
assertWebhookDoInvocation(t, webhookClient, operation, webhookEntity, 0)
}
func assertWebhookDoInvocation(t *testing.T, webhookClient *controllersfakes.FakeWebhookClient, operation *v1alpha1.Operation, webhookEntity *graphql.Webhook, invocation int) {
_, actualRequest := webhookClient.DoArgsForCall(invocation)
expectedRequestObject, err := operation.RequestObject()
require.NoError(t, err)
expectedRequest := webhook.NewRequest(*webhookEntity, expectedRequestObject, operation.Spec.CorrelationID)
require.Equal(t, expectedRequest, actualRequest)
}
func assertWebhookPollCalled(t *testing.T, webhookClient *controllersfakes.FakeWebhookClient, operation *v1alpha1.Operation, webhookEntity *graphql.Webhook) {
require.Equal(t, 1, webhookClient.PollCallCount())
assertWebhookPollInvocation(t, webhookClient, operation, webhookEntity, 0)
}
func assertWebhookPollInvocation(t *testing.T, webhookClient *controllersfakes.FakeWebhookClient, operation *v1alpha1.Operation, webhookEntity *graphql.Webhook, invocation int) {
_, actualRequest := webhookClient.PollArgsForCall(invocation)
expectedRequestObject, err := operation.RequestObject()
require.NoError(t, err)
expectedRequest := webhook.NewPollRequest(*webhookEntity, expectedRequestObject, operation.Spec.CorrelationID, mockedLocationURL)
require.Equal(t, expectedRequest, actualRequest)
}
func assertStatusEquals(expectedStatus, actualStatus *v1alpha1.OperationStatus) bool {
if expectedStatus.Phase != actualStatus.Phase || expectedStatus.ObservedGeneration != expectedStatus.ObservedGeneration ||
len(expectedStatus.Webhooks) != len(actualStatus.Webhooks) || len(expectedStatus.Conditions) != len(actualStatus.Conditions) {
return false
}
actualWebhooks := webhookSliceToMap(actualStatus.Webhooks)
for _, expectedWebhook := range expectedStatus.Webhooks {
actualWebhook, exists := actualWebhooks[expectedWebhook.WebhookID]
if !exists || (actualWebhook != expectedWebhook) {
return false
}
}
actualConditions := conditionSliceToMap(actualStatus.Conditions)
for _, expectedCondition := range expectedStatus.Conditions {
actualCondition, exists := actualConditions[expectedCondition.Type]
if !exists || (actualCondition != expectedCondition) {
return false
}
}
return true
}
| assertStatusManagerInProgressWithPollURLCalled |
0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import tinymce.models
class Migration(migrations.Migration):
| dependencies = [
]
operations = [
migrations.CreateModel(
name='Goods',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('name', models.CharField(max_length=20, verbose_name='商品SPU名称')),
('detail', tinymce.models.HTMLField(verbose_name='商品详情', blank=True)),
],
options={
'verbose_name_plural': '商品SPU',
'verbose_name': '商品SPU',
'db_table': 'df_goods',
},
),
migrations.CreateModel(
name='GoodsImage',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('image', models.ImageField(upload_to='goods', verbose_name='图片路径')),
],
options={
'verbose_name_plural': '商品图片',
'verbose_name': '商品图片',
'db_table': 'df_goods_image',
},
),
migrations.CreateModel(
name='GoodsSKU',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('name', models.CharField(max_length=20, verbose_name='商品名称')),
('desc', models.CharField(max_length=256, verbose_name='商品简介')),
('price', models.DecimalField(verbose_name='商品价格', decimal_places=2, max_digits=10)),
('unite', models.CharField(max_length=20, verbose_name='商品单位')),
('image', models.ImageField(upload_to='goods', verbose_name='商品图片')),
('stock', models.IntegerField(default=1, verbose_name='商品库存')),
('sales', models.IntegerField(default=0, verbose_name='商品销量')),
('status', models.SmallIntegerField(default=1, choices=[(0, '下架'), (1, '上架')], verbose_name='商品状态')),
('goods', models.ForeignKey(to='goods.Goods', verbose_name='商品SPU')),
],
options={
'verbose_name_plural': '商品',
'verbose_name': '商品',
'db_table': 'df_goods_sku',
},
),
migrations.CreateModel(
name='GoodsType',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('name', models.CharField(max_length=20, verbose_name='种类名称')),
('logo', models.CharField(max_length=20, verbose_name='标识')),
('image', models.ImageField(upload_to='type', verbose_name='商品类型图片')),
],
options={
'verbose_name_plural': '商品种类',
'verbose_name': '商品种类',
'db_table': 'df_goods_type',
},
),
migrations.CreateModel(
name='IndexGoodsBanner',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('image', models.ImageField(upload_to='banner', verbose_name='图片')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
('sku', models.ForeignKey(to='goods.GoodsSKU', verbose_name='商品')),
],
options={
'verbose_name_plural': '首页轮播商品',
'verbose_name': '首页轮播商品',
'db_table': 'df_index_banner',
},
),
migrations.CreateModel(
name='IndexPromotionBanner',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('name', models.CharField(max_length=20, verbose_name='活动名称')),
('url', models.CharField(max_length=256, verbose_name='活动链接')),
('image', models.ImageField(upload_to='banner', verbose_name='活动图片')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
],
options={
'verbose_name_plural': '主页促销活动',
'verbose_name': '主页促销活动',
'db_table': 'df_index_promotion',
},
),
migrations.CreateModel(
name='IndexTypeGoodsBanner',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='是否删除')),
('display_type', models.SmallIntegerField(default=1, choices=[(0, '标题'), (1, '图片')], verbose_name='展示类型')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
('sku', models.ForeignKey(to='goods.GoodsSKU', verbose_name='商品SKU')),
('type', models.ForeignKey(to='goods.GoodsType', verbose_name='商品类型')),
],
options={
'verbose_name_plural': '主页分类展示商品',
'verbose_name': '主页分类展示商品',
'db_table': 'df_index_type_goods',
},
),
migrations.AddField(
model_name='goodssku',
name='type',
field=models.ForeignKey(to='goods.GoodsType', verbose_name='商品种类'),
),
migrations.AddField(
model_name='goodsimage',
name='sku',
field=models.ForeignKey(to='goods.GoodsSKU', verbose_name='商品'),
),
]
|
|
main.rs | use serde_json::{json, Value};
use std::env;
use rand::prelude::*;
use std::str::FromStr; | use stocks::structs::order::Order;
use stocks::enums::order_types::OrderType;
fn main() {
let args: Vec<String> = env::args().collect();
let action: &String = &args[1];
let name: &String = &args[2];
let amount: i32 = i32::from_str(&args[3]).unwrap();
let price: f32 = f32::from_str(&args[4]).unwrap();
println!("hello stocks");
let mut new_order: Order = open_order(amount,
OrderType::Long, &name.as_str(), price, None, None);
match action.as_str() {
"buy" => {
println!("the value of your investment is: {}",
new_order.current_value());
}
"sell" => {
let mut rng = rand::thread_rng();
let new_price_ref: f32 = rng.gen();
let new_price: f32 = new_price_ref * 100 as f32;
new_order.stock.update_price(new_price);
let sale_profit: f32 = close_order(new_order);
println!("here is the profit you made: {}", sale_profit);
}
_ => {
panic!("Only 'buy' and 'sell' actions are supported.");
}
}
}
/// Adds two numbers together
///
/// # Arguments
/// * one (i32): one of the numbers to be added
/// * two (i32): one of the numbers to be added
///
/// # Returns
/// (i32): the sum of param one and param two
///
/// # Usage
/// The function can be used by the following code:
///
/// "rust
/// resutl: i32 = add_numbers(2, 5);
/// "
fn add_numbers(one: i32, two: i32) -> i32 {
one + two
} |
mod stocks;
use stocks::structs::stock::Stock;
use stocks::{open_order, close_order}; |
test_scalar.py | """Test around scalar constructors and scalar methods."""
import riptable as rt
import numpy as np
import pytest
from numpy.testing import assert_almost_equal, assert_warns
class | (object):
# Type-coercion from strings test cases adapted from numpy/core/tests/test_scalar_ctors.py.
# https://github.com/numpy/numpy/blob/c31cc36a8a814ed4844a2a553454185601914a5a/numpy/core/tests/test_scalar_ctors.py
@pytest.mark.parametrize(
"scalar_ctor, numeric_string",
[
# simple numeric string
("single", "1.234"),
("double", "1.234"),
("longdouble", "1.234"),
# numeric string with overflow overflow; expect inf value
("half", "1e10000"),
("single", "1e10000"),
("double", "1e10000"),
("longdouble", "1e10000"),
("longdouble", "-1e10000"),
],
)
def test_floating(self, scalar_ctor, numeric_string):
rt_value = getattr(rt, scalar_ctor)(numeric_string)
np_value = getattr(np, scalar_ctor)(numeric_string)
assert_almost_equal(rt_value, np_value)
@pytest.mark.parametrize(
"scalar_ctor, numeric_string",
[("longdouble", "1e10000"), ("longdouble", "-1e10000"),],
)
def test_overflow_warning(self, scalar_ctor, numeric_string):
assert_warns(RuntimeWarning, getattr(np, scalar_ctor), numeric_string)
| TestScalarConstructor |
models.py | """"CDMI Models
Copyright 2015 Archive Analytics Solutions - University of Liverpool
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import mimetypes
from collections import OrderedDict
from indigo.models.collection import Collection
class CDMIContainer(object):
"""Wrapper to return CDMI fields fro an Indigo Collection"""
def __init__(self, indigo_container, api_root):
self.collection = indigo_container
self.api_root = api_root
def get_capabilitiesURI(self):
"""Mandatory URI to the capabilities for the object"""
return (u'{0}/cdmi_capabilities/container{1}'
''.format(self.api_root, self.collection.path)
)
def get_children(self, range=None):
"""Mandatory - Names of the children objects in the container object."""
child_c , child_r = self.collection.get_child()
child_c = [ u"{}/".format(c) for c in child_c ]
res = child_c + child_r
if range:
start, stop = ( int(el) for el in range.split("-", 1))
# map CDMI range value to python index
stop += 1
else:
start = 0
stop = len(res)
return res[start:stop]
def get_childrenrange(self):
"""Mandatory - The children of the container expressed as a range"""
child_container , child_dataobject = self.collection.get_child()
nb_child = len(child_container) + len(child_dataobject)
if nb_child != 0:
return "{}-{}".format(0, nb_child-1)
else:
return "0-0"
def get_completionStatus(self):
"""Mandatory - A string indicating if the object is still in the
process of being created or updated by another operation,"""
val = self.collection.get_metadata_key("cdmi_completionStatus")
if not val:
val = "Complete"
return val
def get_domainURI(self):
"""Mandatory URI of the owning domain"""
return ('{0}/cdmi_domains/indigo/'.format(self.api_root))
def get_metadata(self):
md = self.collection.get_cdmi_metadata()
md.update(self.collection.get_acl_metadata())
return md
def get_objectID(self):
"""Mandatory object ID of the object"""
return self.collection.uuid
def get_objectName(self):
"""Conditional name of the object
We don't support objects only accessible by ID so this is mandatory"""
return self.collection.name
def get_objectType(self):
"""Mandatory Object type"""
return "application/cdmi-container"
def get_parentID(self):
"""Conditional Object ID of the parent container object
We don't support objects only accessible by ID so this is mandatory"""
parent_path = self.collection.container
if self.collection.is_root:
parent_path = u"/"
parent = Collection.find(parent_path)
return parent.uuid
def get_parentURI(self):
|
def get_path(self):
return self.collection.path
def get_percentComplete(self):
"""Optional - Indicate the percentage of completion as a numeric
integer value from 0 through 100. 100 if the completionStatus is
'Complete'"""
val = self.collection.get_metadata_key("cdmi_percentComplete")
if not val:
val = "100"
return val
class CDMIResource(object):
"""Wrapper to return CDMI fields fro an Indigo Resource"""
def __init__(self, indigo_resource, api_root):
self.resource = indigo_resource
self.api_root = api_root
def chunk_content(self):
return self.resource.chunk_content()
def get_capabilitiesURI(self):
"""Mandatory URI to the capabilities for the object"""
return (u'{0}/cdmi_capabilities/dataobject{1}'
''.format(self.api_root, self.resource.path)
)
def get_completionStatus(self):
"""Mandatory - A string indicating if the object is still in the
process of being created or updated by another operation,"""
val = self.resource.get_metadata_key("cdmi_completionStatus")
if not val:
val = "Complete"
return val
def get_domainURI(self):
"""Mandatory URI of the owning domain"""
return ('{0}/cdmi_domains/indigo/'.format(self.api_root))
def get_length(self):
return self.resource.size
def get_metadata(self):
md = self.resource.get_cdmi_metadata()
md.update(self.resource.get_acl_metadata())
return md
def get_mimetype(self):
if self.resource.get_mimetype():
return self.resource.get_mimetype()
# Give best guess at mimetype
mimetype = mimetypes.guess_type(self.resource.name)
if mimetype[0]:
return mimetype[0]
else:
# Interpret as binary data
return 'application/octet-stream'
def get_objectID(self):
"""Mandatory object ID of the object"""
return self.resource.uuid
def get_objectName(self):
"""Conditional name of the object
We don't support objects only accessible by ID so this is mandatory"""
return self.resource.get_name()
def get_objectType(self):
"""Mandatory Object type"""
return "application/cdmi-object"
def get_parentID(self):
"""Conditional Object ID of the parent container object
We don't support objects only accessible by ID so this is mandatory"""
parent = Collection.find(self.resource.container)
return parent.uuid
def get_parentURI(self):
"""Conditional URI for the parent object
We don't support objects only accessible by ID so this is mandatory"""
# A container in CDMI has a '/' at the end but we don't (except for the
# root)
parent_path = self.resource.container
if parent_path != '/':
parent_path = u"{}/".format(parent_path)
return u"{}".format(parent_path)
def get_path(self):
return self.resource.path
def get_percentComplete(self):
"""Optional - Indicate the percentage of completion as a numeric
integer value from 0 through 100. 100 if the completionStatus is
'Complete'"""
val = self.resource.get_metadata_key("cdmi_percentComplete")
if not val:
val = "100"
return val
def get_reference(self):
return self.resource.url
def get_url(self):
return self.resource.url
def get_value(self, range=None):
driver = get_driver(self.resource.url)
# TODO: Improve that for large files. Check what CDMI recommends
# for stream access
data = []
for chk in driver.chunk_content():
data.append(chk)
res = ''.join([s for s in data])
if range:
start, stop = (int(el) for el in range.split("-", 1))
# map CDMI range value to python index
stop += 1
else:
start = 0
stop = len(res)
return res[start:stop]
def get_valuerange(self):
"""Mandatory - The range of bytes of the data object to be returned in
the value field"""
return "0-{}".format(self.resource.size-1)
def get_valuetransferencoding(self):
"""Mandatory - The value transfer encoding used for the data object
value"""
return "utf-8"
def is_reference(self):
"""Check if the resource is a reference"""
return self.resource.is_reference
| """Conditional URI for the parent object
We don't support objects only accessible by ID so this is mandatory"""
# A container in CDMI has a '/' at the end but we don't (except for the
# root)
parent_path = self.collection.container
if parent_path != '/' and parent_path != "null":
parent_path = u"{}/".format(parent_path)
return u"{}".format(parent_path) |
lib.rs | //! # hexstring
//!
//! The `hexstring` crate provide a convenient hexadecimal string wrapper.
//! It allows all the common conversion expected from a hexadecimal string :
//! - Contains a structured representation of uppercase or lowercase hexadecimal string
//! - Construct from both string and string literal
//! - Convert from and into array of bytes
//!
//! The [`HexString`](crate::HexString) type is a tiny immutable wrapper around string and insure it
//! always contains a valid hexadecimal string.
//!
//! ## Feature flags
//!
//! The following are a list of [Cargo features][cargo-features] that can be enabled or disabled:
//! - **serde**: Enable [serde][serde] support.
//!
//! [cargo-features]: https://doc.rust-lang.org/stable/cargo/reference/features.html#the-features-section
//! [serde]: https://serde.rs
#![feature(const_generics)]
#![allow(incomplete_features)]
#![deny(missing_docs)]
use derive_more::Display;
use hex::FromHexError;
use std::{
borrow::Cow,
convert::{From, TryFrom},
str,
};
/// Errors than can occurs during [`HexString`] construction.
///
/// Refers to [`FromHexError`][hex::FromHexError] for more details.
pub type Error = FromHexError;
/// Indicates the case of the hexadecimal string.
#[derive(Debug, PartialEq, Eq)]
pub enum Case {
/// Indicates a lowercase hexadecimal string.
Lower,
/// Indicates a uppercase hexadecimal string.
Upper,
}
/// Provides a structured representation of a hexadecimal string.
///
/// It is guaranteed to be a valid hexadecimal string, whether initialized from a string
/// or from bytes.
/// A valid ['HexString`] should contain only alphanumerical characters such as :
/// - ff04ad992c
/// - FF04AD99C
///
/// And must not mix upper and lower alphabetic characters.
///
/// # Examples
///
/// The idiomatic way to construct a [`HexString`] is to call [`HexString::new`] method with a
/// string.
///
/// ```
/// use hexstring::{HexString, Case};
///
/// let hex = HexString::<{ Case::Upper }>::new("ABCDEF").unwrap();
/// ```
///
/// As the example shown, creating a hexadecimal string is a bit convoluted due to the usage of
/// const generic parameter.
/// Two convenient type aliases must be used instead of the raw [`HexString`] type :
///
/// ```
/// use hexstring::{UpperHexString, LowerHexString};
///
/// let lowercase_hex = LowerHexString::new("abcdef").unwrap();
/// let uppercase_hex = UpperHexString::new("ABCDEF").unwrap();
/// ```
///
/// [`HexString`] has support for conversion from and into array of bytes.
///
/// ```
/// use hexstring::LowerHexString;
///
/// let expected_bytes = [41, 24, 42];
/// let hex = LowerHexString::from(expected_bytes);
/// let bytes = Vec::from(hex);
///
/// assert_eq!(expected_bytes, &bytes[..]);
/// ```
#[cfg_attr(
feature = "serde",
derive(serde::Deserialize, serde::Serialize),
serde(try_from = "String")
)]
#[derive(Display, Default, Clone, Debug, PartialEq, Eq)]
#[display(fmt = "{}", &self.0)]
pub struct HexString<const C: Case>(Cow<'static, str>);
/// Convenient alias type to represent uppercase hexadecimal string.
pub type UpperHexString = HexString<{ Case::Upper }>;
/// Convenient alias type to represent lowercase hexadecimal string.
pub type LowerHexString = HexString<{ Case::Lower }>;
impl<const C: Case> HexString<C> {
/// Constructs a new [`HexString`] from a string.
///
/// # Errors
/// This method fails if the given string is not a valid hexadecimal.
pub fn new<S: Into<Cow<'static, str>>>(s: S) -> Result<Self, Error> {
let s = s.into();
if s.len() % 2 != 0 {
return Err(Error::OddLength);
}
if let Some((index, c)) = s.chars().enumerate().find(|(_, c)| match C {
Case::Lower => !matches!(c, '0'..='9' | 'a'..='f'),
Case::Upper => !matches!(c, '0'..='9' | 'A'..='F'),
}) {
return Err(Error::InvalidHexCharacter { c, index });
}
Ok(Self(s))
}
}
impl LowerHexString {
/// Constructs an [`UpperHexString`] from a [`LowerHexString`].
///
/// This method performs a copy if the internal string is a string literal.
pub fn to_uppercase(self) -> UpperHexString {
// avoid unnecessary copy on owned string
let mut s = match self.0 {
Cow::Borrowed(s) => s.to_string(),
Cow::Owned(s) => s,
};
s.make_ascii_uppercase();
HexString::<{ Case::Upper }>(Cow::Owned(s))
}
}
impl UpperHexString {
/// Constructs a [`LowerHexString`] from an [`UpperHexString`].
///
/// This method performs a copy if the internal string is a string literal.
pub fn to_lowercase(self) -> LowerHexString {
// avoid unnecessary copy on owned string
let mut s = match self.0 {
Cow::Borrowed(s) => s.to_string(),
Cow::Owned(s) => s,
};
s.make_ascii_lowercase();
HexString::<{ Case::Lower }>(Cow::Owned(s))
}
}
impl<const C: Case> From<&[u8]> for HexString<C> {
fn from(bytes: &[u8]) -> Self {
let s = match C {
Case::Upper => hex::encode_upper(bytes),
Case::Lower => hex::encode(bytes),
};
// do not call `HexString::new` on purpose to avoid unnecessary hexadecimal string validation
Self(Cow::Owned(s))
}
}
impl<const C: Case> From<Vec<u8>> for HexString<C> {
fn from(bytes: Vec<u8>) -> Self {
Self::from(&bytes[..])
}
}
impl<const C: Case, const N: usize> From<[u8; N]> for HexString<C> {
fn | (bytes: [u8; N]) -> Self {
Self::from(&bytes[..])
}
}
impl<const C: Case> From<HexString<C>> for Vec<u8> {
fn from(s: HexString<C>) -> Self {
// since `HexString` always represents a valid hexadecimal string, the result of `hex::decode`
// can be safely unwrapped.
hex::decode(s.0.as_ref()).unwrap()
}
}
impl<const C: Case, const N: usize> TryFrom<HexString<C>> for [u8; N] {
type Error = Error;
fn try_from(s: HexString<C>) -> Result<Self, Self::Error> {
let mut bytes = [0u8; N];
hex::decode_to_slice(s.0.as_ref(), &mut bytes).map(|_| bytes)
}
}
// Hide `std::convert::TryFrom` conversion implementation from string used only by
// `serde::Deserialize` mechanism.
//
// It constraints user to use [`HexString::new`] to construct a hexadecimal string.
#[cfg(feature = "serde")]
mod seal {
use super::*;
use std::convert::TryFrom;
#[doc(hidden)]
impl<const C: Case> TryFrom<String> for HexString<C> {
type Error = Error;
fn try_from(s: String) -> Result<Self, Self::Error> {
Self::new(s)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_constructs_from_owned_str() {
assert_eq!(
LowerHexString::new("ab04ff".to_string()),
Ok(HexString(Cow::Owned("ab04ff".to_string())))
);
assert_eq!(
UpperHexString::new("AB04FF".to_string()),
Ok(HexString(Cow::Owned("AB04FF".to_string())))
);
}
#[test]
fn it_constructs_from_borrowed_str() {
assert_eq!(
LowerHexString::new("ab04ff"),
Ok(HexString(Cow::Borrowed("ab04ff")))
);
assert_eq!(
UpperHexString::new("AB04FF"),
Ok(HexString(Cow::Borrowed("AB04FF")))
);
}
#[test]
fn it_constructs_from_empty_str() {
assert!(LowerHexString::new("").is_ok());
assert!(UpperHexString::new("").is_ok());
}
#[test]
fn it_constructs_from_bytes() {
assert_eq!(
LowerHexString::from([42, 15, 5]),
HexString::<{ Case::Lower }>(Cow::Borrowed("2a0f05"))
);
assert_eq!(
UpperHexString::from([42, 15, 5]),
HexString::<{ Case::Upper }>(Cow::Borrowed("2A0F05"))
);
assert_eq!(
LowerHexString::from(vec![1, 2, 3, 4, 5]),
HexString::<{ Case::Lower }>(Cow::Borrowed("0102030405"))
);
assert_eq!(
UpperHexString::from(vec![1, 2, 3, 4, 5]),
HexString::<{ Case::Upper }>(Cow::Borrowed("0102030405"))
);
}
#[test]
fn it_rejects_str_with_odd_length() {
assert_eq!(LowerHexString::new("abc"), Err(Error::OddLength));
assert_eq!(UpperHexString::new("abcde"), Err(Error::OddLength));
}
#[test]
fn it_rejects_str_with_invalid_chars() {
assert_eq!(
LowerHexString::new("abcdZ109"),
Err(Error::InvalidHexCharacter { c: 'Z', index: 4 })
);
assert_eq!(
UpperHexString::new("ABVCD109"),
Err(Error::InvalidHexCharacter { c: 'V', index: 2 })
);
}
#[test]
fn it_converts_into_bytes() {
let hex = LowerHexString::new("2a1a02").unwrap();
let bytes = Vec::from(hex);
assert_eq!(&bytes[..], [42, 26, 2]);
let hex = UpperHexString::new("2A1A02").unwrap();
let bytes = Vec::from(hex);
assert_eq!(&bytes[..], [42, 26, 2]);
}
#[test]
fn it_converts_into_fixed_array_of_bytes() {
use std::convert::TryInto;
let bytes: [u8; 4] = LowerHexString::new("142a020a").unwrap().try_into().unwrap();
assert_eq!(bytes, [20, 42, 2, 10]);
let bytes: [u8; 5] = UpperHexString::new("142A020A0F")
.unwrap()
.try_into()
.unwrap();
assert_eq!(bytes, [20, 42, 2, 10, 15]);
}
#[test]
fn it_creates_upper_hex_str_from_lower_hex_str() {
let s = "aabbccddee";
let hex = LowerHexString::new(s).unwrap().to_uppercase();
let expected_hex = HexString::<{ Case::Upper }>(Cow::Owned("AABBCCDDEE".to_string()));
assert_ne!(s, hex.0.as_ref());
assert_eq!(hex, expected_hex);
let hex = LowerHexString::new(s.to_string()).unwrap().to_uppercase();
assert_eq!(hex, expected_hex);
}
#[test]
fn it_creates_lower_hex_str_from_upper_str() {
let s = "AABBCCDDEE";
let hex = UpperHexString::new(s).unwrap().to_lowercase();
let expected_hex = HexString::<{ Case::Lower }>(Cow::Owned("aabbccddee".to_string()));
assert_ne!(s, hex.0.as_ref());
assert_eq!(hex, expected_hex);
let hex = UpperHexString::new(s.to_string()).unwrap().to_lowercase();
assert_eq!(hex, expected_hex);
}
#[cfg(feature = "serde")]
mod serde {
use super::*;
use serde_json::error::Category;
#[test]
fn it_deser_hex_str() {
let result: Result<LowerHexString, _> = serde_json::from_str("\"abcd09\"");
assert!(result.is_ok());
let result: Result<UpperHexString, _> = serde_json::from_str("\"ABCD09\"");
assert!(result.is_ok());
}
#[test]
fn it_fails_to_deser_invalid_hex_str() {
let result: Result<LowerHexString, serde_json::Error> =
serde_json::from_str("\"invalid hex str\"");
assert_eq!(result.unwrap_err().classify(), Category::Data);
let result: Result<UpperHexString, serde_json::Error> =
serde_json::from_str("\"INVALID HEX STR\"");
assert_eq!(result.unwrap_err().classify(), Category::Data);
}
}
}
| from |
test_directory.py | # (C) Datadog, Inc. 2010-2017
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
from itertools import product
import os
import shutil
import tempfile
# 3p
from nose.plugins.attrib import attr
# project
from tests.checks.common import AgentCheckTest
@attr(requires="directory")
class | (AgentCheckTest):
CHECK_NAME = 'directory'
FILE_METRICS = [
"system.disk.directory.file.bytes",
"system.disk.directory.file.modified_sec_ago",
"system.disk.directory.file.created_sec_ago"
]
HISTOGRAM_SUFFIXES = ['count', '95percentile', 'max', 'median', 'avg']
DIRECTORY_METRICS = [i1 + "." + i2 for i1, i2 in product([
"system.disk.directory.file.bytes",
"system.disk.directory.file.modified_sec_ago",
"system.disk.directory.file.created_sec_ago"
], HISTOGRAM_SUFFIXES)]
COMMON_METRICS = [
"system.disk.directory.files",
"system.disk.directory.bytes"
]
@staticmethod
def get_config_stubs(dir_name, filegauges=False):
"""
Helper to generate configs from a directory name
"""
return [
{
'directory': dir_name,
'filegauges': filegauges
}, {
'directory': dir_name,
'name': "my_beloved_directory",
'filegauges': filegauges
}, {
'directory': dir_name,
'dirtagname': "directory_custom_tagname",
'filegauges': filegauges
}, {
'directory': dir_name,
'filetagname': "file_custom_tagname",
'filegauges': filegauges
}, {
'directory': dir_name,
'dirtagname': "recursive_check",
'recursive': True,
'filegauges': filegauges
}, {
'directory': dir_name,
'dirtagname': "glob_pattern_check",
'pattern': "*.log",
'filegauges': filegauges
}, {
'directory': dir_name,
'dirtagname': "relative_pattern_check",
'pattern': "file_*",
'filegauges': filegauges
}
]
def setUp(self):
"""
Generate a directory with a file structure for tests
"""
self.temp_dir = tempfile.mkdtemp()
# Create 10 files
for i in xrange(0, 10):
open(self.temp_dir + "/file_" + str(i), 'a').close()
# Add 2 '.log' files
open(self.temp_dir + "/log_1.log", 'a').close()
open(self.temp_dir + "/log_2.log", 'a').close()
# Create a subfolder and generate files into it
os.makedirs(str(self.temp_dir) + "/subfolder")
# Create 5 subfiles
for i in xrange(0, 5):
open(self.temp_dir + "/subfolder" + '/file_' + str(i), 'a').close()
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_directory_metrics(self):
"""
Directory metric coverage
"""
config_stubs = self.get_config_stubs(self.temp_dir)
countonly_stubs = self.get_config_stubs(self.temp_dir)
# Try all the configurations in countonly mode as well
for stub in countonly_stubs:
stub['countonly'] = True
config = {
'instances': config_stubs + countonly_stubs
}
self.run_check(config)
for config in config_stubs:
dirtagname = config.get('dirtagname', "name")
name = config.get('name', self.temp_dir)
dir_tags = [dirtagname + ":%s" % name]
# Directory metrics
for mname in (self.DIRECTORY_METRICS + self.COMMON_METRICS):
self.assertMetric(mname, tags=dir_tags, count=1)
# 'recursive' and 'pattern' parameters
if config.get('pattern') == "*.log":
# 2 '*.log' files in 'temp_dir'
self.assertMetric("system.disk.directory.files", tags=dir_tags, count=1, value=2)
elif config.get('pattern') == "file_*":
# 10 'file_*' files in 'temp_dir'
self.assertMetric("system.disk.directory.files", tags=dir_tags, count=1, value=10)
elif config.get('recursive'):
# 12 files in 'temp_dir' + 5 files in 'tempdir/subfolder'
self.assertMetric("system.disk.directory.files", tags=dir_tags, count=1, value=17)
else:
# 12 files in 'temp_dir'
self.assertMetric("system.disk.directory.files", tags=dir_tags, count=1, value=12)
# Raises when coverage < 100%
self.coverage_report()
def test_file_metrics(self):
"""
File metric coverage
"""
config_stubs = self.get_config_stubs(self.temp_dir, filegauges=True)
config = {
'instances': config_stubs
}
self.run_check(config)
for config in config_stubs:
dirtagname = config.get('dirtagname', "name")
name = config.get('name', self.temp_dir)
filetagname = config.get('filetagname', "filename")
dir_tags = [dirtagname + ":%s" % name]
# File metrics
for mname in self.FILE_METRICS:
if config.get('pattern') != "file_*":
# 2 '*.log' files in 'temp_dir'
for i in xrange(1, 3):
file_tag = [filetagname + ":%s" % os.path.normpath(self.temp_dir + "/log_" + str(i) + ".log")]
self.assertMetric(mname, tags=dir_tags + file_tag, count=1)
if config.get('pattern') != "*.log":
# Files in 'temp_dir'
for i in xrange(0, 10):
file_tag = [filetagname + ":%s" % os.path.normpath(self.temp_dir + "/file_" + str(i))]
self.assertMetric(mname, tags=dir_tags + file_tag, count=1)
if not config.get('pattern'):
# Files in 'temp_dir/subfolder'
if config.get('recursive'):
for i in xrange(0, 5):
file_tag = [filetagname + ":%s" % os.path.normpath(self.temp_dir + "/subfolder" + "/file_" + str(i))]
self.assertMetric(mname, tags=dir_tags + file_tag, count=1)
# Common metrics
for mname in self.COMMON_METRICS:
self.assertMetric(mname, tags=dir_tags, count=1)
# Raises when coverage < 100%
self.coverage_report()
def test_non_existent_directory(self):
"""
Missing or inaccessible directory coverage.
"""
config = {'instances': [{'directory': '/non-existent/directory'}]}
self.assertRaises(Exception, lambda: self.run_check(config))
def test_non_existent_directory_ignore_missing(self):
config = {
'instances': [
{'directory': '/non-existent/directory',
'ignore_missing': True}
]
}
self.run_check(config)
| DirectoryTestCase |
packet.rs | // Copyright (c) 2021 Shreepad Shukla
// SPDX-License-Identifier: MIT
#[derive(Debug, Clone, PartialEq)]
pub struct Packet {
version: u8,
type_id: u8,
lit_value: Option<u64>,
op_mode: Option<u8>,
op_sub_packets_length: Option<usize>,
op_sub_packets_count: Option<u32>,
op_sub_packets: Option<Vec<Packet>>,
}
impl Packet {
pub fn new(hex_str: String) -> Packet {
let mut packet = Packet {
version: 0,
type_id: 0,
lit_value: None,
op_mode: None,
op_sub_packets_length: None,
op_sub_packets_count: None,
op_sub_packets: None,
};
let binary_str = Self::hex_to_binary_str(hex_str);
let mut posn = 0usize;
Self::fill_packet(&mut packet, &binary_str, &mut posn);
packet
}
fn fill_packet(packet: &mut Packet, binary_str: &String, posn: &mut usize) {
// version - 3 bits
packet.version =
u8::from_str_radix(&binary_str[*posn..*posn + 3], 2).expect("Invalid version");
*posn += 3;
// type_id - 3 bits
packet.type_id =
u8::from_str_radix(&binary_str[*posn..*posn + 3], 2).expect("Invalid type id");
*posn += 3;
if packet.type_id == 4 {
// literal
let mut reached_end = false;
let mut literal_str = String::new();
while !reached_end {
// Check leading digit for end (0)
if binary_str[*posn..*posn + 1].starts_with("0") {
reached_end = true;
}
*posn += 1;
// Collect 4 bits of literal
literal_str.push_str(&binary_str[*posn..*posn + 4]);
*posn += 4;
}
packet.lit_value = Some(u64::from_str_radix(&literal_str, 2).expect("Invalid literal"));
}
// end literal
else {
// operator
let mut sub_packets: Vec<Packet> = Vec::new();
// Check leading digit for mode (0)
if binary_str[*posn..*posn + 1].starts_with("0") {
packet.op_mode = Some(0);
*posn += 1;
// Sub-packet length - 15 bits
packet.op_sub_packets_length = Some(
usize::from_str_radix(&binary_str[*posn..*posn + 15], 2)
.expect("Invalid sp length"),
);
*posn += 15;
let current_posn: usize = *posn;
while *posn < current_posn + packet.op_sub_packets_length.unwrap() {
let mut sub_packet = Packet {
version: 0,
type_id: 0,
lit_value: None,
op_mode: None,
op_sub_packets_length: None,
op_sub_packets_count: None,
op_sub_packets: None,
};
Self::fill_packet(&mut sub_packet, binary_str, posn);
sub_packets.push(sub_packet);
}
}
// end mode 0
else {
packet.op_mode = Some(1);
*posn += 1;
// Sub-packet count - 11 bits
packet.op_sub_packets_count = Some(
u32::from_str_radix(&binary_str[*posn..*posn + 11], 2)
.expect("Invalid sp count"),
);
*posn += 11;
// collect packets recursively
for _ in 1..=packet.op_sub_packets_count.unwrap() {
let mut sub_packet = Packet {
version: 0,
type_id: 0,
lit_value: None,
op_mode: None,
op_sub_packets_length: None,
op_sub_packets_count: None,
op_sub_packets: None,
};
Self::fill_packet(&mut sub_packet, binary_str, posn);
sub_packets.push(sub_packet);
}
} // end mode 1
packet.op_sub_packets = Some(sub_packets);
} // end operator
//println!("Filled packet: {:?}", packet);
}
fn hex_to_binary_str(hex_str: String) -> String {
let mut binary_str = String::with_capacity(6000);
for hex_char in hex_str.trim().chars() {
let hex_value = hex_char.to_digit(16).unwrap();
binary_str.push_str(&format!("{:04b}", hex_value));
}
binary_str
}
pub fn version_sum(&self) -> u32 {
let mut sum = 0u32;
sum += self.version as u32;
if self.op_sub_packets == None {
return sum;
}
for sub_packet in self.op_sub_packets.as_ref().unwrap().iter() {
sum += sub_packet.version_sum();
}
sum
}
pub fn value(&self) -> u64 {
match self.type_id {
0 => self
.op_sub_packets
.as_ref()
.unwrap()
.iter()
.map(|packet| packet.value() as u64)
.sum(),
1 => self
.op_sub_packets
.as_ref()
.unwrap()
.iter()
.map(|packet| packet.value() as u64)
.product(),
2 => self
.op_sub_packets
.as_ref()
.unwrap()
.iter()
.map(|packet| packet.value() as u64)
.min()
.unwrap(),
3 => self
.op_sub_packets
.as_ref()
.unwrap()
.iter()
.map(|packet| packet.value() as u64)
.max()
.unwrap(),
4 => self.lit_value.unwrap(),
5 => {
// gt
let mut iter_gt = self
.op_sub_packets
.as_ref()
.unwrap()
.iter()
.map(|packet| packet.value() as u64);
if iter_gt.next().unwrap() > iter_gt.next().unwrap() {
1
} else {
0
}
}
6 => {
//lt
let mut iter_gt = self
.op_sub_packets
.as_ref()
.unwrap()
.iter()
.map(|packet| packet.value() as u64);
if iter_gt.next().unwrap() < iter_gt.next().unwrap() {
1
} else {
0
}
}
7 => {
//eq
let mut iter_gt = self
.op_sub_packets
.as_ref()
.unwrap()
.iter()
.map(|packet| packet.value() as u64);
if iter_gt.next().unwrap() == iter_gt.next().unwrap() {
1
} else {
0
}
}
_ => 0,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn packet_literal() {
let result = Packet::new("D2FE28".to_string());
println!("Literal packet: {:?}", result);
assert_eq!(6, result.version_sum()); // fail to print
}
#[test]
fn packet_operator_mode1() {
let result = Packet::new("EE00D40C823060".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(14, result.version_sum()); // fail to print
}
#[test]
fn packet_operator_mode0() {
let result = Packet::new("38006F45291200".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(9, result.version_sum()); // fail to print
}
#[test]
fn packet_3nestedoperator_lit() {
let result = Packet::new("8A004A801A8002F478".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(16, result.version_sum()); // fail to print
}
#[test]
fn packet_3nestedoperator_5lit() {
let result = Packet::new("A0016C880162017C3686B18A3D4780".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(31, result.version_sum()); // fail to print
}
#[test]
fn packet_sum_2lit() {
let result = Packet::new("C200B40A82".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(3, result.value()); // fail to print
}
#[test]
fn packet_min_3lit() {
let result = Packet::new("880086C3E88112".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(7, result.value()); // fail to print
}
#[test]
fn packet_max_3lit() {
let result = Packet::new("CE00C43D881120".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(9, result.value()); // fail to print
}
#[test]
fn packet_gt_2lit() {
let result = Packet::new("F600BC2D8F".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(0, result.value()); // fail to print
}
#[test]
fn packet_lt_2lit() {
let result = Packet::new("D8005AC2A8F0".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(1, result.value()); // fail to print
}
#[test]
fn packet_eq_2lit() {
let result = Packet::new("9C005AC2F8F0".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(0, result.value()); // fail to print
}
#[test]
fn | () {
let result = Packet::new("9C0141080250320F1802104A08".to_string());
println!("Operator packet: {:?}", result);
assert_eq!(1, result.value()); // fail to print
}
}
| packet_eq_sumprod_2lits |
arguments.py | # Copyright 2004-2021 Tom Rothamel <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# This file handles argument parsing. Argument parsing takes place in
# two phases. In the first phase, we only parse the arguments that are
# necessary to load the game, and run the init phase. The init phase
# can register commands and arguments. These arguments are parsed at
# the end of the init phase, before the game begins running, and can
# decide if the game runs or some other action occurs.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
from renpy.compat import *
import argparse
import os
import renpy
try:
import site
site._renpy_argv_emulation() # @UndefinedVariable
except:
pass
# A map from command name to a (function, flag) tuple. The flag is true if the
# function will parse command line arguments, and false otherwise.
commands = { }
# True if the command requires the display, false if it doesn't.
display = { }
# Commands that force compile to be set.
compile_commands = { "compile", "add_from", "merge_strings" }
class ArgumentParser(argparse.ArgumentParser):
"""
Creates an argument parser that is capable of parsing the standard Ren'Py
arguments, as well as arguments that are specific to a sub-command.
"""
def __init__(self, second_pass=True, description=None, require_command=True):
"""
Creates an argument parser.
`second_pass`
True if this is the second pass through argument parsing. (The pass
that parses sub-commands.)
`description`
If supplied, this will be used as a description of the subcommand
to run.
"""
self.group = self
argparse.ArgumentParser.__init__(self, description="The Ren'Py visual novel engine.", add_help=False)
command_names = ", ".join(sorted(commands))
if require_command:
self.add_argument(
"basedir",
help="The base directory containing of the project to run. This defaults to the directory containing the Ren'Py executable.")
self.add_argument(
"command",
help="The command to execute. Available commands are: " + command_names + ". Defaults to 'run'.")
else:
self.add_argument(
"basedir",
default='',
nargs='?',
help="The base directory containing of the project to run. This defaults to the directory containing the Ren'Py executable.")
self.add_argument(
"command",
help="The command to execute. Available commands are: " + command_names + ". Defaults to 'run'.",
nargs='?',
default="run")
self.add_argument(
"--savedir", dest='savedir', default=None, metavar="DIRECTORY",
help="The directory where saves and persistent data are placed.")
self.add_argument(
'--trace', dest='trace', action='store', default=0, type=int, metavar="LEVEL",
help="The level of trace Ren'Py will log to trace.txt. (1=per-call, 2=per-line)")
self.add_argument(
"--version", action='version', version=renpy.version,
help="Displays the version of Ren'Py in use.")
self.add_argument(
"--compile", action='store_true', dest='compile',
help='Forces all .rpy scripts to be recompiled before proceeding.')
self.add_argument(
"--keep-orphan-rpyc", action="store_true",
help="Prevents the compile command from deleting orphan rpyc files.")
self.add_argument(
"--lint", action="store_true", dest="lint",
help=argparse.SUPPRESS)
self.add_argument(
"--errors-in-editor", action="store_true",
help="Causes errors to open in a text editor.")
self.add_argument(
'--safe-mode', dest='safe_mode', action='store_true', default=False,
help="Forces Ren'Py to start in safe mode, allowing the player to configure graphics.")
dump = self.add_argument_group("JSON dump arguments", description="Ren'Py can dump information about the game to a JSON file. These options let you select the file, and choose what is dumped.")
dump.add_argument("--json-dump", action="store", metavar="FILE", help="The name of the JSON file.")
dump.add_argument("--json-dump-private", action="store_true", default=False, help="Include private names. (Names beginning with _.)")
dump.add_argument("--json-dump-common", action="store_true", default=False, help="Include names defined in the common directory.")
if second_pass:
self.add_argument("-h", "--help", action="help", help="Displays this help message, then exits.")
command = renpy.game.args.command # @UndefinedVariable
self.group = self.add_argument_group("{0} command arguments".format(command), description)
def add_argument(self, *args, **kwargs):
if self.group is self:
argparse.ArgumentParser.add_argument(self, *args, **kwargs)
else:
self.group.add_argument(*args, **kwargs)
def parse_args(self, *args, **kwargs):
rv = argparse.ArgumentParser.parse_args(self, *args, **kwargs)
if rv.command in compile_commands:
rv.compile = True
if renpy.session.get("compile", False):
rv.compile = True
return rv
def parse_known_args(self, *args, **kwargs):
args, rest = argparse.ArgumentParser.parse_known_args(self, *args, **kwargs)
if args.command in compile_commands:
args.compile = True
if renpy.session.get("compile", False):
args.compile = True
return args, rest
def run():
"""
The default command, that (when called) leads to normal game startup.
"""
ap = ArgumentParser(description="Runs the current project normally.", require_command=False)
ap.add_argument(
'--profile-display', dest='profile_display', action='store_true', default=False,
help="If present, Ren'Py will report the amount of time it takes to draw the screen.")
ap.add_argument(
'--debug-image-cache', dest='debug_image_cache', action='store_true', default=False,
help="If present, Ren'Py will log information regarding the contents of the image cache.")
ap.add_argument(
'--warp', dest='warp', default=None,
help='This takes as an argument a filename:linenumber pair, and tries to warp to the statement before that line number.')
args = renpy.game.args = ap.parse_args()
if args.warp:
renpy.warp.warp_spec = args.warp
if args.profile_display: # @UndefinedVariable
renpy.config.profile = True
if args.debug_image_cache:
renpy.config.debug_image_cache = True
return True
def compile(): # @ReservedAssignment
"""
This command forces the game script to be recompiled.
"""
takes_no_arguments("Recompiles the game script.")
return False
def quit(): # @ReservedAssignment
|
def rmpersistent():
"""
This command is used to delete the persistent data.
"""
takes_no_arguments("Deletes the persistent data.")
renpy.loadsave.location.unlink_persistent()
renpy.persistent.should_save_persistent = False
return False
def register_command(name, function, uses_display=False):
"""
Registers a command that can be invoked when Ren'Py is run on the command
line. When the command is run, `function` is called with no arguments.
If `function` needs to take additional command-line arguments, it should
instantiate a renpy.arguments.ArgumentParser(), and then call parse_args
on it. Otherwise, it should call renpy.arguments.takes_no_arguments().
If `function` returns true, Ren'Py startup proceeds normally. Otherwise,
Ren'Py will terminate when function() returns.
`uses_display`
If true, Ren'Py will initialize the display. If False, Ren'Py will
use dummy video and audio drivers.
"""
commands[name] = function
display[name] = uses_display
def bootstrap():
"""
Called during bootstrap to perform an initial parse of the arguments, ignoring
unknown arguments. Returns the parsed arguments, and a list of unknown arguments.
"""
global rest
ap = ArgumentParser(False, require_command=False)
args, _rest = ap.parse_known_args()
return args
def pre_init():
"""
Called before init, to set up argument parsing.
"""
global subparsers
register_command("run", run, True)
register_command("lint", renpy.lint.lint)
register_command("compile", compile)
register_command("rmpersistent", rmpersistent)
register_command("quit", quit)
def post_init():
"""
Called after init, but before the game starts. This parses a command
and its arguments. It then runs the command function, and returns True
if execution should continue and False otherwise.
"""
command = renpy.game.args.command # @UndefinedVariable
if command == "run" and renpy.game.args.lint: # @UndefinedVariable
command = "lint"
if command not in commands:
ArgumentParser().error("Command {0} is unknown.".format(command))
if not display[command]:
os.environ.setdefault("SDL_AUDIODRIVER", "dummy")
os.environ.setdefault("SDL_VIDEODRIVER", "dummy")
return commands[command]()
def takes_no_arguments(description=None):
"""
Used to report that a command takes no arguments.
"""
ArgumentParser(description=description).parse_args()
| """
This command is used to quit without doing anything.
"""
takes_no_arguments("Recompiles the game script.")
return False |
convert_test.py | # Copyright 2020-2021 Cambridge Quantum Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple, List
import json
import os
import numpy as np
import pytest
from pytket.circuit import Circuit, OpType # type: ignore
from pytket.extensions.aqt.backends.aqt import _translate_aqt, AQTBackend, _aqt_rebase
skip_remote_tests: bool = os.getenv("PYTKET_RUN_REMOTE_TESTS") is None
REASON = "PYTKET_RUN_REMOTE_TESTS not set (requires configuration of AQT access token)"
def tk_to_aqt(circ: Circuit) -> Tuple[List[List], str]:
"""Convert a circuit to AQT list representation"""
c = circ.copy()
AQTBackend(device_name="sim/noise-model-1").default_compilation_pass().apply(c)
return _translate_aqt(c)
@pytest.mark.skipif(skip_remote_tests, reason=REASON)
def test_convert() -> None:
circ = Circuit(4, 4)
circ.H(0).CX(0, 1)
circ.add_gate(OpType.noop, [1])
circ.CRz(0.5, 1, 2)
circ.add_barrier([2])
circ.ZZPhase(0.3, 2, 3).CX(3, 0).Tdg(1)
circ.Measure(0, 0)
circ.Measure(1, 2)
circ.Measure(2, 3)
circ.Measure(3, 1)
circ_aqt = tk_to_aqt(circ)
assert json.loads(circ_aqt[1]) == [0, 3, 1, 2]
assert all(gate[0] in ["X", "Y", "MS"] for gate in circ_aqt[0])
def test_rebase_CX() -> None:
circ = Circuit(2)
circ.CX(0, 1)
orig_circ = circ.copy()
_aqt_rebase().apply(circ)
u1 = orig_circ.get_unitary()
u2 = circ.get_unitary()
assert np.allclose(u1, u2)
def | () -> None:
circ = Circuit(1)
# some arbitrary unitary
circ.add_gate(OpType.U3, [0.01231, 0.848, 38.200], [0])
orig_circ = circ.copy()
_aqt_rebase().apply(circ)
u1 = orig_circ.get_unitary()
u2 = circ.get_unitary()
assert np.allclose(u1, u2)
def test_rebase_large() -> None:
circ = Circuit(3)
# some arbitrary unitary
circ.Rx(0.21, 0).Rz(0.12, 1).Rz(8.2, 2).X(2).CX(0, 1).CX(1, 2).Rz(0.44, 1).Rx(
0.43, 0
)
orig_circ = circ.copy()
_aqt_rebase().apply(circ)
u1 = orig_circ.get_unitary()
u2 = circ.get_unitary()
assert np.allclose(u1, u2)
| test_rebase_singleq |
model_confirm_consumption_messages_response.go | package model
import (
"encoding/json"
"strings"
)
// Response Object
type ConfirmConsumptionMessagesResponse struct {
// 确认成功的数目(如果为N,则表示前N条消息确认成功)。
Success *int32 `json:"success,omitempty"`
// 确认失败的数目(如果为N,则表示后N条消息确认失败)。
Fail *int32 `json:"fail,omitempty"`
HttpStatusCode int `json:"-"`
}
func (o ConfirmConsumptionMessagesResponse) String() string {
data, err := json.Marshal(o)
if err != nil {
return "ConfirmConsumptionMessagesResponse struct{}"
}
return strings.Join([]string{"ConfirmC | onsumptionMessagesResponse", string(data)}, " ")
}
|
|
main.rs | use std::{
env, fs,
path::{Path, PathBuf},
time::Instant,
};
const MIDI_DIR: &str = "../test-asset";
const MIDI_EXT: &[&str] = &["mid", "midi", "rmi"];
const PARSERS: &[(&str, fn(&Path) -> Result<usize, String>)] = &[
(&"midly", parse_midly),
(&"nom-midi", parse_nom),
(&"rimd", parse_rimd),
];
fn parse_midly(path: &Path) -> Result<usize, String> {
let data = fs::read(path).map_err(|err| format!("{}", err))?;
let smf = midly::Smf::parse(&data).map_err(|err| format!("{}", err))?;
Ok(smf.tracks.len())
}
fn parse_nom(path: &Path) -> Result<usize, String> {
let data = fs::read(path).map_err(|err| format!("{}", err))?;
let smf = nom_midi::parser::parse_smf(&data)
.map_err(|err| format!("{}", err))?
.1;
Ok(smf.tracks.len())
}
fn parse_rimd(path: &Path) -> Result<usize, String> {
let smf = rimd::SMF::from_file(path).map_err(|err| format!("{}", err))?;
Ok(smf.tracks.len())
}
fn list_midis(dir: &Path) -> Vec<PathBuf> {
let mut midis = Vec::new();
for entry in fs::read_dir(dir).unwrap() {
let path = entry.unwrap().path();
if MIDI_EXT
.iter()
.any(|ext| path.extension() == Some(ext.as_ref()))
{
midis.push(path);
}
}
midis
}
fn use_parser(parse: fn(&Path) -> Result<usize, String>, path: &Path) -> Result<(), String> {
let round = |num: f64| (num * 100.0).round() / 100.0;
let runtime = || -> Result<_, String> {
let start = Instant::now();
let out = parse(path)?;
let time = round((start.elapsed().as_micros() as f64) / 1000.0);
Ok((out, time))
};
let (track_count, cold_time) = runtime()?;
let runtime = || -> Result<_, String> {
let (out, time) = runtime()?;
assert_eq!(
out, track_count,
"parser is not consistent with track counts"
);
Ok(time)
};
let iters = (2000.0 / cold_time).floor() as u64 + 1;
let mut total_time = 0.0;
let mut max_time = cold_time;
let mut min_time = cold_time;
for _ in 0..iters {
let time = runtime()?;
total_time += time;
max_time = max_time.max(time);
min_time = min_time.min(time);
}
let avg_time = round(total_time / (iters as f64));
eprintln!(
"{} tracks in {} iters / min {} / avg {} / max {}",
track_count, iters, min_time, avg_time, max_time
);
Ok(())
}
fn main() {
let midi_filter = env::args().nth(1).unwrap_or_default().to_lowercase();
let parser_filter = env::args().nth(2).unwrap_or_default().to_lowercase();
let midi_dir = env::args().nth(3).unwrap_or(MIDI_DIR.to_string());
let parsers = PARSERS
.iter()
.filter(|(name, _)| name.contains(&parser_filter))
.collect::<Vec<_>>();
if parsers.is_empty() {
eprintln!("no parsers match the pattern \"{}\"", parser_filter);
eprint!("available parsers: ");
for (i, (name, _)) in PARSERS.iter().enumerate() {
if i > 0 {
eprint!(", ");
}
eprint!("{}", name);
}
}
let unfiltered_midis = list_midis(midi_dir.as_ref());
let midis = unfiltered_midis
.iter()
.filter(|midi| {
midi.file_name()
.unwrap_or_default()
.to_str()
.expect("non-utf8 file")
.to_lowercase()
.contains(&midi_filter)
})
.collect::<Vec<_>>();
if midis.is_empty() {
eprintln!("no midi files match the pattern \"{}\"", midi_filter);
eprintln!("available midi files:");
for file in unfiltered_midis.iter() {
eprintln!(" {}", file.display());
}
} else {
for midi in midis {
//Parse this file
eprintln!("parsing file \"{}\"", midi.display());
for &(name, parse) in parsers.iter() { | match use_parser(*parse, &midi) {
Ok(()) => {}
Err(_err) => {
eprintln!("parse error");
}
}
}
eprintln!();
}
}
} | eprint!(" {}: ", name); |
color.js | import { ACCURACY } from './variables.js';
const checkRange = (value, maxValue, label) => {
if (isNaN(value) || 0 > value || value > maxValue)
throw new RangeError(value + ' for ' + label + ' is not between 0 and ' + maxValue); | };
export class HSBColor {
constructor(hue, saturation, brightness, alpha = 1) {
this.hue = hue;
this.saturation = saturation;
this.value = brightness;
this.alpha = alpha;
checkRange(hue, 360, 'hue');
checkRange(saturation, 1, 'saturation');
checkRange(brightness, 1, 'value');
checkRange(alpha, 1, 'alpha');
};
}
export class HSLColor {
constructor(hue, saturation, lightness, alpha = 1) {
this.hue = hue;
this.saturation = saturation;
this.lightness = lightness;
this.alpha = alpha;
checkRange(hue, 360, 'hue');
checkRange(saturation, 1, 'saturation');
checkRange(lightness, 1, 'lightness');
checkRange(alpha, 1, 'alpha');
};
// rotate() is never be called
rotate(hueAdjustment) {
return new HSLColor((this.hue + hueAdjustment + 360) % 360, this.saturation, this.lightness, this.alpha);
}
// toCSSValue() is never be called
toCSSValue() {
return (
'hsla(' +
this.hue + ', ' +
100 * this.saturation + '%, ' +
100 * this.lightness + '%, ' + this.alpha +
')'
);
}
}
export class LABColor {
constructor(lightness, a, b, alpha = 1) {
this.lightness = lightness;
this.a = a;
this.b = b;
this.alpha = alpha;
checkRange(lightness, Number.MAX_VALUE, 'lightness');
checkRange(alpha, 1, 'alpha');
}
// equals() is never be called
equals(a) {
return (
1e-4 > Math.abs(this.lightness - a.lightness) &&
1e-4 > Math.abs(this.a - a.a) &&
1e-4 > Math.abs(this.b - a.b) &&
Math.abs(this.alpha - a.alpha) < ACCURACY
);
}
};
export class LCHColor {
constructor(lightness, chroma, hue, alpha = 1) {
this.lightness = lightness;
this.chroma = chroma;
this.hue = hue;
this.alpha = alpha;
checkRange(lightness, Number.MAX_VALUE, 'lightness');
checkRange(chroma, Number.MAX_VALUE, 'chroma');
checkRange(hue, 360, 'hue');
checkRange(alpha, 1, 'alpha');
};
// equals() is never be called
equals(a) {
return (
1e-4 > Math.abs(this.lightness - a.lightness) &&
1e-4 > Math.abs(this.chroma - a.chroma) &&
1e-4 > Math.abs(this.hue - a.hue) &&
Math.abs(this.alpha - a.alpha) < ACCURACY
);
}
}
export class RGBColor {
constructor(red, green, blue, alpha = 1) {
this.red = red;
this.green = green;
this.blue = blue;
this.alpha = alpha;
checkRange(red, 1, 'red');
checkRange(green, 1, 'green');
checkRange(blue, 1, 'blue');
checkRange(alpha, 1, 'alpha');
}
equals(rgbColor) {
return (
Math.abs(this.red - rgbColor.red) < ACCURACY &&
Math.abs(this.green - rgbColor.green) < ACCURACY &&
Math.abs(this.blue - rgbColor.blue) < ACCURACY &&
Math.abs(this.alpha - rgbColor.alpha) < ACCURACY
);
}
// toCSSValue() is never be called
toCSSValue() {
return (
'rgba(' +
100 * this.red + '%, ' +
100 * this.green + '%, ' +
100 * this.blue + '%, ' +
this.alpha +
')'
);
}
}
export class XYZColor {
constructor(x, y, z, alpha = 1) {
this.x = x;
this.y = y;
this.z = z;
this.alpha = alpha;
};
} | |
swap_forget.rs | use std::mem;
use rand;
type NodeCell = Option<Box<Node>>;
struct Node {
x: i32,
y: i32,
left: NodeCell,
right: NodeCell,
}
impl Node {
fn new(x: i32) -> Self {
Self {
x,
y: rand::random::<i32>(),
left: None,
right: None,
}
}
}
fn merge(lower: NodeCell, greater: NodeCell) -> NodeCell {
match (lower, greater) {
(None, greater) => greater,
(lower, None) => lower,
(Some(mut lower_node), Some(mut greater_node)) => {
if lower_node.y < greater_node.y {
let mut merged = merge(lower_node.right.take(), Some(greater_node));
mem::swap(&mut lower_node.right, &mut merged);
mem::forget(merged);
Some(lower_node)
} else {
let mut merged = merge(Some(lower_node), greater_node.left.take());
mem::swap(&mut greater_node.left, &mut merged);
mem::forget(merged);
Some(greater_node)
}
}
}
}
fn split_binary(orig: NodeCell, value: i32) -> (NodeCell, NodeCell) {
if let Some(mut orig_node) = orig {
if orig_node.x < value {
let mut split_pair = split_binary(orig_node.right.take(), value);
mem::swap(&mut orig_node.right, &mut split_pair.0);
debug_assert!(split_pair.0.is_none());
mem::forget(split_pair.0);
(Some(orig_node), split_pair.1)
} else {
let mut split_pair = split_binary(orig_node.left.take(), value);
mem::swap(&mut orig_node.left, &mut split_pair.1);
debug_assert!(split_pair.1.is_none());
mem::forget(split_pair.1);
(split_pair.0, Some(orig_node))
}
} else {
(None, None)
}
}
fn merge3(lower: NodeCell, equal: NodeCell, greater: NodeCell) -> NodeCell {
merge(merge(lower, equal), greater)
}
struct SplitResult {
lower: NodeCell,
equal: NodeCell,
greater: NodeCell,
}
fn split(orig: NodeCell, value: i32) -> SplitResult {
let (lower, equal_greater) = split_binary(orig, value);
let (equal, greater) = split_binary(equal_greater, value + 1);
SplitResult {
lower,
equal,
greater,
}
}
pub struct Tree {
root: NodeCell,
}
impl Tree {
pub fn new() -> Self {
Self { root: None }
}
pub fn has_value(&mut self, x: i32) -> bool |
pub fn insert(&mut self, x: i32) {
let mut splited = split(self.root.take(), x);
if splited.equal.is_none() {
splited.equal = Some(Box::new(Node::new(x)));
}
self.root = merge3(splited.lower, splited.equal, splited.greater);
}
pub fn erase(&mut self, x: i32) {
let splited = split(self.root.take(), x);
self.root = merge(splited.lower, splited.greater);
}
}
| {
let splited = split(self.root.take(), x);
let res = splited.equal.is_some();
self.root = merge3(splited.lower, splited.equal, splited.greater);
res
} |
main.rs | mod book;
mod errors;
mod header;
mod lz77;
use book::{from_path_raw, parse_book};
const FILE_PATH: &str = "./data/ex.mobi";
pub fn | () {
let buffer = std::fs::read(FILE_PATH).expect("Couldn't read file");
let res = parse_book(&buffer).expect("Error");
println!("{}", res);
let _ = from_path_raw(FILE_PATH);
}
| main |
1.rs | fn main() {
let n: u32 = (1..1000) | } | .filter(|n| n % 3 == 0 || n % 5 == 0)
.sum();
println!("{}", n); |
limits.go | // Copyright 2016-2018 The NATS Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package stores
import (
"fmt"
"time"
"github.com/yanzongzhen/nats-streaming-server/util"
)
// Used for display of limits
const (
limitCount = iota
limitBytes
limitDuration
)
// Clone returns a copy of the store limits
func (sl *StoreLimits) Clone() *StoreLimits {
cloned := *sl
cloned.PerChannel = sl.ClonePerChannelMap()
return &cloned
}
// ClonePerChannelMap returns a deep copy of the StoreLimits's PerChannel map
func (sl *StoreLimits) ClonePerChannelMap() map[string]*ChannelLimits {
if sl.PerChannel == nil {
return nil
}
clone := make(map[string]*ChannelLimits, len(sl.PerChannel))
for k, v := range sl.PerChannel {
copyVal := *v
clone[k] = ©Val
}
return clone
}
// AddPerChannel stores limits for the given channel `name` in the StoreLimits.
// Inheritance (that is, specifying 0 for a limit means that the global limit
// should be used) is not applied in this call. This is done in StoreLimits.Build
// along with some validation.
func (sl *StoreLimits) AddPerChannel(name string, cl *ChannelLimits) {
if sl.PerChannel == nil {
sl.PerChannel = make(map[string]*ChannelLimits)
}
sl.PerChannel[name] = cl
}
type channelLimitInfo struct {
name string
limits *ChannelLimits
isLiteral bool
isProcessed bool
}
// Build sets the global limits into per-channel limits that are set
// to zero. This call also validates the limits. An error is returned if:
// * any global limit is set to a negative value.
// * the number of per-channel is higher than StoreLimits.MaxChannels.
// * a per-channel name is invalid
func (sl *StoreLimits) Build() error {
// Check that there is no negative value
if err := sl.checkGlobalLimits(); err != nil {
return err
}
// If there is no per-channel, we are done.
if len(sl.PerChannel) == 0 {
return nil
}
literals := 0
sublist := util.NewSublist()
for cn, cl := range sl.PerChannel {
if !util.IsChannelNameValid(cn, true) {
return fmt.Errorf("invalid channel name %q", cn)
}
isLiteral := util.IsChannelNameLiteral(cn)
if isLiteral {
literals++
if sl.MaxChannels > 0 && literals > sl.MaxChannels {
return fmt.Errorf("too many channels defined (%v). The max channels limit is set to %v",
literals, sl.MaxChannels)
}
}
cli := &channelLimitInfo{
name: cn,
limits: cl,
isLiteral: isLiteral,
}
sublist.Insert(cn, cli)
}
// If we are here, it means that there was no error,
// so we now apply inheritance.
sl.applyInheritance(sublist)
return nil
}
func (sl *StoreLimits) applyInheritance(sublist *util.Sublist) {
// Get the subjects from the sublist. This ensure that they are ordered
// from the widest to the narrowest of subjects.
channels := sublist.Subjects()
for _, cn := range channels {
r := sublist.Match(cn)
// There has to be at least 1 match (the current channel name we
// are trying to match).
channel := r[0].(*channelLimitInfo)
if channel.isLiteral && channel.isProcessed {
continue
}
if !channel.isProcessed {
sl.inheritLimits(channel, &sl.ChannelLimits)
}
prev := channel
for i := 1; i < len(r); i++ {
channel = r[i].(*channelLimitInfo)
if !channel.isProcessed {
sl.inheritLimits(channel, prev.limits)
}
prev = channel
}
}
}
func (sl *StoreLimits) inheritLimits(channel *channelLimitInfo, parentLimits *ChannelLimits) {
cl := channel.limits
if cl.MaxSubscriptions < 0 {
cl.MaxSubscriptions = 0
} else if cl.MaxSubscriptions == 0 {
cl.MaxSubscriptions = parentLimits.MaxSubscriptions
}
if cl.MaxMsgs < 0 {
cl.MaxMsgs = 0
} else if cl.MaxMsgs == 0 {
cl.MaxMsgs = parentLimits.MaxMsgs
}
if cl.MaxBytes < 0 {
cl.MaxBytes = 0
} else if cl.MaxBytes == 0 {
cl.MaxBytes = parentLimits.MaxBytes
}
if cl.MaxAge < 0 {
cl.MaxAge = 0
} else if cl.MaxAge == 0 {
cl.MaxAge = parentLimits.MaxAge
}
if cl.MaxInactivity < 0 {
cl.MaxInactivity = 0
} else if cl.MaxInactivity == 0 {
cl.MaxInactivity = parentLimits.MaxInactivity
}
channel.isProcessed = true
}
func (sl *StoreLimits) checkGlobalLimits() error {
if sl.MaxChannels < 0 {
return fmt.Errorf("max channels limit cannot be negative (%v)", sl.MaxChannels)
}
if sl.MaxSubscriptions < 0 {
return fmt.Errorf("max subscriptions limit cannot be negative (%v)", sl.MaxSubscriptions)
}
if sl.MaxMsgs < 0 {
return fmt.Errorf("max messages limit cannot be negative (%v)", sl.MaxMsgs)
}
if sl.MaxBytes < 0 {
return fmt.Errorf("max bytes limit cannot be negative (%v)", sl.MaxBytes)
}
if sl.MaxAge < 0 {
return fmt.Errorf("max age limit cannot be negative (%v)", sl.MaxAge)
}
if sl.MaxInactivity < 0 {
return fmt.Errorf("max inactivity limit cannot be negative (%v)", sl.MaxInactivity)
}
return nil
}
// Print returns an array of strings suitable for printing the store limits.
func (sl *StoreLimits) Print() []string {
sublist := util.NewSublist()
for cn, cl := range sl.PerChannel {
sublist.Insert(cn, &channelLimitInfo{
name: cn,
limits: cl,
isLiteral: util.IsChannelNameLiteral(cn),
})
}
maxLevels := sublist.NumLevels()
txt := []string{}
title := "---------- Store Limits ----------"
txt = append(txt, title)
txt = append(txt, fmt.Sprintf("Channels: %s",
getLimitStr(true, int64(sl.MaxChannels),
int64(DefaultStoreLimits.MaxChannels),
limitCount)))
maxLen := len(title)
txt = append(txt, "--------- Channels Limits --------")
txt = append(txt, getGlobalLimitsPrintLines(&sl.ChannelLimits)...)
if len(sl.PerChannel) > 0 {
channels := sublist.Subjects()
channelLines := []string{}
for _, cn := range channels {
r := sublist.Match(cn)
var prev *channelLimitInfo
for i := 0; i < len(r); i++ {
channel := r[i].(*channelLimitInfo)
if channel.name == cn {
var parentLimits *ChannelLimits
if prev == nil {
parentLimits = &sl.ChannelLimits
} else {
parentLimits = prev.limits
}
channelLines = append(channelLines,
getChannelLimitsPrintLines(i, maxLevels, &maxLen, channel.name, channel.limits, parentLimits)...)
break
}
prev = channel
}
}
title := " List of Channels "
numberDashesLeft := (maxLen - len(title)) / 2
numberDashesRight := maxLen - len(title) - numberDashesLeft
title = fmt.Sprintf("%s%s%s",
repeatChar("-", numberDashesLeft),
title,
repeatChar("-", numberDashesRight))
txt = append(txt, title)
txt = append(txt, channelLines...)
}
txt = append(txt, repeatChar("-", maxLen))
return txt
}
func getLimitStr(isGlobal bool, val, parentVal int64, limitType int) string {
valStr := ""
inherited := ""
if !isGlobal && (val == parentVal) {
return ""
}
if val == parentVal {
inherited = " *"
}
if val == 0 {
valStr = "unlimited"
} else {
switch limitType {
case limitBytes:
valStr = util.FriendlyBytes(val)
case limitDuration:
valStr = fmt.Sprintf("%v", time.Duration(val))
default:
valStr = fmt.Sprintf("%v", val)
}
}
return fmt.Sprintf("%13s%s", valStr, inherited)
}
func getGlobalLimitsPrintLines(limits *ChannelLimits) []string {
defaultLimits := &DefaultStoreLimits
defMaxSubs := int64(defaultLimits.MaxSubscriptions)
defMaxMsgs := int64(defaultLimits.MaxMsgs)
defMaxBytes := defaultLimits.MaxBytes
defMaxAge := defaultLimits.MaxAge
defMaxInactivity := defaultLimits.MaxInactivity
txt := []string{}
txt = append(txt, fmt.Sprintf(" Subscriptions: %s", getLimitStr(true, int64(limits.MaxSubscriptions), defMaxSubs, limitCount)))
txt = append(txt, fmt.Sprintf(" Messages : %s", getLimitStr(true, int64(limits.MaxMsgs), defMaxMsgs, limitCount)))
txt = append(txt, fmt.Sprintf(" Bytes : %s", getLimitStr(true, limits.MaxBytes, defMaxBytes, limitBytes)))
txt = append(txt, fmt.Sprintf(" Age : %s", getLimitStr(true, int64(limits.MaxAge), int64(defMaxAge), limitDuration)))
txt = append(txt, fmt.Sprintf(" Inactivity : %s", getLimitStr(true, int64(limits.MaxInactivity), int64(defMaxInactivity), limitDuration)))
return txt
}
func getChannelLimitsPrintLines(level, maxLevels int, maxLen *int, channelName string, limits, parentLimits *ChannelLimits) []string {
plMaxSubs := int64(parentLimits.MaxSubscriptions)
plMaxMsgs := int64(parentLimits.MaxMsgs)
plMaxBytes := parentLimits.MaxBytes
plMaxAge := parentLimits.MaxAge
plMaxInactivity := parentLimits.MaxInactivity
maxSubsOverride := getLimitStr(false, int64(limits.MaxSubscriptions), plMaxSubs, limitCount)
maxMsgsOverride := getLimitStr(false, int64(limits.MaxMsgs), plMaxMsgs, limitCount)
maxBytesOverride := getLimitStr(false, limits.MaxBytes, plMaxBytes, limitBytes)
maxAgeOverride := getLimitStr(false, int64(limits.MaxAge), int64(plMaxAge), limitDuration)
MaxInactivityOverride := getLimitStr(false, int64(limits.MaxInactivity), int64(plMaxInactivity), limitDuration)
paddingLeft := repeatChar(" ", level)
paddingRight := repeatChar(" ", maxLevels-level)
txt := []string{}
txt = append(txt, fmt.Sprintf("%s%s", paddingLeft, channelName))
if maxSubsOverride != "" {
txt = append(txt, fmt.Sprintf("%s |-> Subscriptions %s%s", paddingLeft, paddingRight, maxSubsOverride))
}
if maxMsgsOverride != "" {
txt = append(txt, fmt.Sprintf("%s |-> Messages %s%s", paddingLeft, paddingRight, maxMsgsOverride))
}
if maxBytesOverride != "" {
txt = append(txt, fmt.Sprintf("%s |-> Bytes %s%s", paddingLeft, paddingRight, maxBytesOverride))
}
if maxAgeOverride != "" {
txt = append(txt, fmt.Sprintf("%s |-> Age %s%s", paddingLeft, paddingRight, maxAgeOverride))
}
if MaxInactivityOverride != "" {
txt = append(txt, fmt.Sprintf("%s |-> Inactivity %s%s", paddingLeft, paddingRight, MaxInactivityOverride))
}
for _, l := range txt {
if len(l) > *maxLen {
*maxLen = len(l)
}
}
return txt
}
func repeatChar(char string, len int) string {
res := ""
for i := 0; i < len; i++ {
res += char
}
return res
} | //
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, |
webpack-runtime-2fe4800ebbc750d1de13.js | !function(e){function t(t){for(var o,s,c=t[0],u=t[1],i=t[2],l=0,p=[];l<c.length;l++)s=c[l],Object.prototype.hasOwnProperty.call(r,s)&&r[s]&&p.push(r[s][0]),r[s]=0;for(o in u)Object.prototype.hasOwnProperty.call(u,o)&&(e[o]=u[o]);for(d&&d(t);p.length;)p.shift()();return a.push.apply(a,i||[]),n()}function n(){for(var e,t=0;t<a.length;t++){for(var n=a[t],o=!0,c=1;c<n.length;c++){var u=n[c];0!==r[u]&&(o=!1)}o&&(a.splice(t--,1),e=s(s.s=n[0]))}return e}var o={},r={9:0},a=[];function s(t){if(o[t])return o[t].exports;var n=o[t]={i:t,l:!1,exports:{}};return e[t].call(n.exports,n,n.exports,s),n.l=!0,n.exports}s.e=function(e){var t=[],n=r[e];if(0!==n)if(n)t.push(n[2]);else{var o=new Promise((function(t,o){n=r[e]=[t,o]}));t.push(n[2]=o);var a,c=document.createElement("script");c.charset="utf-8",c.timeout=120,s.nc&&c.setAttribute("nonce",s.nc),c.src=function(e){return s.p+""+({2:"component---node-modules-gatsby-theme-devgiangho-src-pages-404-tsx",3:"component---node-modules-gatsby-theme-devgiangho-src-pages-archive-tsx",4:"component---node-modules-gatsby-theme-devgiangho-src-pages-tags-tsx",5:"component---node-modules-gatsby-theme-devgiangho-src-templates-page-tsx",6:"component---node-modules-gatsby-theme-devgiangho-src-templates-post-tsx",7:"component---node-modules-gatsby-theme-devgiangho-src-templates-posts-tsx",8:"component---node-modules-gatsby-theme-devgiangho-src-templates-tag-tsx"}[e]||e)+"-"+{2:"75d94990eade131c59a9",3:"3db819236aee3cfde79f",4:"0609cb3af77996f71884",5:"1edd7412f6c55c742061",6:"d2b5514f3a767efcd7c8",7:"7ec1d87ca7a863597c0f",8:"e1ae27783d8b3f6a96b1"}[e]+".js"}(e);var u=new Error;a=function(t){c.onerror=c.onload=null,clearTimeout(i);var n=r[e];if(0!==n){if(n){var o=t&&("load"===t.type?"missing":t.type),a=t&&t.target&&t.target.src;u.message="Loading chunk "+e+" failed.\n("+o+": "+a+")",u.name="ChunkLoadError",u.type=o,u.request=a,n[1](u)}r[e]=void 0}};var i=setTimeout((function(){a({type:"timeout",target:c})}),12e4);c.onerror=c.onload=a,document.head.appendChild(c)}return Promise.all(t)},s.m=e,s.c=o,s.d=function(e,t,n){s.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},s.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},s.t=function(e,t){if(1&t&&(e=s(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(s.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)s.d(n,o,function(t){return e[t]}.bind(null,o));return n},s.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return s.d(t,"a",t),t},s.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},s.p="/",s.oe=function(e){throw console.error(e),e};var c=window.webpackJsonp=window.webpackJsonp||[],u=c.push.bind(c);c.push=t,c=c.slice();for(var i=0;i<c.length;i++)t(c[i]);var d=u;n()}([]);
//# sourceMappingURL=webpack-runtime-2fe4800ebbc750d1de13.js.map |
||
server.py | from concurrent import futures
import time
import grpc
import app.helloworld_pb2 as helloworld_pb2
import app.helloworld_pb2_grpc as helloworld_pb2_grpc
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
class Greeter(helloworld_pb2_grpc.GreeterServicer):
|
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:50051')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
| def Greet(self, request, context):
print('Saying `hello` to %s' % request.name)
return helloworld_pb2.GreetResponse(message='Hello, {}!'.format(request.name)) |
Exercise_3.py | # Zbiór przedziałów [(a[1], b[1]), ..., (a[n], b[n])], każdy przedział należy do [0, 1]. Opisać algorytm
# który sprawdzi czy jest możliwy taki wybór przedziałów, aby cały przedział [0, 1] zawierał się
# w wybranych odcinkach. Przedział ma składać się z jak najmniejszej ilości odcinków.
def minimum_intervals(T):
T.sort(key=lambda x: x | 0.35], [0.2, 0.6], [0.4, 0.6], [0.5, 0.6], [0.1, 0.9], [0.85, 1], [0.9, 1],
[0.3, 0.4], [0.35, 0.4], [0.2, 0.75], [0.4, 1], [0.55, 1], [0.6, 1], [0.9, 1]]
print(minimum_intervals(T))
| [0])
i = 0
end = 0
result = []
while i < len(T) and end != 1:
actual_start = T[i][0]
actual_end = T[i][1]
flag = True
while i != len(T) and T[i][0] <= end:
if actual_end < T[i][1]:
actual_start = T[i][0]
actual_end = T[i][1]
i += 1
flag = False
if flag:
i += 1
result.append((actual_start, actual_end))
end = actual_end
return result
T = [[0, 0.4], [0, |
request.rs | pub struct | {
// Method specifies the HTTP method (GET, POST, PUT, etc.).
// For client requests, an empty string means GET.
pub method: String,
// URL specifies either the URI being requested (for server
// requests) or the URL to access (for client requests).
//
// For server requests, the URL is parsed from the URI
// supplied on the Request-Line as stored in RequestURI. For
// most requests, fields other than Path and RawQuery will be
// empty. (See RFC 7230, Section 5.3)
//
// For client requests, the URL's Host specifies the server to
// connect to, while the Request's Host field optionally
// specifies the Host header value to send in the HTTP
// request.
// pub URL: URL,
// The protocol version for incoming server requests.
//
// For client requests, these fields are ignored. The HTTP
// client code always uses either HTTP/1.1 or HTTP/2.
// See the docs on Transport for details.
pub proto: String, // "HTTP/1.0"
pub proto_major: u8, // 1
pub proto_minor: u8, // 0
// Header contains the request header fields either received
// by the server or to be sent by the client.
//
// If a server received a request with header lines,
//
// Host: example.com
// accept-encoding: gzip, deflate
// Accept-Language: en-us
// fOO: Bar
// foo: two
//
// then
//
// Header = map[string][]string{
// "Accept-Encoding": {"gzip, deflate"},
// "Accept-Language": {"en-us"},
// "Foo": {"Bar", "two"},
// }
//
// For incoming requests, the Host header is promoted to the
// Request.Host field and removed from the Header map.
//
// HTTP defines that header names are case-insensitive. The
// request parser implements this by using CanonicalHeaderKey,
// making the first character and any characters following a
// hyphen uppercase and the rest lowercase.
//
// For client requests, certain headers such as Content-Length
// and Connection are automatically written when needed and
// values in Header may be ignored. See the documentation
// for the Request.Write method.
// Header: Header,
// Body is the request's body.
//
// For client requests, a nil body means the request has no
// body, such as a GET request. The HTTP Client's Transport
// is responsible for calling the Close method.
//
// For server requests, the Request Body is always non-nil
// but will return EOF immediately when no body is present.
// The Server will close the request body. The ServeHTTP
// Handler does not need to.
// Body io.ReadCloser
// GetBody defines an optional func to return a new copy of
// Body. It is used for client requests when a redirect requires
// reading the body more than once. Use of GetBody still
// requires setting Body.
//
// For server requests, it is unused.
// GetBody func() (io.ReadCloser, error)
// ContentLength records the length of the associated content.
// The value -1 indicates that the length is unknown.
// Values >= 0 indicate that the given number of bytes may
// be read from Body.
//
// For client requests, a value of 0 with a non-nil Body is
// also treated as unknown.
pub content_length: i64,
// TransferEncoding lists the transfer encodings from outermost to
// innermost. An empty list denotes the "identity" encoding.
// TransferEncoding can usually be ignored; chunked encoding is
// automatically added and removed as necessary when sending and
// receiving requests.
// TransferEncoding []string
// Close indicates whether to close the connection after
// replying to this request (for servers) or after sending this
// request and reading its response (for clients).
//
// For server requests, the HTTP server handles this automatically
// and this field is not needed by Handlers.
//
// For client requests, setting this field prevents re-use of
// TCP connections between requests to the same hosts, as if
// Transport.DisableKeepAlives were set.
pub close: bool,
// For server requests, Host specifies the host on which the URL
// is sought. Per RFC 7230, section 5.4, this is either the value
// of the "Host" header or the host name given in the URL itself.
// It may be of the form "host:port". For international domain
// names, Host may be in Punycode or Unicode form. Use
// golang.org/x/net/idna to convert it to either format if
// needed.
// To prevent DNS rebinding attacks, server Handlers should
// validate that the Host header has a value for which the
// Handler considers itself authoritative. The included
// ServeMux supports patterns registered to particular host
// names and thus protects its registered Handlers.
//
// For client requests, Host optionally overrides the Host
// header to send. If empty, the Request.Write method uses
// the value of URL.Host. Host may contain an international
// domain name.
pub host: String,
// Form contains the parsed form data, including both the URL
// field's query parameters and the POST or PUT form data.
// This field is only available after ParseForm is called.
// The HTTP client ignores Form and uses Body instead.
// Form url.Values
// PostForm contains the parsed form data from POST, PATCH,
// or PUT body parameters.
//
// This field is only available after ParseForm is called.
// The HTTP client ignores PostForm and uses Body instead.
// PostForm url.Values
// MultipartForm is the parsed multipart form, including file uploads.
// This field is only available after ParseMultipartForm is called.
// The HTTP client ignores MultipartForm and uses Body instead.
// MultipartForm *multipart.Form
// Trailer specifies additional headers that are sent after the request
// body.
//
// For server requests, the Trailer map initially contains only the
// trailer keys, with nil values. (The client declares which trailers it
// will later send.) While the handler is reading from Body, it must
// not reference Trailer. After reading from Body returns EOF, Trailer
// can be read again and will contain non-nil values, if they were sent
// by the client.
//
// For client requests, Trailer must be initialized to a map containing
// the trailer keys to later send. The values may be nil or their final
// values. The ContentLength must be 0 or -1, to send a chunked request.
// After the HTTP request is sent the map values can be updated while
// the request body is read. Once the body returns EOF, the caller must
// not mutate Trailer.
//
// Few HTTP clients, servers, or proxies support HTTP trailers.
// Trailer Header
// RemoteAddr allows HTTP servers and other software to record
// the network address that sent the request, usually for
// logging. This field is not filled in by ReadRequest and
// has no defined format. The HTTP server in this package
// sets RemoteAddr to an "IP:port" address before invoking a
// handler.
// This field is ignored by the HTTP client.
pub remote_addr: String,
// RequestURI is the unmodified request-target of the
// Request-Line (RFC 7230, Section 3.1.1) as sent by the client
// to a server. Usually the URL field should be used instead.
// It is an error to set this field in an HTTP client request.
pub request_uri: String, // TLS allows HTTP servers and other software to record
// information about the TLS connection on which the request
// was received. This field is not filled in by ReadRequest.
// The HTTP server in this package sets the field for
// TLS-enabled connections before invoking a handler;
// otherwise it leaves the field nil.
// This field is ignored by the HTTP client.
// TLS *tls.ConnectionState
// Cancel is an optional channel whose closure indicates that the client
// request should be regarded as canceled. Not all implementations of
// RoundTripper may support Cancel.
//
// For server requests, this field is not applicable.
//
// Deprecated: Use the Context and WithContext methods
// instead. If a Request's Cancel field and context are both
// set, it is undefined whether Cancel is respected.
// Cancel <-chan struct{}
// Response is the redirect response which caused this request
// to be created. This field is only populated during client
// redirects.
// Response *Response
// ctx is either the client or server context. It should only
// be modified via copying the whole Request using WithContext.
// It is unexported to prevent people from using Context wrong
// and mutating the contexts held by callers of the same request.
// ctx context.Context
}
| Request |
service.rs | use crate::error::Closed;
use crate::InFlight;
use linkerd2_channel as mpsc;
use linkerd2_error::Error;
use std::task::{Context, Poll};
use std::{future::Future, pin::Pin};
use tokio::sync::oneshot;
pub struct Buffer<Req, Rsp> {
/// The queue on which in-flight requests are sent to the inner service.
tx: mpsc::Sender<InFlight<Req, Rsp>>,
}
// === impl Buffer ===
impl<Req, Rsp> Buffer<Req, Rsp> {
pub(crate) fn | (tx: mpsc::Sender<InFlight<Req, Rsp>>) -> Self {
Self { tx }
}
}
impl<Req, Rsp> tower::Service<Req> for Buffer<Req, Rsp>
where
Rsp: Send + 'static,
{
type Response = Rsp;
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<Rsp, Error>> + Send + 'static>>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.tx.poll_ready(cx).map_err(Into::into)
}
fn call(&mut self, request: Req) -> Self::Future {
let (tx, rx) = oneshot::channel();
self.tx
.try_send(InFlight { request, tx })
.expect("poll_ready must be called");
Box::pin(async move { rx.await.map_err(|_| Closed(()))??.await })
}
}
impl<Req, Rsp> Clone for Buffer<Req, Rsp> {
fn clone(&self) -> Self {
Self::new(self.tx.clone())
}
}
| new |
index.tsx | import styles from './styles.module.scss';
export function | () {
return(
<div className={styles.playerContainer}>
<header>
<img src="/playing.svg" alt="Tocando agora"/>
<strong>Tocando agora</strong>
</header>
<div className={styles.emptyPlayer}>
<strong>Selecione um podcast para ouvir</strong>
</div>
<footer className={styles.empty}>
<div className={styles.progress}>
<span>00:00</span>
<div className={styles.slider}>
<div className={styles.emptySlider} />
</div>
<span>00:00</span>
</div>
<div className={styles.buttons}>
<button type="button">
<img src="/shuffle.svg" alt="Embaralhar"/>
</button>
<button type="button">
<img src="/play-previous.svg" alt="Tocar anterior"/>
</button>
<button type="button" className={styles.playButton}>
<img src="/play.svg" alt="Tocar"/>
</button>
<button type="button">
<img src="/play-next.svg" alt="Tocar próxima"/>
</button>
<button type="button">
<img src="/repeat.svg" alt="Repetir"/>
</button>
</div>
</footer>
</div>
);
} | Player |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.