file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
privateEndpoint.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20190801
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
type PrivateEndpoint struct {
pulumi.CustomResourceState
Etag pulumi.StringPtrOutput `pulumi:"etag"`
Location pulumi.StringPtrOutput `pulumi:"location"`
ManualPrivateLinkServiceConnections PrivateLinkServiceConnectionResponseArrayOutput `pulumi:"manualPrivateLinkServiceConnections"`
Name pulumi.StringOutput `pulumi:"name"`
NetworkInterfaces NetworkInterfaceResponseArrayOutput `pulumi:"networkInterfaces"`
PrivateLinkServiceConnections PrivateLinkServiceConnectionResponseArrayOutput `pulumi:"privateLinkServiceConnections"`
ProvisioningState pulumi.StringOutput `pulumi:"provisioningState"`
Subnet SubnetResponsePtrOutput `pulumi:"subnet"`
Tags pulumi.StringMapOutput `pulumi:"tags"`
Type pulumi.StringOutput `pulumi:"type"`
}
// NewPrivateEndpoint registers a new resource with the given unique name, arguments, and options.
func NewPrivateEndpoint(ctx *pulumi.Context,
name string, args *PrivateEndpointArgs, opts ...pulumi.ResourceOption) (*PrivateEndpoint, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:network/v20190801:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20180801:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20180801:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20181001:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20181001:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20181101:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20181101:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20181201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20181201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20190201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20190401:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190401:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20190601:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190601:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20190701:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190701:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20190901:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190901:PrivateEndpoint"),
}, | {
Type: pulumi.String("azure-native:network/v20191101:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20191101:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20191201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20191201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20200301:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200301:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20200401:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200401:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20200501:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200501:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20200601:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200601:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20200701:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200701:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20200801:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200801:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20201101:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20201101:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20210201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20210201:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-native:network/v20210301:PrivateEndpoint"),
},
{
Type: pulumi.String("azure-nextgen:network/v20210301:PrivateEndpoint"),
},
})
opts = append(opts, aliases)
var resource PrivateEndpoint
err := ctx.RegisterResource("azure-native:network/v20190801:PrivateEndpoint", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetPrivateEndpoint gets an existing PrivateEndpoint resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetPrivateEndpoint(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *PrivateEndpointState, opts ...pulumi.ResourceOption) (*PrivateEndpoint, error) {
var resource PrivateEndpoint
err := ctx.ReadResource("azure-native:network/v20190801:PrivateEndpoint", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering PrivateEndpoint resources.
type privateEndpointState struct {
}
type PrivateEndpointState struct {
}
func (PrivateEndpointState) ElementType() reflect.Type {
return reflect.TypeOf((*privateEndpointState)(nil)).Elem()
}
type privateEndpointArgs struct {
Etag *string `pulumi:"etag"`
Id *string `pulumi:"id"`
Location *string `pulumi:"location"`
ManualPrivateLinkServiceConnections []PrivateLinkServiceConnection `pulumi:"manualPrivateLinkServiceConnections"`
PrivateEndpointName *string `pulumi:"privateEndpointName"`
PrivateLinkServiceConnections []PrivateLinkServiceConnection `pulumi:"privateLinkServiceConnections"`
ResourceGroupName string `pulumi:"resourceGroupName"`
Subnet *SubnetType `pulumi:"subnet"`
Tags map[string]string `pulumi:"tags"`
}
// The set of arguments for constructing a PrivateEndpoint resource.
type PrivateEndpointArgs struct {
Etag pulumi.StringPtrInput
Id pulumi.StringPtrInput
Location pulumi.StringPtrInput
ManualPrivateLinkServiceConnections PrivateLinkServiceConnectionArrayInput
PrivateEndpointName pulumi.StringPtrInput
PrivateLinkServiceConnections PrivateLinkServiceConnectionArrayInput
ResourceGroupName pulumi.StringInput
Subnet SubnetTypePtrInput
Tags pulumi.StringMapInput
}
func (PrivateEndpointArgs) ElementType() reflect.Type {
return reflect.TypeOf((*privateEndpointArgs)(nil)).Elem()
}
type PrivateEndpointInput interface {
pulumi.Input
ToPrivateEndpointOutput() PrivateEndpointOutput
ToPrivateEndpointOutputWithContext(ctx context.Context) PrivateEndpointOutput
}
func (*PrivateEndpoint) ElementType() reflect.Type {
return reflect.TypeOf((*PrivateEndpoint)(nil))
}
func (i *PrivateEndpoint) ToPrivateEndpointOutput() PrivateEndpointOutput {
return i.ToPrivateEndpointOutputWithContext(context.Background())
}
func (i *PrivateEndpoint) ToPrivateEndpointOutputWithContext(ctx context.Context) PrivateEndpointOutput {
return pulumi.ToOutputWithContext(ctx, i).(PrivateEndpointOutput)
}
type PrivateEndpointOutput struct{ *pulumi.OutputState }
func (PrivateEndpointOutput) ElementType() reflect.Type {
return reflect.TypeOf((*PrivateEndpoint)(nil))
}
func (o PrivateEndpointOutput) ToPrivateEndpointOutput() PrivateEndpointOutput {
return o
}
func (o PrivateEndpointOutput) ToPrivateEndpointOutputWithContext(ctx context.Context) PrivateEndpointOutput {
return o
}
func init() {
pulumi.RegisterOutputType(PrivateEndpointOutput{})
} | |
abi.rs | use super::{FunctionDeclaration, TraitFn};
use crate::{build_config::BuildConfig, error::*, parse_tree::ident, parser::Rule};
use sway_types::{ident::Ident, span::Span};
use pest::iterators::Pair;
/// An `abi` declaration, which declares an interface for a contract
/// to implement or for a caller to use to call a contract.
#[derive(Debug, Clone)]
pub struct AbiDeclaration {
/// The name of the abi trait (also known as a "contract trait")
pub(crate) name: Ident,
/// The methods a contract is required to implement in order opt in to this interface
pub(crate) interface_surface: Vec<TraitFn>,
/// The methods provided to a contract "for free" upon opting in to this interface | pub(crate) span: Span,
}
impl AbiDeclaration {
pub(crate) fn parse_from_pair(
pair: Pair<Rule>,
config: Option<&BuildConfig>,
) -> CompileResult<Self> {
let span = Span {
span: pair.as_span(),
path: config.map(|c| c.path()),
};
let mut iter = pair.into_inner();
let mut warnings = Vec::new();
let mut errors = Vec::new();
let _abi_keyword = iter.next().expect("guaranteed by grammar");
let name = check!(
ident::parse_from_pair(iter.next().expect("guaranteed by grammar"), config),
return err(warnings, errors),
warnings,
errors
);
let mut interface_surface = vec![];
let mut methods = vec![];
let trait_methods = iter.next().expect("guaranteed by grammar");
for func in trait_methods.into_inner() {
match func.as_rule() {
Rule::fn_signature => {
let fn_sig = check!(
TraitFn::parse_from_pair(func, config),
continue,
warnings,
errors
);
interface_surface.push(fn_sig);
}
Rule::fn_decl => methods.push(check!(
FunctionDeclaration::parse_from_pair(func, config),
continue,
warnings,
errors
)),
x => unreachable!("guaranteed to not be here: {:?}", x),
}
}
ok(
AbiDeclaration {
methods,
interface_surface,
name,
span,
},
warnings,
errors,
)
}
} | pub(crate) methods: Vec<FunctionDeclaration>, |
DetailsTwoTone.js | "use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var React = _interopRequireWildcard(require("react"));
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
| }), /*#__PURE__*/React.createElement("path", {
d: "M3 4l9 16 9-16H3zm3.38 2h11.25L12 16 6.38 6z"
})), 'DetailsTwoTone');
exports["default"] = _default; | var _default = (0, _createSvgIcon["default"])( /*#__PURE__*/React.createElement(React.Fragment, null, /*#__PURE__*/React.createElement("path", {
d: "M6.38 6L12 16l5.63-10z",
opacity: ".3" |
cli_parser.py | import subprocess
def | (argument_string):
p = subprocess.Popen(argument_string,shell=True,stdout=subprocess.PIPE)
print str(p.communicate()[0])
p.wait()
rc = p.returncode
return rc
def cli_output(argument_string):
p = subprocess.Popen(argument_string,shell=True,stdout=subprocess.PIPE)
out = str(p.communicate()[0])
p.wait()
return out
def cli_error(argument_string):
p = subprocess.Popen(argument_string,shell=True,stderr=subprocess.PIPE)
err = str(p.communicate()[1])
p.wait()
return err
def cli_response_parser(cli_resp, key_attr):
arrResp = cli_resp.splitlines()
for j in range(0, len(arrResp)):
arrL = arrResp[j].split("|")
for i in range(0, len(arrL)):
arrL[i] = arrL[i].rstrip()
arrL[i] = arrL[i].lstrip()
if(len(arrL) > 1):
if(arrL[1] == key_attr):
return arrL[2]
| cli_returncode |
client.go | // Code generated by goa v3.2.3, DO NOT EDIT.
//
// health client HTTP transport
//
// Command:
// $ goa gen goa.design/plugins/v3/goakit/examples/fetcher/fetcher/design -o
// $(GOPATH)/src/goa.design/plugins/goakit/examples/fetcher/fetcher
package client
import (
"context"
"net/http"
"github.com/go-kit/kit/endpoint"
goahttp "goa.design/goa/v3/http"
)
// Client lists the health service endpoint HTTP clients.
type Client struct {
// Show Doer is the HTTP client used to make requests to the show endpoint.
ShowDoer goahttp.Doer
// RestoreResponseBody controls whether the response bodies are reset after
// decoding so they can be read again.
RestoreResponseBody bool
scheme string
host string
encoder func(*http.Request) goahttp.Encoder
decoder func(*http.Response) goahttp.Decoder
}
// NewClient instantiates HTTP clients for all the health service servers.
func NewClient(
scheme string,
host string,
doer goahttp.Doer,
enc func(*http.Request) goahttp.Encoder,
dec func(*http.Response) goahttp.Decoder,
restoreBody bool,
) *Client {
return &Client{
ShowDoer: doer,
RestoreResponseBody: restoreBody,
scheme: scheme,
host: host,
decoder: dec,
encoder: enc,
}
}
// Show returns an endpoint that makes HTTP requests to the health service show
// server.
func (c *Client) Show() endpoint.Endpoint {
var (
decodeResponse = DecodeShowResponse(c.decoder, c.RestoreResponseBody)
)
return func(ctx context.Context, v interface{}) (interface{}, error) {
req, err := c.BuildShowRequest(ctx, v)
if err != nil {
return nil, err
}
resp, err := c.ShowDoer.Do(req)
if err != nil {
return nil, goahttp.ErrRequestError("health", "show", err) | }
return decodeResponse(resp)
}
} |
|
command-executor.service.ts | import {Injectable} from '@angular/core';
import {HttpClient} from '@angular/common/http';
import {ErrorService} from './error.service';
import {Observable} from 'rxjs';
import {webSocket} from 'rxjs/webSocket';
import {map, mergeAll, mergeMap, tap} from 'rxjs/operators';
import {getHost} from './url';
@Injectable({providedIn: 'root'})
export class CommandExecutorService {
private static readonly BASE_HTTP_URL = '/api/command-executor';
private static readonly BASE_WS_URL = `ws://${getHost()}`;
private static readonly ALL_EVENTS_URL = `${CommandExecutorService.BASE_HTTP_URL}/event/status`;
private static readonly BASE_COMMAND_URL = `${CommandExecutorService.BASE_HTTP_URL}/command`;
private static readonly BASE_EXECUTION_URL = `${CommandExecutorService.BASE_HTTP_URL}/execution`;
constructor(private httpClient: HttpClient, private errorService: ErrorService) {
}
async getAllCommands(): Promise<Array<Command>> {
try {
return await this.httpClient.get<Array<Command>>(CommandExecutorService.BASE_COMMAND_URL).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async addCommand(command: Command): Promise<void> {
try {
await this.httpClient.post(CommandExecutorService.BASE_COMMAND_URL, command).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async deleteCommand(command: Command): Promise<void> {
try {
await this.httpClient.delete(this.commandUrl(command)).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async execute(command: Command): Promise<void> {
try {
await this.httpClient.post(this.allExecutionsOfCommandUrl(command), null).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async getAllExecutions(): Promise<Array<Execution>> {
try {
const executions = await this.httpClient.get<Array<Execution>>(CommandExecutorService.BASE_EXECUTION_URL).toPromise();
return executions.map(e => {
return new Execution(e.startTime, e.commandId, e.commandName, e.commandScript, e.exitCode, e.exitSignal, e.errorMessage);
});
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
watchAllExecutions(): Observable<Array<Execution>> {
return webSocket(`${CommandExecutorService.BASE_WS_URL}${CommandExecutorService.ALL_EVENTS_URL}`).pipe(
mergeMap(_ => this.getAllExecutions())
);
}
async terminateExecution(execution: Execution): Promise<void> {
try {
await this.httpClient.post(this.executionUrl(execution, 'terminate'), null).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async haltExecution(execution: Execution): Promise<void> {
try {
await this.httpClient.post(this.executionUrl(execution, 'halt'), null).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async deleteExecution(execution: Execution): Promise<void> {
try {
await this.httpClient.delete(this.executionUrl(execution)).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async deleteAllExecutions(): Promise<void> {
try {
await this.httpClient.delete(CommandExecutorService.BASE_EXECUTION_URL).toPromise();
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
async watchExecution(commandId: string, executionStartTime: number): Promise<ExecutionWithOutput> {
try {
const executions = await this.httpClient.get<Array<Execution>>(this.allExecutionsOfCommandUrl(commandId)).toPromise();
const execution = executions.filter(e => e.startTime === executionStartTime)[0];
if (!execution) {
throw new Error(`Execution with ID '${executionStartTime}' does not exist`);
}
const outputUrl = `${CommandExecutorService.BASE_WS_URL}${this.executionUrl(execution, 'output')}?fromStart=true`;
const output = webSocket<OutputChanges>(outputUrl).pipe(
tap({error: e => this.errorService.log(e)}),
map(c => c.changes),
mergeAll()
);
return new ExecutionWithOutput(new Execution(execution.startTime, execution.commandId, execution.commandName), output);
} catch (e) {
this.errorService.logAndReThrow(e);
}
}
private commandUrl(command: Command | string): string {
return `${CommandExecutorService.BASE_COMMAND_URL}/${typeof command === 'string' ? command : command.id}`;
}
private allExecutionsOfCommandUrl(command: Command | string): string {
return `${this.commandUrl(command)}/execution`;
}
private executionUrl(execution: Execution, action?: string): string {
const urlParts: Array<string> = [
CommandExecutorService.BASE_COMMAND_URL,
execution.commandId,
'execution',
execution.startTime.toString()
];
if (action) {
urlParts.push(action);
}
return urlParts.join('/');
}
}
export class Command {
constructor(readonly id?: string, readonly name?: string, readonly script?: string) {
}
}
export class Execution {
constructor(readonly startTime: number = null, readonly commandId: string = null, readonly commandName: string = null,
readonly commandScript: string = null, readonly exitCode: number = null, readonly exitSignal: string = null,
readonly errorMessage: string = null) {
}
| get isSuccessful(): boolean {
return this.exitCode === 0;
}
get isFailed(): boolean {
return this.exitCode !== 0 && (this.exitCode !== null || this.exitSignal !== null || this.errorMessage !== null);
}
get isComplete(): boolean {
return this.isSuccessful || this.isFailed;
}
}
export class ExecutionWithOutput {
constructor(private execution: Execution, readonly output: Observable<string>) {
}
get name(): string {
return `${this.execution.commandName} ${this.execution.startDate.toLocaleString()}`;
}
}
class OutputChanges {
changes: Array<string>;
} | get startDate(): Date {
return new Date(this.startTime);
}
|
0001_squashed_0004_auto_20170831_0541.py | # Generated by Django 1.9.13 on 2017-08-31 05:44
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import markupfield.fields
class | (migrations.Migration):
replaces = [('community', '0001_initial'), ('community', '0002_auto_20150416_1853'), ('community', '0003_auto_20170831_0358'), ('community', '0004_auto_20170831_0541')]
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Link',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, db_index=True, default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now, blank=True)),
('url', models.URLField(blank=True, max_length=1000, verbose_name='URL')),
('creator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_link_creator', to=settings.AUTH_USER_MODEL)),
('last_modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_link_modified', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Links',
'ordering': ['-created'],
'verbose_name': 'Link',
'get_latest_by': 'created',
},
),
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, db_index=True, default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now, blank=True)),
('image', models.ImageField(blank=True, upload_to='community/photos/')),
('image_url', models.URLField(blank=True, max_length=1000, verbose_name='Image URL')),
('caption', models.TextField(blank=True)),
('click_through_url', models.URLField(blank=True)),
('creator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_photo_creator', to=settings.AUTH_USER_MODEL)),
('last_modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_photo_modified', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'photos',
'ordering': ['-created'],
'verbose_name': 'photo',
'get_latest_by': 'created',
},
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, db_index=True, default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now, blank=True)),
('title', models.CharField(blank=True, max_length=200, null=True)),
('content', markupfield.fields.MarkupField(rendered_field=True)),
('abstract', models.TextField(blank=True, null=True)),
('content_markup_type', models.CharField(choices=[('', '--'), ('html', 'html'), ('plain', 'plain'), ('markdown', 'markdown'), ('restructuredtext', 'restructuredtext')], default='html', max_length=30)),
('_content_rendered', models.TextField(editable=False)),
('media_type', models.IntegerField(choices=[(1, 'text'), (2, 'photo'), (3, 'video'), (4, 'link')], default=1)),
('source_url', models.URLField(blank=True, max_length=1000)),
('meta', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default={})),
('status', models.IntegerField(choices=[(1, 'private'), (2, 'public')], db_index=True, default=1)),
('creator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_post_creator', to=settings.AUTH_USER_MODEL)),
('last_modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_post_modified', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'posts',
'ordering': ['-created'],
'verbose_name': 'post',
'get_latest_by': 'created',
},
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, db_index=True, default=django.utils.timezone.now)),
('updated', models.DateTimeField(default=django.utils.timezone.now, blank=True)),
('video_embed', models.TextField(blank=True)),
('video_data', models.FileField(blank=True, upload_to='community/videos/')),
('caption', models.TextField(blank=True)),
('click_through_url', models.URLField(blank=True, verbose_name='Click Through URL')),
('creator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_video_creator', to=settings.AUTH_USER_MODEL)),
('last_modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='community_video_modified', to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_video', to='community.Post')),
],
options={
'verbose_name_plural': 'videos',
'ordering': ['-created'],
'verbose_name': 'video',
'get_latest_by': 'created',
},
),
migrations.AddField(
model_name='photo',
name='post',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_photo', to='community.Post'),
),
migrations.AddField(
model_name='link',
name='post',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_link', to='community.Post'),
),
migrations.AlterField(
model_name='post',
name='content_markup_type',
field=models.CharField(choices=[('', '--'), ('html', 'HTML'), ('plain', 'Plain'), ('markdown', 'Markdown'), ('restructuredtext', 'Restructured Text')], default='html', max_length=30),
),
migrations.AlterField(
model_name='post',
name='meta',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default={}),
),
migrations.AlterField(
model_name='post',
name='meta',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict),
),
]
| Migration |
webhook.go | // Package webhook provides means to interact with webhooks directly and not
// through the bot API.
package webhook
import (
"context"
"mime/multipart"
"net/url"
"strconv"
"github.com/pkg/errors"
"github.com/diamondburned/arikawa/v3/api"
"github.com/diamondburned/arikawa/v3/api/rate"
"github.com/diamondburned/arikawa/v3/discord"
"github.com/diamondburned/arikawa/v3/utils/httputil"
"github.com/diamondburned/arikawa/v3/utils/httputil/httpdriver"
"github.com/diamondburned/arikawa/v3/utils/json/option"
"github.com/diamondburned/arikawa/v3/utils/sendpart"
)
// TODO: if there's ever an Arikawa v3, then a new Client abstraction could be
// made that wraps around Session being an interface. Just a food for thought.
// Session keeps a single webhook session. It is referenced by other webhook
// clients using the same session.
type Session struct {
// Limiter is the rate limiter used for the client. This field should not be
// changed, as doing so is potentially racy.
Limiter *rate.Limiter
// ID is the ID of the webhook.
ID discord.WebhookID
// Token is the token of the webhook.
Token string
}
// OnRequest should be called on each client request to inject itself.
func (s *Session) OnRequest(r httpdriver.Request) error {
return s.Limiter.Acquire(r.GetContext(), r.GetPath())
}
// OnResponse should be called after each client request to clean itself up.
func (s *Session) OnResponse(r httpdriver.Request, resp httpdriver.Response) error {
return s.Limiter.Release(r.GetPath(), httpdriver.OptHeader(resp))
}
// Client is the client used to interact with a webhook.
type Client struct {
// Client is the httputil.Client used to call Discord's API.
*httputil.Client
*Session
}
// New creates a new Client using the passed webhook token and ID. It uses its
// own rate limiter.
func New(id discord.WebhookID, token string) *Client {
return NewCustom(id, token, httputil.NewClient())
}
// NewCustom creates a new webhook client using the passed webhook token, ID and
// a copy of the given httputil.Client. The copy will have a new rate limiter
// added in.
func NewCustom(id discord.WebhookID, token string, hcl *httputil.Client) *Client {
ses := Session{
Limiter: rate.NewLimiter(api.Path),
ID: id,
Token: token,
}
hcl = hcl.Copy()
hcl.OnRequest = append(hcl.OnRequest, ses.OnRequest)
hcl.OnResponse = append(hcl.OnResponse, ses.OnResponse)
return &Client{
Client: hcl,
Session: &ses,
}
}
// FromAPI creates a new client that shares the same internal HTTP client with
// the one in the API's. This is often useful for bots that need webhook
// interaction, since the rate limiter is shared.
func FromAPI(id discord.WebhookID, token string, c *api.Client) *Client {
return &Client{
Client: c.Client,
Session: &Session{
Limiter: c.Limiter,
ID: id,
Token: token,
},
}
}
// WithContext returns a shallow copy of Client with the given context. It's
// used for method timeouts and such. This method is thread-safe.
func (c *Client) WithContext(ctx context.Context) *Client {
return &Client{
Client: c.Client.WithContext(ctx),
Session: c.Session,
}
}
// Get gets the webhook.
func (c *Client) Get() (*discord.Webhook, error) {
var w *discord.Webhook
return w, c.RequestJSON(&w, "GET", api.EndpointWebhooks+c.ID.String()+"/"+c.Token)
}
// Modify modifies the webhook.
func (c *Client) Modify(data api.ModifyWebhookData) (*discord.Webhook, error) {
var w *discord.Webhook
return w, c.RequestJSON(
&w, "PATCH",
api.EndpointWebhooks+c.ID.String()+"/"+c.Token,
httputil.WithJSONBody(data),
)
}
// Delete deletes a webhook permanently.
func (c *Client) Delete() error {
return c.FastRequest("DELETE", api.EndpointWebhooks+c.ID.String()+"/"+c.Token)
}
// https://discord.com/developers/docs/resources/webhook#execute-webhook-jsonform-params
type ExecuteData struct {
// Content are the message contents (up to 2000 characters).
//
// Required: one of content, file, embeds
Content string `json:"content,omitempty"`
// ThreadID causes the message to be sent to the specified thread within
// the webhook's channel. The thread will automatically be unarchived.
ThreadID discord.CommandID `json:"-"`
// Username overrides the default username of the webhook
Username string `json:"username,omitempty"`
// AvatarURL overrides the default avatar of the webhook.
AvatarURL discord.URL `json:"avatar_url,omitempty"`
// TTS is true if this is a TTS message.
TTS bool `json:"tts,omitempty"`
// Embeds contains embedded rich content.
//
// Required: one of content, file, embeds
Embeds []discord.Embed `json:"embeds,omitempty"`
// Components is the list of components (such as buttons) to be attached to
// the message.
Components []discord.Component `json:"components,omitempty"`
// Files represents a list of files to upload. This will not be
// JSON-encoded and will only be available through WriteMultipart.
Files []sendpart.File `json:"-"`
// AllowedMentions are the allowed mentions for the message.
AllowedMentions *api.AllowedMentions `json:"allowed_mentions,omitempty"`
}
// NeedsMultipart returns true if the ExecuteWebhookData has files.
func (data ExecuteData) NeedsMultipart() bool {
return len(data.Files) > 0
}
// WriteMultipart writes the webhook data into the given multipart body. It does
// not close body.
func (data ExecuteData) WriteMultipart(body *multipart.Writer) error {
return sendpart.Write(body, data, data.Files)
} | // get created. This is generally faster, but only applicable if no further
// interaction is required.
func (c *Client) Execute(data ExecuteData) (err error) {
_, err = c.execute(data, false)
return
}
// ExecuteAndWait executes the webhook, and waits for the generated
// discord.Message to be returned.
func (c *Client) ExecuteAndWait(data ExecuteData) (*discord.Message, error) {
return c.execute(data, true)
}
func (c *Client) execute(data ExecuteData, wait bool) (*discord.Message, error) {
if data.Content == "" && len(data.Embeds) == 0 && len(data.Files) == 0 {
return nil, api.ErrEmptyMessage
}
if data.AllowedMentions != nil {
if err := data.AllowedMentions.Verify(); err != nil {
return nil, errors.Wrap(err, "allowedMentions error")
}
}
sum := 0
for i, embed := range data.Embeds {
if err := embed.Validate(); err != nil {
return nil, errors.Wrap(err, "embed error at "+strconv.Itoa(i))
}
sum += embed.Length()
if sum > 6000 {
return nil, &discord.OverboundError{sum, 6000, "sum of all text in embeds"}
}
}
param := make(url.Values, 2)
if wait {
param["wait"] = []string{"true"}
}
if data.ThreadID.IsValid() {
param["thread_id"] = []string{data.ThreadID.String()}
}
var URL = api.EndpointWebhooks + c.ID.String() + "/" + c.Token + "?" + param.Encode()
var msg *discord.Message
var ptr interface{}
if wait {
ptr = &msg
}
return msg, sendpart.POST(c.Client, data, ptr, URL)
}
// Message returns a previously-sent webhook message from the same token.
func (c *Client) Message(messageID discord.MessageID) (*discord.Message, error) {
var m *discord.Message
return m, c.RequestJSON(
&m, "GET",
api.EndpointWebhooks+c.ID.String()+"/"+c.Token+"/messages/"+messageID.String())
}
// https://discord.com/developers/docs/resources/webhook#edit-webhook-message-jsonform-params
type EditMessageData struct {
// Content is the new message contents (up to 2000 characters).
Content option.NullableString `json:"content,omitempty"`
// Embeds contains embedded rich content.
Embeds *[]discord.Embed `json:"embeds,omitempty"`
// Components contains the new components to attach.
Components *[]discord.Component `json:"components,omitempty"`
// AllowedMentions are the allowed mentions for a message.
AllowedMentions *api.AllowedMentions `json:"allowed_mentions,omitempty"`
// Attachments are the attached files to keep
Attachments *[]discord.Attachment `json:"attachments,omitempty"`
Files []sendpart.File `json:"-"`
}
// EditMessage edits a previously-sent webhook message from the same webhook.
func (c *Client) EditMessage(messageID discord.MessageID, data EditMessageData) (*discord.Message, error) {
if data.AllowedMentions != nil {
if err := data.AllowedMentions.Verify(); err != nil {
return nil, errors.Wrap(err, "allowedMentions error")
}
}
if data.Embeds != nil {
sum := 0
for _, e := range *data.Embeds {
if err := e.Validate(); err != nil {
return nil, errors.Wrap(err, "embed error")
}
sum += e.Length()
if sum > 6000 {
return nil, &discord.OverboundError{sum, 6000, "sum of text in embeds"}
}
}
}
var msg *discord.Message
return msg, sendpart.PATCH(c.Client, data, &msg,
api.EndpointWebhooks+c.ID.String()+"/"+c.Token+"/messages/"+messageID.String())
}
// NeedsMultipart returns true if the SendMessageData has files.
func (data EditMessageData) NeedsMultipart() bool {
return len(data.Files) > 0
}
func (data EditMessageData) WriteMultipart(body *multipart.Writer) error {
return sendpart.Write(body, data, data.Files)
}
// DeleteMessage deletes a message that was previously created by the same
// webhook.
func (c *Client) DeleteMessage(messageID discord.MessageID) error {
return c.FastRequest("DELETE",
api.EndpointWebhooks+c.ID.String()+"/"+c.Token+"/messages/"+messageID.String())
} |
// Execute sends a message to the webhook, but doesn't wait for the message to |
yolo3_resnet50.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""YOLO_v3 ResNet50 Model Defined in Keras."""
from tensorflow.keras.layers import UpSampling2D, Concatenate
from tensorflow.keras.models import Model
from tensorflow.keras.applications.resnet import ResNet50
from yolo3.models.layers import yolo3_predictions, yolo3lite_predictions, tiny_yolo3_predictions, tiny_yolo3lite_predictions
def yolo3_resnet50_body(inputs, num_anchors, num_classes):
"""Create YOLO_V3 ResNet50 model CNN body in Keras."""
resnet50 = ResNet50(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(resnet50.layers)))
# input: 416 x 416 x 3
# conv5_block3_out: 13 x 13 x 2048
# conv4_block6_out: 26 x 26 x 1024
# conv3_block4_out: 52 x 52 x 512
# f1 :13 x 13 x 2048
f1 = resnet50.get_layer('conv5_block3_out').output
# f2: 26 x 26 x 1024
f2 = resnet50.get_layer('conv4_block6_out').output
# f3 : 52 x 52 x 512
f3 = resnet50.get_layer('conv3_block4_out').output
f1_channel_num = 1024
f2_channel_num = 512
f3_channel_num = 256
y1, y2, y3 = yolo3_predictions((f1, f2, f3), (f1_channel_num, f2_channel_num, f3_channel_num), num_anchors, num_classes)
return Model(inputs = inputs, outputs=[y1,y2,y3])
def yolo3lite_resnet50_body(inputs, num_anchors, num_classes):
'''Create YOLO_v3 Lite ResNet50 model CNN body in keras.'''
resnet50 = ResNet50(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(resnet50.layers)))
# input: 416 x 416 x 3
# conv5_block3_out: 13 x 13 x 2048
# conv4_block6_out: 26 x 26 x 1024
# conv3_block4_out: 52 x 52 x 512
# f1 :13 x 13 x 2048
f1 = resnet50.get_layer('conv5_block3_out').output
# f2: 26 x 26 x 1024
f2 = resnet50.get_layer('conv4_block6_out').output
# f3 : 52 x 52 x 512
f3 = resnet50.get_layer('conv3_block4_out').output
f1_channel_num = 1024
f2_channel_num = 512
f3_channel_num = 256
y1, y2, y3 = yolo3lite_predictions((f1, f2, f3), (f1_channel_num, f2_channel_num, f3_channel_num), num_anchors, num_classes)
return Model(inputs = inputs, outputs=[y1,y2,y3])
def yolo3lite_spp_resnet50_body(inputs, num_anchors, num_classes):
'''Create YOLO_v3 Lite SPP ResNet50 model CNN body in keras.'''
resnet50 = ResNet50(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(resnet50.layers)))
# input: 416 x 416 x 3
# conv5_block3_out: 13 x 13 x 2048
# conv4_block6_out: 26 x 26 x 1024
# conv3_block4_out: 52 x 52 x 512
# f1 :13 x 13 x 2048
f1 = resnet50.get_layer('conv5_block3_out').output
# f2: 26 x 26 x 1024
f2 = resnet50.get_layer('conv4_block6_out').output
# f3 : 52 x 52 x 512
f3 = resnet50.get_layer('conv3_block4_out').output
f1_channel_num = 1024
f2_channel_num = 512
f3_channel_num = 256
y1, y2, y3 = yolo3lite_predictions((f1, f2, f3), (f1_channel_num, f2_channel_num, f3_channel_num), num_anchors, num_classes, use_spp=True)
return Model(inputs = inputs, outputs=[y1,y2,y3])
def tiny_yolo3_resnet50_body(inputs, num_anchors, num_classes):
'''Create Tiny YOLO_v3 ResNet50 model CNN body in keras.'''
resnet50 = ResNet50(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(resnet50.layers)))
# input: 416 x 416 x 3
# conv5_block3_out: 13 x 13 x 2048
# conv4_block6_out: 26 x 26 x 1024
# conv3_block4_out: 52 x 52 x 512
# f1 :13 x 13 x 2048
f1 = resnet50.get_layer('conv5_block3_out').output
# f2: 26 x 26 x 1024
f2 = resnet50.get_layer('conv4_block6_out').output
f1_channel_num = 1024
f2_channel_num = 512
y1, y2 = tiny_yolo3_predictions((f1, f2), (f1_channel_num, f2_channel_num), num_anchors, num_classes)
return Model(inputs, [y1,y2])
def tiny_yolo3lite_resnet50_body(inputs, num_anchors, num_classes):
| '''Create Tiny YOLO_v3 Lite ResNet50 model CNN body in keras.'''
resnet50 = ResNet50(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(resnet50.layers)))
# input: 416 x 416 x 3
# conv5_block3_out: 13 x 13 x 2048
# conv4_block6_out: 26 x 26 x 1024
# conv3_block4_out: 52 x 52 x 512
# f1 :13 x 13 x 2048
f1 = resnet50.get_layer('conv5_block3_out').output
# f2: 26 x 26 x 1024
f2 = resnet50.get_layer('conv4_block6_out').output
f1_channel_num = 1024
f2_channel_num = 512
y1, y2 = tiny_yolo3lite_predictions((f1, f2), (f1_channel_num, f2_channel_num), num_anchors, num_classes)
return Model(inputs, [y1,y2]) |
|
abstract_simplex_token.rs | use crate::AbstractSimplexToken;
use super::DynamicAbstractToken;
impl<T, U> AbstractSimplexToken for U
where
U: DynamicAbstractToken<Name = T> {
type Simplex = T;
fn name(&self) -> &Self::Simplex { DynamicAbstractToken::name(self) }
fn consume(self) -> Self::Simplex |
}
| { DynamicAbstractToken::consume_concept(self) } |
libvirt.go | // Package libvirt collects libvirt-specific configuration.
package libvirt
import (
survey "github.com/AlecAivazis/survey/v2"
"github.com/pkg/errors"
"github.com/openshift/installer/pkg/types/libvirt"
libvirtdefaults "github.com/openshift/installer/pkg/types/libvirt/defaults"
"github.com/openshift/installer/pkg/validate"
)
// Platform collects libvirt-specific configuration.
func Platform() (*libvirt.Platform, error) |
// uriValidator validates if the answer provided in prompt is a valid
// url and has non-empty scheme.
func uriValidator(ans interface{}) error {
return validate.URI(ans.(string))
}
| {
var uri string
err := survey.Ask([]*survey.Question{
{
Prompt: &survey.Input{
Message: "Libvirt Connection URI",
Help: "The libvirt connection URI to be used. This must be accessible from the running cluster.",
Default: libvirtdefaults.DefaultURI,
},
Validate: survey.ComposeValidators(survey.Required, uriValidator),
},
}, &uri)
if err != nil {
return nil, errors.Wrap(err, "failed UserInput")
}
return &libvirt.Platform{
URI: uri,
}, nil
} |
invvect.go | // Copyright (c) 2013-2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package wire
import (
"fmt"
"io"
"github.com/fichain/filechain/chaincfg/chainhash"
)
const (
// MaxInvPerMsg is the maximum number of inventory vectors that can be in a
// single bitcoin inv message.
MaxInvPerMsg = 50000
// Maximum payload size for an inventory vector.
maxInvVectPayload = 4 + chainhash.HashSize
// InvWitnessFlag denotes that the inventory vector type is requesting,
// or sending a version which includes witness data.
InvWitnessFlag = 1 << 30
)
// InvType represents the allowed types of inventory vectors. See InvVect.
type InvType uint32
// These constants define the various supported inventory vector types.
const (
InvTypeError InvType = 0
InvTypeTx InvType = 1
InvTypeBlock InvType = 2
InvTypeFilteredBlock InvType = 3
InvTypeWitnessBlock InvType = InvTypeBlock | InvWitnessFlag
InvTypeWitnessTx InvType = InvTypeTx | InvWitnessFlag
InvTypeFilteredWitnessBlock InvType = InvTypeFilteredBlock | InvWitnessFlag
)
// Map of service flags back to their constant names for pretty printing.
var ivStrings = map[InvType]string{
InvTypeError: "ERROR",
InvTypeTx: "MSG_TX",
InvTypeBlock: "MSG_BLOCK",
InvTypeFilteredBlock: "MSG_FILTERED_BLOCK",
InvTypeWitnessBlock: "MSG_WITNESS_BLOCK",
InvTypeWitnessTx: "MSG_WITNESS_TX",
InvTypeFilteredWitnessBlock: "MSG_FILTERED_WITNESS_BLOCK",
}
// String returns the InvType in human-readable form.
func (invtype InvType) String() string {
if s, ok := ivStrings[invtype]; ok {
return s
}
return fmt.Sprintf("Unknown InvType (%d)", uint32(invtype))
}
// InvVect defines a bitcoin inventory vector which is used to describe data,
// as specified by the Type field, that a peer wants, has, or does not have to
// another peer.
type InvVect struct {
Type InvType // Type of data
Hash chainhash.Hash // Hash of the data
}
// NewInvVect returns a new InvVect using the provided type and hash.
func NewInvVect(typ InvType, hash *chainhash.Hash) *InvVect {
return &InvVect{
Type: typ,
Hash: *hash,
}
}
// readInvVect reads an encoded InvVect from r depending on the protocol | return readElements(r, &iv.Type, &iv.Hash)
}
// writeInvVect serializes an InvVect to w depending on the protocol version.
func writeInvVect(w io.Writer, pver uint32, iv *InvVect) error {
return writeElements(w, iv.Type, &iv.Hash)
} | // version.
func readInvVect(r io.Reader, pver uint32, iv *InvVect) error { |
ESRB.py | import numpy as np
import matplotlib.pyplot as plt
import pint
# Use the same registry
from main import ureg
ureg.setup_matplotlib(True)
from uncertainties import ufloat, umath, unumpy
import pandas as pd
from scipy.signal import find_peaks
from scipy.integrate import simpson
from scipy.optimize import curve_fit
plt.rcParams['text.usetex'] = True
amp = 700*ureg.mV
R=ufloat(0.82, 0.82*0.1)*ureg.ohm
df = pd.read_csv("./ESRB.csv")
# The I0_modulation signal is horrible, the system was too noisy, so instead:
#
# I0_modulation = (unumpy.uarray(
# df['V_modulation_raw'].values,
# df['V_modulation_err'].values
# )*ureg.mV/R).to('ampere')
#
# we regnerate it, assuming it should be linear, just as V_DC is.
I0_modulation = (unumpy.uarray(np.linspace(
df['V_modulation_raw'].min(),
df['V_modulation_raw'].max(),
len(df)
), df['V_modulation_err'].mean())*ureg.mV/R).to('ampere')
ptp_Y = unumpy.uarray(
df['ptp_Y_raw'].values*df['phase_sign'].values,
df['ptp_Y_err'].values
)*ureg.mV
ptp_X_modulation = ufloat(3.09, 0.01)*ureg.mV
fig, ax = plt.subplots()
I0_modulation_err = np.array([val.m.s for val in I0_modulation])
I0_modulation_raw = np.array([val.m.n for val in I0_modulation])
ptp_ratio = ptp_Y/ptp_X_modulation
absorption_deriviative = ptp_ratio/max(ptp_ratio)
absorption_deriviative_raw = np.array([val.m.n for val in absorption_deriviative])
absorption_deriviative_err = np.array([val.m.s for val in absorption_deriviative])
ax.errorbar(
I0_modulation_raw*ureg.ampere,
absorption_deriviative_raw, # Dimensionless
fmt='.',
yerr=absorption_deriviative_err,
# TODO: Mention in report that error is too big to be drafted
#xerr=I_modulation_err,
# TODO: Is this the correct label?
label='Absorption Deriviative'
)
def | (I, I0, gamma, amplitude):
return amplitude*(-2*(gamma**2)*(I - I0))/ \
(gamma**2 + (I - I0)**2)**2
def lorentzian_fit(I, I0, gamma, amplitude):
return amplitude*gamma**2/\
(gamma**2 + (I - I0)**2)**2
##### By MATLAB:
# Goodness of fit:
# SSE: 0.197
# R-square: 0.9845
# Adjusted R-square: 0.9838
# RMSE: 0.06769
# I0 gamma amplitude
matlab_p0 = [0.5479, 0.03847, 0.05554]
matlab_bounds=((0.547, 0.03672, 0.05304),
(0.5488, 0.04021, 0.05805))
I_rf = ufloat(matlab_p0[0], abs(matlab_bounds[0][0] - matlab_p0[0]))*ureg.ampere
I_hwhm = ufloat(matlab_p0[1], abs(matlab_bounds[0][1] - matlab_p0[1]))*ureg.ampere
from main import g_times_bohr
# TODO: Take this value from Itamar & Tomer
H_RF = ufloat(34.914, 0.009)*ureg.gauss
k = H_RF/I_rf
# Converts current I To frequency f using all of the constants
def I2f(I):
return (I*k*g_times_bohr/ureg.planck_constant).to('megahertz')
f0_modulation = I2f(I0_modulation)
f_rf = I2f(I_rf)
f_hwhm = I2f(I_hwhm)
T2 = (1/f_hwhm).to('nanosecond')
##### A failing Python fit attempt - I consider it as a failure because it hits
##### the bounds :/
# popt, pcov = curve_fit(
# lorentzian_dif_fit, absorption_deriviative_raw, I0_modulation_raw,
# p0=matlab_p0, bounds=matlab_bounds
# )
# lorentzian_dif_fit_points = lorentzian_dif_fit(I0_modulation_raw, *popt)
# ax.plot(
# I0_modulation_raw*ureg.ampere,
# lorentzian_dif_fit_points,
# label="Python fit"
# )
I0_modulation_seq = np.linspace(
I0_modulation.min().m.n,
I0_modulation.max().m.n,
len(I0_modulation)*100
)
ax.plot(
I0_modulation_seq*ureg.ampere,
lorentzian_dif_fit(I0_modulation_seq, I_rf.m.n, I_hwhm.m.n, matlab_p0[2]),
label="Matlab fit"
)
ax.set_yticks([])
axt = ax.twiny()
axt.grid(linestyle='--')
axt.set_yticks([])
f0_modulation_seq = np.linspace(
f0_modulation.min().m.n,
f0_modulation.max().m.n,
len(f0_modulation)*100
)
def lorentzian_wrapper(f0):
# From some reason this need to be amplified by a factor of 800 so it will
# look good.
return lorentzian_fit(f0, f_rf.m.n, f_hwhm.m.n, matlab_p0[2]*800)
axt.plot(
f0_modulation_seq*ureg.megahertz,
lorentzian_wrapper(f0_modulation_seq),
label = "Lorenzian fit", color='green'
)
axt.set_xticks(
[(f_rf - f_hwhm).m.n, f_rf.m.n, (f_rf + f_hwhm).m.n],
['', '$f_{rf}$', '']
)
axt.set_xlabel('')
axt.arrow(
length_includes_head = True,
x = (f_rf - f_hwhm).m.n*ureg.megahertz,
y = lorentzian_wrapper((f_rf - f_hwhm).m.n),
dx = 2*f_hwhm.m.n*ureg.megahertz,
dy = 0,
head_length = f_hwhm.m.n/10,
head_width = matlab_p0[2],
label="Full Width Half Max",
)
axt.arrow(
length_includes_head = True,
x = (f_rf + f_hwhm).m.n*ureg.megahertz,
y = lorentzian_wrapper((f_rf + f_hwhm).m.n),
dx = -2*f_hwhm.m.n*ureg.megahertz,
head_length = f_hwhm.m.n/10,
head_width = matlab_p0[2],
dy = 0,
)
axt.text(
0.5, 0.63,
# (f_hwhm.m.n/10),
# lorentzian_wrapper((f0 - f_hwhm).m.n)*2,
"FWHM",
transform=ax.transAxes,
# fontsize=00
)
ax.legend(loc='upper right')
# axt.legend(loc='upper left')
plt.show()
fig.savefig("ESRB.pgf")
fig.savefig("ESRB.png")
# TODO: Integrate numerically / or fit to a laurenzian's differentiation
# TODO: Scale the x axis to frequency and find the width of the laurenzian in
# frequency scale
| lorentzian_dif_fit |
image.go | package db
// An Image row represents a Docker image that should be built by the Quilt
// masters.
type Image struct {
ID int
// The desired name for the image.
Name string
// The Dockerfile with which to build the image.
Dockerfile string
// The ID of the built image.
DockerID string
// The build status of the image.
Status string
}
const (
// Building is the status string for when the image is being built.
Building = "building"
// Built is the status string for when the image has been built.
Built = "built"
)
// InsertImage creates a new image row and inserts it into the database.
func (db Database) InsertImage() Image {
result := Image{ID: db.nextID()}
db.insert(result)
return result
}
// SelectFromImage gets all images in the database that satisfy 'check'.
func (db Database) SelectFromImage(check func(Image) bool) []Image {
var result []Image
for _, row := range db.selectRows(ImageTable) {
if check == nil || check(row.(Image)) {
result = append(result, row.(Image))
}
}
return result
}
// SelectFromImage gets all images in the database connection that satisfy 'check'.
func (conn Conn) SelectFromImage(check func(Image) bool) []Image {
var result []Image
conn.Txn(ImageTable).Run(func(view Database) error {
result = view.SelectFromImage(check)
return nil
})
return result
}
func (image Image) getID() int {
return image.ID
}
func (image Image) tt() TableType {
return ImageTable
}
func (image Image) String() string {
return defaultString(image)
}
| return image.ID < r.(Image).ID
}
// ImageSlice is an alias for []Image to allow for joins
type ImageSlice []Image
// Get returns the value contained at the given index
func (slc ImageSlice) Get(ii int) interface{} {
return slc[ii]
}
// Len returns the number of items in the slice.
func (slc ImageSlice) Len() int {
return len(slc)
} | func (image Image) less(r row) bool { |
utils.rs | // Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
//==============================================================================
// Imports
//==============================================================================
use super::DPDKRuntime;
use ::rand::{
distributions::{
Distribution,
Standard,
},
seq::SliceRandom,
Rng,
};
use ::runtime::utils::UtilsRuntime;
//==============================================================================
// Trait Implementations
//==============================================================================
/// Utils Runtime Trait Implementation for DPDK Runtime
impl UtilsRuntime for DPDKRuntime {
/// Returns a random value supporting the [Standard] distribution.
fn rng_gen<T>(&self) -> T
where
Standard: Distribution<T>,
{
self.rng.borrow_mut().gen()
}
/// Shuffles a mutable slice in place.
fn | <T>(&self, slice: &mut [T]) {
let rng = self.rng.borrow_mut();
slice.shuffle(&mut rng.to_owned());
}
}
| rng_shuffle |
ch3_impurity.py | import numpy as np
import matplotlib.pyplot as plt
blue = (0, 0, 1.0)
red = (1.0, 0, 0)
gray = (0.7, 0.7, 0.7)
# Criterion
def impurity_error(p1, p2):
return min(p1, p2)
def impurity_entropy(p1, p2):
|
def impurity_gini(p1, p2):
return p1 * (1 - p1) + p2 * (1 - p2)
# Split
def p(y_t):
return 1.0 * y_t / np.sum(y_t)
impurity = impurity_gini
y_t = np.array([2, 8], dtype=np.float)
y_t_L = np.array([1, 5], dtype=np.float)
y_t_R = y_t - y_t_L
p_y_t = p(y_t)
p_y_t_L = p(y_t_L)
p_y_t_R = p(y_t_R)
p_L = y_t_L.sum() / y_t.sum()
p_R = y_t_R.sum() / y_t.sum()
i_t = impurity(*p_y_t)
i_t_L = impurity(*p_y_t_L)
i_t_R = impurity(*p_y_t_R)
print "Delta i(s, t) = i(t) - p_L * i(t_L) - p_R * i (t_R)"
print " = %f - %f * %f - %f * %f" % (i_t, p_L, i_t_L, p_R, i_t_R)
print " = %f" % (i_t - p_L * i_t_L - p_R * i_t_R, )
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.linspace(0.0, 1.0, num=300)
# ax.plot(x, map(impurity, x, 1-x), label="entropy", color=blue)
ax.plot(x, map(impurity_error, x, 1-x), label="$i_E(t)$", color=gray)
ax.plot(x, map(impurity_entropy, x, 1-x), label="$i_H(t)$", color=blue)
ax.plot(x, map(impurity_gini, x, 1-x), label="$i_G(t)$", color=red)
ax.legend(loc="best")
plt.show()
ax.plot(p_y_t[0], i_t, marker="o", color=red)
ax.plot(p_y_t_L[0], i_t_L, marker="o", color=red)
ax.plot(p_y_t_R[0], i_t_R, marker="o", color=red)
ax.plot((p_y_t[0], p_y_t[0]), (0, i_t), ":", color=gray)
ax.plot((0, p_y_t[0]), (i_t, i_t), ":", color=gray)
ax.annotate("$i(t)$", xy=(0, i_t), xytext=(0+0.01, i_t), va="center")
ax.annotate("$p(c_1|t)$", xy=(p_y_t[0], 0), xytext=(p_y_t[0], 0+0.025), ha="center")
ax.plot((p_y_t_L[0], p_y_t_L[0]), (0, i_t_L), ":", color=gray)
ax.plot((0, p_y_t_L[0]), (i_t_L, i_t_L), ":", color=gray)
ax.annotate("$i(t_L)$", xy=(0, i_t_L), xytext=(0+0.01, i_t_L), va="center")
ax.annotate("$p(c_1|t_L)$", xy=(p_y_t_L[0], 0), xytext=(p_y_t_L[0], 0+0.025), ha="center")
ax.plot((p_y_t_R[0], p_y_t_R[0]), (0, i_t_R), ":", color=gray)
ax.plot((0, p_y_t_R[0]), (i_t_R, i_t_R), ":", color=gray)
ax.annotate("$i(t_R)$", xy=(0, i_t_R), xytext=(0+0.01, i_t_R), va="center")
ax.annotate("$p(c_1|t_R)$", xy=(p_y_t_R[0], 0), xytext=(p_y_t_R[0], 0+0.025), ha="center")
ax.plot((p_y_t_L[0], p_y_t_R[0]), (i_t_L, i_t_R), "-", color=gray)
ax.plot((p_y_t[0], p_y_t[0]), (i_t, p_L * i_t_L + p_R * i_t_R), "-", color=red)
ax.plot(p_y_t[0], p_L * i_t_L + p_R * i_t_R, marker="o", color=gray)
ax.annotate("$\Delta i(s, t) = %.3f$" % abs(i_t - p_L * i_t_L - p_R * i_t_R), xy=(p_y_t[0], i_t - 0.5*(i_t - p_L * i_t_L - p_R * i_t_R)), xytext=(p_y_t[0]+0.05, i_t - 0.5*(i_t - p_L * i_t_L - p_R * i_t_R)), arrowprops=dict(arrowstyle="->"), va="center")
#ax.legend(loc="best")
plt.show()
| if p1 == 0.0 or p1 == 1.0 or p2 == 0.0 or p2 == 1.0:
return 0.0
else:
return -(p1 * np.log2(p1) + p2 * np.log2(p2)) |
index.tsx | import * as React from 'react'
import Link from 'gatsby-link'
import Helmet from "react-helmet";
import { PostListViewItem } from "../components/ui/molecules/PostListViewItem";
declare const graphql: any;
export interface IIndexPageProps{
data: any;
}
export default class IndexPage extends React.Component<IIndexPageProps, {}> {
constructor(props) {
super(props);
}
render(){
return (
<div>
<Helmet title={`sharkDeveloper - home`} />
<div className={"row"}>
{
this.props.data.allMarkdownRemark.edges.map((post, i) =>
<div className="col-xs-12 col-sm-8 col-sm-offset-2" key={i}>
<PostListViewItem
url={post.node.frontmatter.path}
title={post.node.frontmatter.title}
date={post.node.frontmatter.date}
description={post.node.frontmatter.description}
/>
</div>
)
}
</div>
</div>
);
}
}
export const pageQuery = graphql`
query getAllPosts {
allMarkdownRemark(limit: 10) {
edges {
node {
frontmatter {
date(formatString: "MMMM DD, YYYY")
path
title
description
}
} | }
}
}
`; |
|
dhcp.rs | use crate::bus::Bus;
use crate::host::Host;
use crate::MacAddress;
pub struct Dhcp {
// settings: HostConfig,
// current: HostConfig,
}
impl Dhcp {
pub fn new(_mac: MacAddress) -> Self {
// let settings = HostConfig {
// mac,
// ..HostConfig::default()
// };
Self {
// settings,
// current: HostConfig::default(),
}
}
}
impl Host for Dhcp {
/// Gets (if necessary) and sets the host settings on the chip
fn refresh<SpiBus: Bus>(&mut self, _bus: &mut SpiBus) -> Result<(), SpiBus::Error> {
// TODO actually negotiate settings from DHCP
// TODO figure out how should receive socket for DHCP negotiations
Ok(()) | } | } |
special.rs | use crate::mem::AgcMemType;
use crate::consts::special::*;
use heapless::spsc::Producer;
use log::{error, warn};
// =============================================================================
// Public Structures
// =============================================================================
#[derive(Clone)]
pub struct AgcSpecialRegs {
pub cdu: (u16, u16, u16),
pub opt: (u16, u16),
pub pipa: (u16, u16, u16),
// LM only
pub rch: (u16, u16, u16), // Pitch, Yaw, Roll
// Uplink from Ground station comes here. Then UPRUPT interrupt
// occurs. Values can be:
// - 0 - Error Recovery
// - cccccCCCCCccccc - Triply Redundant bit pattern
pub inlink: u16,
}
// =============================================================================
// Implementations
// =============================================================================
impl AgcSpecialRegs {
pub fn new(_rupt_tx: Producer<u8, 8>) -> Self {
Self {
cdu: (0, 0, 0),
inlink: 0,
opt: (0, 0),
pipa: (0, 0, 0),
rch: (0, 0, 0),
}
}
#[allow(dead_code)]
pub fn reset(&mut self) {}
}
impl AgcMemType for AgcSpecialRegs {
fn read(&self, bank_idx: usize, bank_offset: usize) -> u16 {
if bank_idx != 0 {
error!("Accessing SpecialRegs on a non-zero bank. 0x{:x}", bank_idx);
return 0;
}
match bank_offset {
SG_CDUX => self.cdu.0,
SG_CDUY => self.cdu.1,
SG_CDUZ => self.cdu.2,
SG_OPTX => self.opt.0,
SG_OPTY => self.opt.1,
SG_PIPAX => self.pipa.0,
SG_PIPAY => self.pipa.1,
SG_PIPAZ => self.pipa.2,
// Inlink and Outlink Registers
SG_INLINK => self.inlink,
SG_OUTLINK => {
error!("Reading from outlink, which is known to not be used!");
0
}
SG_CDUXCMD | SG_CDUYCMD | SG_CDUZCMD => 0,
_ => {
error!(
"Accessing invalid SpecialRegister value: 0o{:o}",
bank_offset
);
0
}
}
}
fn write(&mut self, _bank_idx: usize, bank_offset: usize, value: u16) {
match bank_offset {
// Block of Read Only Registers. Send a warning mentioning how the
// Execution is trying to write to special read only registers
SG_CDUX | SG_CDUY | SG_CDUZ | SG_OPTX | SG_OPTY | SG_PIPAX | SG_PIPAY | SG_PIPAZ => {
warn!(
"Attempting to write to Read-Only Special Registers Address: {:o}",
bank_offset
);
}
// Inlink and Outlink Registers
SG_INLINK => {
self.inlink = value & 0x7FFF;
}
SG_OUTLINK => {
error!("Writing to outlink, which is known to not be used!");
}
_ => {
error!("Unimplemented Special Write: {:o}", bank_offset); | } | }
}
} |
losses.py | import numpy as np
import torch
import torch.nn as nn
def calc_iou(a, b):
area = (b[:, 2] - b[:, 0]) * (b[:, 3] - b[:, 1])
iw = torch.min(torch.unsqueeze(a[:, 2], dim=1), b[:, 2]) - torch.max(torch.unsqueeze(a[:, 0], 1), b[:, 0])
ih = torch.min(torch.unsqueeze(a[:, 3], dim=1), b[:, 3]) - torch.max(torch.unsqueeze(a[:, 1], 1), b[:, 1])
iw = torch.clamp(iw, min=0)
ih = torch.clamp(ih, min=0)
ua = torch.unsqueeze((a[:, 2] - a[:, 0]) * (a[:, 3] - a[:, 1]), dim=1) + area - iw * ih
ua = torch.clamp(ua, min=1e-8)
intersection = iw * ih
IoU = intersection / ua
return IoU
class FocalLoss(nn.Module):
# def __init__(self):
def forward(self, classifications, regressions, anchors, annotations):
| alpha = 0.25
gamma = 2.0
batch_size = classifications.shape[0]
classification_losses = []
regression_losses = []
anchor = anchors[0, :, :]
anchor_widths = anchor[:, 2] - anchor[:, 0]
anchor_heights = anchor[:, 3] - anchor[:, 1]
anchor_ctr_x = anchor[:, 0] + 0.5 * anchor_widths
anchor_ctr_y = anchor[:, 1] + 0.5 * anchor_heights
for j in range(batch_size):
classification = classifications[j, :, :]
regression = regressions[j, :, :]
bbox_annotation = annotations[j, :, :]
bbox_annotation = bbox_annotation[bbox_annotation[:, 4] != -1]
if bbox_annotation.shape[0] == 0:
regression_losses.append(torch.tensor(0).float().cuda())
classification_losses.append(torch.tensor(0).float().cuda())
continue
classification = torch.clamp(classification, 1e-4, 1.0 - 1e-4)
IoU = calc_iou(anchors[0, :, :], bbox_annotation[:, :4]) # num_anchors x num_annotations
IoU_max, IoU_argmax = torch.max(IoU, dim=1) # num_anchors x 1
# import pdb
# pdb.set_trace()
# compute the loss for classification
targets = torch.ones(classification.shape) * -1
targets = targets.cuda()
targets[torch.lt(IoU_max, 0.4), :] = 0
positive_indices = torch.ge(IoU_max, 0.5)
num_positive_anchors = positive_indices.sum()
assigned_annotations = bbox_annotation[IoU_argmax, :]
targets[positive_indices, :] = 0
targets[positive_indices, assigned_annotations[positive_indices, 4].long()] = 1
alpha_factor = torch.ones(targets.shape).cuda() * alpha
alpha_factor = torch.where(torch.eq(targets, 1.), alpha_factor, 1. - alpha_factor)
focal_weight = torch.where(torch.eq(targets, 1.), 1. - classification, classification)
focal_weight = alpha_factor * torch.pow(focal_weight, gamma)
bce = -(targets * torch.log(classification) + (1.0 - targets) * torch.log(1.0 - classification))
# cls_loss = focal_weight * torch.pow(bce, gamma)
cls_loss = focal_weight * bce
cls_loss = torch.where(torch.ne(targets, -1.0), cls_loss, torch.zeros(cls_loss.shape).cuda())
classification_losses.append(cls_loss.sum() / torch.clamp(num_positive_anchors.float(), min=1.0))
# compute the loss for regression
if positive_indices.sum() > 0:
assigned_annotations = assigned_annotations[positive_indices, :]
anchor_widths_pi = anchor_widths[positive_indices]
anchor_heights_pi = anchor_heights[positive_indices]
anchor_ctr_x_pi = anchor_ctr_x[positive_indices]
anchor_ctr_y_pi = anchor_ctr_y[positive_indices]
gt_widths = assigned_annotations[:, 2] - assigned_annotations[:, 0]
gt_heights = assigned_annotations[:, 3] - assigned_annotations[:, 1]
gt_ctr_x = assigned_annotations[:, 0] + 0.5 * gt_widths
gt_ctr_y = assigned_annotations[:, 1] + 0.5 * gt_heights
# clip widths to 1
gt_widths = torch.clamp(gt_widths, min=1)
gt_heights = torch.clamp(gt_heights, min=1)
targets_dx = (gt_ctr_x - anchor_ctr_x_pi) / anchor_widths_pi
targets_dy = (gt_ctr_y - anchor_ctr_y_pi) / anchor_heights_pi
targets_dw = torch.log(gt_widths / anchor_widths_pi)
targets_dh = torch.log(gt_heights / anchor_heights_pi)
targets = torch.stack((targets_dx, targets_dy, targets_dw, targets_dh))
targets = targets.t()
targets = targets / torch.Tensor([[0.1, 0.1, 0.2, 0.2]]).cuda()
negative_indices = 1 - positive_indices
regression_diff = torch.abs(targets - regression[positive_indices, :])
regression_loss = torch.where(
torch.le(regression_diff, 1.0 / 9.0),
0.5 * 9.0 * torch.pow(regression_diff, 2),
regression_diff - 0.5 / 9.0
)
regression_losses.append(regression_loss.mean())
else:
regression_losses.append(torch.tensor(0).float().cuda())
return torch.stack(classification_losses).mean(dim=0, keepdim=True), torch.stack(regression_losses).mean(dim=0,
keepdim=True) |
|
compare_method.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::free_region::FreeRegionMap;
use middle::infer;
use middle::traits;
use middle::ty::{self};
use middle::subst::{self, Subst, Substs, VecPerParamSpace};
use syntax::ast;
use syntax::codemap::Span;
use syntax::parse::token;
use super::assoc;
/// Checks that a method from an impl conforms to the signature of
/// the same method as declared in the trait.
///
/// # Parameters
///
/// - impl_m: type of the method we are checking
/// - impl_m_span: span to use for reporting errors
/// - impl_m_body_id: id of the method body
/// - trait_m: the method in the trait
/// - impl_trait_ref: the TraitRef corresponding to the trait implementation
pub fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
impl_m: &ty::Method<'tcx>,
impl_m_span: Span,
impl_m_body_id: ast::NodeId,
trait_m: &ty::Method<'tcx>,
impl_trait_ref: &ty::TraitRef<'tcx>) |
pub fn compare_const_impl<'tcx>(tcx: &ty::ctxt<'tcx>,
impl_c: &ty::AssociatedConst<'tcx>,
impl_c_span: Span,
trait_c: &ty::AssociatedConst<'tcx>,
impl_trait_ref: &ty::TraitRef<'tcx>) {
debug!("compare_const_impl(impl_trait_ref={:?})",
impl_trait_ref);
let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, true);
let mut fulfillment_cx = infcx.fulfillment_cx.borrow_mut();
// The below is for the most part highly similar to the procedure
// for methods above. It is simpler in many respects, especially
// because we shouldn't really have to deal with lifetimes or
// predicates. In fact some of this should probably be put into
// shared functions because of DRY violations...
let trait_to_impl_substs = &impl_trait_ref.substs;
// Create a parameter environment that represents the implementation's
// method.
let impl_param_env =
ty::ParameterEnvironment::for_item(tcx, impl_c.def_id.node);
// Create mapping from impl to skolemized.
let impl_to_skol_substs = &impl_param_env.free_substs;
// Create mapping from trait to skolemized.
let trait_to_skol_substs =
trait_to_impl_substs
.subst(tcx, impl_to_skol_substs)
.with_method(impl_to_skol_substs.types.get_slice(subst::FnSpace).to_vec(),
impl_to_skol_substs.regions().get_slice(subst::FnSpace).to_vec());
debug!("compare_const_impl: trait_to_skol_substs={:?}",
trait_to_skol_substs);
// Compute skolemized form of impl and trait const tys.
let impl_ty = impl_c.ty.subst(tcx, impl_to_skol_substs);
let trait_ty = trait_c.ty.subst(tcx, &trait_to_skol_substs);
let err = infcx.commit_if_ok(|_| {
let origin = infer::Misc(impl_c_span);
// There is no "body" here, so just pass dummy id.
let impl_ty =
assoc::normalize_associated_types_in(&infcx,
&mut fulfillment_cx,
impl_c_span,
0,
&impl_ty);
debug!("compare_const_impl: impl_ty={:?}",
impl_ty);
let trait_ty =
assoc::normalize_associated_types_in(&infcx,
&mut fulfillment_cx,
impl_c_span,
0,
&trait_ty);
debug!("compare_const_impl: trait_ty={:?}",
trait_ty);
infer::mk_subty(&infcx, false, origin, impl_ty, trait_ty)
});
match err {
Ok(()) => { }
Err(terr) => {
debug!("checking associated const for compatibility: impl ty {:?}, trait ty {:?}",
impl_ty,
trait_ty);
span_err!(tcx.sess, impl_c_span, E0326,
"implemented const `{}` has an incompatible type for \
trait: {}",
token::get_name(trait_c.name),
terr);
return;
}
}
}
| {
debug!("compare_impl_method(impl_trait_ref={:?})",
impl_trait_ref);
debug!("compare_impl_method: impl_trait_ref (liberated) = {:?}",
impl_trait_ref);
let mut infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, true);
let mut fulfillment_cx = infcx.fulfillment_cx.borrow_mut();
let trait_to_impl_substs = &impl_trait_ref.substs;
// Try to give more informative error messages about self typing
// mismatches. Note that any mismatch will also be detected
// below, where we construct a canonical function type that
// includes the self parameter as a normal parameter. It's just
// that the error messages you get out of this code are a bit more
// inscrutable, particularly for cases where one method has no
// self.
match (&trait_m.explicit_self, &impl_m.explicit_self) {
(&ty::StaticExplicitSelfCategory,
&ty::StaticExplicitSelfCategory) => {}
(&ty::StaticExplicitSelfCategory, _) => {
span_err!(tcx.sess, impl_m_span, E0185,
"method `{}` has a `{}` declaration in the impl, \
but not in the trait",
trait_m.name,
impl_m.explicit_self);
return;
}
(_, &ty::StaticExplicitSelfCategory) => {
span_err!(tcx.sess, impl_m_span, E0186,
"method `{}` has a `{}` declaration in the trait, \
but not in the impl",
trait_m.name,
trait_m.explicit_self);
return;
}
_ => {
// Let the type checker catch other errors below
}
}
let num_impl_m_type_params = impl_m.generics.types.len(subst::FnSpace);
let num_trait_m_type_params = trait_m.generics.types.len(subst::FnSpace);
if num_impl_m_type_params != num_trait_m_type_params {
span_err!(tcx.sess, impl_m_span, E0049,
"method `{}` has {} type parameter{} \
but its trait declaration has {} type parameter{}",
token::get_name(trait_m.name),
num_impl_m_type_params,
if num_impl_m_type_params == 1 {""} else {"s"},
num_trait_m_type_params,
if num_trait_m_type_params == 1 {""} else {"s"});
return;
}
if impl_m.fty.sig.0.inputs.len() != trait_m.fty.sig.0.inputs.len() {
span_err!(tcx.sess, impl_m_span, E0050,
"method `{}` has {} parameter{} \
but the declaration in trait `{}` has {}",
token::get_name(trait_m.name),
impl_m.fty.sig.0.inputs.len(),
if impl_m.fty.sig.0.inputs.len() == 1 {""} else {"s"},
tcx.item_path_str(trait_m.def_id),
trait_m.fty.sig.0.inputs.len());
return;
}
// This code is best explained by example. Consider a trait:
//
// trait Trait<'t,T> {
// fn method<'a,M>(t: &'t T, m: &'a M) -> Self;
// }
//
// And an impl:
//
// impl<'i, 'j, U> Trait<'j, &'i U> for Foo {
// fn method<'b,N>(t: &'j &'i U, m: &'b N) -> Foo;
// }
//
// We wish to decide if those two method types are compatible.
//
// We start out with trait_to_impl_substs, that maps the trait
// type parameters to impl type parameters. This is taken from the
// impl trait reference:
//
// trait_to_impl_substs = {'t => 'j, T => &'i U, Self => Foo}
//
// We create a mapping `dummy_substs` that maps from the impl type
// parameters to fresh types and regions. For type parameters,
// this is the identity transform, but we could as well use any
// skolemized types. For regions, we convert from bound to free
// regions (Note: but only early-bound regions, i.e., those
// declared on the impl or used in type parameter bounds).
//
// impl_to_skol_substs = {'i => 'i0, U => U0, N => N0 }
//
// Now we can apply skol_substs to the type of the impl method
// to yield a new function type in terms of our fresh, skolemized
// types:
//
// <'b> fn(t: &'i0 U0, m: &'b) -> Foo
//
// We now want to extract and substitute the type of the *trait*
// method and compare it. To do so, we must create a compound
// substitution by combining trait_to_impl_substs and
// impl_to_skol_substs, and also adding a mapping for the method
// type parameters. We extend the mapping to also include
// the method parameters.
//
// trait_to_skol_substs = { T => &'i0 U0, Self => Foo, M => N0 }
//
// Applying this to the trait method type yields:
//
// <'a> fn(t: &'i0 U0, m: &'a) -> Foo
//
// This type is also the same but the name of the bound region ('a
// vs 'b). However, the normal subtyping rules on fn types handle
// this kind of equivalency just fine.
//
// We now use these substitutions to ensure that all declared bounds are
// satisfied by the implementation's method.
//
// We do this by creating a parameter environment which contains a
// substitution corresponding to impl_to_skol_substs. We then build
// trait_to_skol_substs and use it to convert the predicates contained
// in the trait_m.generics to the skolemized form.
//
// Finally we register each of these predicates as an obligation in
// a fresh FulfillmentCtxt, and invoke select_all_or_error.
// Create a parameter environment that represents the implementation's
// method.
let impl_param_env =
ty::ParameterEnvironment::for_item(tcx, impl_m.def_id.node);
// Create mapping from impl to skolemized.
let impl_to_skol_substs = &impl_param_env.free_substs;
// Create mapping from trait to skolemized.
let trait_to_skol_substs =
trait_to_impl_substs
.subst(tcx, impl_to_skol_substs)
.with_method(impl_to_skol_substs.types.get_slice(subst::FnSpace).to_vec(),
impl_to_skol_substs.regions().get_slice(subst::FnSpace).to_vec());
debug!("compare_impl_method: trait_to_skol_substs={:?}",
trait_to_skol_substs);
// Check region bounds. FIXME(@jroesch) refactor this away when removing
// ParamBounds.
if !check_region_bounds_on_impl_method(tcx,
impl_m_span,
impl_m,
&trait_m.generics,
&impl_m.generics,
&trait_to_skol_substs,
impl_to_skol_substs) {
return;
}
// Create obligations for each predicate declared by the impl
// definition in the context of the trait's parameter
// environment. We can't just use `impl_env.caller_bounds`,
// however, because we want to replace all late-bound regions with
// region variables.
let impl_bounds =
impl_m.predicates.instantiate(tcx, impl_to_skol_substs);
let (impl_bounds, _) =
infcx.replace_late_bound_regions_with_fresh_var(
impl_m_span,
infer::HigherRankedType,
&ty::Binder(impl_bounds));
debug!("compare_impl_method: impl_bounds={:?}",
impl_bounds);
// Normalize the associated types in the trait_bounds.
let trait_bounds = trait_m.predicates.instantiate(tcx, &trait_to_skol_substs);
// Obtain the predicate split predicate sets for each.
let trait_pred = trait_bounds.predicates.split();
let impl_pred = impl_bounds.predicates.split();
// This is the only tricky bit of the new way we check implementation methods
// We need to build a set of predicates where only the FnSpace bounds
// are from the trait and we assume all other bounds from the implementation
// to be previously satisfied.
//
// We then register the obligations from the impl_m and check to see
// if all constraints hold.
let hybrid_preds = VecPerParamSpace::new(
impl_pred.types,
impl_pred.selfs,
trait_pred.fns
);
// Construct trait parameter environment and then shift it into the skolemized viewpoint.
// The key step here is to update the caller_bounds's predicates to be
// the new hybrid bounds we computed.
let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_body_id);
let trait_param_env = impl_param_env.with_caller_bounds(hybrid_preds.into_vec());
let trait_param_env = traits::normalize_param_env_or_error(trait_param_env,
normalize_cause.clone());
// FIXME(@jroesch) this seems ugly, but is a temporary change
infcx.parameter_environment = trait_param_env;
debug!("compare_impl_method: trait_bounds={:?}",
infcx.parameter_environment.caller_bounds);
let mut selcx = traits::SelectionContext::new(&infcx);
for predicate in impl_pred.fns {
let traits::Normalized { value: predicate, .. } =
traits::normalize(&mut selcx, normalize_cause.clone(), &predicate);
let cause = traits::ObligationCause {
span: impl_m_span,
body_id: impl_m_body_id,
code: traits::ObligationCauseCode::CompareImplMethodObligation
};
fulfillment_cx.register_predicate_obligation(
&infcx,
traits::Obligation::new(cause, predicate));
}
// We now need to check that the signature of the impl method is
// compatible with that of the trait method. We do this by
// checking that `impl_fty <: trait_fty`.
//
// FIXME. Unfortunately, this doesn't quite work right now because
// associated type normalization is not integrated into subtype
// checks. For the comparison to be valid, we need to
// normalize the associated types in the impl/trait methods
// first. However, because function types bind regions, just
// calling `normalize_associated_types_in` would have no effect on
// any associated types appearing in the fn arguments or return
// type.
// Compute skolemized form of impl and trait method tys.
let impl_fty = tcx.mk_fn(None, tcx.mk_bare_fn(impl_m.fty.clone()));
let impl_fty = impl_fty.subst(tcx, impl_to_skol_substs);
let trait_fty = tcx.mk_fn(None, tcx.mk_bare_fn(trait_m.fty.clone()));
let trait_fty = trait_fty.subst(tcx, &trait_to_skol_substs);
let err = infcx.commit_if_ok(|snapshot| {
let origin = infer::MethodCompatCheck(impl_m_span);
let (impl_sig, _) =
infcx.replace_late_bound_regions_with_fresh_var(impl_m_span,
infer::HigherRankedType,
&impl_m.fty.sig);
let impl_sig =
impl_sig.subst(tcx, impl_to_skol_substs);
let impl_sig =
assoc::normalize_associated_types_in(&infcx,
&mut fulfillment_cx,
impl_m_span,
impl_m_body_id,
&impl_sig);
let impl_fty = tcx.mk_fn(None, tcx.mk_bare_fn(ty::BareFnTy {
unsafety: impl_m.fty.unsafety,
abi: impl_m.fty.abi,
sig: ty::Binder(impl_sig)
}));
debug!("compare_impl_method: impl_fty={:?}",
impl_fty);
let (trait_sig, skol_map) =
infcx.skolemize_late_bound_regions(&trait_m.fty.sig, snapshot);
let trait_sig =
trait_sig.subst(tcx, &trait_to_skol_substs);
let trait_sig =
assoc::normalize_associated_types_in(&infcx,
&mut fulfillment_cx,
impl_m_span,
impl_m_body_id,
&trait_sig);
let trait_fty = tcx.mk_fn(None, tcx.mk_bare_fn(ty::BareFnTy {
unsafety: trait_m.fty.unsafety,
abi: trait_m.fty.abi,
sig: ty::Binder(trait_sig)
}));
debug!("compare_impl_method: trait_fty={:?}",
trait_fty);
try!(infer::mk_subty(&infcx, false, origin, impl_fty, trait_fty));
infcx.leak_check(&skol_map, snapshot)
});
match err {
Ok(()) => { }
Err(terr) => {
debug!("checking trait method for compatibility: impl ty {:?}, trait ty {:?}",
impl_fty,
trait_fty);
span_err!(tcx.sess, impl_m_span, E0053,
"method `{}` has an incompatible type for trait: {}",
token::get_name(trait_m.name),
terr);
return;
}
}
// Check that all obligations are satisfied by the implementation's
// version.
match fulfillment_cx.select_all_or_error(&infcx) {
Err(ref errors) => { traits::report_fulfillment_errors(&infcx, errors) }
Ok(_) => {}
}
// Finally, resolve all regions. This catches wily misuses of
// lifetime parameters. We have to build up a plausible lifetime
// environment based on what we find in the trait. We could also
// include the obligations derived from the method argument types,
// but I don't think it's necessary -- after all, those are still
// in effect when type-checking the body, and all the
// where-clauses in the header etc should be implied by the trait
// anyway, so it shouldn't be needed there either. Anyway, we can
// always add more relations later (it's backwards compat).
let mut free_regions = FreeRegionMap::new();
free_regions.relate_free_regions_from_predicates(tcx,
&infcx.parameter_environment.caller_bounds);
infcx.resolve_regions_and_report_errors(&free_regions, impl_m_body_id);
fn check_region_bounds_on_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
span: Span,
impl_m: &ty::Method<'tcx>,
trait_generics: &ty::Generics<'tcx>,
impl_generics: &ty::Generics<'tcx>,
trait_to_skol_substs: &Substs<'tcx>,
impl_to_skol_substs: &Substs<'tcx>)
-> bool
{
let trait_params = trait_generics.regions.get_slice(subst::FnSpace);
let impl_params = impl_generics.regions.get_slice(subst::FnSpace);
debug!("check_region_bounds_on_impl_method: \
trait_generics={:?} \
impl_generics={:?} \
trait_to_skol_substs={:?} \
impl_to_skol_substs={:?}",
trait_generics,
impl_generics,
trait_to_skol_substs,
impl_to_skol_substs);
// Must have same number of early-bound lifetime parameters.
// Unfortunately, if the user screws up the bounds, then this
// will change classification between early and late. E.g.,
// if in trait we have `<'a,'b:'a>`, and in impl we just have
// `<'a,'b>`, then we have 2 early-bound lifetime parameters
// in trait but 0 in the impl. But if we report "expected 2
// but found 0" it's confusing, because it looks like there
// are zero. Since I don't quite know how to phrase things at
// the moment, give a kind of vague error message.
if trait_params.len() != impl_params.len() {
span_err!(tcx.sess, span, E0195,
"lifetime parameters or bounds on method `{}` do \
not match the trait declaration",
token::get_name(impl_m.name));
return false;
}
return true;
}
} |
model_virtual_machine_to_alternative_restore_options.go | /*
* Veeam Backup for AWS public API 1.0
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* API version: 1.0-rev0
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package client
import (
"encoding/json"
)
// VirtualMachineToAlternativeRestoreOptions struct for VirtualMachineToAlternativeRestoreOptions
type VirtualMachineToAlternativeRestoreOptions struct {
Name string `json:"name"`
RegionId string `json:"regionId"`
VmType string `json:"vmType"`
SubnetId string `json:"subnetId"`
NetworkSecurityGroupId string `json:"networkSecurityGroupId"`
PreserveEncryptionForVolumes bool `json:"preserveEncryptionForVolumes"`
EncryptionKeyId *string `json:"encryptionKeyId,omitempty"`
}
// NewVirtualMachineToAlternativeRestoreOptions instantiates a new VirtualMachineToAlternativeRestoreOptions object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewVirtualMachineToAlternativeRestoreOptions(name string, regionId string, vmType string, subnetId string, networkSecurityGroupId string, preserveEncryptionForVolumes bool) *VirtualMachineToAlternativeRestoreOptions {
this := VirtualMachineToAlternativeRestoreOptions{}
this.Name = name
this.RegionId = regionId
this.VmType = vmType
this.SubnetId = subnetId
this.NetworkSecurityGroupId = networkSecurityGroupId
this.PreserveEncryptionForVolumes = preserveEncryptionForVolumes
return &this
}
// NewVirtualMachineToAlternativeRestoreOptionsWithDefaults instantiates a new VirtualMachineToAlternativeRestoreOptions object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewVirtualMachineToAlternativeRestoreOptionsWithDefaults() *VirtualMachineToAlternativeRestoreOptions {
this := VirtualMachineToAlternativeRestoreOptions{}
return &this
}
// GetName returns the Name field value
func (o *VirtualMachineToAlternativeRestoreOptions) GetName() string {
if o == nil {
var ret string
return ret
}
return o.Name
}
// GetNameOk returns a tuple with the Name field value
// and a boolean to check if the value has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) GetNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Name, true
}
// SetName sets field value
func (o *VirtualMachineToAlternativeRestoreOptions) SetName(v string) {
o.Name = v
}
// GetRegionId returns the RegionId field value
func (o *VirtualMachineToAlternativeRestoreOptions) GetRegionId() string {
if o == nil {
var ret string
return ret
}
return o.RegionId
}
// GetRegionIdOk returns a tuple with the RegionId field value
// and a boolean to check if the value has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) GetRegionIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.RegionId, true
}
// SetRegionId sets field value
func (o *VirtualMachineToAlternativeRestoreOptions) SetRegionId(v string) {
o.RegionId = v
}
// GetVmType returns the VmType field value
func (o *VirtualMachineToAlternativeRestoreOptions) GetVmType() string {
if o == nil {
var ret string
return ret
}
return o.VmType
}
// GetVmTypeOk returns a tuple with the VmType field value
// and a boolean to check if the value has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) GetVmTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.VmType, true
}
// SetVmType sets field value
func (o *VirtualMachineToAlternativeRestoreOptions) SetVmType(v string) {
o.VmType = v
}
// GetSubnetId returns the SubnetId field value
func (o *VirtualMachineToAlternativeRestoreOptions) GetSubnetId() string {
if o == nil {
var ret string
return ret
}
return o.SubnetId
}
// GetSubnetIdOk returns a tuple with the SubnetId field value
// and a boolean to check if the value has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) GetSubnetIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.SubnetId, true
}
// SetSubnetId sets field value
func (o *VirtualMachineToAlternativeRestoreOptions) SetSubnetId(v string) {
o.SubnetId = v
}
// GetNetworkSecurityGroupId returns the NetworkSecurityGroupId field value
func (o *VirtualMachineToAlternativeRestoreOptions) GetNetworkSecurityGroupId() string {
if o == nil {
var ret string
return ret
}
return o.NetworkSecurityGroupId
}
// GetNetworkSecurityGroupIdOk returns a tuple with the NetworkSecurityGroupId field value
// and a boolean to check if the value has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) GetNetworkSecurityGroupIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.NetworkSecurityGroupId, true
}
// SetNetworkSecurityGroupId sets field value
func (o *VirtualMachineToAlternativeRestoreOptions) SetNetworkSecurityGroupId(v string) {
o.NetworkSecurityGroupId = v
}
// GetPreserveEncryptionForVolumes returns the PreserveEncryptionForVolumes field value
func (o *VirtualMachineToAlternativeRestoreOptions) GetPreserveEncryptionForVolumes() bool {
if o == nil {
var ret bool
return ret
}
return o.PreserveEncryptionForVolumes
}
// GetPreserveEncryptionForVolumesOk returns a tuple with the PreserveEncryptionForVolumes field value
// and a boolean to check if the value has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) GetPreserveEncryptionForVolumesOk() (*bool, bool) {
if o == nil {
return nil, false
}
return &o.PreserveEncryptionForVolumes, true
}
// SetPreserveEncryptionForVolumes sets field value
func (o *VirtualMachineToAlternativeRestoreOptions) SetPreserveEncryptionForVolumes(v bool) {
o.PreserveEncryptionForVolumes = v
}
// GetEncryptionKeyId returns the EncryptionKeyId field value if set, zero value otherwise.
func (o *VirtualMachineToAlternativeRestoreOptions) GetEncryptionKeyId() string {
if o == nil || o.EncryptionKeyId == nil {
var ret string
return ret
}
return *o.EncryptionKeyId
}
// GetEncryptionKeyIdOk returns a tuple with the EncryptionKeyId field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) GetEncryptionKeyIdOk() (*string, bool) {
if o == nil || o.EncryptionKeyId == nil {
return nil, false
}
return o.EncryptionKeyId, true
}
// HasEncryptionKeyId returns a boolean if a field has been set.
func (o *VirtualMachineToAlternativeRestoreOptions) HasEncryptionKeyId() bool {
if o != nil && o.EncryptionKeyId != nil {
return true
}
return false
}
// SetEncryptionKeyId gets a reference to the given string and assigns it to the EncryptionKeyId field.
func (o *VirtualMachineToAlternativeRestoreOptions) SetEncryptionKeyId(v string) {
o.EncryptionKeyId = &v
}
func (o VirtualMachineToAlternativeRestoreOptions) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["name"] = o.Name
}
if true {
toSerialize["regionId"] = o.RegionId
}
if true {
toSerialize["vmType"] = o.VmType
}
if true {
toSerialize["subnetId"] = o.SubnetId
}
if true {
toSerialize["networkSecurityGroupId"] = o.NetworkSecurityGroupId
}
if true {
toSerialize["preserveEncryptionForVolumes"] = o.PreserveEncryptionForVolumes
}
if o.EncryptionKeyId != nil {
toSerialize["encryptionKeyId"] = o.EncryptionKeyId
}
return json.Marshal(toSerialize)
} | type NullableVirtualMachineToAlternativeRestoreOptions struct {
value *VirtualMachineToAlternativeRestoreOptions
isSet bool
}
func (v NullableVirtualMachineToAlternativeRestoreOptions) Get() *VirtualMachineToAlternativeRestoreOptions {
return v.value
}
func (v *NullableVirtualMachineToAlternativeRestoreOptions) Set(val *VirtualMachineToAlternativeRestoreOptions) {
v.value = val
v.isSet = true
}
func (v NullableVirtualMachineToAlternativeRestoreOptions) IsSet() bool {
return v.isSet
}
func (v *NullableVirtualMachineToAlternativeRestoreOptions) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableVirtualMachineToAlternativeRestoreOptions(val *VirtualMachineToAlternativeRestoreOptions) *NullableVirtualMachineToAlternativeRestoreOptions {
return &NullableVirtualMachineToAlternativeRestoreOptions{value: val, isSet: true}
}
func (v NullableVirtualMachineToAlternativeRestoreOptions) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableVirtualMachineToAlternativeRestoreOptions) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | |
cnn_bin_file_to_csv_converter.py | import glob, struct, random, csv
from tensorflow.core.example import example_pb2
# <s> and </s> are used in the data files to segment the abstracts into sentences. They don't receive vocab ids.
SENTENCE_START = '<s>'
SENTENCE_END = '</s>'
PAD_TOKEN = '[PAD]' # This has a vocab id, which is used to pad the encoder input, decoder input and target sequence
UNKNOWN_TOKEN = '[UNK]' # This has a vocab id, which is used to represent out-of-vocabulary words
START_DECODING = '[START]' # This has a vocab id, which is used at the start of every decoder input sequence
STOP_DECODING = '[STOP]' # This has a vocab id, which is used at the end of untruncated target sequences
# Note: none of <s>, </s>, [PAD], [UNK], [START], [STOP] should appear in the vocab file.
def example_generator(data_path, single_pass):
"""Generates tf.Examples from data files.
Binary data format: <length><blob>. <length> represents the byte size
of <blob>. <blob> is serialized tf.Example proto. The tf.Example contains
the tokenized article text and summary.
Args:
data_path:
Path to tf.Example data files. Can include wildcards, e.g. if you have several training data chunk files train_001.bin, train_002.bin, etc, then pass data_path=train_* to access them all.
single_pass:
Boolean. If True, go through the dataset exactly once, generating examples in the order they appear, then return. Otherwise, generate random examples indefinitely.
Yields:
Deserialized tf.Example.
"""
while True:
filelist = glob.glob(data_path) # get the list of datafiles
assert filelist, ('Error: Empty filelist at %s' % data_path) # check filelist isn't empty
if single_pass:
filelist = sorted(filelist)
else:
random.shuffle(filelist)
for f in filelist:
reader = open(f, 'rb')
while True:
len_bytes = reader.read(8)
if not len_bytes: break # finished reading this file
str_len = struct.unpack('q', len_bytes)[0]
example_str = struct.unpack('%ds' % str_len, reader.read(str_len))[0]
yield example_pb2.Example.FromString(example_str)
if single_pass:
print "example_generator completed reading all datafiles. No more data."
break
def abstract2sents(abstract):
|
def text_generator(example_generator):
"""Generates article and abstract text from tf.Example.
Args:
example_generator: a generator of tf.Examples from file. See data.example_generator"""
while True:
e = example_generator.next() # e is a tf.Example
try:
article_text = e.features.feature['article'].bytes_list.value[
0] # the article text was saved under the key 'article' in the data files
abstract_text = e.features.feature['abstract'].bytes_list.value[
0] # the abstract text was saved under the key 'abstract' in the data files
except ValueError:
# tf.logging.error('Failed to get article or abstract from example')
continue
else:
yield (article_text, abstract_text)
def read_bin_files(input_bin_path, output_csv_path,single_pass):
"""Reads data from file and processes into Examples which are then placed into the example queue."""
input_gen = text_generator(example_generator(input_bin_path, single_pass))
with open(output_csv_path, mode='w') as output_file:
output_writer = csv.writer(output_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
while True:
try:
(article,
abstract) = input_gen.next() # read the next example from file. article and abstract are both strings.
except StopIteration: # if there are no more examples:
# tf.logging.info("The example generator for this example queue filling thread has exhausted data.")
if single_pass:
# tf.logging.info("single_pass mode is on, so we've finished reading dataset. This thread is stopping.")
# self._finished_reading = True
break
else:
raise Exception("single_pass mode is off but the example generator is out of data; error.")
# Use the <s> and </s> tags in abstract to get a list of sentences.
abstract_sentences = [sent.strip() for sent in abstract2sents(abstract)]
output_writer.writerow(['. '.join(abstract_sentences), article])
if __name__ == "__main__":
input_bin_path = '/home/sampanna/Study/BDTS/modified-keras-text-summarization/files/cnn/finished_files/chunked/train_*.bin'
output_csv_path = 'cnn_summary_dataset.csv'
read_bin_files(input_bin_path, output_csv_path,True)
| """Splits abstract text from datafile into list of sentences.
Args:
abstract: string containing <s> and </s> tags for starts and ends of sentences
Returns:
sents: List of sentence strings (no tags)"""
cur = 0
sents = []
while True:
try:
start_p = abstract.index(SENTENCE_START, cur)
end_p = abstract.index(SENTENCE_END, start_p + 1)
cur = end_p + len(SENTENCE_END)
sents.append(abstract[start_p + len(SENTENCE_START):end_p])
except ValueError as e: # no more sentences
return sents |
ma.py | #!/usr/bin/python3
import sys
import fcntl
import logging
import time
import io
import datetime
import decimal
import statistics
from astm_bidirectional_common import my_sql , file_mgmt, print_to_log
#For mysql password
sys.path.append('/var/gmcs_config')
import astm_var
####Settings section start#####
logfile_name='/var/log/ma.log'
log=1
n_size=50
####Settings section end#####
'''
select sample_id,result,avg(result)
over (ROWS BETWEEN 10 PRECEDING AND CURRENT ROW)
from result where result>0 and examination_id=5031 order by sample_id desc limit 40
'''
last_sample_id_dict={}
logging.basicConfig(filename=logfile_name,level=logging.DEBUG,format='%(asctime)s %(message)s')
if(log==0):
logging.disable(logging.DEBUG)
print_to_log("Moving Average Logging Test","[OK]")
def check_if_new_result_arrived(ms,examination_id):
|
def calculate_moving_average(ms,examination_id):
chk=check_if_new_result_arrived(ms,examination_id)
if(chk==False):
print_to_log("Last sample id is not changed.. nothing to do for:",examination_id)
return
#prepared_sql='select avg(result) from result where examination_id=%s and result>0 order by sample_id desc limit %s'
prepared_sql='select result from result where examination_id=%s and result>0 order by sample_id desc limit %s'
data_tpl=(examination_id,n_size)
cur=ms.run_query_with_log(prepared_sql,data_tpl)
r_tuple=()
if(cur!=None):
r=ms.get_single_row(cur)
while(r!=None):
r_tuple=r_tuple+(decimal.Decimal(r[0]),)
r=ms.get_single_row(cur)
ms.close_cursor(cur)
r_avg=statistics.mean(r_tuple)
dt=datetime.datetime.now()
print_to_log("datetime",dt.strftime("%Y-%m-%d-%H-%M-%S"))
prepared_sql_insert='insert into moving_average (examination_id,date_time,avg_value,sample_id,value) values(%s,%s,%s,%s,%s)'
data_tpl_insert=(examination_id,dt,r_avg,chk[0],chk[1])
curi=ms.run_query_with_log(prepared_sql_insert,data_tpl_insert)
ms=my_sql()
ms.get_link(astm_var.my_host,astm_var.my_user,astm_var.my_pass,astm_var.my_db)
while True:
calculate_moving_average(ms,5031)
time.sleep(10)
ms.close_link()
| global last_sample_id_dict
prepared_sql='select max(sample_id) from result where examination_id=%s and result>0'
data_tpl=(examination_id,)
cur=ms.run_query_with_log(prepared_sql,data_tpl)
if(cur!=None):
r=ms.get_single_row(cur)
print_to_log("max sample_id for {}".format(examination_id),r[0])
ms.close_cursor(cur)
if(examination_id in last_sample_id_dict):
if(last_sample_id_dict[examination_id]==r[0]):
print_to_log("Last sample id is not changed {}".format(last_sample_id_dict),"{}:{}".format(examination_id,r[0]))
return False
else:
print_to_log("Last sample id is changed {}".format(last_sample_id_dict),"{}:{}".format(examination_id,r[0]))
last_sample_id_dict.update({examination_id:r[0]})
print_to_log("updated dictionary",format(last_sample_id_dict))
prepared_sql_sample_data='select * from result where examination_id=%s and sample_id=%s'
data_tpl_sample_data=(examination_id,r[0])
cur_sample_data=ms.run_query_with_log(prepared_sql_sample_data,data_tpl_sample_data)
r_sample_data=ms.get_single_row(cur_sample_data)
return r_sample_data[0],r_sample_data[2] #sample id and result
else:
print_to_log("Examination not in dict:{}".format(last_sample_id_dict),examination_id)
last_sample_id_dict.update({examination_id:r[0]})
print_to_log("updated dictionary",format(last_sample_id_dict))
return 0,0,0 |
routes.py | import random
import application.BChain_WordList as bwords
import pandas as pd
from flask import Flask, redirect, request, url_for,render_template, Response, jsonify
from application import app
@app.route('/mnemonic_generator', methods=['GET'])
def mnemonic_generator():
| seedphrase_words = []
while len(seedphrase_words) < 12:
seedphrase_words.append(bwords.wordlist[random.randint(0,len(bwords.wordlist)-1)])
series = pd.Series(seedphrase_words, name="sp_words").reset_index().drop(columns='index')
return series.to_json() |
|
gopathfs.go | package gopathfs
import (
"log"
"path/filepath"
"github.com/hanwen/go-fuse/fuse"
"github.com/hanwen/go-fuse/fuse/pathfs"
"github.com/rjeczalik/notify"
cli "github.com/urfave/cli/v2"
"github.com/linuxerwang/goplz/conf"
"github.com/linuxerwang/goplz/mapping"
"github.com/linuxerwang/goplz/vfs"
)
var (
verbose bool
)
// Init initialize the gopathfs package.
func Init(ctx *cli.Context) {
verbose = ctx.Bool("verbose")
}
type changeCallbackFunc func(notify.EventInfo)
// GoPathFs implements a virtual tree for src folder of GOPATH.
type GoPathFs struct {
pathfs.FileSystem
cfg *conf.Config
vfs vfs.FileSystem
mapper mapping.SourceMapper
changeCallback changeCallbackFunc
notifyCh chan notify.EventInfo
}
// Access overrides the parent's Access method.
func (gpf *GoPathFs) Access(name string, mode uint32, context *fuse.Context) fuse.Status {
return fuse.OK
}
// GetAttr overrides the parent's GetAttr method.
func (gpf *GoPathFs) GetAttr(name string, context *fuse.Context) (*fuse.Attr, fuse.Status) {
entry, relpath := gpf.vfs.MatchPath(name)
if len(relpath) != 0 {
return nil, fuse.ENOENT
}
attr, err := entry.Attr()
if err != nil {
return nil, fuse.ENOENT
}
return attr, fuse.OK
}
// OnMount overrides the parent's OnMount method.
func (gpf *GoPathFs) OnMount(nodeFs *pathfs.PathNodeFs) {
root := filepath.Join(gpf.cfg.Workspace, "...")
if verbose {
log.Printf("Watching directory %s for changes.", root)
}
if err := notify.Watch(root, gpf.notifyCh, notify.Create|notify.Remove|notify.Rename); err != nil {
log.Fatal(err)
}
go func() {
for ei := range gpf.notifyCh {
gpf.changeCallback(ei)
}
}()
}
// OnUnmount overwrites the parent's OnUnmount method.
func (gpf *GoPathFs) OnUnmount() {
notify.Stop(gpf.notifyCh)
}
// NewGoPathFs returns a new GoPathFs.
func | (cfg *conf.Config, fs vfs.FileSystem, mapper mapping.SourceMapper, changeCallback changeCallbackFunc) *GoPathFs {
gpfs := GoPathFs{
FileSystem: pathfs.NewDefaultFileSystem(),
cfg: cfg,
vfs: fs,
mapper: mapper,
changeCallback: changeCallback,
notifyCh: make(chan notify.EventInfo, 1000),
}
gpfs.SetDebug(true)
return &gpfs
}
| NewGoPathFs |
stub_test.go | package mode
import (
"testing"
"time"
"src.elv.sh/pkg/cli"
. "src.elv.sh/pkg/cli/clitest"
"src.elv.sh/pkg/cli/term"
"src.elv.sh/pkg/cli/tk"
)
func | (t *testing.T) {
f := Setup()
defer f.Stop()
startStub(f.App, StubSpec{Name: " STUB "})
f.TestTTY(t,
"", term.DotHere, "\n",
" STUB ", Styles,
"******",
)
}
func TestStub_Handling(t *testing.T) {
f := Setup()
defer f.Stop()
bindingCalled := make(chan bool)
startStub(f.App, StubSpec{
Bindings: tk.MapBindings{
term.K('a'): func(tk.Widget) { bindingCalled <- true }},
})
f.TTY.Inject(term.K('a'))
select {
case <-bindingCalled:
// OK
case <-time.After(time.Second):
t.Errorf("Handler not called after 1s")
}
}
func startStub(app cli.App, spec StubSpec) {
w := NewStub(spec)
app.SetAddon(w, false)
}
| TestStub_Rendering |
client.go | // Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v20191118
import (
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common"
tchttp "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/http"
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/profile"
)
const APIVersion = "2019-11-18"
type Client struct {
common.Client
}
// Deprecated
func NewClientWithSecretId(secretId, secretKey, region string) (client *Client, err error) {
cpf := profile.NewClientProfile()
client = &Client{}
client.Init(region).WithSecretId(secretId, secretKey).WithProfile(cpf)
return
}
func NewClient(credential *common.Credential, region string, clientProfile *profile.ClientProfile) (client *Client, err error) {
client = &Client{}
client.Init(region).
WithCredential(credential).
WithProfile(clientProfile)
return
}
func NewCreateSessionRequest() (request *CreateSessionRequest) {
request = &CreateSessionRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("gs", APIVersion, "CreateSession")
return
}
func NewCreateSessionResponse() (response *CreateSessionResponse) {
response = &CreateSessionResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 创建会话
func (c *Client) CreateSession(request *CreateSessionRequest) (response *CreateSessionResponse, err error) {
if request == nil {
request = NewCreateSessionRequest()
}
response = NewCreateSessionResponse()
err = c.Send(request, response)
return
}
func NewDescribeWorkersRequest() (request *DescribeWorkersRequest) {
request = &DescribeWorkersRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("gs", APIVersion, "DescribeWorkers")
return
}
func NewDescribeWorkersResponse() (response *DescribeWorkersResponse) {
response = &DescribeWorkersResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 查询空闲机器数量
func (c *Client) DescribeWorkers(request *DescribeWorkersRequest) (response *DescribeWorkersResponse, err error) {
if request == nil {
request = NewDescribeWorkersRequest()
}
response = NewDescribeWorkersResponse()
err = c.Send(request, response)
return
}
func NewDescribeWorkersInfoRequest() (request *DescribeWorkersInfoRequest) {
request = &DescribeWorkersInfoRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("gs", APIVersion, "DescribeWorkersInfo")
return
}
func NewDescribeWorkersInfoRe | rkersInfoResponse) {
response = &DescribeWorkersInfoResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 获取机器信息
func (c *Client) DescribeWorkersInfo(request *DescribeWorkersInfoRequest) (response *DescribeWorkersInfoResponse, err error) {
if request == nil {
request = NewDescribeWorkersInfoRequest()
}
response = NewDescribeWorkersInfoResponse()
err = c.Send(request, response)
return
}
func NewModifyWorkersRequest() (request *ModifyWorkersRequest) {
request = &ModifyWorkersRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("gs", APIVersion, "ModifyWorkers")
return
}
func NewModifyWorkersResponse() (response *ModifyWorkersResponse) {
response = &ModifyWorkersResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 修改机器信息
func (c *Client) ModifyWorkers(request *ModifyWorkersRequest) (response *ModifyWorkersResponse, err error) {
if request == nil {
request = NewModifyWorkersRequest()
}
response = NewModifyWorkersResponse()
err = c.Send(request, response)
return
}
func NewStopGameRequest() (request *StopGameRequest) {
request = &StopGameRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("gs", APIVersion, "StopGame")
return
}
func NewStopGameResponse() (response *StopGameResponse) {
response = &StopGameResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 强制退出游戏
func (c *Client) StopGame(request *StopGameRequest) (response *StopGameResponse, err error) {
if request == nil {
request = NewStopGameRequest()
}
response = NewStopGameResponse()
err = c.Send(request, response)
return
}
func NewTrylockWorkerRequest() (request *TrylockWorkerRequest) {
request = &TrylockWorkerRequest{
BaseRequest: &tchttp.BaseRequest{},
}
request.Init().WithApiInfo("gs", APIVersion, "TrylockWorker")
return
}
func NewTrylockWorkerResponse() (response *TrylockWorkerResponse) {
response = &TrylockWorkerResponse{
BaseResponse: &tchttp.BaseResponse{},
}
return
}
// 尝试锁定机器
func (c *Client) TrylockWorker(request *TrylockWorkerRequest) (response *TrylockWorkerResponse, err error) {
if request == nil {
request = NewTrylockWorkerRequest()
}
response = NewTrylockWorkerResponse()
err = c.Send(request, response)
return
}
| sponse() (response *DescribeWo |
_virtual_hub_bgp_connection_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualHubBgpConnectionOperations:
"""VirtualHubBgpConnectionOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def | (
self,
resource_group_name: str,
virtual_hub_name: str,
connection_name: str,
**kwargs: Any
) -> "_models.BgpConnection":
"""Retrieves the details of a Virtual Hub Bgp Connection.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param connection_name: The name of the connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BgpConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.BgpConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BgpConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/bgpConnections/{connectionName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
connection_name: str,
parameters: "_models.BgpConnection",
**kwargs: Any
) -> "_models.BgpConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'BgpConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('BgpConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('BgpConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/bgpConnections/{connectionName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_hub_name: str,
connection_name: str,
parameters: "_models.BgpConnection",
**kwargs: Any
) -> AsyncLROPoller["_models.BgpConnection"]:
"""Creates a VirtualHubBgpConnection resource if it doesn't exist else updates the existing
VirtualHubBgpConnection.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param connection_name: The name of the connection.
:type connection_name: str
:param parameters: Parameters of Bgp connection.
:type parameters: ~azure.mgmt.network.v2020_06_01.models.BgpConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BgpConnection or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.BgpConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
connection_name=connection_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BgpConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/bgpConnections/{connectionName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
connection_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/bgpConnections/{connectionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
virtual_hub_name: str,
connection_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a VirtualHubBgpConnection.
:param resource_group_name: The resource group name of the VirtualHubBgpConnection.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param connection_name: The name of the connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
connection_name=connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/bgpConnections/{connectionName}'} # type: ignore
| get |
config.rs | use std::collections::HashMap;
#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(deny_unknown_fields)]
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct Config {
pub files: Walk,
pub default: EngineConfig,
#[serde(rename = "type")]
pub type_: TypeEngineConfig,
#[serde(skip)]
pub overrides: EngineConfig,
}
impl Config {
pub fn from_dir(cwd: &std::path::Path) -> Result<Option<Self>, anyhow::Error> {
let config = if let Some(path) =
find_project_file(cwd, &["typos.toml", "_typos.toml", ".typos.toml"])
{
log::debug!("Loading {}", path.display());
Some(Self::from_file(&path)?)
} else {
None
};
Ok(config)
}
pub fn from_file(path: &std::path::Path) -> Result<Self, anyhow::Error> {
let s = std::fs::read_to_string(path)?;
Self::from_toml(&s)
}
pub fn from_toml(data: &str) -> Result<Self, anyhow::Error> {
let content = toml_edit::easy::from_str(data)?;
Ok(content)
}
pub fn from_defaults() -> Self {
Self {
files: Walk::from_defaults(),
default: EngineConfig::from_defaults(),
type_: TypeEngineConfig::from_defaults(),
overrides: EngineConfig::default(),
}
}
pub fn update(&mut self, source: &Config) {
self.files.update(&source.files);
self.default.update(&source.default);
self.type_.update(&source.type_);
self.overrides.update(&source.overrides);
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(deny_unknown_fields)]
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct Walk {
pub extend_exclude: Vec<String>,
/// Skip hidden files and directories.
pub ignore_hidden: Option<bool>,
/// Respect ignore files.
pub ignore_files: Option<bool>,
/// Respect .ignore files.
pub ignore_dot: Option<bool>,
/// Respect ignore files in vcs directories.
pub ignore_vcs: Option<bool>,
/// Respect global ignore files.
pub ignore_global: Option<bool>,
/// Respect ignore files in parent directories.
pub ignore_parent: Option<bool>,
}
impl Walk {
pub fn from_defaults() -> Self {
let empty = Self::default();
Self {
extend_exclude: empty.extend_exclude.clone(),
ignore_hidden: Some(empty.ignore_hidden()),
ignore_files: Some(true),
ignore_dot: Some(empty.ignore_dot()),
ignore_vcs: Some(empty.ignore_vcs()),
ignore_global: Some(empty.ignore_global()),
ignore_parent: Some(empty.ignore_parent()),
}
}
pub fn update(&mut self, source: &Walk) {
self.extend_exclude
.extend(source.extend_exclude.iter().cloned());
if let Some(source) = source.ignore_hidden {
self.ignore_hidden = Some(source);
}
if let Some(source) = source.ignore_files {
self.ignore_files = Some(source);
self.ignore_dot = None;
self.ignore_vcs = None;
self.ignore_global = None;
self.ignore_parent = None;
}
if let Some(source) = source.ignore_dot {
self.ignore_dot = Some(source);
}
if let Some(source) = source.ignore_vcs {
self.ignore_vcs = Some(source);
self.ignore_global = None;
}
if let Some(source) = source.ignore_global {
self.ignore_global = Some(source);
}
if let Some(source) = source.ignore_parent {
self.ignore_parent = Some(source);
}
}
pub fn extend_exclude(&self) -> &[String] {
&self.extend_exclude
}
pub fn ignore_hidden(&self) -> bool {
self.ignore_hidden.unwrap_or(true)
}
pub fn ignore_dot(&self) -> bool {
self.ignore_dot.or(self.ignore_files).unwrap_or(true)
}
pub fn ignore_vcs(&self) -> bool {
self.ignore_vcs.or(self.ignore_files).unwrap_or(true)
}
pub fn ignore_global(&self) -> bool {
self.ignore_global
.or(self.ignore_vcs)
.or(self.ignore_files)
.unwrap_or(true)
}
pub fn ignore_parent(&self) -> bool {
self.ignore_parent.or(self.ignore_files).unwrap_or(true)
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(deny_unknown_fields)]
#[serde(default)]
#[serde(transparent)]
pub struct TypeEngineConfig {
pub patterns: std::collections::HashMap<kstring::KString, GlobEngineConfig>,
}
impl TypeEngineConfig {
pub fn from_defaults() -> Self {
let empty = Self::default();
Self {
patterns: empty.patterns().collect(),
}
}
pub fn update(&mut self, source: &Self) {
for (type_name, engine) in source.patterns.iter() {
self.patterns
.entry(type_name.to_owned())
.or_insert_with(GlobEngineConfig::default)
.update(engine);
}
}
pub fn patterns(&self) -> impl Iterator<Item = (kstring::KString, GlobEngineConfig)> {
let mut patterns = self.patterns.clone();
patterns
.entry("lock".into())
.or_insert_with(|| GlobEngineConfig {
extend_glob: Vec::new(),
engine: EngineConfig {
check_file: Some(false),
..Default::default()
},
});
patterns
.entry("rust".into())
.or_insert_with(|| GlobEngineConfig {
// From a spell-check perspective, these are more closely related to Rust than Toml
extend_glob: vec!["Cargo.toml".into()],
engine: EngineConfig {
dict: Some(DictConfig {
extend_words: maplit::hashmap! {
"flate".into() => "flate".into(),
"ser".into() => "ser".into(),
},
..Default::default()
}),
..Default::default()
},
});
patterns
.entry("python".into())
.or_insert_with(|| GlobEngineConfig {
// From a spell-check perspective, these are more closely related to Python than Toml
extend_glob: vec!["pyproject.toml".into()],
engine: EngineConfig {
..Default::default()
},
});
patterns.entry("cert".into()).or_insert_with(|| {
GlobEngineConfig {
extend_glob: vec![
// Certificate files:
"*.crt".into(),
"*.cer".into(),
"*.ca-bundle".into(),
"*.p7b".into(),
"*.p7c".into(),
"*.p7s".into(),
"*.pem".into(),
// Keystore Files:
"*.key".into(),
"*.keystore".into(),
"*.jks".into(),
// Combined certificate and key files:
"*.p12".into(),
"*.pfx".into(),
"*.pem".into(),
],
engine: EngineConfig {
check_file: Some(false),
..Default::default()
},
}
});
patterns.into_iter()
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
//#[serde(deny_unknown_fields)] // Doesn't work with `flatten`
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct GlobEngineConfig {
pub extend_glob: Vec<kstring::KString>,
#[serde(flatten)]
pub engine: EngineConfig,
}
| self.extend_glob.extend(source.extend_glob.iter().cloned());
self.engine.update(&source.engine);
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
//#[serde(deny_unknown_fields)] // Doesn't work with `flatten`
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct EngineConfig {
/// Check binary files.
pub binary: Option<bool>,
/// Verifying spelling in file names.
pub check_filename: Option<bool>,
/// Verifying spelling in files.
pub check_file: Option<bool>,
#[serde(flatten)]
pub tokenizer: Option<TokenizerConfig>,
#[serde(flatten)]
pub dict: Option<DictConfig>,
}
impl EngineConfig {
pub fn from_defaults() -> Self {
let empty = Self::default();
EngineConfig {
binary: Some(empty.binary()),
check_filename: Some(empty.check_filename()),
check_file: Some(empty.check_file()),
tokenizer: Some(
empty
.tokenizer
.unwrap_or_else(TokenizerConfig::from_defaults),
),
dict: Some(empty.dict.unwrap_or_else(DictConfig::from_defaults)),
}
}
pub fn update(&mut self, source: &EngineConfig) {
if let Some(source) = source.binary {
self.binary = Some(source);
}
if let Some(source) = source.check_filename {
self.check_filename = Some(source);
}
if let Some(source) = source.check_file {
self.check_file = Some(source);
}
if let Some(source) = source.tokenizer.as_ref() {
let mut tokenizer = None;
std::mem::swap(&mut tokenizer, &mut self.tokenizer);
let mut tokenizer = tokenizer.unwrap_or_default();
tokenizer.update(source);
let mut tokenizer = Some(tokenizer);
std::mem::swap(&mut tokenizer, &mut self.tokenizer);
}
if let Some(source) = source.dict.as_ref() {
let mut dict = None;
std::mem::swap(&mut dict, &mut self.dict);
let mut dict = dict.unwrap_or_default();
dict.update(source);
let mut dict = Some(dict);
std::mem::swap(&mut dict, &mut self.dict);
}
}
pub fn binary(&self) -> bool {
self.binary.unwrap_or(false)
}
pub fn check_filename(&self) -> bool {
self.check_filename.unwrap_or(true)
}
pub fn check_file(&self) -> bool {
self.check_file.unwrap_or(true)
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(deny_unknown_fields)]
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct TokenizerConfig {
/// Allow unicode characters in identifiers (and not just ASCII)
pub unicode: Option<bool>,
/// Do not check identifiers that appear to be hexadecimal values.
pub ignore_hex: Option<bool>,
/// Allow identifiers to start with digits, in addition to letters.
pub identifier_leading_digits: Option<bool>,
}
impl TokenizerConfig {
pub fn from_defaults() -> Self {
let empty = Self::default();
Self {
unicode: Some(empty.unicode()),
ignore_hex: Some(empty.ignore_hex()),
identifier_leading_digits: Some(empty.identifier_leading_digits()),
}
}
pub fn update(&mut self, source: &TokenizerConfig) {
if let Some(source) = source.unicode {
self.unicode = Some(source);
}
if let Some(source) = source.ignore_hex {
self.ignore_hex = Some(source);
}
if let Some(source) = source.identifier_leading_digits {
self.identifier_leading_digits = Some(source);
}
}
pub fn unicode(&self) -> bool {
self.unicode.unwrap_or(true)
}
pub fn ignore_hex(&self) -> bool {
self.ignore_hex.unwrap_or(true)
}
pub fn identifier_leading_digits(&self) -> bool {
self.identifier_leading_digits.unwrap_or(false)
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(deny_unknown_fields)]
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct DictConfig {
pub locale: Option<Locale>,
pub extend_identifiers: HashMap<kstring::KString, kstring::KString>,
pub extend_words: HashMap<kstring::KString, kstring::KString>,
}
impl DictConfig {
pub fn from_defaults() -> Self {
let empty = Self::default();
Self {
locale: Some(empty.locale()),
extend_identifiers: Default::default(),
extend_words: Default::default(),
}
}
pub fn update(&mut self, source: &DictConfig) {
if let Some(source) = source.locale {
self.locale = Some(source);
}
self.extend_identifiers.extend(
source
.extend_identifiers
.iter()
.map(|(key, value)| (key.clone(), value.clone())),
);
self.extend_words.extend(
source
.extend_words
.iter()
.map(|(key, value)| (key.clone(), value.clone())),
);
}
pub fn locale(&self) -> Locale {
self.locale.unwrap_or_default()
}
pub fn extend_identifiers(&self) -> Box<dyn Iterator<Item = (&str, &str)> + '_> {
Box::new(
self.extend_identifiers
.iter()
.map(|(k, v)| (k.as_str(), v.as_str())),
)
}
pub fn extend_words(&self) -> Box<dyn Iterator<Item = (&str, &str)> + '_> {
Box::new(
self.extend_words
.iter()
.map(|(k, v)| (k.as_str(), v.as_str())),
)
}
}
fn find_project_file(dir: &std::path::Path, names: &[&str]) -> Option<std::path::PathBuf> {
let mut file_path = dir.join("placeholder");
for name in names {
file_path.set_file_name(name);
if file_path.exists() {
return Some(file_path);
}
}
None
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum Locale {
En,
EnUs,
EnGb,
EnCa,
EnAu,
}
impl Locale {
pub const fn category(self) -> Option<varcon_core::Category> {
match self {
Locale::En => None,
Locale::EnUs => Some(varcon_core::Category::American),
Locale::EnGb => Some(varcon_core::Category::BritishIse),
Locale::EnCa => Some(varcon_core::Category::Canadian),
Locale::EnAu => Some(varcon_core::Category::Australian),
}
}
pub const fn variants() -> [&'static str; 5] {
["en", "en-us", "en-gb", "en-ca", "en-au"]
}
}
impl Default for Locale {
fn default() -> Self {
Locale::En
}
}
impl std::str::FromStr for Locale {
type Err = String;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
match s {
"en" => Ok(Locale::En),
"en-us" => Ok(Locale::EnUs),
"en-gb" => Ok(Locale::EnGb),
"en-ca" => Ok(Locale::EnCa),
"en-au" => Ok(Locale::EnAu),
_ => Err("valid values: en, en-us, en-gb, en-ca, en-au".to_owned()),
}
}
}
impl std::fmt::Display for Locale {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
Locale::En => write!(f, "en"),
Locale::EnUs => write!(f, "en-us"),
Locale::EnGb => write!(f, "en-gb"),
Locale::EnCa => write!(f, "en-ca"),
Locale::EnAu => write!(f, "en-au"),
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_from_defaults() {
let null = Config::default();
let defaulted = Config::from_defaults();
assert_ne!(defaulted, null);
assert_ne!(defaulted.files, null.files);
assert_ne!(defaulted.default, null.default);
assert_ne!(defaulted.default.tokenizer, null.default.tokenizer);
assert_ne!(defaulted.default.dict, null.default.dict);
}
#[test]
fn test_update_from_nothing() {
let null = Config::default();
let defaulted = Config::from_defaults();
let mut actual = defaulted.clone();
actual.update(&null);
assert_eq!(actual, defaulted);
}
#[test]
fn test_update_from_defaults() {
let null = Config::default();
let defaulted = Config::from_defaults();
let mut actual = null;
actual.update(&defaulted);
assert_eq!(actual, defaulted);
}
#[test]
fn test_extend_glob_updates() {
let null = GlobEngineConfig::default();
let extended = GlobEngineConfig {
extend_glob: vec!["*.foo".into()],
..Default::default()
};
let mut actual = null;
actual.update(&extended);
assert_eq!(actual, extended);
}
#[test]
fn test_extend_glob_extends() {
let base = GlobEngineConfig {
extend_glob: vec!["*.foo".into()],
..Default::default()
};
let extended = GlobEngineConfig {
extend_glob: vec!["*.bar".into()],
..Default::default()
};
let mut actual = base;
actual.update(&extended);
let expected: Vec<kstring::KString> = vec!["*.foo".into(), "*.bar".into()];
assert_eq!(actual.extend_glob, expected);
}
#[test]
fn parse_extend_globs() {
let input = r#"[type.po]
extend-glob = ["*.po"]
check-file = true
"#;
let mut expected = Config::default();
expected.type_.patterns.insert(
"po".into(),
GlobEngineConfig {
extend_glob: vec!["*.po".into()],
engine: EngineConfig {
tokenizer: Some(TokenizerConfig::default()),
dict: Some(DictConfig::default()),
check_file: Some(true),
..Default::default()
},
},
);
let actual = Config::from_toml(input).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn parse_extend_words() {
let input = r#"[type.shaders]
extend-glob = [
'*.shader',
'*.cginc',
]
[type.shaders.extend-words]
inout = "inout"
"#;
let mut expected = Config::default();
expected.type_.patterns.insert(
"shaders".into(),
GlobEngineConfig {
extend_glob: vec!["*.shader".into(), "*.cginc".into()],
engine: EngineConfig {
tokenizer: Some(TokenizerConfig::default()),
dict: Some(DictConfig {
extend_words: maplit::hashmap! {
"inout".into() => "inout".into(),
},
..Default::default()
}),
..Default::default()
},
},
);
let actual = Config::from_toml(input).unwrap();
assert_eq!(actual, expected);
}
} | impl GlobEngineConfig {
pub fn update(&mut self, source: &GlobEngineConfig) { |
share.go | package bolt
import (
"github.com/asdine/storm"
"github.com/asdine/storm/q"
"github.com/filebrowser/filebrowser/v2/errors"
"github.com/filebrowser/filebrowser/v2/share"
)
type shareBackend struct {
db *storm.DB
}
func (s shareBackend) GetByHash(hash string) (*share.Link, error) {
var v share.Link
err := s.db.One("Hash", hash, &v)
if err == storm.ErrNotFound {
return nil, errors.ErrNotExist
}
return &v, err
}
func (s shareBackend) GetPermanent(path string, id uint) (*share.Link, error) {
var v share.Link
err := s.db.Select(q.Eq("Path", path), q.Eq("Expire", 0), q.Eq("UserID", id)).First(&v)
if err == storm.ErrNotFound {
return nil, errors.ErrNotExist
}
return &v, err
}
func (s shareBackend) Gets(path string, id uint) ([]*share.Link, error) {
var v []*share.Link
err := s.db.Select(q.Eq("Path", path), q.Eq("UserID", id)).Find(&v)
if err == storm.ErrNotFound {
return v, errors.ErrNotExist
}
return v, err
}
func (s shareBackend) Save(l *share.Link) error {
return s.db.Save(l)
}
func (s shareBackend) Delete(hash string) error {
err := s.db.DeleteStruct(&share.Link{Hash: hash})
if err == storm.ErrNotFound |
return err
}
| {
return nil
} |
sketch.js | const Engine = Matter.Engine;
const World = Matter.World;
const Bodies = Matter.Bodies;
const Constraint = Matter.Constraint;
var engine, world;
var canvas;
var player, playerBase, playerArcher;
var baseimage;
function preload() {
backgroundImg = loadImage("./background.png");
baseimage = loadImage("./base.png");
playerimage = loadImage("./player.png");
}
function setup() {
canvas = createCanvas(windowWidth, windowHeight);
engine = Engine.create();
world = engine.world;
angleMode(DEGREES);
var options = {
isStatic:true
}
//create player base body
playerBase = Bodies.rectangle(200,350,180,150,options)
World.add(world,playerBase)
//create player body
player = Bodies.rectangle(250,playerBase.position.y-160,50,180,options)
World.add(world,player)
}
function draw() {
background(backgroundImg);
| //show the player image using image() function
image(baseimage,playerBase.position.x,playerBase.position.y,180,150)
//show the playerbase image using image() function
image(playerimage,player.position.x,player.position.y,50,180)
Engine.update(engine);
// Title
fill("#FFFF");
textAlign("center");
textSize(40);
text("EPIC ARCHERY", width / 2, 100);
} | |
LightBulbIcon.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = LightBulbIcon;
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _Icon = require("./Icon");
var _Icon2 = _interopRequireDefault(_Icon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function LightBulbIcon(props) {
return _react2.default.createElement(
_Icon2.default,
props, | _react2.default.createElement("path", { d: "M12 4C15.3137 4 18 6.68629 18 10C18 11.8202 17.1895 13.4511 15.9097 14.5514L14.9806 19.1961C14.8949 19.6246 14.5429 19.943 14.1176 19.9931L14 20H10C9.56304 20 9.18177 19.7173 9.04925 19.3101L9.01942 19.1961L8.0903 14.5514C6.81051 13.4511 6 11.8202 6 10C6 6.68629 8.68629 4 12 4ZM12 6C9.79086 6 8 7.79086 8 10C8 11.8636 9.2744 13.4295 10.9992 13.8738L11 11C9.71264 11 9.66825 9.13555 10.8668 9.00697L11 9H13C13.5523 9 14 9.44772 14 10C14 10.5128 13.614 10.9355 13.1166 10.9933L13 11L12.9998 13.874C14.7251 13.4301 16 11.8639 16 10C16 7.79086 14.2091 6 12 6Z" })
);
} |
|
settings.go | package internal
import (
"time"
)
| Debug bool
Timeout time.Duration
} | type DialSettings struct { |
orm.py | # Copyright 2017 Pilosa Corp.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
#
import json
from .exceptions import PilosaError
from .validator import validate_index_name, validate_frame_name, validate_label
__all__ = ("TimeQuantum", "CacheType", "Schema", "Index", "PQLQuery", "PQLBatchQuery")
_TIME_FORMAT = "%Y-%m-%dT%H:%M"
class TimeQuantum:
"""Valid time quantum values for frames having support for that.
* See: `Data Model <https://www.pilosa.com/docs/data-model/>`_
"""
NONE = None
YEAR = None
MONTH = None
DAY = None
HOUR = None
YEAR_MONTH = None
MONTH_DAY = None
DAY_HOUR = None
YEAR_MONTH_DAY = None
MONTH_DAY_HOUR = None
YEAR_MONTH_DAY_HOUR = None
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
def __eq__(self, other):
if isinstance(other, TimeQuantum):
return self.value == other.value
return False
TimeQuantum.NONE = TimeQuantum("")
TimeQuantum.YEAR = TimeQuantum("Y")
TimeQuantum.MONTH = TimeQuantum("M")
TimeQuantum.DAY = TimeQuantum("D")
TimeQuantum.HOUR = TimeQuantum("H")
TimeQuantum.YEAR_MONTH = TimeQuantum("YM")
TimeQuantum.MONTH_DAY = TimeQuantum("MD")
TimeQuantum.DAY_HOUR = TimeQuantum("DH")
TimeQuantum.YEAR_MONTH_DAY = TimeQuantum("YMD")
TimeQuantum.MONTH_DAY_HOUR = TimeQuantum("MDH")
TimeQuantum.YEAR_MONTH_DAY_HOUR = TimeQuantum("YMDH")
class CacheType:
DEFAULT = None
LRU = None
RANKED = None
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
def __eq__(self, other):
if isinstance(other, CacheType):
return self.value == other.value
return False
CacheType.DEFAULT = CacheType("")
CacheType.LRU = CacheType("lru")
CacheType.RANKED = CacheType("ranked")
class Schema:
"""Schema is a container for index objects"""
def __init__(self):
self._indexes = {}
def __eq__(self, other):
if id(self) == id(other):
return True
if not isinstance(other, self.__class__):
return False
return self._indexes == other._indexes
def __ne__(self, other):
return not self.__eq__(other)
def index(self, name, column_label="columnID", time_quantum=TimeQuantum.NONE):
"""Returns an index object with the given name and options.
If the index didn't exist in the schema, it is added to the schema.
:param str name: index name
:param str column_label: a valid column label
:param pilosa.TimeQuantum time_quantum: Sets the time quantum
:return: Index object
* See `Data Model <https://www.pilosa.com/docs/data-model/>`_
* See `Query Language <https://www.pilosa.com/docs/query-language/>`_
"""
index = self._indexes.get(name)
if index is None:
index = Index(name, column_label, time_quantum)
self._indexes[name] = index
return index
def _diff(self, other):
result = Schema()
for index_name, index in self._indexes.items():
if index_name not in other._indexes:
# if the index doesn't exist in the other schema, simply copy it
result._indexes[index_name] = index.copy()
else:
# the index exists in the other schema; check the frames
result_index = index.copy(frames=False)
for frame_name, frame in index._frames.items():
# the frame doesn't exist in the other scheme, copy it
if frame_name not in result_index._frames:
result_index._frames[frame_name] = frame.copy()
# check whether we modified result index
if len(result_index._frames) > 0:
result._indexes[index_name] = result_index
return result
class Index:
"""The purpose of the Index is to represent a data namespace.
You cannot perform cross-index queries. Column-level attributes are global to the Index.
:param str name: index name
:param str column_label: a valid column label
:param pilosa.TimeQuantum time_quantum: Sets the time quantum
* See `Data Model <https://www.pilosa.com/docs/data-model/>`_
* See `Query Language <https://www.pilosa.com/docs/query-language/>`_
"""
def __init__(self, name, column_label="columnID", time_quantum=TimeQuantum.NONE):
validate_index_name(name)
validate_label(column_label)
self.name = name
self.column_label = column_label
self.time_quantum = time_quantum
self._frames = {}
def __eq__(self, other):
if id(self) == id(other):
return True
if not isinstance(other, self.__class__):
return False
return self._meta_eq(other) and \
self._frames == other._frames
def __ne__(self, other):
return not self.__eq__(other)
def _meta_eq(self, other):
return self.name == other.name and \
self.column_label == other.column_label and \
self.time_quantum == other.time_quantum
def copy(self, frames=True):
index = Index(self.name, column_label=self.column_label, time_quantum=self.time_quantum)
if frames:
index._frames = dict((name, frame.copy()) for name, frame in self._frames.items())
return index
def frame(self, name, row_label="rowID", time_quantum=TimeQuantum.NONE,
inverse_enabled=False, cache_type=CacheType.DEFAULT, cache_size=0):
"""Creates a frame object with the specified name and defaults.
:param str name: frame name
:param str row_label: a valid row label
:param pilosa.TimeQuantum time_quantum: Sets the time quantum for the frame. If a Frame has a time quantum, then Views are generated for each of the defined time segments.
:param bool inverse_enabled:
:param pilosa.CacheType cache_type: ``CacheType.DEFAULT``, ``CacheType.LRU`` or ``CacheType.RANKED``
:param int cache_size: Values greater than 0 sets the cache size. Otherwise uses the default cache size
:return: Pilosa frame
:rtype: pilosa.Frame
"""
frame = self._frames.get(name)
if frame is None:
frame = Frame(self, name, row_label, time_quantum,
inverse_enabled, cache_type, cache_size)
self._frames[name] = frame
return frame
def raw_query(self, query):
"""Creates a raw query.
Note that the query is not validated before sending to the server.
:param str query:
:return: Pilosa query
:rtype: pilosa.PQLQuery
"""
return PQLQuery(query, self)
def batch_query(self, *queries):
"""Creates a batch query.
:param pilosa.PQLQuery queries: the queries in the batch
:return: Pilosa batch query
:rtype: pilosa.PQLBatchQuery
"""
q = PQLBatchQuery(self)
q.add(*queries)
return q
def union(self, *bitmaps):
"""Creates a ``Union`` query.
``Union`` performs a logical OR on the results of each BITMAP_CALL query passed to it.
:param pilosa.PQLBitmapQuery bitmaps: 0 or more bitmap queries to union
:return: Pilosa bitmap query
:rtype: pilosa.PQLBitmapQuery
"""
return self._bitmap_op("Union", bitmaps)
def | (self, *bitmaps):
"""Creates an ``Intersect`` query.
``Intersect`` performs a logical AND on the results of each BITMAP_CALL query passed to it.
:param pilosa.PQLBitmapQuery bitmaps: 1 or more bitmap queries to intersect
:return: Pilosa bitmap query
:rtype: pilosa.PQLBitmapQuery
:raise PilosaError: if the number of bitmaps is less than 1
"""
if len(bitmaps) < 1:
raise PilosaError("Number of bitmap queries should be greater or equal to 1")
return self._bitmap_op("Intersect", bitmaps)
def difference(self, *bitmaps):
"""Creates a ``Difference`` query.
``Difference`` returns all of the bits from the first BITMAP_CALL argument passed to it,
without the bits from each subsequent BITMAP_CALL.
:param pilosa.PQLBitmapQuery bitmaps: 0 or more bitmap queries to differentiate
:return: Pilosa bitmap query
:rtype: pilosa.PQLBitmapQuery
:raise PilosaError: if the number of bitmaps is less than 1
"""
if len(bitmaps) < 1:
raise PilosaError("Number of bitmap queries should be greater or equal to 1")
return self._bitmap_op("Difference", bitmaps)
def count(self, bitmap):
"""Creates a Count query.
``Count`` returns the number of set bits in the BITMAP_CALL passed in.
:param pilosa.PQLQuery bitmap: the bitmap query
:return: Pilosa query
:rtype: pilosa.PQLQuery
"""
return PQLQuery(u"Count(%s)" % bitmap.serialize(), self)
def set_column_attrs(self, column_id, attrs):
"""Creates a SetColumnAttrs query.
``SetColumnAttrs`` associates arbitrary key/value pairs with a column in an index.
Following object types are accepted:
* int
* str
* bool
* float
:param int column_id:
:param dict attrs: column attributes
:return: Pilosa query
:rtype: pilosa.PQLQuery
"""
attrs_str = _create_attributes_str(attrs)
return PQLQuery(u"SetColumnAttrs(%s=%d, %s)" %
(self.column_label, column_id, attrs_str), self)
def _bitmap_op(self, name, bitmaps):
return PQLQuery(u"%s(%s)" % (name, u", ".join(b.serialize() for b in bitmaps)), self)
class Frame:
"""Frames are used to segment and define different functional characteristics within your entire index.
You can think of a Frame as a table-like data partition within your Index.
Row-level attributes are namespaced at the Frame level.
Do not create a Frame object directly. Instead, use ``pilosa.Index.frame`` method.
* See `Data Model <https://www.pilosa.com/docs/data-model/>`_
* See `Query Language <https://www.pilosa.com/docs/query-language/>`_
"""
def __init__(self, index, name, row_label, time_quantum, inverse_enabled,
cache_type, cache_size):
validate_frame_name(name)
validate_label(row_label)
self.index = index
self.name = name
self.time_quantum = time_quantum
self.inverse_enabled = inverse_enabled
self.cache_type = cache_type
self.cache_size = cache_size
self.row_label = row_label
self.column_label = index.column_label
def __eq__(self, other):
if id(self) == id(other):
return True
if not isinstance(other, self.__class__):
return False
# Note that we skip comparing the frames of the indexes by using index._meta_eq
# in order to avoid a call cycle
return self.name == other.name and \
self.index._meta_eq(other.index) and \
self.row_label == other.row_label and \
self.time_quantum == other.time_quantum and \
self.inverse_enabled == other.inverse_enabled and \
self.cache_type == other.cache_type and \
self.cache_size == other.cache_size
def __ne__(self, other):
return not self.__eq__(other)
def copy(self):
return Frame(self.index, self.name, self.row_label, self.time_quantum,
self.inverse_enabled, self.cache_type, self.cache_size)
def bitmap(self, row_id):
"""Creates a Bitmap query.
Bitmap retrieves the indices of all the set bits in a row or column based on whether the row label or column label is given in the query. It also retrieves any attributes set on that row or column.
This variant of Bitmap query uses the row label.
:param int row_id:
:return: Pilosa bitmap query
:rtype: pilosa.PQLBitmapQuery
"""
return PQLQuery(u"Bitmap(%s=%d, frame='%s')" % (self.row_label, row_id, self.name),
self.index)
def inverse_bitmap(self, column_id):
"""Creates a Bitmap query.
``Bitmap`` retrieves the indices of all the set bits in a row or column based on whether the row label or column label is given in the query. It also retrieves any attributes set on that row or column.
This variant of Bitmap query uses the column label.
:param int column_id:
:return: Pilosa bitmap query
:rtype: pilosa.PQLBitmapQuery
"""
return PQLQuery(u"Bitmap(%s=%d, frame='%s')" % (self.column_label, column_id, self.name),
self.index)
def setbit(self, row_id, column_id, timestamp=None):
"""Creates a SetBit query.
``SetBit`` assigns a value of 1 to a bit in the binary matrix, thus associating the given row in the given frame with the given column.
:param int row_id:
:param int column_id:
:param pilosa.TimeStamp timestamp:
:return: Pilosa query
:rtype: pilosa.PQLQuery
"""
ts = ", timestamp='%s'" % timestamp.strftime(_TIME_FORMAT) if timestamp else ''
return PQLQuery(u"SetBit(%s=%d, frame='%s', %s=%d%s)" % \
(self.row_label, row_id, self.name, self.column_label, column_id, ts),
self.index)
def clearbit(self, row_id, column_id):
"""Creates a ClearBit query.
``ClearBit`` assigns a value of 0 to a bit in the binary matrix, thus disassociating the given row in the given frame from the given column.
:param int row_id:
:param int column_id:
:return: Pilosa query
:rtype: pilosa.PQLQuery
"""
return PQLQuery(u"ClearBit(%s=%d, frame='%s', %s=%d)" % \
(self.row_label, row_id, self.name, self.column_label, column_id),
self.index)
def topn(self, n, bitmap=None, field="", *values):
"""Creates a TopN query.
``TopN`` returns the id and count of the top n bitmaps (by count of bits) in the frame.
* see: `TopN Query <https://www.pilosa.com/docs/query-language/#topn>`_
:param int n: number of items to return
:param pilosa.PQLBitmapQuery bitmap: a PQL Bitmap query
:param field str field: field name
:param object values: filter values to be matched against the field
"""
return self._topn(n, bitmap, field, False, *values)
def inverse_topn(self, n, bitmap=None, field="", *values):
"""Creates a TopN query.
``TopN`` returns the id and count of the top n bitmaps (by count of bits) in the frame.
This version sets `inverse=true`.
* see: `TopN Query <https://www.pilosa.com/docs/query-language/#topn>`_
:param int n: number of items to return
:param pilosa.PQLBitmapQuery bitmap: a PQL Bitmap query
:param field str field: field name
:param object values: filter values to be matched against the field
"""
return self._topn(n, bitmap, field, True, *values)
def _topn(self, n, bitmap=None, field="", inverse=False, *values):
parts = ["frame='%s'" % self.name, "n=%d" % n, "inverse=%s" % ('true' if inverse else 'false')]
if bitmap:
parts.insert(0, bitmap.serialize())
if field:
validate_label(field)
values_str = json.dumps(values, separators=(',', ': '))
parts.extend(["field='%s'" % field, "filters=%s" % values_str])
qry = u"TopN(%s)" % ", ".join(parts)
return PQLQuery(qry, self.index)
def range(self, row_id, start, end):
"""Creates a Range query.
Similar to ``Bitmap``, but only returns bits which were set with timestamps between the given start and end timestamps.
* see: `Range Query <https://www.pilosa.com/docs/query-language/#range>`_
:param int row_id:
:param datetime.datetime start: start timestamp
:param datetime.datetime end: end timestamp
"""
return self._range(self.row_label, row_id, start, end)
def inverse_range(self, column_id, start, end):
"""Creates a Range query.
Similar to ``Bitmap``, but only returns bits which were set with timestamps between the given start and end timestamps.
:param int column_id:
:param datetime.datetime start: start timestamp
:param datetime.datetime end: end timestamp
"""
return self._range(self.column_label, column_id, start, end)
def _range(self, label, rowcol_id, start, end):
start_str = start.strftime(_TIME_FORMAT)
end_str = end.strftime(_TIME_FORMAT)
return PQLQuery(u"Range(%s=%d, frame='%s', start='%s', end='%s')" %
(label, rowcol_id, self.name, start_str, end_str),
self.index)
def set_row_attrs(self, row_id, attrs):
"""Creates a SetRowAttrs query.
``SetRowAttrs`` associates arbitrary key/value pairs with a row in a frame.
Following object types are accepted:
* int
* str
* bool
* float
:param int row_id:
:param dict attrs: row attributes
:return: Pilosa query
:rtype: pilosa.PQLQuery
"""
attrs_str = _create_attributes_str(attrs)
return PQLQuery(u"SetRowAttrs(%s=%d, frame='%s', %s)" %
(self.row_label, row_id, self.name, attrs_str),
self.index)
def _get_options_string(self):
data = {"rowLabel": self.row_label}
if self.inverse_enabled:
data["inverseEnabled"] = True
if self.time_quantum != TimeQuantum.NONE:
data["timeQuantum"] = str(self.time_quantum)
if self.cache_type != CacheType.DEFAULT:
data["cacheType"] = str(self.cache_type)
if self.cache_size > 0:
data["cacheSize"] = self.cache_size
return json.dumps({"options": data}, sort_keys=True)
class PQLQuery:
def __init__(self, pql, index):
self.pql = pql
self.index = index
def serialize(self):
return self.pql
def _create_attributes_str(attrs):
kvs = []
try:
for k, v in attrs.items():
# TODO: make key use its own validator
validate_label(k)
kvs.append("%s=%s" % (k, json.dumps(v)))
return ", ".join(sorted(kvs))
except TypeError:
raise PilosaError("Error while converting values")
class PQLBatchQuery:
def __init__(self, index):
self.index = index
self.queries = []
def add(self, *queries):
self.queries.extend(queries)
def serialize(self):
return u''.join(q.serialize() for q in self.queries)
| intersect |
cmfsm.py | # -*- coding: utf-8 -*-
# @Author: yulidong
# @Date: 2018-07-17 10:44:43
# @Last Modified by: yulidong
# @Last Modified time: 2019-03-01 14:12:35
# -*- coding: utf-8 -*-
# @Author: lidong
# @Date: 2018-03-20 18:01:52
# @Last Modified by: yulidong
# @Last Modified time: 2018-07-16 22:16:14
import time
import torch
import numpy as np
import torch.nn as nn
import math
from math import ceil
from torch.autograd import Variable
from torch.nn.functional import cosine_similarity as cosine_s
from cmf import caffe_pb2
from cmf.models.utils import *
rsn_specs = {
'scene':
{
'n_classes': 9,
'input_size': (540, 960),
'block_config': [3, 4, 23, 3],
},
}
group_dim=32
pramid_dim=8
group_norm_group_num = 32
def convbn(in_planes, out_planes, kernel_size, stride, pad, dilation):
return nn.Sequential(
nn.Conv2d(
in_planes,
out_planes,
kernel_size=kernel_size,
stride=stride,
padding=dilation if dilation > 1 else pad,
dilation=dilation,
bias=False), nn.GroupNorm(group_norm_group_num, out_planes))
def convbn_3d(in_planes, out_planes, kernel_size, stride, pad):
return nn.Sequential(
nn.Conv3d(
in_planes,
out_planes,
kernel_size=kernel_size,
padding=pad,
stride=stride,
bias=False), nn.GroupNorm(group_norm_group_num, out_planes))
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride, downsample, pad, dilation):
super(BasicBlock, self).__init__()
self.conv1 = nn.Sequential(
convbn(inplanes, planes, 3, stride, pad, dilation),
nn.ReLU(inplace=True))
self.conv2 = convbn(planes, planes, 3, 1, pad, dilation)
self.downsample = downsample
self.stride = stride
def forward(self, x):
out = self.conv1(x)
out = self.conv2(out)
if self.downsample is not None:
x = self.downsample(x)
out += x
return out
class matchshifted(nn.Module):
def __init__(self):
super(matchshifted, self).__init__()
def | (self, left, right, shift):
batch, filters, height, width = left.size()
shifted_left = F.pad(
torch.index_select(
left, 3,
Variable(torch.LongTensor(
[i for i in range(shift, width)])).cuda()),
(shift, 0, 0, 0))
shifted_right = F.pad(
torch.index_select(
right, 3,
Variable(torch.LongTensor(
[i for i in range(width - shift)])).cuda()),
(shift, 0, 0, 0))
out = torch.cat((shifted_left, shifted_right), 1).view(
batch, filters * 2, 1, height, width)
return out
class disparityregression(nn.Module):
def __init__(self, maxdisp):
super().__init__()
self.disp = Variable(
torch.Tensor(
np.reshape(np.array(range(maxdisp)),
[1, maxdisp, 1, 1])).cuda(),
requires_grad=False)
def forward(self, x):
disp = self.disp.repeat(x.size()[0], 1, x.size()[2], x.size()[3])
out = torch.sum(x * disp, 1)
return out
class feature_extraction(nn.Module):
def __init__(self):
super(feature_extraction, self).__init__()
self.inplanes = 32
self.firstconv = nn.Sequential(
convbn(3, 32, 3, 1, 1, 1),
# nn.GroupNorm(group_dim, 32),
nn.ReLU(inplace=True),
convbn(32, 32, 3, 1, 1, 1),
nn.ReLU(inplace=True),
convbn(32, 32, 3, 1, 1, 1),
nn.ReLU(inplace=True),
nn.Conv2d(32, 32, kernel_size=3, padding=1, stride=1, bias=False))
self.secondconv = nn.Sequential(
nn.GroupNorm(group_dim, 32),
nn.ReLU(inplace=True),
convbn(32, 32, 3, 2, 1, 1),
nn.ReLU(inplace=True),
convbn(32, 32, 3, 1, 1, 1),
nn.ReLU(inplace=True))
self.layer1 = self._make_layer(BasicBlock, 32, 3, 1, 1, 1)
self.layer2 = self._make_layer(BasicBlock, 64, 16, 2, 1, 1)
self.layer3 = self._make_layer(BasicBlock, 128, 3, 1, 1, 1)
self.layer4 = self._make_layer(BasicBlock, 128, 3, 1, 1, 2)
self.branch1 = nn.Sequential(
nn.AvgPool2d((64, 64), stride=(64, 64)),
convbn(128, 32, 1, 1, 0, 1),
nn.ReLU(inplace=True))
self.branch2 = nn.Sequential(
nn.AvgPool2d((32, 32), stride=(32, 32)),
convbn(128, 32, 1, 1, 0, 1),
nn.ReLU(inplace=True))
self.branch3 = nn.Sequential(
nn.AvgPool2d((16, 16), stride=(16, 16)),
convbn(128, 32, 1, 1, 0, 1),
nn.ReLU(inplace=True))
self.branch4 = nn.Sequential(
nn.AvgPool2d((8, 8), stride=(8, 8)),
convbn(128, 32, 1, 1, 0, 1),
nn.ReLU(inplace=True))
self.lastconv = nn.Sequential(
convbn(320, 128, 3, 1, 1, 1),
nn.ReLU(inplace=True),
nn.Conv2d(128, 32, kernel_size=1, padding=0, stride=1, bias=False))
def _make_layer(self, block, planes, blocks, stride, pad, dilation):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(
self.inplanes,
planes * block.expansion,
kernel_size=1,
stride=stride,
bias=False),
nn.GroupNorm(group_norm_group_num, planes * block.expansion),
)
layers = []
layers.append(
block(self.inplanes, planes, stride, downsample, pad, dilation))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, 1, None, pad, dilation))
return nn.Sequential(*layers)
def forward(self, x):
output_all = self.firstconv(x)
output=self.secondconv(output_all)
output_rt = self.layer1(output)
output_raw = self.layer2(output_rt)
output = self.layer3(output_raw)
output_skip = self.layer4(output)
output_branch1 = self.branch1(output_skip)
output_branch1 = F.interpolate(
output_branch1, (output_skip.size()[2], output_skip.size()[3]),
mode='bilinear',
align_corners=False)
output_branch2 = self.branch2(output_skip)
output_branch2 = F.interpolate(
output_branch2, (output_skip.size()[2], output_skip.size()[3]),
mode='bilinear',
align_corners=False)
output_branch3 = self.branch3(output_skip)
output_branch3 = F.interpolate(
output_branch3, (output_skip.size()[2], output_skip.size()[3]),
mode='bilinear',
align_corners=False)
output_branch4 = self.branch4(output_skip)
output_branch4 = F.interpolate(
output_branch4, (output_skip.size()[2], output_skip.size()[3]),
mode='bilinear',
align_corners=False)
output_feature = torch.cat(
(output_raw, output_skip, output_branch4, output_branch3,
output_branch2, output_branch1), 1)
output_feature = self.lastconv(output_feature)
return output_feature, output_rt,output_all
class hourglass(nn.Module):
def __init__(self, inplanes):
super().__init__()
self.conv1 = nn.Sequential(
convbn_3d(inplanes, inplanes * 2, kernel_size=3, stride=2, pad=1),
nn.ReLU(inplace=True))
self.conv2 = convbn_3d(
inplanes * 2, inplanes * 2, kernel_size=3, stride=1, pad=1)
self.conv3 = nn.Sequential(
convbn_3d(
inplanes * 2, inplanes * 2, kernel_size=3, stride=2, pad=1),
nn.ReLU(inplace=True))
self.conv4 = nn.Sequential(
convbn_3d(
inplanes * 2, inplanes * 2, kernel_size=3, stride=1, pad=1),
nn.ReLU(inplace=True))
self.conv5 = nn.Sequential(
nn.ConvTranspose3d(
inplanes * 2,
inplanes * 2,
kernel_size=3,
padding=1,
output_padding=1,
stride=2,
bias=False), nn.GroupNorm(group_norm_group_num,
inplanes * 2)) # +conv2
self.conv6 = nn.Sequential(
nn.ConvTranspose3d(
inplanes * 2,
inplanes,
kernel_size=3,
padding=1,
output_padding=(1,1,1),
stride=2,
bias=False), nn.GroupNorm(group_norm_group_num,
inplanes)) # +x
def forward(self, x, presqu, postsqu):
out = self.conv1(x) # in:1/4 out:1/8
pre = self.conv2(out) # in:1/8 out:1/8
if postsqu is not None:
pre = F.relu(pre + postsqu, inplace=True)
else:
pre = F.relu(pre, inplace=True)
out = self.conv3(pre) # in:1/8 out:1/16
out = self.conv4(out) # in:1/16 out:1/16
if presqu is not None:
post = F.relu(
self.conv5(out) + presqu, inplace=True) # in:1/16 out:1/8
else:
post = F.relu(self.conv5(out) + pre, inplace=True)
out = self.conv6(post) # in:1/8 out:1/4
return out, pre, post
class similarity_measure1(nn.Module):
def __init__(self):
super(similarity_measure1, self).__init__()
self.inplanes = 32
self.conv0 = nn.Conv2d(66, 32, kernel_size=1, stride=1, padding=0,
bias=False,dilation=1)
self.relu0 = nn.LeakyReLU(inplace=True)
self.conv1 = nn.Conv2d(32, 16, kernel_size=1, stride=1, padding=0,
bias=False,dilation=1)
self.relu1 = nn.LeakyReLU(inplace=True)
self.conv2 = nn.Conv2d(16, 8, kernel_size=1, stride=1, padding=0,
bias=False,dilation=1)
self.relu2 = nn.LeakyReLU(inplace=True)
self.conv3 = nn.Conv2d(8, 1, kernel_size=1, stride=1, padding=0,
bias=False,dilation=1)
#self.relu3 = nn.Sigmoid()
# self.conv4 = nn.Conv2d(16, 8, kernel_size=1, stride=1, padding=0,
# bias=False,dilation=1)
# self.relu4 = nn.LeakyReLU(inplace=True)
# self.conv5 = nn.Conv2d(8, 1, kernel_size=1, stride=1, padding=0,
# bias=False,dilation=1)
# self.relu5 = nn.ReLU(inplace=True)
#self.s1=nn.Parameter(torch.ones(1)).float()*0.5
for m in self.modules():
if isinstance(m,nn.Conv2d):
nn.init.kaiming_normal_(m.weight,mode='fan_out',nonlinearity='relu')
elif isinstance(m, nn.GroupNorm):
nn.init.constant_(m.weight,1)
nn.init.constant_(m.bias,0)
def forward(self, x):
output = self.conv0(x)
output = self.relu0(output)
output = self.conv1(output)
output = self.relu1(output)
output = self.conv2(output)
output = self.relu2(output)
output = self.conv3(output)
#output = self.relu3(output)
# output = self.conv4(output)
# output = self.relu4(output)
# output = self.conv5(output)
# #output = torch.abs(output)
# output = self.relu5(output)
# print(output.shape)
# print(torch.mean(output).item(),torch.max(output).item(),torch.min(output).item())
# output = output/torch.max(output)
# output = output-torch.min(output)
# output = 1-output
# output = torch.exp(-output)
#print(torch.mean(output).item(),torch.max(output).item(),torch.min(output).item())
return output
class similarity_measure2(nn.Module):
def __init__(self):
super(similarity_measure2, self).__init__()
self.inplanes = 32
self.conv0 = nn.Conv2d(3, 3, kernel_size=1, stride=1, padding=0,
bias=False,dilation=1)
self.relu0 = nn.LeakyReLU(inplace=True)
self.conv1 = nn.Conv2d(3, 2, kernel_size=1, stride=1, padding=0,
bias=False,dilation=1)
self.relu1 = nn.LeakyReLU(inplace=True)
self.conv2 = nn.Conv2d(2, 1, kernel_size=1, stride=1, padding=0,
bias=False,dilation=1)
self.relu2 = nn.LeakyReLU(inplace=True)
#self.s2=nn.Parameter(torch.ones(1)).float()*0.5
for m in self.modules():
if isinstance(m,nn.Conv2d):
nn.init.kaiming_normal_(m.weight,mode='fan_out',nonlinearity='relu')
elif isinstance(m, nn.GroupNorm):
nn.init.constant_(m.weight,1)
nn.init.constant_(m.bias,0)
def forward(self, x):
output = self.conv0(x)
output = self.relu0(output)
output = self.conv1(output)
output = self.relu1(output)
output = self.conv2(output)
output = self.relu2(output)
return output
def matrix_generation():
scale=4
x=torch.arange(-scale//2,scale//2+1).float()
x=torch.cat([x[:x.shape[0]//2],x[x.shape[0]//2+1:]]).unsqueeze(0)
distance_matrix=x.expand(scale,scale).unsqueeze(0)
distance_matrix=torch.cat([distance_matrix,distance_matrix.transpose(2,1)],0)
distance_matrix=distance_matrix.unsqueeze(0)
distance_matrix1=distance_matrix+0
distance_matrix2=distance_matrix+0
distance_matrix3=distance_matrix+0
distance_matrix4=distance_matrix+0
distance_matrix5=distance_matrix+0
distance_matrix6=distance_matrix+0
distance_matrix7=distance_matrix+0
distance_matrix8=distance_matrix+0
x=torch.arange(1,scale+1).float()
x=x.expand(scale,scale).unsqueeze(0)
#x=x.repeat(hr_feature.shape[0],hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float().cuda()
distance_matrix1[:,0,:,:]=scale-x+1
distance_matrix2[:,0,:,:]=x
distance_matrix5[:,0,:,:]=distance_matrix2[:,0,:,:]
distance_matrix6[:,0,:,:]=distance_matrix1[:,0,:,:]
distance_matrix7[:,0,:,:]=distance_matrix2[:,0,:,:]
distance_matrix8[:,0,:,:]=distance_matrix1[:,0,:,:]
x=torch.arange(1,scale+1).float()
x=x.expand(scale,scale).unsqueeze(0).transpose(2,1)
distance_matrix3[:,1,:,:]=(scale-x+1)
distance_matrix4[:,1,:,:]=x
distance_matrix5[:,1,:,:]=distance_matrix3[:,1,:,:]
distance_matrix6[:,1,:,:]=distance_matrix3[:,1,:,:]
distance_matrix7[:,1,:,:]=distance_matrix4[:,1,:,:]
distance_matrix8[:,1,:,:]=distance_matrix4[:,1,:,:]
# print(distance_matrix3)
return distance_matrix.cuda(),distance_matrix1.cuda(),distance_matrix2.cuda(),distance_matrix3.cuda(),distance_matrix4.cuda(), \
distance_matrix5.cuda(),distance_matrix6.cuda(),distance_matrix7.cuda(),distance_matrix8.cuda()
class eight_related_context_mapping(nn.Module):
def __init__(self):
super(eight_related_context_mapping,self).__init__()
self.similarity1=similarity_measure1()
#need to remove
#self.similarity2=similarity_measure2()
# self.fuse=nn.Sequential(nn.Conv2d(2, 1, kernel_size=1, stride=1, padding=0,
# bias=False,dilation=1),nn.LeakyReLU(inplace=True))
#self.fuse.weight.data.fill_(1)
self.sigmoid=nn.Sigmoid()
self.distance_matrix,self.distance_matrix1,self.distance_matrix2,self.distance_matrix3,self.distance_matrix4, \
self.distance_matrix5,self.distance_matrix6,self.distance_matrix7,self.distance_matrix8=matrix_generation()
def forward(self, lr_feature, hr_feature,lr_feature_r, hr_feature_r):
#self.fuse.weight.data=torch.abs(self.fuse.weight.data)
with torch.no_grad():
scale=hr_feature.shape[-1]//lr_feature.shape[-1]
if scale%2!=0:
exit()
padding1=hr_feature[:,:1,:,:scale]*0-100
padding2=hr_feature[:,:1,:scale,:]*0-100
distance_matrix=self.distance_matrix.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix1=self.distance_matrix1.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix2=self.distance_matrix2.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix3=self.distance_matrix3.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix4=self.distance_matrix4.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix5=self.distance_matrix1.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix6=self.distance_matrix2.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix7=self.distance_matrix3.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
distance_matrix8=self.distance_matrix4.repeat(hr_feature.shape[0],1,hr_feature.shape[-2]//scale,hr_feature.shape[-1]//scale).float()
#center
#reference image
lr_feature=lr_feature.unsqueeze(-1).expand(lr_feature.shape[0],lr_feature.shape[1],lr_feature.shape[2],lr_feature.shape[3],scale) \
.contiguous().view(lr_feature.shape[0],lr_feature.shape[1],lr_feature.shape[2],lr_feature.shape[3]*scale) \
.unsqueeze(-2).expand(lr_feature.shape[0],lr_feature.shape[1],lr_feature.shape[2],scale,lr_feature.shape[3]*scale) \
.contiguous().view(lr_feature.shape[0],lr_feature.shape[1],lr_feature.shape[2]*scale,lr_feature.shape[3]*scale)
representation=torch.cat([lr_feature,hr_feature,distance_matrix],1)
weight=self.similarity1(representation)
#target image
# lr_feature_r=lr_feature_r.unsqueeze(-1).expand(lr_feature_r.shape[0],lr_feature_r.shape[1],lr_feature_r.shape[2],lr_feature_r.shape[3],scale) \
# .contiguous().view(lr_feature_r.shape[0],lr_feature_r.shape[1],lr_feature_r.shape[2],lr_feature_r.shape[3]*scale) \
# .unsqueeze(-2).expand(lr_feature_r.shape[0],lr_feature_r.shape[1],lr_feature_r.shape[2],scale,lr_feature_r.shape[3]*scale) \
# .contiguous().view(lr_feature_r.shape[0],lr_feature_r.shape[1],lr_feature_r.shape[2]*scale,lr_feature_r.shape[3]*scale)
# representation_target=torch.cat([lr_feature_r,hr_feature_r,distance_matrix],1)
# weight_target=self.similarity1(representation_target)
#left
#reference
representation_l=torch.cat([lr_feature[:,:,:,:-scale],hr_feature[:,:,:,scale:],distance_matrix1[:,:,:,:-scale]],1)
weight_l=self.similarity1(representation_l)
weight_l=torch.cat([padding1,weight_l],-1)
#target
# representation_l_target=torch.cat([lr_feature_r[:,:,:,:-scale],hr_feature_r[:,:,:,scale:],distance_matrix2[:,:,:,:-scale]],1)
# weight_l_target=self.similarity1(representation_l_target)
# weight_l_target=torch.cat([padding1,weight_l_target],-1)
#right
#reference
representation_r=torch.cat([lr_feature[:,:,:,scale:],hr_feature[:,:,:,:-scale],distance_matrix2[:,:,:,scale:]],1)
weight_r=self.similarity1(representation_r)
weight_r=torch.cat([weight_r,padding1],-1)
#target image
# representation_r_target=torch.cat([lr_feature_r[:,:,:,scale:],hr_feature_r[:,:,:,:-scale],distance_matrix1[:,:,:,scale:]],1)
# weight_r_target=self.similarity1(representation_r_target)
# weight_r_target=torch.cat([weight_r_target,padding1],-1)
#top
#reference
representation_t=torch.cat([lr_feature[:,:,:-scale,:],hr_feature[:,:,scale:,:],distance_matrix3[:,:,:-scale,:]],1)
weight_t=self.similarity1(representation_t)
weight_t=torch.cat([padding2,weight_t],-2)
#target
# representation_t_target=torch.cat([lr_feature_r[:,:,:-scale,:],hr_feature_r[:,:,scale:,:],distance_matrix3[:,:,:-scale,:]],1)
# weight_t_target=self.similarity1(representation_t_target)
# weight_t_target=torch.cat([padding2,weight_t_target],-2)
#bottom
#reference
representation_b=torch.cat([lr_feature[:,:,scale:,:],hr_feature[:,:,:-scale,:],distance_matrix4[:,:,scale:,:]],1)
weight_b=self.similarity1(representation_b)
weight_b=torch.cat([weight_b,padding2],-2)
#left-top
#reference
representation_lt=torch.cat([lr_feature[:,:,:-scale,:-scale],hr_feature[:,:,scale:,scale:],distance_matrix5[:,:,:-scale,:-scale]],1)
weight_lt=self.similarity1(representation_lt)
weight_lt=torch.cat([padding2,torch.cat([padding1[...,scale:,:],weight_lt],-1)],-2)
#target
# representation_l_target=torch.cat([lr_feature_r[:,:,:,:-scale],hr_feature_r[:,:,:,scale:],distance_matrix2[:,:,:,:-scale]],1)
# weight_l_target=self.similarity1(representation_l_target)
# weight_l_target=torch.cat([padding1,weight_l_target],-1)
#right-top
#reference
representation_rt=torch.cat([lr_feature[:,:,:-scale,scale:],hr_feature[:,:,scale:,:-scale],distance_matrix6[:,:,:-scale,scale:]],1)
weight_rt=self.similarity1(representation_rt)
weight_rt=torch.cat([padding2,torch.cat([weight_rt,padding1[...,scale:,:]],-1)],-2)
#target image
# representation_r_target=torch.cat([lr_feature_r[:,:,:,scale:],hr_feature_r[:,:,:,:-scale],distance_matrix1[:,:,:,scale:]],1)
# weight_r_target=self.similarity1(representation_r_target)
# weight_r_target=torch.cat([weight_r_target,padding1],-1)
#left-bottom
#reference
representation_lb=torch.cat([lr_feature[:,:,scale:,:-scale],hr_feature[:,:,:-scale:,scale:],distance_matrix7[:,:,scale:,:-scale]],1)
weight_lb=self.similarity1(representation_lb)
weight_lb=torch.cat([torch.cat([padding1[...,scale:,:],weight_lb],-1),padding2],-2)
#target
# representation_t_target=torch.cat([lr_feature_r[:,:,:-scale,:],hr_feature_r[:,:,scale:,:],distance_matrix3[:,:,:-scale,:]],1)
# weight_t_target=self.similarity1(representation_t_target)
# weight_t_target=torch.cat([padding2,weight_t_target],-2)
#right-bottom
#reference
representation_rb=torch.cat([lr_feature[:,:,scale:,scale:],hr_feature[:,:,:-scale,:-scale],distance_matrix8[:,:,scale:,scale:]],1)
weight_rb=self.similarity1(representation_rb)
weight_rb=torch.cat([torch.cat([weight_rb,padding1[...,:-scale,:]],-1),padding2],-2)
weight_all=torch.cat([weight,weight_l,weight_r,weight_t,weight_b,weight_lt,weight_rt,weight_lb,weight_rb],dim=1)
weight_norm=F.softmax(weight_all, dim=1)
#weight_fuse=F.softmax(weight_norm*weight_all)
#target
# representation_b_target=torch.cat([lr_feature_r[:,:,scale:,:],hr_feature_r[:,:,:-scale,:],distance_matrix4[:,:,scale:,:]],1)
# weight_b_target=self.similarity1(representation_b_target)
# weight_b_target=torch.cat([weight_b_target,padding2],-2)
# weight_all=torch.cat([weight,weight_r,weight_l,weight_t,weight_b],dim=1)
# weight_norm=F.softmax(weight_all, dim=1)
# weight_all_target=torch.cat([weight_target,weight_r_target,weight_l_target,weight_t_target,weight_b_target],dim=1)
# weight_norm_target=F.softmax(weight_all_target, dim=1)
# return weight*weight_norm[:,0:1,:,:],weight_target*weight_norm_target[:,0:1,:,:], \
# weight_r*weight_norm[:,1:2,:,:],weight_r_target*weight_norm_target[:,1:2,:,:], \
# weight_l*weight_norm[:,2:3,:,:],weight_l_target*weight_norm_target[:,2:3,:,:], \
# weight_t*weight_norm[:,3:4,:,:],weight_t_target*weight_norm_target[:,3:4,:,:], \
# weight_b*weight_norm[:,4:5,:,:],weight_b_target*weight_norm_target[:,4:5,:,:]
# return self.sigmoid(weight)*weight_norm[:,0:1,...], \
# self.sigmoid(weight_l)*weight_norm[:,1:2,...], \
# self.sigmoid(weight_r)*weight_norm[:,2:3,...], \
# self.sigmoid(weight_t)*weight_norm[:,3:4,...], \
# self.sigmoid(weight_b)*weight_norm[:,4:5,...],\
# self.sigmoid(weight_lt)*weight_norm[:,5:6,...], \
# self.sigmoid(weight_rt)*weight_norm[:,6:7,...], \
# self.sigmoid(weight_lb)*weight_norm[:,7:8,...], \
# self.sigmoid(weight_rb)*weight_norm[:,8:9,...]
#print(torch.mean(torch.max(weight_norm,dim=1)[0]),torch.max(weight_all,dim=1)[0])
#print(torch.mean(torch.topk(weight_all,3,dim=1)[0].float()),torch.mean(torch.topk(weight_all,3,dim=1)[1].float()))
#print(torch.mean(torch.topk(weight_all,1,dim=1)[0].float()),torch.mean(torch.topk(weight_all,1,dim=1)[1].float()))
if torch.mean(torch.topk(weight_all,1,dim=1)[0].float())<0:
print(torch.mean(torch.topk(weight_all,3,dim=1)[0].float()),torch.mean(torch.topk(weight_all,3,dim=1)[1].float()))
print(torch.mean(torch.topk(weight_all,1,dim=1)[0].float()),torch.mean(torch.topk(weight_all,1,dim=1)[1].float()))
#print(torch.mean(torch.min(weight_norm,dim=1)[0]),torch.min(weight_all,dim=1)[0])
return weight_norm[:,0:1,...], \
weight_norm[:,1:2,...], \
weight_norm[:,2:3,...], \
weight_norm[:,3:4,...], \
weight_norm[:,4:5,...],\
weight_norm[:,5:6,...], \
weight_norm[:,6:7,...], \
weight_norm[:,7:8,...], \
weight_norm[:,8:9,...]
class cmfsm(nn.Module):
def __init__(self,
maxdisp=192):
super(cmfsm, self).__init__()
self.maxdisp = maxdisp
self.feature_extraction = feature_extraction()
self.dres0 = nn.Sequential(
convbn_3d(64, 32, 3, 1, 1),
nn.ReLU(inplace=True),
convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True))
self.dres1 = nn.Sequential(
convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
convbn_3d(32, 32, 3, 1, 1))
self.dres2 = hourglass(32)
self.dres3 = hourglass(32)
self.dres4 = hourglass(32)
self.classif1 = nn.Sequential(
convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
nn.Conv3d(32, 1, kernel_size=3, padding=1, stride=1, bias=False))
self.classif2 = nn.Sequential(
convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
nn.Conv3d(32, 1, kernel_size=3, padding=1, stride=1, bias=False))
self.classif3 = nn.Sequential(
convbn_3d(32, 32, 3, 1, 1),
nn.ReLU(inplace=True),
nn.Conv3d(32, 1, kernel_size=3, padding=1, stride=1, bias=False))
self.mapping_matrix=eight_related_context_mapping()
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.Conv3d):
n = m.kernel_size[0] * m.kernel_size[1] * \
m.kernel_size[2] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm3d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.bias.data.zero_()
def forward(self, left, right):
start=time.time()
refimg_fea, half,all_feature= self.feature_extraction(left)
targetimg_fea, _ ,all_feature_right= self.feature_extraction(right)
scale=all_feature.shape[-1]//refimg_fea.shape[-1]
#mapping,mapping_r,mapping_l,mapping_t,mapping_b=self.mapping_matrix(refimg_fea,all_feature)
#target
#[mapping,mapping_r,mapping_l,mapping_t,mapping_b],[mapping_target,mapping_target_r,mapping_target_l]=self.mapping_matrix(refimg_fea,all_feature,targetimg_fea,all_feature_right)
#time=0.1s
weight,weight_l,weight_r,weight_t,weight_b,weight_lt,weight_rt,weight_lb,weight_rb=self.mapping_matrix(refimg_fea,all_feature,targetimg_fea,all_feature_right)
#mapping,mapping_target=self.mapping_matrix(refimg_fea,all_feature,targetimg_fea,all_feature_right)
# matching
cost = Variable(
torch.FloatTensor(refimg_fea.size()[0],
refimg_fea.size()[1] * 2, self.maxdisp // scale,
refimg_fea.size()[2],
refimg_fea.size()[3]).zero_()).cuda()
for i in range(self.maxdisp // scale):
if i > 0:
cost[:, :refimg_fea.size()[1], i, :, i:] = refimg_fea[:, :, :,
i:]
cost[:, refimg_fea.size()[1]:, i, :,
i:] = targetimg_fea[:, :, :, :-i]
else:
cost[:, :refimg_fea.size()[1], i, :, :] = refimg_fea
cost[:, refimg_fea.size()[1]:, i, :, :] = targetimg_fea
cost = cost.contiguous()
cost0 = self.dres0(cost)
cost0 = self.dres1(cost0) + cost0
out1, pre1, post1 = self.dres2(cost0, None, None)
out1 = out1 + cost0
out2, pre2, post2 = self.dres3(out1, pre1, post1)
out2 = out2 + cost0
out3, pre3, post3 = self.dres4(out2, pre1, post2)
out3 = out3 + cost0
cost1 = self.classif1(out1)
#cost2 = self.classif2(out2) + cost1
#cost3 = self.classif3(out3) + cost2
#torch.Size([1, 1, 256, 512])
# weight_all=torch.cat([weight,weight_r,weight_l,weight_t,weight_b],dim=1)
# weight_norm=F.softmax(weight_all, dim=1)
# t=time.time()
cost1 = torch.squeeze(cost1, 1)
pred1 = F.softmax(cost1, dim=1)
pred1 = disparityregression(self.maxdisp//scale)(pred1)
#torch.Size([1, 64, 128])
pred1=scale*pred1.unsqueeze(-1).expand(pred1.shape[0],pred1.shape[1],pred1.shape[2],scale) \
.contiguous().view(pred1.shape[0],pred1.shape[1],pred1.shape[2]*scale) \
.unsqueeze(-2).expand(pred1.shape[0],pred1.shape[1],scale,pred1.shape[2]*scale) \
.contiguous().view(pred1.shape[0],pred1.shape[1]*scale,pred1.shape[2]*scale)
pred1_map=pred1*weight
pred1_map[...,scale:]+=pred1[...,:-scale]*weight_l[...,scale:]
pred1_map[...,:-scale]+=pred1[...,scale:]*weight_r[...,:-scale]
pred1_map[...,scale:,:]+=pred1[...,:-scale,:]*weight_t[...,scale:,:]
pred1_map[...,:-scale,:]+=pred1[...,scale:,:]*weight_b[...,:-scale,:]
pred1_map[...,scale:,scale:]+=pred1[...,:-scale,:-scale]*weight_lt[...,scale:,scale:]
pred1_map[...,scale:,:-scale]+=pred1[...,:-scale,scale:]*weight_rt[...,scale:,:-scale]
pred1_map[...,:-scale,scale:]+=pred1[...,scale:,:-scale]*weight_lb[...,:-scale,scale:]
pred1_map[...,:-scale,:-scale]+=pred1[...,scale:,scale:]*weight_rb[...,:-scale,:-scale]
cost2 = self.classif2(out2)
cost2 = torch.squeeze(cost2, 1)+cost1
pred2 = F.softmax(cost2, dim=1)
pred2 = disparityregression(self.maxdisp//scale)(pred2)
pred2=scale*pred2.unsqueeze(-1).expand(pred2.shape[0],pred2.shape[1],pred2.shape[2],scale) \
.contiguous().view(pred2.shape[0],pred2.shape[1],pred2.shape[2]*scale) \
.unsqueeze(-2).expand(pred2.shape[0],pred2.shape[1],scale,pred2.shape[2]*scale) \
.contiguous().view(pred2.shape[0],pred2.shape[1]*scale,pred2.shape[2]*scale)
pred2_map=pred2*weight
pred2_map[...,scale:]+=pred2[...,:-scale]*weight_l[...,scale:]
pred2_map[...,:-scale]+=pred2[...,scale:]*weight_r[...,:-scale]
pred2_map[...,scale:,:]+=pred2[...,:-scale,:]*weight_t[...,scale:,:]
pred2_map[...,:-scale,:]+=pred2[...,scale:,:]*weight_b[...,:-scale,:]
pred2_map[...,scale:,scale:]+=pred2[...,:-scale,:-scale]*weight_lt[...,scale:,scale:]
pred2_map[...,scale:,:-scale]+=pred2[...,:-scale,scale:]*weight_rt[...,scale:,:-scale]
pred2_map[...,:-scale,scale:]+=pred2[...,scale:,:-scale]*weight_lb[...,:-scale,scale:]
pred2_map[...,:-scale,:-scale]+=pred2[...,scale:,scale:]*weight_rb[...,:-scale,:-scale]
cost3 = self.classif3(out3)
cost3 = torch.squeeze(cost3, 1)+cost2
pred3 = F.softmax(cost3, dim=1)
# print(torch.max(pred3,dim=1)[0])
# print(torch.min(pred3,dim=1)[0])
pred3 = disparityregression(self.maxdisp//scale)(pred3)
pred3=scale*pred3.unsqueeze(-1).expand(pred3.shape[0],pred3.shape[1],pred3.shape[2],scale) \
.contiguous().view(pred3.shape[0],pred3.shape[1],pred3.shape[2]*scale) \
.unsqueeze(-2).expand(pred3.shape[0],pred3.shape[1],scale,pred3.shape[2]*scale) \
.contiguous().view(pred3.shape[0],pred3.shape[1]*scale,pred3.shape[2]*scale)
pred3_map=pred3*weight
pred3_map[...,scale:]+=pred3[...,:-scale]*weight_l[...,scale:]
pred3_map[...,:-scale]+=pred3[...,scale:]*weight_r[...,:-scale]
pred3_map[...,scale:,:]+=pred3[...,:-scale,:]*weight_t[...,scale:,:]
pred3_map[...,:-scale,:]+=pred3[...,scale:,:]*weight_b[...,:-scale,:]
pred3_map[...,scale:,scale:]+=pred3[...,:-scale,:-scale]*weight_lt[...,scale:,scale:]
pred3_map[...,scale:,:-scale]+=pred3[...,:-scale,scale:]*weight_rt[...,scale:,:-scale]
pred3_map[...,:-scale,scale:]+=pred3[...,scale:,:-scale]*weight_lb[...,:-scale,scale:]
pred3_map[...,:-scale,:-scale]+=pred3[...,scale:,scale:]*weight_rb[...,:-scale,:-scale]
#pred3 = self.srr(pred3, left, refimg_fea, half)
#print(time.time()-start)
return pred1_map, pred2_map, pred3_map
#return pred3
| forward |
deep-web-scanner.py | import threading
import ipaddress
import socket
import time
from typing import Optional, Union
import requests
requests.packages.urllib3.disable_warnings() # type: ignore
from concurrent.futures import ThreadPoolExecutor
import colorama
colorama.init(autoreset=True)
import os
import bs4
import argparse
folder = os.path.dirname(__file__)
output_strings: list[str] = []
ports = [80, 443, 8080, 8081, 8443, 4434]
keywords = ["cam", "rasp", " hp ", "system", "index of", "dashboard"]
output_tmp = ""
last_write = time.time()
global_lock = threading.Lock()
banner_targets: list[dict[str, Union[str, int]]] = []
def main():
print("----------------------------")
print(" Deep Web Scanner! ")
print("----------------------------\n")
print("Every active webserver url will be logged in the output file.")
print("This terminal will only show urls/metadata with the following keywords: " + ", ".join(keywords))
if indexof.lower() == "true":
print ("'Index of /' filenames will be logged!")
print("Scan will start...")
with open(input_file, "r") as myfile:
content = myfile.readlines()
for line in content:
# split ip range 2.56.20.0-2.56.23.255
if "-" in line:
ip_range_array = line.split("-")
ip_range_start = ip_range_array[0].strip()
ip_range_end = ip_range_array[1].strip()
print(f"Start scan from range: {ip_range_start} - {ip_range_end}")
current_ip = ipaddress.IPv4Address(ip_range_start)
end_ip = ipaddress.IPv4Address(ip_range_end)
with ThreadPoolExecutor(max_workers=100) as executor_portcheck:
while current_ip < end_ip:
executor_portcheck.submit(start_portcheck, current_ip.exploded)
current_ip += 1
elif "/" in line:
ip_range = ipaddress.ip_network(line.strip())
with ThreadPoolExecutor(max_workers=100) as executor_portcheck:
for ip in ip_range.hosts():
executor_portcheck.submit(start_portcheck, ip.exploded)
else:
print("No valid input file! Should be something like 2.56.20.0-2.56.23.255 per line!")
global banner_targets
print(f"{len(banner_targets)} responses")
for target in banner_targets:
start_request(target["ip"], target["port"]) # type: ignore
banner_targets.clear()
write_line("", True)
def start_portcheck(ip: str) -> None:
global banner_targets
# fast webserver port checking
for port in ports:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(3)
result = sock.connect_ex((ip, port))
if result == 0:
# queue normal browser request
banner_targets.append({"ip": ip, "port": port})
def start_request(ip: str, port: int) -> None:
# check for running websites
|
def request_url(url: str) -> Union[tuple[requests.Response, bs4.BeautifulSoup], bool]:
# request url and return the response
try:
session = requests.session()
session.headers[
"User-Agent"
] = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36"
header = session.head(url=url, timeout=20, verify=False)
# check content type
one_allowed_content_type = False
content_type_header = header.headers.get("content-type")
if content_type_header is not None:
for allowed_content_type in ["html", "plain", "xml", "text", "json"]:
if allowed_content_type in content_type_header.lower():
one_allowed_content_type = True
if not one_allowed_content_type:
return False
else:
return False
response = session.get(url=url, timeout=30, verify=False)
session.close()
soup = bs4.BeautifulSoup(response.text, "html.parser")
return (response, soup)
except Exception:
return False
def get_banner(request: requests.Response, soup: bs4.BeautifulSoup):
# get banner information, show console output and save them to file
banner_array: list[str] = []
banner_array.append(request.url)
server_header = request.headers.get("Server")
if isinstance(server_header, str):
banner_array.append(server_header)
title = soup.find("title")
if isinstance(title, bs4.Tag):
title = title.get_text().strip().replace("\n", "")
banner_array.append(title)
meta_tags: bs4.element.ResultSet[bs4.Tag] = soup.find_all("meta", attrs={"name": "generator"})
if len(meta_tags) > 0:
for meta_tag in meta_tags:
attrs = meta_tag.attr
if isinstance(attrs, bs4.Tag):
generator = attrs.get("content")
if isinstance(generator, str):
banner_array.append(generator)
# has this site a password field?
password_fields = soup.find_all(attrs={"type": "password"})
if len(password_fields) > 0:
banner_array.append("login required")
# check for "index of" websites and show root files/folders
global indexof
if indexof.lower() == "true" and "index of" in request.text.lower():
a_array: list[bs4.Tag] = soup.find_all("a")
for a in a_array:
href = a.attrs.get("href")
if isinstance(href, str):
if href.find("?") != 0:
banner_array.append(href)
banner_array.append(f"{str(len(request.content))} content size")
fullstring = ", ".join(banner_array)
if fullstring not in output_strings:
output_strings.append(fullstring)
for keyword in keywords:
if keyword in fullstring.lower():
if "login required" in fullstring:
print(colorama.Fore.RED + fullstring)
elif "Index of /" in fullstring:
print(colorama.Fore.YELLOW + fullstring)
else:
print(colorama.Fore.GREEN + fullstring)
write_line(fullstring)
def write_line(line: str, force: Optional[bool] = False):
# buffers and writes output to file
global output_tmp, last_write
output_tmp += line + "\n"
if last_write + 30 < time.time() or force:
last_write = time.time()
while global_lock.locked():
continue
global_lock.acquire()
lines_to_write = output_tmp.count("\n")
with open(output_file, "a") as output_1:
output_1.write(output_tmp)
output_tmp = ""
if lines_to_write > 1:
print(f"{lines_to_write} webservers found and written to file")
else:
print(f"{lines_to_write} webserver found and written to file")
global_lock.release()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Check if domain has an active website and grab banner."
)
parser.add_argument(
"-i", type=str, default="./asn-country-ipv4.csv", help="Path to input file"
)
parser.add_argument(
"-o", type=str, default="./deep-web.txt", help="Path to output file"
)
parser.add_argument(
"-indexof", type=str, default="no", help="Show files from index of sites"
)
args = parser.parse_args()
input_file = args.i
output_file = args.o
indexof = args.indexof
main()
| try:
url = "https://" + ip + ":" + str(port)
if port == 80:
url = "http://" + ip
elif port == 8080:
url = "http://" + ip + ":8080"
elif port == 8081:
url = "http://" + ip + ":8081"
site_result = request_url(url)
if not isinstance(site_result, bool) and site_result is not False:
# if the site is reachable get some information
get_banner(site_result[0], site_result[1])
except Exception as e:
print(e) |
basic.py | """Base class for all the objects in SymPy"""
from collections import defaultdict
from itertools import chain, zip_longest
from .assumptions import BasicMeta, ManagedProperties
from .cache import cacheit
from .sympify import _sympify, sympify, SympifyError
from .compatibility import iterable, ordered, Mapping
from .singleton import S
from inspect import getmro
def as_Basic(expr):
"""Return expr as a Basic instance using strict sympify
or raise a TypeError; this is just a wrapper to _sympify,
raising a TypeError instead of a SympifyError."""
from sympy.utilities.misc import func_name
try:
return _sympify(expr)
except SympifyError:
raise TypeError(
'Argument must be a Basic object, not `%s`' % func_name(
expr))
class Basic(metaclass=ManagedProperties):
"""
Base class for all objects in SymPy.
Conventions:
1) Always use ``.args``, when accessing parameters of some instance:
>>> from sympy import cot
>>> from sympy.abc import x, y
>>> cot(x).args
(x,)
>>> cot(x).args[0]
x
>>> (x*y).args
(x, y)
>>> (x*y).args[1]
y
2) Never use internal methods or variables (the ones prefixed with ``_``):
>>> cot(x)._args # do not use this, use cot(x).args instead
(x,)
"""
__slots__ = ('_mhash', # hash value
'_args', # arguments
'_assumptions'
)
# To be overridden with True in the appropriate subclasses
is_number = False
is_Atom = False
is_Symbol = False
is_symbol = False
is_Indexed = False
is_Dummy = False
is_Wild = False
is_Function = False
is_Add = False
is_Mul = False
is_Pow = False
is_Number = False
is_Float = False
is_Rational = False
is_Integer = False
is_NumberSymbol = False
is_Order = False
is_Derivative = False
is_Piecewise = False
is_Poly = False
is_AlgebraicNumber = False
is_Relational = False
is_Equality = False
is_Boolean = False
is_Not = False
is_Matrix = False
is_Vector = False
is_Point = False
is_MatAdd = False
is_MatMul = False
def __new__(cls, *args):
obj = object.__new__(cls)
obj._assumptions = cls.default_assumptions
obj._mhash = None # will be set by __hash__ method.
obj._args = args # all items in args must be Basic objects
return obj
def copy(self):
return self.func(*self.args)
def __reduce_ex__(self, proto):
""" Pickling support."""
return type(self), self.__getnewargs__(), self.__getstate__()
def __getnewargs__(self):
return self.args
def __getstate__(self):
return {}
def __setstate__(self, state):
for k, v in state.items():
setattr(self, k, v)
def __hash__(self):
# hash cannot be cached using cache_it because infinite recurrence
# occurs as hash is needed for setting cache dictionary keys
h = self._mhash
if h is None:
h = hash((type(self).__name__,) + self._hashable_content())
self._mhash = h
return h
def _hashable_content(self):
"""Return a tuple of information about self that can be used to
compute the hash. If a class defines additional attributes,
like ``name`` in Symbol, then this method should be updated
accordingly to return such relevant attributes.
Defining more than _hashable_content is necessary if __eq__ has
been defined by a class. See note about this in Basic.__eq__."""
return self._args
@property
def assumptions0(self):
"""
Return object `type` assumptions.
For example:
Symbol('x', real=True)
Symbol('x', integer=True)
are different objects. In other words, besides Python type (Symbol in
this case), the initial assumptions are also forming their typeinfo.
Examples
========
>>> from sympy import Symbol
>>> from sympy.abc import x
>>> x.assumptions0
{'commutative': True}
>>> x = Symbol("x", positive=True)
>>> x.assumptions0
{'commutative': True, 'complex': True, 'extended_negative': False,
'extended_nonnegative': True, 'extended_nonpositive': False,
'extended_nonzero': True, 'extended_positive': True, 'extended_real':
True, 'finite': True, 'hermitian': True, 'imaginary': False,
'infinite': False, 'negative': False, 'nonnegative': True,
'nonpositive': False, 'nonzero': True, 'positive': True, 'real':
True, 'zero': False}
"""
return {}
def compare(self, other):
"""
Return -1, 0, 1 if the object is smaller, equal, or greater than other.
Not in the mathematical sense. If the object is of a different type
from the "other" then their classes are ordered according to
the sorted_classes list.
Examples
========
>>> from sympy.abc import x, y
>>> x.compare(y)
-1
>>> x.compare(x)
0
>>> y.compare(x)
1
"""
# all redefinitions of __cmp__ method should start with the
# following lines:
if self is other:
return 0
n1 = self.__class__
n2 = other.__class__
c = (n1 > n2) - (n1 < n2)
if c:
return c
#
st = self._hashable_content()
ot = other._hashable_content()
c = (len(st) > len(ot)) - (len(st) < len(ot))
if c:
return c
for l, r in zip(st, ot):
l = Basic(*l) if isinstance(l, frozenset) else l
r = Basic(*r) if isinstance(r, frozenset) else r
if isinstance(l, Basic):
c = l.compare(r)
else:
c = (l > r) - (l < r)
if c:
return c
return 0
@staticmethod
def _compare_pretty(a, b):
from sympy.series.order import Order
if isinstance(a, Order) and not isinstance(b, Order):
return 1
if not isinstance(a, Order) and isinstance(b, Order):
return -1
if a.is_Rational and b.is_Rational:
l = a.p * b.q
r = b.p * a.q
return (l > r) - (l < r)
else:
from sympy.core.symbol import Wild
p1, p2, p3 = Wild("p1"), Wild("p2"), Wild("p3")
r_a = a.match(p1 * p2**p3)
if r_a and p3 in r_a:
a3 = r_a[p3]
r_b = b.match(p1 * p2**p3)
if r_b and p3 in r_b:
b3 = r_b[p3]
c = Basic.compare(a3, b3)
if c != 0:
return c
return Basic.compare(a, b)
@classmethod
def fromiter(cls, args, **assumptions):
"""
Create a new object from an iterable.
This is a convenience function that allows one to create objects from
any iterable, without having to convert to a list or tuple first.
Examples
========
>>> from sympy import Tuple
>>> Tuple.fromiter(i for i in range(5))
(0, 1, 2, 3, 4)
"""
return cls(*tuple(args), **assumptions)
@classmethod
def class_key(cls):
"""Nice order of classes. """
return 5, 0, cls.__name__
@cacheit
def sort_key(self, order=None):
"""
Return a sort key.
Examples
========
>>> from sympy.core import S, I
>>> sorted([S(1)/2, I, -I], key=lambda x: x.sort_key())
[1/2, -I, I]
>>> S("[x, 1/x, 1/x**2, x**2, x**(1/2), x**(1/4), x**(3/2)]")
[x, 1/x, x**(-2), x**2, sqrt(x), x**(1/4), x**(3/2)]
>>> sorted(_, key=lambda x: x.sort_key())
[x**(-2), 1/x, x**(1/4), sqrt(x), x, x**(3/2), x**2]
"""
# XXX: remove this when issue 5169 is fixed
def inner_key(arg):
if isinstance(arg, Basic):
return arg.sort_key(order)
else:
return arg
args = self._sorted_args
args = len(args), tuple([inner_key(arg) for arg in args])
return self.class_key(), args, S.One.sort_key(), S.One
def __eq__(self, other):
"""Return a boolean indicating whether a == b on the basis of
their symbolic trees.
This is the same as a.compare(b) == 0 but faster.
Notes
=====
If a class that overrides __eq__() needs to retain the
implementation of __hash__() from a parent class, the
interpreter must be told this explicitly by setting __hash__ =
<ParentClass>.__hash__. Otherwise the inheritance of __hash__()
will be blocked, just as if __hash__ had been explicitly set to
None.
References
==========
from http://docs.python.org/dev/reference/datamodel.html#object.__hash__
"""
if self is other:
return True
tself = type(self)
tother = type(other)
if tself is not tother:
try:
other = _sympify(other)
tother = type(other)
except SympifyError:
return NotImplemented
# As long as we have the ordering of classes (sympy.core),
# comparing types will be slow in Python 2, because it uses
# __cmp__. Until we can remove it
# (https://github.com/sympy/sympy/issues/4269), we only compare
# types in Python 2 directly if they actually have __ne__.
if type(tself).__ne__ is not type.__ne__:
if tself != tother:
return False
elif tself is not tother:
return False
return self._hashable_content() == other._hashable_content()
def __ne__(self, other):
"""``a != b`` -> Compare two symbolic trees and see whether they are different
this is the same as:
``a.compare(b) != 0``
but faster
"""
return not self == other
def dummy_eq(self, other, symbol=None):
"""
Compare two expressions and handle dummy symbols.
Examples
========
>>> from sympy import Dummy
>>> from sympy.abc import x, y
>>> u = Dummy('u')
>>> (u**2 + 1).dummy_eq(x**2 + 1)
True
>>> (u**2 + 1) == (x**2 + 1)
False
>>> (u**2 + y).dummy_eq(x**2 + y, x)
True
>>> (u**2 + y).dummy_eq(x**2 + y, y)
False
"""
s = self.as_dummy()
o = _sympify(other)
o = o.as_dummy()
dummy_symbols = [i for i in s.free_symbols if i.is_Dummy]
if len(dummy_symbols) == 1:
dummy = dummy_symbols.pop()
else:
return s == o
if symbol is None:
symbols = o.free_symbols
if len(symbols) == 1:
symbol = symbols.pop()
else:
return s == o
tmp = dummy.__class__()
return s.subs(dummy, tmp) == o.subs(symbol, tmp)
# Note, we always use the default ordering (lex) in __str__ and __repr__,
# regardless of the global setting. See issue 5487.
def __repr__(self):
"""Method to return the string representation.
Return the expression as a string.
"""
from sympy.printing import sstr
return sstr(self, order=None)
def __str__(self):
from sympy.printing import sstr
return sstr(self, order=None)
# We don't define _repr_png_ here because it would add a large amount of
# data to any notebook containing SymPy expressions, without adding
# anything useful to the notebook. It can still enabled manually, e.g.,
# for the qtconsole, with init_printing().
def _repr_latex_(self):
"""
IPython/Jupyter LaTeX printing
To change the behavior of this (e.g., pass in some settings to LaTeX),
use init_printing(). init_printing() will also enable LaTeX printing
for built in numeric types like ints and container types that contain
SymPy objects, like lists and dictionaries of expressions.
"""
from sympy.printing.latex import latex
s = latex(self, mode='plain')
return "$\\displaystyle %s$" % s
_repr_latex_orig = _repr_latex_
def atoms(self, *types):
"""Returns the atoms that form the current object.
By default, only objects that are truly atomic and can't
be divided into smaller pieces are returned: symbols, numbers,
and number symbols like I and pi. It is possible to request
atoms of any type, however, as demonstrated below.
Examples
========
>>> from sympy import I, pi, sin
>>> from sympy.abc import x, y
>>> (1 + x + 2*sin(y + I*pi)).atoms()
{1, 2, I, pi, x, y}
If one or more types are given, the results will contain only
those types of atoms.
>>> from sympy import Number, NumberSymbol, Symbol
>>> (1 + x + 2*sin(y + I*pi)).atoms(Symbol)
{x, y}
>>> (1 + x + 2*sin(y + I*pi)).atoms(Number)
{1, 2}
>>> (1 + x + 2*sin(y + I*pi)).atoms(Number, NumberSymbol)
{1, 2, pi}
>>> (1 + x + 2*sin(y + I*pi)).atoms(Number, NumberSymbol, I)
{1, 2, I, pi}
Note that I (imaginary unit) and zoo (complex infinity) are special
types of number symbols and are not part of the NumberSymbol class.
The type can be given implicitly, too:
>>> (1 + x + 2*sin(y + I*pi)).atoms(x) # x is a Symbol
{x, y}
Be careful to check your assumptions when using the implicit option
since ``S(1).is_Integer = True`` but ``type(S(1))`` is ``One``, a special type
of sympy atom, while ``type(S(2))`` is type ``Integer`` and will find all
integers in an expression:
>>> from sympy import S
>>> (1 + x + 2*sin(y + I*pi)).atoms(S(1))
{1}
>>> (1 + x + 2*sin(y + I*pi)).atoms(S(2))
{1, 2}
Finally, arguments to atoms() can select more than atomic atoms: any
sympy type (loaded in core/__init__.py) can be listed as an argument
and those types of "atoms" as found in scanning the arguments of the
expression recursively:
>>> from sympy import Function, Mul
>>> from sympy.core.function import AppliedUndef
>>> f = Function('f')
>>> (1 + f(x) + 2*sin(y + I*pi)).atoms(Function)
{f(x), sin(y + I*pi)}
>>> (1 + f(x) + 2*sin(y + I*pi)).atoms(AppliedUndef)
{f(x)}
>>> (1 + x + 2*sin(y + I*pi)).atoms(Mul)
{I*pi, 2*sin(y + I*pi)}
"""
if types:
types = tuple(
[t if isinstance(t, type) else type(t) for t in types])
nodes = preorder_traversal(self)
if types:
result = {node for node in nodes if isinstance(node, types)}
else:
result = {node for node in nodes if not node.args}
return result
@property
def free_symbols(self):
"""Return from the atoms of self those which are free symbols.
For most expressions, all symbols are free symbols. For some classes
this is not true. e.g. Integrals use Symbols for the dummy variables
which are bound variables, so Integral has a method to return all
symbols except those. Derivative keeps track of symbols with respect
to which it will perform a derivative; those are
bound variables, too, so it has its own free_symbols method.
Any other method that uses bound variables should implement a
free_symbols method."""
return set().union(*[a.free_symbols for a in self.args])
@property
def expr_free_symbols(self):
return set()
def as_dummy(self):
"""Return the expression with any objects having structurally
bound symbols replaced with unique, canonical symbols within
the object in which they appear and having only the default
assumption for commutativity being True.
Examples
========
>>> from sympy import Integral, Symbol
>>> from sympy.abc import x, y
>>> r = Symbol('r', real=True)
>>> Integral(r, (r, x)).as_dummy()
Integral(_0, (_0, x))
>>> _.variables[0].is_real is None
True
Notes
=====
Any object that has structural dummy variables should have
a property, `bound_symbols` that returns a list of structural
dummy symbols of the object itself.
Lambda and Subs have bound symbols, but because of how they
are cached, they already compare the same regardless of their
bound symbols:
>>> from sympy import Lambda
>>> Lambda(x, x + 1) == Lambda(y, y + 1)
True
"""
def can(x):
d = {i: i.as_dummy() for i in x.bound_symbols}
# mask free that shadow bound
x = x.subs(d)
c = x.canonical_variables
# replace bound
x = x.xreplace(c)
# undo masking
x = x.xreplace({v: k for k, v in d.items()})
return x
return self.replace(
lambda x: hasattr(x, 'bound_symbols'),
lambda x: can(x))
@property
def canonical_variables(self):
"""Return a dictionary mapping any variable defined in
``self.bound_symbols`` to Symbols that do not clash
with any existing symbol in the expression.
Examples
========
>>> from sympy import Lambda
>>> from sympy.abc import x
>>> Lambda(x, 2*x).canonical_variables
{x: _0}
"""
from sympy.core.symbol import Symbol
from sympy.utilities.iterables import numbered_symbols
if not hasattr(self, 'bound_symbols'):
return {}
dums = numbered_symbols('_')
reps = {}
v = self.bound_symbols
# this free will include bound symbols that are not part of
# self's bound symbols
free = {i.name for i in self.atoms(Symbol) - set(v)}
for v in v:
d = next(dums)
if v.is_Symbol:
while v.name == d.name or d.name in free:
d = next(dums)
reps[v] = d
return reps
def rcall(self, *args):
"""Apply on the argument recursively through the expression tree.
This method is used to simulate a common abuse of notation for
operators. For instance in SymPy the the following will not work:
``(x+Lambda(y, 2*y))(z) == x+2*z``,
however you can use
>>> from sympy import Lambda
>>> from sympy.abc import x, y, z
>>> (x + Lambda(y, 2*y)).rcall(z)
x + 2*z
"""
return Basic._recursive_call(self, args)
@staticmethod
def _recursive_call(expr_to_call, on_args):
"""Helper for rcall method."""
from sympy import Symbol
def the_call_method_is_overridden(expr):
for cls in getmro(type(expr)):
if '__call__' in cls.__dict__:
return cls != Basic
if callable(expr_to_call) and the_call_method_is_overridden(expr_to_call):
if isinstance(expr_to_call, Symbol): # XXX When you call a Symbol it is
return expr_to_call # transformed into an UndefFunction
else:
return expr_to_call(*on_args)
elif expr_to_call.args:
args = [Basic._recursive_call(
sub, on_args) for sub in expr_to_call.args]
return type(expr_to_call)(*args)
else:
return expr_to_call
def is_hypergeometric(self, k):
from sympy.simplify import hypersimp
return hypersimp(self, k) is not None
@property
def is_comparable(self):
"""Return True if self can be computed to a real number
(or already is a real number) with precision, else False.
Examples
========
>>> from sympy import exp_polar, pi, I
>>> (I*exp_polar(I*pi/2)).is_comparable
True
>>> (I*exp_polar(I*pi*2)).is_comparable
False
A False result does not mean that `self` cannot be rewritten
into a form that would be comparable. For example, the
difference computed below is zero but without simplification
it does not evaluate to a zero with precision:
>>> e = 2**pi*(1 + 2**pi)
>>> dif = e - e.expand()
>>> dif.is_comparable
False
>>> dif.n(2)._prec
1
"""
is_extended_real = self.is_extended_real
if is_extended_real is False:
return False
if not self.is_number:
return False
# don't re-eval numbers that are already evaluated since
# this will create spurious precision
n, i = [p.evalf(2) if not p.is_Number else p
for p in self.as_real_imag()]
if not (i.is_Number and n.is_Number):
return False
if i:
# if _prec = 1 we can't decide and if not,
# the answer is False because numbers with
# imaginary parts can't be compared
# so return False
return False
else:
return n._prec != 1
@property
def func(self):
"""
The top-level function in an expression.
The following should hold for all objects::
>> x == x.func(*x.args)
Examples
========
>>> from sympy.abc import x
>>> a = 2*x
>>> a.func
<class 'sympy.core.mul.Mul'>
>>> a.args
(2, x)
>>> a.func(*a.args)
2*x
>>> a == a.func(*a.args)
True
"""
return self.__class__
@property
def args(self):
"""Returns a tuple of arguments of 'self'.
Examples
========
>>> from sympy import cot
>>> from sympy.abc import x, y
>>> cot(x).args
(x,)
>>> cot(x).args[0]
x
>>> (x*y).args
(x, y)
>>> (x*y).args[1]
y
Notes
=====
Never use self._args, always use self.args.
Only use _args in __new__ when creating a new function.
Don't override .args() from Basic (so that it's easy to
change the interface in the future if needed).
"""
return self._args
@property
def _sorted_args(self):
"""
The same as ``args``. Derived classes which don't fix an
order on their arguments should override this method to
produce the sorted representation.
"""
return self.args
def as_content_primitive(self, radical=False, clear=True):
"""A stub to allow Basic args (like Tuple) to be skipped when computing
the content and primitive components of an expression.
See Also
========
sympy.core.expr.Expr.as_content_primitive
"""
return S.One, self
def subs(self, *args, **kwargs):
"""
Substitutes old for new in an expression after sympifying args.
`args` is either:
- two arguments, e.g. foo.subs(old, new)
- one iterable argument, e.g. foo.subs(iterable). The iterable may be
o an iterable container with (old, new) pairs. In this case the
replacements are processed in the order given with successive
patterns possibly affecting replacements already made.
o a dict or set whose key/value items correspond to old/new pairs.
In this case the old/new pairs will be sorted by op count and in
case of a tie, by number of args and the default_sort_key. The
resulting sorted list is then processed as an iterable container
(see previous).
If the keyword ``simultaneous`` is True, the subexpressions will not be
evaluated until all the substitutions have been made.
Examples
========
>>> from sympy import pi, exp, limit, oo
>>> from sympy.abc import x, y
>>> (1 + x*y).subs(x, pi)
pi*y + 1
>>> (1 + x*y).subs({x:pi, y:2})
1 + 2*pi
>>> (1 + x*y).subs([(x, pi), (y, 2)])
1 + 2*pi
>>> reps = [(y, x**2), (x, 2)]
>>> (x + y).subs(reps)
6
>>> (x + y).subs(reversed(reps))
x**2 + 2
>>> (x**2 + x**4).subs(x**2, y)
y**2 + y
To replace only the x**2 but not the x**4, use xreplace:
>>> (x**2 + x**4).xreplace({x**2: y})
x**4 + y
To delay evaluation until all substitutions have been made,
set the keyword ``simultaneous`` to True:
>>> (x/y).subs([(x, 0), (y, 0)])
0
>>> (x/y).subs([(x, 0), (y, 0)], simultaneous=True)
nan
This has the added feature of not allowing subsequent substitutions
to affect those already made:
>>> ((x + y)/y).subs({x + y: y, y: x + y})
1
>>> ((x + y)/y).subs({x + y: y, y: x + y}, simultaneous=True)
y/(x + y)
In order to obtain a canonical result, unordered iterables are
sorted by count_op length, number of arguments and by the
default_sort_key to break any ties. All other iterables are left
unsorted.
>>> from sympy import sqrt, sin, cos
>>> from sympy.abc import a, b, c, d, e
>>> A = (sqrt(sin(2*x)), a)
>>> B = (sin(2*x), b)
>>> C = (cos(2*x), c)
>>> D = (x, d)
>>> E = (exp(x), e)
>>> expr = sqrt(sin(2*x))*sin(exp(x)*x)*cos(2*x) + sin(2*x)
>>> expr.subs(dict([A, B, C, D, E]))
a*c*sin(d*e) + b
The resulting expression represents a literal replacement of the
old arguments with the new arguments. This may not reflect the
limiting behavior of the expression:
>>> (x**3 - 3*x).subs({x: oo})
nan
>>> limit(x**3 - 3*x, x, oo)
oo
If the substitution will be followed by numerical
evaluation, it is better to pass the substitution to
evalf as
>>> (1/x).evalf(subs={x: 3.0}, n=21)
0.333333333333333333333
rather than
>>> (1/x).subs({x: 3.0}).evalf(21)
0.333333333333333314830
as the former will ensure that the desired level of precision is
obtained.
See Also
========
replace: replacement capable of doing wildcard-like matching,
parsing of match, and conditional replacements
xreplace: exact node replacement in expr tree; also capable of
using matching rules
sympy.core.evalf.EvalfMixin.evalf: calculates the given formula to a desired level of precision
"""
from sympy.core.containers import Dict
from sympy.utilities.iterables import sift
from sympy import Dummy, Symbol
unordered = False
if len(args) == 1:
sequence = args[0]
if isinstance(sequence, set):
unordered = True
elif isinstance(sequence, (Dict, Mapping)):
unordered = True
sequence = sequence.items()
elif not iterable(sequence):
from sympy.utilities.misc import filldedent
raise ValueError(filldedent("""
When a single argument is passed to subs
it should be a dictionary of old: new pairs or an iterable
of (old, new) tuples."""))
elif len(args) == 2:
sequence = [args]
else:
raise ValueError("subs accepts either 1 or 2 arguments")
sequence = list(sequence)
for i, s in enumerate(sequence):
if isinstance(s[0], str):
# when old is a string we prefer Symbol
s = Symbol(s[0]), s[1]
try:
s = [sympify(_, strict=not isinstance(_, str))
for _ in s]
except SympifyError:
# if it can't be sympified, skip it
sequence[i] = None
continue
# skip if there is no change
sequence[i] = None if _aresame(*s) else tuple(s)
sequence = list(filter(None, sequence))
if unordered:
sequence = dict(sequence)
atoms, nonatoms = sift(list(sequence),
lambda x: x.is_Atom, binary=True)
sequence = [(k, sequence[k]) for k in
list(reversed(list(ordered(nonatoms)))) + list(ordered(atoms))]
if kwargs.pop('simultaneous', False): # XXX should this be the default for dict subs?
reps = {}
rv = self
kwargs['hack2'] = True
m = Dummy('subs_m')
for old, new in sequence:
com = new.is_commutative
if com is None:
com = True
d = Dummy('subs_d', commutative=com)
# using d*m so Subs will be used on dummy variables
# in things like Derivative(f(x, y), x) in which x
# is both free and bound
rv = rv._subs(old, d*m, **kwargs)
if not isinstance(rv, Basic):
break
reps[d] = new
reps[m] = S.One # get rid of m
return rv.xreplace(reps)
else:
rv = self
for old, new in sequence:
rv = rv._subs(old, new, **kwargs)
if not isinstance(rv, Basic):
break
return rv
@cacheit
def _subs(self, old, new, **hints):
"""Substitutes an expression old -> new.
If self is not equal to old then _eval_subs is called.
If _eval_subs doesn't want to make any special replacement
then a None is received which indicates that the fallback
should be applied wherein a search for replacements is made
amongst the arguments of self.
>>> from sympy import Add
>>> from sympy.abc import x, y, z
Examples
========
Add's _eval_subs knows how to target x + y in the following
so it makes the change:
>>> (x + y + z).subs(x + y, 1)
z + 1
Add's _eval_subs doesn't need to know how to find x + y in
the following:
>>> Add._eval_subs(z*(x + y) + 3, x + y, 1) is None
True
The returned None will cause the fallback routine to traverse the args and
pass the z*(x + y) arg to Mul where the change will take place and the
substitution will succeed:
>>> (z*(x + y) + 3).subs(x + y, 1)
z + 3
** Developers Notes **
An _eval_subs routine for a class should be written if:
1) any arguments are not instances of Basic (e.g. bool, tuple);
2) some arguments should not be targeted (as in integration
variables);
3) if there is something other than a literal replacement
that should be attempted (as in Piecewise where the condition
may be updated without doing a replacement).
If it is overridden, here are some special cases that might arise:
1) If it turns out that no special change was made and all
the original sub-arguments should be checked for
replacements then None should be returned.
2) If it is necessary to do substitutions on a portion of
the expression then _subs should be called. _subs will
handle the case of any sub-expression being equal to old
(which usually would not be the case) while its fallback
will handle the recursion into the sub-arguments. For
example, after Add's _eval_subs removes some matching terms
it must process the remaining terms so it calls _subs
on each of the un-matched terms and then adds them
onto the terms previously obtained.
3) If the initial expression should remain unchanged then
the original expression should be returned. (Whenever an
expression is returned, modified or not, no further
substitution of old -> new is attempted.) Sum's _eval_subs
routine uses this strategy when a substitution is attempted
on any of its summation variables.
"""
def fallback(self, old, new):
"""
Try to replace old with new in any of self's arguments.
"""
hit = False
args = list(self.args)
for i, arg in enumerate(args):
if not hasattr(arg, '_eval_subs'):
continue
arg = arg._subs(old, new, **hints)
if not _aresame(arg, args[i]):
hit = True
args[i] = arg
if hit:
rv = self.func(*args)
hack2 = hints.get('hack2', False)
if hack2 and self.is_Mul and not rv.is_Mul: # 2-arg hack
coeff = S.One
nonnumber = []
for i in args:
if i.is_Number:
coeff *= i
else:
nonnumber.append(i)
nonnumber = self.func(*nonnumber)
if coeff is S.One:
return nonnumber
else:
return self.func(coeff, nonnumber, evaluate=False)
return rv
return self
if _aresame(self, old):
return new
rv = self._eval_subs(old, new)
if rv is None:
rv = fallback(self, old, new)
return rv
def _eval_subs(self, old, new):
"""Override this stub if you want to do anything more than
attempt a replacement of old with new in the arguments of self.
See also
========
_subs
"""
return None
def xreplace(self, rule):
"""
Replace occurrences of objects within the expression.
Parameters
==========
rule : dict-like
Expresses a replacement rule
Returns
=======
xreplace : the result of the replacement
Examples
========
>>> from sympy import symbols, pi, exp
>>> x, y, z = symbols('x y z')
>>> (1 + x*y).xreplace({x: pi})
pi*y + 1
>>> (1 + x*y).xreplace({x: pi, y: 2})
1 + 2*pi
Replacements occur only if an entire node in the expression tree is
matched:
>>> (x*y + z).xreplace({x*y: pi})
z + pi
>>> (x*y*z).xreplace({x*y: pi})
x*y*z
>>> (2*x).xreplace({2*x: y, x: z})
y
>>> (2*2*x).xreplace({2*x: y, x: z})
4*z
>>> (x + y + 2).xreplace({x + y: 2})
x + y + 2
>>> (x + 2 + exp(x + 2)).xreplace({x + 2: y})
x + exp(y) + 2
xreplace doesn't differentiate between free and bound symbols. In the
following, subs(x, y) would not change x since it is a bound symbol,
but xreplace does:
>>> from sympy import Integral
>>> Integral(x, (x, 1, 2*x)).xreplace({x: y})
Integral(y, (y, 1, 2*y))
Trying to replace x with an expression raises an error:
>>> Integral(x, (x, 1, 2*x)).xreplace({x: 2*y}) # doctest: +SKIP
ValueError: Invalid limits given: ((2*y, 1, 4*y),)
See Also
========
replace: replacement capable of doing wildcard-like matching,
parsing of match, and conditional replacements
subs: substitution of subexpressions as defined by the objects
themselves.
"""
value, _ = self._xreplace(rule)
return value
def _xreplace(self, rule):
"""
Helper for xreplace. Tracks whether a replacement actually occurred.
"""
if self in rule:
return rule[self], True
elif rule:
args = []
changed = False
for a in self.args:
_xreplace = getattr(a, '_xreplace', None)
if _xreplace is not None:
a_xr = _xreplace(rule)
args.append(a_xr[0])
changed |= a_xr[1]
else:
args.append(a)
args = tuple(args)
if changed:
return self.func(*args), True
return self, False
@cacheit
def has(self, *patterns):
"""
Test whether any subexpression matches any of the patterns.
Examples
========
>>> from sympy import sin
>>> from sympy.abc import x, y, z
>>> (x**2 + sin(x*y)).has(z)
False
>>> (x**2 + sin(x*y)).has(x, y, z)
True
>>> x.has(x)
True
Note ``has`` is a structural algorithm with no knowledge of
mathematics. Consider the following half-open interval:
>>> from sympy.sets import Interval
>>> i = Interval.Lopen(0, 5); i
Interval.Lopen(0, 5)
>>> i.args
(0, 5, True, False)
>>> i.has(4) # there is no "4" in the arguments
False
>>> i.has(0) # there *is* a "0" in the arguments
True
Instead, use ``contains`` to determine whether a number is in the
interval or not:
>>> i.contains(4)
True
>>> i.contains(0)
False
Note that ``expr.has(*patterns)`` is exactly equivalent to
``any(expr.has(p) for p in patterns)``. In particular, ``False`` is
returned when the list of patterns is empty.
>>> x.has()
False
"""
return any(self._has(pattern) for pattern in patterns)
def _has(self, pattern):
"""Helper for .has()"""
from sympy.core.function import UndefinedFunction, Function
if isinstance(pattern, UndefinedFunction):
return any(f.func == pattern or f == pattern
for f in self.atoms(Function, UndefinedFunction))
pattern = sympify(pattern)
if isinstance(pattern, BasicMeta):
return any(isinstance(arg, pattern)
for arg in preorder_traversal(self))
_has_matcher = getattr(pattern, '_has_matcher', None)
if _has_matcher is not None:
match = _has_matcher()
return any(match(arg) for arg in preorder_traversal(self))
else:
return any(arg == pattern for arg in preorder_traversal(self))
def _has_matcher(self):
"""Helper for .has()"""
return lambda other: self == other
def replace(self, query, value, map=False, simultaneous=True, exact=None):
"""
Replace matching subexpressions of ``self`` with ``value``.
If ``map = True`` then also return the mapping {old: new} where ``old``
was a sub-expression found with query and ``new`` is the replacement
value for it. If the expression itself doesn't match the query, then
the returned value will be ``self.xreplace(map)`` otherwise it should
be ``self.subs(ordered(map.items()))``.
Traverses an expression tree and performs replacement of matching
subexpressions from the bottom to the top of the tree. The default
approach is to do the replacement in a simultaneous fashion so
changes made are targeted only once. If this is not desired or causes
problems, ``simultaneous`` can be set to False.
In addition, if an expression containing more than one Wild symbol
is being used to match subexpressions and the ``exact`` flag is None
it will be set to True so the match will only succeed if all non-zero
values are received for each Wild that appears in the match pattern.
Setting this to False accepts a match of 0; while setting it True
accepts all matches that have a 0 in them. See example below for
cautions.
The list of possible combinations of queries and replacement values
is listed below:
Examples
========
Initial setup
>>> from sympy import log, sin, cos, tan, Wild, Mul, Add
>>> from sympy.abc import x, y
>>> f = log(sin(x)) + tan(sin(x**2))
1.1. type -> type
obj.replace(type, newtype)
When object of type ``type`` is found, replace it with the
result of passing its argument(s) to ``newtype``.
>>> f.replace(sin, cos)
log(cos(x)) + tan(cos(x**2))
>>> sin(x).replace(sin, cos, map=True)
(cos(x), {sin(x): cos(x)})
>>> (x*y).replace(Mul, Add)
x + y
1.2. type -> func
obj.replace(type, func)
When object of type ``type`` is found, apply ``func`` to its
argument(s). ``func`` must be written to handle the number
of arguments of ``type``.
>>> f.replace(sin, lambda arg: sin(2*arg))
log(sin(2*x)) + tan(sin(2*x**2))
>>> (x*y).replace(Mul, lambda *args: sin(2*Mul(*args)))
sin(2*x*y)
2.1. pattern -> expr
obj.replace(pattern(wild), expr(wild))
Replace subexpressions matching ``pattern`` with the expression
written in terms of the Wild symbols in ``pattern``.
>>> a, b = map(Wild, 'ab')
>>> f.replace(sin(a), tan(a))
log(tan(x)) + tan(tan(x**2))
>>> f.replace(sin(a), tan(a/2))
log(tan(x/2)) + tan(tan(x**2/2))
>>> f.replace(sin(a), a)
log(x) + tan(x**2)
>>> (x*y).replace(a*x, a)
y
Matching is exact by default when more than one Wild symbol
is used: matching fails unless the match gives non-zero
values for all Wild symbols:
>>> (2*x + y).replace(a*x + b, b - a)
y - 2
>>> (2*x).replace(a*x + b, b - a)
2*x
When set to False, the results may be non-intuitive:
>>> (2*x).replace(a*x + b, b - a, exact=False)
2/x
2.2. pattern -> func
obj.replace(pattern(wild), lambda wild: expr(wild))
All behavior is the same as in 2.1 but now a function in terms of
pattern variables is used rather than an expression:
>>> f.replace(sin(a), lambda a: sin(2*a))
log(sin(2*x)) + tan(sin(2*x**2))
3.1. func -> func
obj.replace(filter, func)
Replace subexpression ``e`` with ``func(e)`` if ``filter(e)``
is True.
>>> g = 2*sin(x**3)
>>> g.replace(lambda expr: expr.is_Number, lambda expr: expr**2)
4*sin(x**9)
The expression itself is also targeted by the query but is done in
such a fashion that changes are not made twice.
>>> e = x*(x*y + 1)
>>> e.replace(lambda x: x.is_Mul, lambda x: 2*x)
2*x*(2*x*y + 1)
When matching a single symbol, `exact` will default to True, but
this may or may not be the behavior that is desired:
Here, we want `exact=False`:
>>> from sympy import Function
>>> f = Function('f')
>>> e = f(1) + f(0)
>>> q = f(a), lambda a: f(a + 1)
>>> e.replace(*q, exact=False)
f(1) + f(2)
>>> e.replace(*q, exact=True)
f(0) + f(2)
But here, the nature of matching makes selecting
the right setting tricky:
>>> e = x**(1 + y)
>>> (x**(1 + y)).replace(x**(1 + a), lambda a: x**-a, exact=False)
1
>>> (x**(1 + y)).replace(x**(1 + a), lambda a: x**-a, exact=True)
x**(-x - y + 1)
>>> (x**y).replace(x**(1 + a), lambda a: x**-a, exact=False)
1
>>> (x**y).replace(x**(1 + a), lambda a: x**-a, exact=True)
x**(1 - y)
It is probably better to use a different form of the query
that describes the target expression more precisely:
>>> (1 + x**(1 + y)).replace(
... lambda x: x.is_Pow and x.exp.is_Add and x.exp.args[0] == 1,
... lambda x: x.base**(1 - (x.exp - 1)))
...
x**(1 - y) + 1
See Also
========
subs: substitution of subexpressions as defined by the objects
themselves.
xreplace: exact node replacement in expr tree; also capable of
using matching rules
"""
from sympy.core.symbol import Dummy, Wild
from sympy.simplify.simplify import bottom_up
try:
query = _sympify(query)
except SympifyError:
pass
try:
value = _sympify(value)
except SympifyError:
pass
if isinstance(query, type):
_query = lambda expr: isinstance(expr, query)
if isinstance(value, type):
_value = lambda expr, result: value(*expr.args)
elif callable(value):
_value = lambda expr, result: value(*expr.args)
else:
raise TypeError(
"given a type, replace() expects another "
"type or a callable")
elif isinstance(query, Basic):
_query = lambda expr: expr.match(query)
if exact is None:
exact = (len(query.atoms(Wild)) > 1)
if isinstance(value, Basic):
if exact:
_value = lambda expr, result: (value.subs(result)
if all(result.values()) else expr)
else:
_value = lambda expr, result: value.subs(result)
elif callable(value):
# match dictionary keys get the trailing underscore stripped
# from them and are then passed as keywords to the callable;
# if ``exact`` is True, only accept match if there are no null
# values amongst those matched.
if exact:
_value = lambda expr, result: (value(**
{str(k)[:-1]: v for k, v in result.items()})
if all(val for val in result.values()) else expr)
else:
_value = lambda expr, result: value(**
{str(k)[:-1]: v for k, v in result.items()})
else:
raise TypeError(
"given an expression, replace() expects "
"another expression or a callable")
elif callable(query):
_query = query
if callable(value):
_value = lambda expr, result: value(expr)
else:
raise TypeError(
"given a callable, replace() expects "
"another callable")
else:
raise TypeError(
"first argument to replace() must be a "
"type, an expression or a callable")
mapping = {} # changes that took place
mask = [] # the dummies that were used as change placeholders
def rec_replace(expr):
result = _query(expr)
if result or result == {}:
new = _value(expr, result)
if new is not None and new != expr:
mapping[expr] = new
if simultaneous:
# don't let this change during rebuilding;
# XXX this may fail if the object being replaced
# cannot be represented as a Dummy in the expression
# tree, e.g. an ExprConditionPair in Piecewise
# cannot be represented with a Dummy
com = getattr(new, 'is_commutative', True)
if com is None:
com = True
d = Dummy('rec_replace', commutative=com)
mask.append((d, new))
expr = d
else:
expr = new
return expr
rv = bottom_up(self, rec_replace, atoms=True)
# restore original expressions for Dummy symbols
if simultaneous:
mask = list(reversed(mask))
for o, n in mask:
r = {o: n}
# if a sub-expression could not be replaced with
# a Dummy then this will fail; either filter
# against such sub-expressions or figure out a
# way to carry out simultaneous replacement
# in this situation.
rv = rv.xreplace(r) # if this fails, see above
if not map:
return rv
else:
if simultaneous:
# restore subexpressions in mapping
for o, n in mask:
r = {o: n}
mapping = {k.xreplace(r): v.xreplace(r)
for k, v in mapping.items()}
return rv, mapping
def find(self, query, group=False):
"""Find all subexpressions matching a query. """
query = _make_find_query(query)
results = list(filter(query, preorder_traversal(self)))
if not group:
return set(results)
else:
groups = {}
for result in results:
if result in groups:
groups[result] += 1
else:
groups[result] = 1
return groups
def count(self, query):
"""Count the number of matching subexpressions. """
query = _make_find_query(query)
return sum(bool(query(sub)) for sub in preorder_traversal(self))
def matches(self, expr, repl_dict={}, old=False):
"""
Helper method for match() that looks for a match between Wild symbols
in self and expressions in expr.
Examples
========
>>> from sympy import symbols, Wild, Basic
>>> a, b, c = symbols('a b c')
>>> x = Wild('x')
>>> Basic(a + x, x).matches(Basic(a + b, c)) is None
True
>>> Basic(a + x, x).matches(Basic(a + b + c, b + c))
{x_: b + c}
"""
expr = sympify(expr)
if not isinstance(expr, self.__class__):
return None
if self == expr:
return repl_dict
if len(self.args) != len(expr.args):
return None
d = repl_dict.copy()
for arg, other_arg in zip(self.args, expr.args):
if arg == other_arg:
continue
d = arg.xreplace(d).matches(other_arg, d, old=old)
if d is None:
return None
return d
def match(self, pattern, old=False):
"""
Pattern matching.
Wild symbols match all.
Return ``None`` when expression (self) does not match
with pattern. Otherwise return a dictionary such that::
pattern.xreplace(self.match(pattern)) == self
Examples
========
>>> from sympy import Wild
>>> from sympy.abc import x, y
>>> p = Wild("p")
>>> q = Wild("q")
>>> r = Wild("r")
>>> e = (x+y)**(x+y)
>>> e.match(p**p)
{p_: x + y}
>>> e.match(p**q)
{p_: x + y, q_: x + y}
>>> e = (2*x)**2
>>> e.match(p*q**r)
{p_: 4, q_: x, r_: 2}
>>> (p*q**r).xreplace(e.match(p*q**r))
4*x**2
The ``old`` flag will give the old-style pattern matching where
expressions and patterns are essentially solved to give the
match. Both of the following give None unless ``old=True``:
>>> (x - 2).match(p - x, old=True)
{p_: 2*x - 2}
>>> (2/x).match(p*x, old=True)
{p_: 2/x**2}
"""
pattern = sympify(pattern)
return pattern.matches(self, old=old)
def count_ops(self, visual=None):
"""wrapper for count_ops that returns the operation count."""
from sympy import count_ops
return count_ops(self, visual)
def doit(self, **hints):
"""Evaluate objects that are not evaluated by default like limits,
integrals, sums and products. All objects of this kind will be
evaluated recursively, unless some species were excluded via 'hints'
or unless the 'deep' hint was set to 'False'.
>>> from sympy import Integral
>>> from sympy.abc import x
>>> 2*Integral(x, x)
2*Integral(x, x)
>>> (2*Integral(x, x)).doit()
x**2
>>> (2*Integral(x, x)).doit(deep=False)
2*Integral(x, x)
"""
if hints.get('deep', True):
terms = [term.doit(**hints) if isinstance(term, Basic) else term
for term in self.args]
return self.func(*terms)
else:
return self
def simplify(self, **kwargs):
"""See the simplify function in sympy.simplify"""
from sympy.simplify import simplify
return simplify(self, **kwargs)
def _eval_rewrite(self, pattern, rule, **hints):
if self.is_Atom:
if hasattr(self, rule):
return getattr(self, rule)()
return self
if hints.get('deep', True):
args = [a._eval_rewrite(pattern, rule, **hints)
if isinstance(a, Basic) else a
for a in self.args]
else:
args = self.args
if pattern is None or isinstance(self, pattern):
if hasattr(self, rule):
rewritten = getattr(self, rule)(*args, **hints)
if rewritten is not None:
return rewritten
return self.func(*args) if hints.get('evaluate', True) else self
def _accept_eval_derivative(self, s):
# This method needs to be overridden by array-like objects
return s._visit_eval_derivative_scalar(self)
def _visit_eval_derivative_scalar(self, base):
# Base is a scalar
# Types are (base: scalar, self: scalar)
return base._eval_derivative(self)
def _visit_eval_derivative_array(self, base):
# Types are (base: array/matrix, self: scalar)
# Base is some kind of array/matrix,
# it should have `.applyfunc(lambda x: x.diff(self)` implemented:
return base._eval_derivative_array(self)
def _eval_derivative_n_times(self, s, n):
# This is the default evaluator for derivatives (as called by `diff`
# and `Derivative`), it will attempt a loop to derive the expression
# `n` times by calling the corresponding `_eval_derivative` method,
# while leaving the derivative unevaluated if `n` is symbolic. This
# method should be overridden if the object has a closed form for its
# symbolic n-th derivative.
from sympy import Integer
if isinstance(n, (int, Integer)):
obj = self
for i in range(n):
obj2 = obj._accept_eval_derivative(s)
if obj == obj2 or obj2 is None:
break
obj = obj2
return obj2
else:
return None
def rewrite(self, *args, **hints):
""" Rewrite functions in terms of other functions.
Rewrites expression containing applications of functions
of one kind in terms of functions of different kind. For
example you can rewrite trigonometric functions as complex
exponentials or combinatorial functions as gamma function.
As a pattern this function accepts a list of functions to
to rewrite (instances of DefinedFunction class). As rule
you can use string or a destination function instance (in
this case rewrite() will use the str() function).
There is also the possibility to pass hints on how to rewrite
the given expressions. For now there is only one such hint
defined called 'deep'. When 'deep' is set to False it will
forbid functions to rewrite their contents.
Examples
========
>>> from sympy import sin, exp
>>> from sympy.abc import x
Unspecified pattern:
>>> sin(x).rewrite(exp)
-I*(exp(I*x) - exp(-I*x))/2
Pattern as a single function:
>>> sin(x).rewrite(sin, exp)
-I*(exp(I*x) - exp(-I*x))/2
Pattern as a list of functions:
>>> sin(x).rewrite([sin, ], exp)
-I*(exp(I*x) - exp(-I*x))/2
"""
if not args:
return self
else:
pattern = args[:-1]
if isinstance(args[-1], str):
rule = '_eval_rewrite_as_' + args[-1]
else:
# rewrite arg is usually a class but can also be a
# singleton (e.g. GoldenRatio) so we check
# __name__ or __class__.__name__
clsname = getattr(args[-1], "__name__", None)
if clsname is None:
clsname = args[-1].__class__.__name__
rule = '_eval_rewrite_as_' + clsname
if not pattern:
return self._eval_rewrite(None, rule, **hints)
else:
if iterable(pattern[0]):
pattern = pattern[0]
pattern = [p for p in pattern if self.has(p)]
if pattern:
return self._eval_rewrite(tuple(pattern), rule, **hints)
else:
return self
_constructor_postprocessor_mapping = {} # type: ignore
@classmethod
def _exec_constructor_postprocessors(cls, obj):
# WARNING: This API is experimental.
# This is an experimental API that introduces constructor
# postprosessors for SymPy Core elements. If an argument of a SymPy
# expression has a `_constructor_postprocessor_mapping` attribute, it will
# be interpreted as a dictionary containing lists of postprocessing
# functions for matching expression node names.
clsname = obj.__class__.__name__
postprocessors = defaultdict(list)
for i in obj.args:
try:
postprocessor_mappings = (
Basic._constructor_postprocessor_mapping[cls].items()
for cls in type(i).mro()
if cls in Basic._constructor_postprocessor_mapping
)
for k, v in chain.from_iterable(postprocessor_mappings):
postprocessors[k].extend([j for j in v if j not in postprocessors[k]])
except TypeError:
pass
for f in postprocessors.get(clsname, []):
obj = f(obj)
return obj
class Atom(Basic):
"""
A parent class for atomic things. An atom is an expression with no subexpressions.
Examples
========
Symbol, Number, Rational, Integer, ...
But not: Add, Mul, Pow, ...
"""
is_Atom = True
__slots__ = ()
def matches(self, expr, repl_dict={}, old=False):
if self == expr:
return repl_dict
def | (self, rule, hack2=False):
return rule.get(self, self)
def doit(self, **hints):
return self
@classmethod
def class_key(cls):
return 2, 0, cls.__name__
@cacheit
def sort_key(self, order=None):
return self.class_key(), (1, (str(self),)), S.One.sort_key(), S.One
def _eval_simplify(self, **kwargs):
return self
@property
def _sorted_args(self):
# this is here as a safeguard against accidentally using _sorted_args
# on Atoms -- they cannot be rebuilt as atom.func(*atom._sorted_args)
# since there are no args. So the calling routine should be checking
# to see that this property is not called for Atoms.
raise AttributeError('Atoms have no args. It might be necessary'
' to make a check for Atoms in the calling code.')
def _aresame(a, b):
"""Return True if a and b are structurally the same, else False.
Examples
========
In SymPy (as in Python) two numbers compare the same if they
have the same underlying base-2 representation even though
they may not be the same type:
>>> from sympy import S
>>> 2.0 == S(2)
True
>>> 0.5 == S.Half
True
This routine was written to provide a query for such cases that
would give false when the types do not match:
>>> from sympy.core.basic import _aresame
>>> _aresame(S(2.0), S(2))
False
"""
from .numbers import Number
from .function import AppliedUndef, UndefinedFunction as UndefFunc
if isinstance(a, Number) and isinstance(b, Number):
return a == b and a.__class__ == b.__class__
for i, j in zip_longest(preorder_traversal(a), preorder_traversal(b)):
if i != j or type(i) != type(j):
if ((isinstance(i, UndefFunc) and isinstance(j, UndefFunc)) or
(isinstance(i, AppliedUndef) and isinstance(j, AppliedUndef))):
if i.class_key() != j.class_key():
return False
else:
return False
return True
def _atomic(e, recursive=False):
"""Return atom-like quantities as far as substitution is
concerned: Derivatives, Functions and Symbols. Don't
return any 'atoms' that are inside such quantities unless
they also appear outside, too, unless `recursive` is True.
Examples
========
>>> from sympy import Derivative, Function, cos
>>> from sympy.abc import x, y
>>> from sympy.core.basic import _atomic
>>> f = Function('f')
>>> _atomic(x + y)
{x, y}
>>> _atomic(x + f(y))
{x, f(y)}
>>> _atomic(Derivative(f(x), x) + cos(x) + y)
{y, cos(x), Derivative(f(x), x)}
"""
from sympy import Derivative, Function, Symbol
pot = preorder_traversal(e)
seen = set()
if isinstance(e, Basic):
free = getattr(e, "free_symbols", None)
if free is None:
return {e}
else:
return set()
atoms = set()
for p in pot:
if p in seen:
pot.skip()
continue
seen.add(p)
if isinstance(p, Symbol) and p in free:
atoms.add(p)
elif isinstance(p, (Derivative, Function)):
if not recursive:
pot.skip()
atoms.add(p)
return atoms
class preorder_traversal:
"""
Do a pre-order traversal of a tree.
This iterator recursively yields nodes that it has visited in a pre-order
fashion. That is, it yields the current node then descends through the
tree breadth-first to yield all of a node's children's pre-order
traversal.
For an expression, the order of the traversal depends on the order of
.args, which in many cases can be arbitrary.
Parameters
==========
node : sympy expression
The expression to traverse.
keys : (default None) sort key(s)
The key(s) used to sort args of Basic objects. When None, args of Basic
objects are processed in arbitrary order. If key is defined, it will
be passed along to ordered() as the only key(s) to use to sort the
arguments; if ``key`` is simply True then the default keys of ordered
will be used.
Yields
======
subtree : sympy expression
All of the subtrees in the tree.
Examples
========
>>> from sympy import symbols
>>> from sympy.core.basic import preorder_traversal
>>> x, y, z = symbols('x y z')
The nodes are returned in the order that they are encountered unless key
is given; simply passing key=True will guarantee that the traversal is
unique.
>>> list(preorder_traversal((x + y)*z, keys=None)) # doctest: +SKIP
[z*(x + y), z, x + y, y, x]
>>> list(preorder_traversal((x + y)*z, keys=True))
[z*(x + y), z, x + y, x, y]
"""
def __init__(self, node, keys=None):
self._skip_flag = False
self._pt = self._preorder_traversal(node, keys)
def _preorder_traversal(self, node, keys):
yield node
if self._skip_flag:
self._skip_flag = False
return
if isinstance(node, Basic):
if not keys and hasattr(node, '_argset'):
# LatticeOp keeps args as a set. We should use this if we
# don't care about the order, to prevent unnecessary sorting.
args = node._argset
else:
args = node.args
if keys:
if keys != True:
args = ordered(args, keys, default=False)
else:
args = ordered(args)
for arg in args:
yield from self._preorder_traversal(arg, keys)
elif iterable(node):
for item in node:
yield from self._preorder_traversal(item, keys)
def skip(self):
"""
Skip yielding current node's (last yielded node's) subtrees.
Examples
========
>>> from sympy.core import symbols
>>> from sympy.core.basic import preorder_traversal
>>> x, y, z = symbols('x y z')
>>> pt = preorder_traversal((x+y*z)*z)
>>> for i in pt:
... print(i)
... if i == x+y*z:
... pt.skip()
z*(x + y*z)
z
x + y*z
"""
self._skip_flag = True
def __next__(self):
return next(self._pt)
def __iter__(self):
return self
def _make_find_query(query):
"""Convert the argument of Basic.find() into a callable"""
try:
query = _sympify(query)
except SympifyError:
pass
if isinstance(query, type):
return lambda expr: isinstance(expr, query)
elif isinstance(query, Basic):
return lambda expr: expr.match(query) is not None
return query
| xreplace |
converters.js | import { parseUrl } from './parser';
export function modelToViewUrlAttributeConverter( registry, options, domain ) {
return dispatcher => {
dispatcher.on( 'attribute:url:preview', converter );
};
function | ( evt, data, conversionApi ) {
if ( !conversionApi.consumable.consume( data.item, evt.name ) ) {
return;
}
const url = data.attributeNewValue;
const information = parseUrl( domain, url );
const viewWriter = conversionApi.writer;
const figure = conversionApi.mapper.toViewElement( data.item );
viewWriter.remove( viewWriter.createRangeIn( figure ) );
const linkViewElement = registry.getLinkViewElement( viewWriter, url, options, information );
viewWriter.insert( viewWriter.createPositionAt( figure, 0 ), linkViewElement );
}
}
| converter |
Main.tsx | // Copyright 2019-2022 @polkadot/extension-ui authors & contributors
// SPDX-License-Identifier: Apache-2.0 | import styled from 'styled-components';
interface Props {
children: React.ReactNode;
className?: string;
}
function Main ({ children, className }: Props): React.ReactElement<Props> {
return (
<main className={className}>
{children}
</main>
);
}
export default styled(Main)(({ theme }: ThemeProps) => `
display: flex;
flex-direction: column;
height: 100%;
background: ${theme.background};
color: ${theme.textColor};
font-size: ${theme.fontSize};
line-height: ${theme.lineHeight};
border: 1px solid ${theme.extensionBorder};
* {
font-family: ${theme.fontFamily};
::-webkit-scrollbar-thumb {
background: ${theme.scrollBarThumb};
}
::-webkit-scrollbar-thumb:window-inactive {
background: ${theme.scrollBarThumbInactive};
}
::-webkit-scrollbar-thumb:hover {
background: ${theme.scrollBarThumbHover};
}
}
`); |
import type { ThemeProps } from '../types';
import React from 'react'; |
helpers.d.ts | import Servient from "./servient";
export default class Helpers {
private srv;
constructor(srv: Servient);
private static staticAddress;
static extractScheme(uri: string): string;
static setStaticAddress(address: string): void;
static getAddresses(): Array<string>;
static toUriLiteral(address: string): string;
static generateUniqueName(name: string): string;
| fetch(uri: string): Promise<WoT.ThingDescription>;
static extend<T, U>(first: T, second: U): T & U;
} |
|
fake_trigger.go | /*
Copyright 2019 The Tekton Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
"context"
v1alpha1 "github.com/dongwenjuan/triggers/pkg/apis/triggers/v1alpha1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
labels "k8s.io/apimachinery/pkg/labels"
schema "k8s.io/apimachinery/pkg/runtime/schema"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
testing "k8s.io/client-go/testing"
)
// FakeTriggers implements TriggerInterface
type FakeTriggers struct {
Fake *FakeTriggersV1alpha1
ns string
}
var triggersResource = schema.GroupVersionResource{Group: "triggers.tekton.dev", Version: "v1alpha1", Resource: "triggers"}
var triggersKind = schema.GroupVersionKind{Group: "triggers.tekton.dev", Version: "v1alpha1", Kind: "Trigger"}
// Get takes name of the trigger, and returns the corresponding trigger object, and an error if there is any.
func (c *FakeTriggers) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.Trigger, err error) {
obj, err := c.Fake.
Invokes(testing.NewGetAction(triggersResource, c.ns, name), &v1alpha1.Trigger{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha1.Trigger), err
}
// List takes label and field selectors, and returns the list of Triggers that match those selectors.
func (c *FakeTriggers) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.TriggerList, err error) {
obj, err := c.Fake.
Invokes(testing.NewListAction(triggersResource, triggersKind, c.ns, opts), &v1alpha1.TriggerList{})
if obj == nil {
return nil, err
}
label, _, _ := testing.ExtractFromListOptions(opts)
if label == nil {
label = labels.Everything()
}
list := &v1alpha1.TriggerList{ListMeta: obj.(*v1alpha1.TriggerList).ListMeta}
for _, item := range obj.(*v1alpha1.TriggerList).Items {
if label.Matches(labels.Set(item.Labels)) {
list.Items = append(list.Items, item)
}
}
return list, err
}
// Watch returns a watch.Interface that watches the requested triggers.
func (c *FakeTriggers) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) {
return c.Fake.
InvokesWatch(testing.NewWatchAction(triggersResource, c.ns, opts))
}
// Create takes the representation of a trigger and creates it. Returns the server's representation of the trigger, and an error, if there is any.
func (c *FakeTriggers) Create(ctx context.Context, trigger *v1alpha1.Trigger, opts v1.CreateOptions) (result *v1alpha1.Trigger, err error) {
obj, err := c.Fake.
Invokes(testing.NewCreateAction(triggersResource, c.ns, trigger), &v1alpha1.Trigger{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha1.Trigger), err
}
// Update takes the representation of a trigger and updates it. Returns the server's representation of the trigger, and an error, if there is any.
func (c *FakeTriggers) Update(ctx context.Context, trigger *v1alpha1.Trigger, opts v1.UpdateOptions) (result *v1alpha1.Trigger, err error) {
obj, err := c.Fake.
Invokes(testing.NewUpdateAction(triggersResource, c.ns, trigger), &v1alpha1.Trigger{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha1.Trigger), err
}
// Delete takes name of the trigger and deletes it. Returns an error if one occurs.
func (c *FakeTriggers) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error {
_, err := c.Fake.
Invokes(testing.NewDeleteAction(triggersResource, c.ns, name), &v1alpha1.Trigger{})
return err
}
// DeleteCollection deletes a collection of objects.
func (c *FakeTriggers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {
action := testing.NewDeleteCollectionAction(triggersResource, c.ns, listOpts)
_, err := c.Fake.Invokes(action, &v1alpha1.TriggerList{})
return err
}
// Patch applies the patch and returns the patched trigger.
func (c *FakeTriggers) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.Trigger, err error) {
obj, err := c.Fake.
Invokes(testing.NewPatchSubresourceAction(triggersResource, c.ns, name, pt, data, subresources...), &v1alpha1.Trigger{})
| return obj.(*v1alpha1.Trigger), err
} | if obj == nil {
return nil, err
} |
homepage.tsx | import React from 'react';
import { Link } from "react-router-dom";
import { makeStyles, useTheme } from '@material-ui/core/styles';
import { Typography, Container, Paper, CircularProgress } from "@material-ui/core";
import axios from 'axios';
// Import jquery datatables.net
import 'datatables.net-dt/css/jquery.dataTables.min.css'
const $ = require('jquery');
$.DataTable = require('datatables.net');
import Yasgui from "@triply/yasgui";
import "@triply/yasgui/build/yasgui.min.css";
import { Graph, drawLine } from "perfect-graph";
import { ApplicationProvider } from 'unitx-ui';
import CytoscapeComponent from 'react-cytoscapejs';
import Cytoscape from 'cytoscape';
import Cola from 'cytoscape-cola';
import LinkDescribe from '../components/LinkDescribe';
import About from './About';
// import { data } from "@solid/query-ldflex";
// import data from "@solid/query-ldflex";
// import { LoggedIn, LoggedOut, Value, useWebId } from '@solid/react';
// import { Like } from '@solid/react';
// import SolidStar from "./SolidStar";
// import {newEngine} from '@comunica/actor-init-sparql';
// import {ActorInitSparql} from '@comunica/actor-init-sparql/lib/ActorInitSparql-browser';
// import {IQueryOptions, newEngineDynamicArged} from "@comunica/actor-init-sparql/lib/QueryDynamic";
Cytoscape.use(Cola);
const useStyles = makeStyles(theme => ({
margin: {
marginTop: theme.spacing(2),
marginBottom: theme.spacing(2),
// textAlign: 'center',
},
paperSearch: {
padding: '2px 4px',
display: 'flex',
alignItems: 'center',
width: '30%',
},
searchInput: {
marginLeft: theme.spacing(1),
width: '50%',
fontSize: '14px',
flex: 1,
},
link: {
textDecoration: 'none',
textTransform: 'none',
color: theme.palette.primary.main,
'&:hover': {
color: theme.palette.secondary.main,
textDecoration: 'none',
},
},
paperPadding: {
padding: theme.spacing(2, 2),
margin: theme.spacing(2, 0),
},
cardSubtitle: {
fontSize: 14,
marginTop: 8,
},
loadSpinner: {
padding: theme.spacing(10, 10)
},
}))
export default function Homepage() {
const classes = useStyles();
const theme = useTheme();
const [state, setState] = React.useState({
isLoading: true,
describe_endpoint: '',
webid: '',
projects_list: [],
search: '',
get_all_graphs_results: [],
hcls_overview_results: [],
entities_relations_overview_results: [],
graph_data: {nodes: [], edges: []},
cytoscape_elements: [],
repositories_hash: {},
category_pie: {}
});
const stateRef = React.useRef(state);
// Avoid conflict when async calls
// Can be done with another lib (cf. Turgay)
const updateState = React.useCallback((update) => {
stateRef.current = {...stateRef.current, ...update};
setState(stateRef.current);
}, [setState]);
// Check SOLID pod for a user preferences file
// https://github.com/solid/react-components/blob/master/demo/app.jsx
// https://solid.github.io/react-components/
// useLocation hook to get SOLID WebID
// let solid_webid = useWebId();
// function createEmptyDocument() {
// // const location = "/public/into-the-graph/preferences.ttl";
// const webId = useWebId();
// // console.log("webId!!");
// // console.log(webId);
// // return data[webId + location].put();
// }
// async function WebIdStatus() {
// updateState({webid: useWebId()})
// // const webId = useWebId();
// // .replace("profile/card#me", "public/into-the-graph/preferences.ttl");
// // const location = webId.replace("profile/card#me", "public/into-the-graph/preferences.ttl");
// // return data[webId + location].put();
// return <span>Preferences stored at {webId}.</span>;
// }
// TODO: fix to use webid hook
// async function createEmptyDocument(location: any) {
// // webId.replace("profile/card#me", "public/into-the-graph/preferences.ttl");
// return data[location].put();
// }
function displayTableCell(stringToDisplay: any) {
if (stringToDisplay) {
return stringToDisplay.value;
} else {
return 'Not computed';
}
}
// Run at start of the page
React.useEffect(() => {
let describe_endpoint = '';
// Get sparql_endpoint from cookie intothegraphSettings
if (!describe_endpoint) {
const localStorageConfig = localStorage.getItem("intothegraphSettings");
if (localStorageConfig) {
let configState: any = JSON.parse(localStorageConfig);
describe_endpoint = configState.sparql_endpoint;
}
}
if (!describe_endpoint) {
// If no endpoint found in localStorage
describe_endpoint = 'https://graphdb.dumontierlab.com/repositories/ncats-red-kg';
// describe_endpoint = 'https://bio2rdf.org/sparql';
}
updateState({ describe_endpoint: describe_endpoint });
Yasgui.defaults.requestConfig.endpoint = describe_endpoint;
// @ts-ignore If endpoint and query provided
let yasgui: any = new Yasgui(document.getElementById('yasguiDiv'), {
requestConfig: { endpoint: describe_endpoint },
copyEndpointOnNewTab: true,
});
// yasgui.addTab(
// true, // set as active tab
// { ...Yasgui.Tab.getDefaults(), yasqe: { value: props.query }}
// );
axios.get(describe_endpoint + `?query=` + encodeURIComponent(get_all_graphs_query))
.then((res: any) => {
console.log('after get all graphs');
console.log(res);
if (res.data.results){
updateState( { get_all_graphs_results: res.data.results.bindings } );
// updateState({ graphsLoading: false });
// $(this.refs.graphsOverview).DataTable();
$('#datatableAllGraphs').DataTable({
"autoWidth": false
});
}
})
.catch((error: any) => {
console.log('Query to get all graphs failed');
console.log(error);
});
axios.get(describe_endpoint + `?query=` + encodeURIComponent(hcls_overview_query))
.then((res: any) => {
if (res.data.results){
updateState( { hcls_overview_results: res.data.results.bindings } );
// updateState({ graphsLoading: false });
// $(this.refs.graphsOverview).DataTable();
$('#datatableHclsOverview').DataTable({
"autoWidth": false
});
}
})
.catch((error: any) => {
console.log('Query to get HCLS stats overview failed');
console.log(error);
});
axios.get(describe_endpoint + `?query=` + encodeURIComponent(entities_relations_query))
.then((res: any) => {
if (res.data.results){
updateState( { entities_relations_overview_results: res.data.results.bindings } );
// updateState({ graphsLoading: false });
// $(this.refs.graphsOverview).DataTable();
$('#datatableEntitiesRelationOverview').DataTable({
"autoWidth": false
});
let graph_nodes: any = {}
let graph_edges: any = {}
let cytoscape_elements: any = []
let node_count = 1;
let edge_count = 0;
const edge_max = 100;
// Prepare perfect graph and cytoscape data
res.data.results.bindings.forEach((result_row: any) => {
let subject_count = 1;
if (result_row.subjectCount) {
subject_count = result_row.subjectCount.value;
}
// Add subject node to hash if not present
if (!(result_row.subject.value in graph_nodes)) {
// If not already in array
graph_nodes[result_row.subject.value] = {
id: result_row.subject.value,
position: { x: node_count * 80, y: node_count * 100 },
data: { uri: result_row.subject.value, color: 'red', size: subject_count },
};
// cytoscape_elements.push({ data: {
// id: result_row.subject.value,
// label: result_row.subject.value,
// size: result_row.subjectCount.value
// } })
node_count += 1;
} else {
graph_nodes[result_row.subject.value].data.size += subject_count;
}
let object_count = 1;
if (result_row.objectCount) {
object_count = result_row.objectCount.value;
}
// Add object node
if (result_row.object) {
if (!(result_row.object.value in graph_nodes)) {
// If not already in array
graph_nodes[result_row.object.value] = {
id: result_row.object.value,
position: { x: node_count * 80, y: node_count * 40 },
data: { uri: result_row.object.value, color: 'green', size: object_count },
};
// cytoscape_elements.push({ data: {
// id: result_row.object.value,
// label: result_row.object.value,
// size: result_row.objectCount.value
// } })
node_count += 1;
} else {
graph_nodes[result_row.object.value].data.size += object_count;
}
}
// Add edge between the 2 nodes
if (result_row.object && edge_count < edge_max) {
const edge_id = result_row.subject.value + result_row.predicate.value + result_row.object.value;
if (!(edge_id in graph_edges)) {
if (!(result_row.object.value === result_row.subject.value)) {
// Prevents link to itself (too confusing currently)
graph_edges[edge_id] = {
id: edge_id,
source: result_row.subject.value,
target: result_row.object.value,
data: { uri: result_row.predicate.value, color: 'green' }
};
cytoscape_elements.push({ data: {
source: result_row.subject.value,
target: result_row.object.value,
label: result_row.predicate.value
} })
}
edge_count += 1
}
}
})
// Convert graph nodes and edges objects to arrays
const graph_nodes_array = Object.keys(graph_nodes).map(function(node_id){
cytoscape_elements.push({ data: {
id: node_id,
label: node_id,
size: graph_nodes[node_id].data.size
} })
return graph_nodes[node_id];
});
const graph_edges_array = Object.keys(graph_edges).map(function(edge_id){
// cytoscape_elements.push({ data: {
// source: graph_edges[edge_id].source,
// target: graph_edges[edge_id].target,
// label: graph_edges[edge_id].data.uri
// } })
return graph_edges[edge_id];
});
console.log('Graph nodes and edges data');
console.log(graph_nodes_array);
console.log(graph_edges);
updateState({
graph_data: { nodes: graph_nodes_array, edges: graph_edges_array },
cytoscape_elements: cytoscape_elements,
isLoading: false
})
}
})
.catch((error: any) => {
console.log('Query to get all HCLS entities-relations infos FAILED:');
console.log(error);
});
}, [])
// This useless array needs to be added for React to understand he needs to use the state inside...
// }, [solid_webid])
// Trying out the SOLID webId hook
const get_all_graphs_query = `SELECT DISTINCT ?graph WHERE { GRAPH ?graph {?s ?p ?o} }`;
// TODO: For Bio2RDF documented queries fails
// https://github.com/bio2rdf/bio2rdf-scripts/wiki/Bio2RDF-Dataset-Summary-Statistics
const hcls_overview_query = `PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX dct: <http://purl.org/dc/terms/>
PREFIX dctypes: <http://purl.org/dc/dcmitype/>
PREFIX dcat: <http://www.w3.org/ns/dcat#>
PREFIX void: <http://rdfs.org/ns/void#>
PREFIX dc: <http://purl.org/dc/elements/1.1/>
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT DISTINCT ?graph ?name ?description ?homepage ?dateGenerated ?statements ?entities ?properties ?classes
WHERE {
GRAPH ?metadataGraph {
?graph a void:Dataset .
OPTIONAL {
?dataset a dctypes:Dataset ;
dct:title ?name ;
dct:description ?description ;
foaf:page ?homepage .
?version dct:isVersionOf ?dataset ;
dcat:distribution ?graph .
}
OPTIONAL {
?graph void:triples ?statements ;
void:entities ?entities ;
void:properties ?properties .
}
OPTIONAL {
?graph dct:created ?dateGenerated .
}
OPTIONAL {
?graph void:classPartition [
void:class rdfs:Class ;
void:distinctSubjects ?classes
] .
}
}
} ORDER BY DESC(?statements)`;
const entities_relations_query = `PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX dct: <http://purl.org/dc/terms/>
PREFIX bl: <http://w3id.org/biolink/vocab/>
PREFIX dctypes: <http://purl.org/dc/dcmitype/>
PREFIX idot: <http://identifiers.org/idot/>
PREFIX dcat: <http://www.w3.org/ns/dcat#>
PREFIX void: <http://rdfs.org/ns/void#>
PREFIX dc: <http://purl.org/dc/elements/1.1/>
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX void-ext: <http://ldf.fi/void-ext#>
SELECT DISTINCT ?metadataGraph ?graph ?subjectCount ?subject ?predicate ?objectCount ?object
WHERE {
GRAPH ?metadataGraph {
# ?graph a void:Dataset .
?graph void:propertyPartition [
void:property ?predicate ;
void:classPartition [
void:class ?subject ;
void:distinctSubjects ?subjectCount ;
];
void-ext:objectClassPartition [
void:class ?object ;
void:distinctObjects ?objectCount ;
]
] .
}
} ORDER BY DESC(?subjectCount)`;
// Change Cytoscape layout
// https://js.cytoscape.org/#layouts
const cytoscape_layout = {
name: 'cola',
nodeSpacing: 400,
edgeLengthVal: 1500,
animate: false,
randomize: false,
maxSimulationTime: 1500
}
// const cytoscape_layout = {
// name: 'concentric',
// minNodeSpacing: 200
// };
// const cytoscape_layout = { name: 'breadthfirst' };
// const cytoscape_layout = {
// name: 'cose',
// animate: 'end',
// fit: true,
// componentSpacing: 1000,
// nodeOverlap: 10,
// nodeRepulsion: function( node: any ){ return 4092; },
// idealEdgeLength: function( edge: any ){ return 300; },
// };
return(
<Container className='mainContainer'>
<About />
{/* <Paper elevation={4} className={classes.paperPadding}>
<Typography variant="body1" className={classes.margin}>
Provide the <b>URI to describe</b>, and the <b>SPARQL endpoint</b> queried in the URL parameters, such as:
</Typography>
<Typography variant="h5" className={classes.margin}>
<Link to={{
pathname: '/describe',
search: '?uri=https://identifiers.org/drugbank:DB00002&endpoint=https://graphdb.dumontierlab.com/repositories/ncats-red-kg',
// search: '?uri=http://bio2rdf.org/clinicaltrials:NCT00209495&endpoint=https://bio2rdf.org/sparql',
}} className={classes.link}>
/describe?uri=https://identifiers.org/drugbank:DB00002&endpoint=https://graphdb.dumontierlab.com/repositories/ncats-red-kg
</Link>
</Typography>
</Paper>
<Typography variant="body1" className={classes.margin} style={{textAlign: 'left', marginTop: theme.spacing(5) }}>
<b>Into the Graph</b> provides a simple RDF web browser that just need a SPARQL endpoint URL to resolve URIs, and explore the available linked data.
</Typography>
<Typography variant="body1" className={classes.margin} style={{ textAlign: 'left' }}>
This linked data browser features:
<br/>🔎 A web-based UI to browse any SPARQL endpoints content easily
<br/>🕸️ Native support for graphs (nquads)
<br/>🏗️ Work in progress: visualize and browse concepts using <a href='https://perfectgraph-5c619.web.app' target='_blank' rel="noopener noreferrer"><code>perfect-graph</code></a>
<br/>🚧 Work in progress: insights about the content of the triplestore and its different graphs, using precomputed HCLS descriptives statistics
</Typography>
<Typography variant="body1" className={classes.margin} style={{textAlign: 'left'}}>
Other relevant libraries:
</Typography>
<ul style={{textAlign: 'left'}}>
<li><Typography variant="body1">
<a href='https://github.com/micheldumontier/torres-api-platform/' className={classes.link} target='_blank' rel="noopener noreferrer">TORRES API platform</a> to store HCLS descriptive metadata for your dataset
</Typography></li>
<li><Typography variant="body1">
<a href='https://github.com/MaastrichtU-IDS/fair-metadata' className={classes.link} target='_blank' rel="noopener noreferrer">FAIR metadata</a> python lib: to generate HCLS descriptive metadata for your dataset
</Typography></li>
<li><Typography variant="body1">
<a href='https://github.com/MaastrichtU-IDS/d2s-project-template/tree/master/datasets/preppi' className={classes.link} target='_blank' rel="noopener noreferrer">Data2Services workflows</a> to generate RDF knowledge graphs from structured data using RML (RDF Mapping Language)
</Typography></li>
</ul> */}
{/* Display YASGUI */}
<Paper elevation={4} className={classes.paperPadding} style={{ textAlign: 'left', marginTop: theme.spacing(4) }}>
<div id="yasguiDiv"></div>
</Paper>
{/* Display a datatable with subject, predicate, object, graph retrieved */}
{Object.keys(state.get_all_graphs_results).length > 0 && (<>
<Typography variant="h5" className={classes.margin} style={{ marginTop: theme.spacing(6) }}>
<a href={state.describe_endpoint} className={classes.link} >{state.describe_endpoint}</a> endpoint overview
</Typography>
<Paper elevation={4} className={classes.paperPadding}>
<table id='datatableAllGraphs' style={{ wordBreak: 'break-all' }}>
<thead>
<tr>
<th>Graphs</th>
</tr>
</thead>
<tbody>
{/* Iterate Describe query results array */}
{state.get_all_graphs_results.map((row: any, key: number) => {
// return <Tooltip title={displayDescription(row.name, row.description)} key={key}>
return <tr key={key}>
<td><LinkDescribe variant='body2' uri={row.graph.value}/></td>
</tr>
{/* </Tooltip>; */}
})}
</tbody>
</table>
</Paper>
</>)}
{Object.keys(state.hcls_overview_results).length > 0 && (<>
<Typography variant="h5" className={classes.margin} style={{ marginTop: theme.spacing(6) }}>
Endpoint <b>descriptive metadata</b> (<a href={state.describe_endpoint} className={classes.link}>HCLS</a>)
</Typography>
<Paper elevation={4} className={classes.paperPadding}>
<table id='datatableHclsOverview' style={{ wordBreak: 'break-all' }}>
<thead>
<tr>
<th>Graph</th>
<th>Date generated</th>
<th># of triples</th>
<th># of entities</th>
<th># of properties</th>
<th># of classes</th>
</tr>
</thead>
<tbody>
{/* Iterate Describe query results array */}
{state.hcls_overview_results.map((row: any, key: number) => {
// return <Tooltip title={displayDescription(row.name, row.description)} key={key}>
return <tr key={key}>
<td><LinkDescribe variant='body2' uri={row.graph.value}/></td>
<td><Typography variant="body2">{displayTableCell(row.dateGenerated)}</Typography></td>
<td><Typography variant="body2">{displayTableCell(row.statements)}</Typography></td>
<td><Typography variant="body2">{displayTableCell(row.entities)}</Typography></td>
<td><Typography variant="body2">{displayTableCell(row.properties)}</Typography></td>
<td><Typography variant="body2">{displayTableCell(row.classes)}</Typography></td>
</tr>
{/* </Tooltip>; */}
})}
</tbody>
</table>
</Paper>
</>)}
<Paper elevation={4} className={classes.paperPadding}>
{state.isLoading && (
<CircularProgress className={classes.loadSpinner} />
)}
{state.graph_data.nodes.length > 0 && (<>
<Typography variant="h5" className={classes.margin} style={{ marginTop: theme.spacing(6) }}>
<b>Entities-relations</b> metadata (<a href={state.describe_endpoint} className={classes.link}>HCLS</a>)
</Typography>
<Typography variant="body1" className={classes.margin} style={{ marginTop: theme.spacing(6) }}>
<a href='https://perfectgraph-5c619.web.app/' className={classes.link} target='_blank' rel="noopener noreferrer">
<b>Perfect Graph</b>
</a> visualization
</Typography>
<Paper elevation={4} className={classes.paperPadding}>
<ApplicationProvider>
<Graph
style={{ width: '100%', height: 800 }}
config={{ layout: Graph.Layouts.euler }}
nodes={state.graph_data.nodes}
edges={state.graph_data.edges}
// nodes={[
// {
// id: '1',
// position: { x: 10, y: 10 },
// data: { city: 'Amsterdam', color: 'red' },
// },
// {
// id: '2',
// position: { x: 300, y: 10 },
// data: { city: 'Maastricht', color: 'blue' },
// },
// ]}
// edges={[
// { id: '51', source: '1', target: '2' },
// ]}
// drawLine={({ graphics, to, from }) => {
// drawLine({
// graphics,
// to,
// from,
// directed: true
// // type: 'bezier'
// })
// }}
renderNode={({ item: { data } }: any) => (
<Graph.View
style={{ width: 100, height: 100, backgroundColor: data.color }}
>
<Graph.Text style={{ fontSize: 16 }}>
{data.uri.substring(data.uri.lastIndexOf('/') + 1)}
</Graph.Text>
{/* <LinkDescribe variant='body2' uri={data.uri}/> */}
</Graph.View>
)}
/>
</ApplicationProvider>
</Paper>
</> )}
{state.graph_data.nodes.length > 0 && (<>
<Typography variant="body1" className={classes.margin} style={{ marginTop: theme.spacing(6) }}>
<a href='https://github.com/plotly/react-cytoscapejs' className={classes.link} target='_blank' rel="noopener noreferrer">
<b>Cytoscape JS</b>
</a> visualization
</Typography>
<Paper elevation={4} className={classes.paperPadding} style={{ height: '80vh', textAlign: 'left' }}>
<CytoscapeComponent elements={state.cytoscape_elements} layout={cytoscape_layout}
style={{ width: '100%', height: '100%', }}
stylesheet={[
{
selector: 'edge',
style: {
'label': 'data(label)',
'color': '#546e7a', // Grey
'text-wrap': 'wrap',
'font-size': '18px',
'text-opacity': 0.9,
'target-arrow-shape': 'triangle',
// 'line-color': '#ccc',
// 'target-arrow-color': '#ccc',
// Control multi edge on 2 nodes:
'curve-style': 'bezier',
'control-point-step-size': 300,
}
},
{
selector: 'node',
style: {
'label': 'data(label)',
'text-wrap': 'wrap',
'font-size': '30px',
// width: 15,
// 'width': 'data(size)',
// 'height': 'data(size)',
// shape: 'rectangle'
}
}
]}
/>
</Paper>
</> )}
{Object.keys(state.entities_relations_overview_results).length > 0 && (<>
<Typography variant="body1" className={classes.margin} style={{ marginTop: theme.spacing(6) }}>
<a href='https://datatables.net' className={classes.link} target='_blank' rel="noopener noreferrer">
Datatable
</a>
</Typography>
<Paper elevation={4} className={classes.paperPadding}>
<table id='datatableEntitiesRelationOverview' style={{ wordBreak: 'break-all' }}>
<thead>
<tr>
<th>Graph</th>
<th># of instance of subject</th>
<th>Subject class</th>
<th>Have relation</th>
<th>With Object class</th>
<th># of instance of object</th>
</tr> | {state.entities_relations_overview_results.map((row: any, key: number) => {
return <tr key={key}>
<td><LinkDescribe uri={row.graph.value} variant='body2'/></td>
<td><Typography variant="body2">{displayTableCell(row.subjectCount)}</Typography></td>
<td><LinkDescribe uri={row.subject.value} variant='body2'/></td>
<td><LinkDescribe uri={row.predicate.value} variant='body2'/></td>
{row.object && (
<td><LinkDescribe uri={row.object.value} variant='body2'/></td>
)}
{!row.object && (
<td><Typography variant="body2">Not found</Typography></td>
)}
<td><Typography variant="body2">{displayTableCell(row.objectCount)}</Typography></td>
</tr>
})}
</tbody>
</table>
</Paper>
</>)}
</Paper>
{/* <LoggedIn>
<Typography style={{textAlign: 'center', marginBottom: '20px'}}>
Welcome <Value src="user.name"/>!
</Typography>
<Typography style={{textAlign: 'center', marginBottom: '20px'}}>
Soon you will be able to use your SOLID account!
</Typography>
</LoggedIn>
<LoggedOut>
<Typography style={{textAlign: 'center', marginBottom: '20px'}}>
Welcome
</Typography>
</LoggedOut> */}
</Container>
)
} | </thead>
<tbody>
{/* Iterate Describe query results array */} |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
| """
@File : __init__.py.py
@Time : 2020/3/27 22:36
@Author : Empty Chan
@Contact : [email protected]
@Description:
@License : (C) Copyright 2016-2020, iFuture Corporation Limited.
"""
from . import * | |
types.rs | use crate::{
encode_functype, encode_section, ComponentSection, ComponentSectionId, Encode, EntityType,
ValType,
};
/// Represents a module type.
#[derive(Debug, Clone, Default)]
pub struct ModuleType {
bytes: Vec<u8>,
num_added: u32,
types_added: u32,
}
impl ModuleType {
/// Creates a new module type.
pub fn new() -> Self {
Self::default()
}
/// Define a function in this module type.
pub fn function<P, R>(&mut self, params: P, results: R) -> &mut Self
where
P: IntoIterator<Item = ValType>,
P::IntoIter: ExactSizeIterator,
R: IntoIterator<Item = ValType>,
R::IntoIter: ExactSizeIterator,
{
self.bytes.push(0x01);
encode_functype(&mut self.bytes, params, results);
self.num_added += 1;
self.types_added += 1;
self
}
/// Defines an import in this module type.
pub fn import(&mut self, module: &str, name: &str, ty: EntityType) -> &mut Self {
self.bytes.push(0x02);
module.encode(&mut self.bytes);
name.encode(&mut self.bytes);
ty.encode(&mut self.bytes);
self.num_added += 1;
self
}
/// Defines an export in this module type.
pub fn export(&mut self, name: &str, ty: EntityType) -> &mut Self {
self.bytes.push(0x07);
name.encode(&mut self.bytes);
ty.encode(&mut self.bytes);
self.num_added += 1;
self
}
/// Gets the number of types that have been added to this module type.
pub fn type_count(&self) -> u32 {
self.types_added
}
}
impl Encode for ModuleType {
fn encode(&self, sink: &mut Vec<u8>) {
self.num_added.encode(sink);
sink.extend(&self.bytes);
}
}
/// Represents a component type.
#[derive(Debug, Clone, Default)]
pub struct ComponentType {
bytes: Vec<u8>,
num_added: u32,
types_added: u32,
}
impl ComponentType {
/// Creates a new component type.
pub fn new() -> Self {
Self::default()
}
/// Define a type in this component type.
///
/// The returned encoder must be finished before adding another definition.
#[must_use = "the encoder must be used to encode the type"]
pub fn ty(&mut self) -> TypeEncoder {
self.bytes.push(0x01);
self.num_added += 1;
self.types_added += 1;
TypeEncoder(&mut self.bytes)
}
/// Defines an import in this component type.
///
/// The type is expected to be an index to a previously defined or aliased type.
pub fn import(&mut self, name: &str, ty: u32) -> &mut Self {
self.bytes.push(0x02);
name.encode(&mut self.bytes);
ty.encode(&mut self.bytes);
self.num_added += 1;
self
}
/// Defines an export in this component type.
///
/// The type is expected to be an index to a previously defined or aliased type.
pub fn export(&mut self, name: &str, ty: u32) -> &mut Self {
self.bytes.push(0x07);
name.encode(&mut self.bytes);
ty.encode(&mut self.bytes);
self.num_added += 1;
self
}
/// Defines an alias to an outer type in this component type.
pub fn alias_outer_type(&mut self, count: u32, index: u32) -> &mut Self {
self.bytes.push(0x09);
self.bytes.push(0x02);
self.bytes.push(0x05);
count.encode(&mut self.bytes);
index.encode(&mut self.bytes);
self.num_added += 1;
self.types_added += 1;
self
}
/// Gets the number of types that have been added or aliased in this component type.
pub fn type_count(&self) -> u32 {
self.types_added
}
}
impl Encode for ComponentType {
fn encode(&self, sink: &mut Vec<u8>) {
self.num_added.encode(sink);
sink.extend(&self.bytes);
}
}
/// Represents an instance type.
#[derive(Debug, Clone, Default)]
pub struct InstanceType {
bytes: Vec<u8>,
num_added: u32,
types_added: u32,
}
impl InstanceType {
/// Creates a new instance type.
pub fn new() -> Self |
/// Define a type in this instance type.
///
/// The returned encoder must be finished before adding another definition.
#[must_use = "the encoder must be used to encode the type"]
pub fn ty(&mut self) -> TypeEncoder {
self.bytes.push(0x01);
self.num_added += 1;
self.types_added += 1;
TypeEncoder(&mut self.bytes)
}
/// Defines an export in this instance type.
///
/// The type is expected to be an index to a previously defined or aliased type.
pub fn export(&mut self, name: &str, ty: u32) -> &mut Self {
self.bytes.push(0x07);
name.encode(&mut self.bytes);
ty.encode(&mut self.bytes);
self.num_added += 1;
self
}
/// Defines an alias to an outer type in this instance type.
pub fn alias_outer_type(&mut self, count: u32, index: u32) -> &mut Self {
self.bytes.push(0x09);
self.bytes.push(0x02);
self.bytes.push(0x05);
count.encode(&mut self.bytes);
index.encode(&mut self.bytes);
self.num_added += 1;
self.types_added += 1;
self
}
/// Gets the number of types that have been added or aliased in this instance type.
pub fn type_count(&self) -> u32 {
self.types_added
}
}
impl Encode for InstanceType {
fn encode(&self, sink: &mut Vec<u8>) {
self.num_added.encode(sink);
sink.extend(&self.bytes);
}
}
/// Used to encode types.
#[derive(Debug)]
pub struct TypeEncoder<'a>(&'a mut Vec<u8>);
impl<'a> TypeEncoder<'a> {
/// Define a module type.
pub fn module(self, ty: &ModuleType) {
self.0.push(0x4f);
ty.encode(self.0);
}
/// Define a component type.
pub fn component(self, ty: &ComponentType) {
self.0.push(0x4e);
ty.encode(self.0);
}
/// Define an instance type.
pub fn instance(self, ty: &InstanceType) {
self.0.push(0x4d);
ty.encode(self.0);
}
/// Define a function type.
pub fn function<'b, P, T>(self, params: P, result: impl Into<InterfaceTypeRef>)
where
P: IntoIterator<Item = (Option<&'b str>, T)>,
P::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let params = params.into_iter();
self.0.push(0x4c);
params.len().encode(self.0);
for (name, ty) in params {
match name {
Some(name) => {
self.0.push(0x01);
name.encode(self.0);
}
None => self.0.push(0x00),
}
ty.into().encode(self.0);
}
result.into().encode(self.0);
}
/// Define a value type.
pub fn value(self, ty: impl Into<InterfaceTypeRef>) {
self.0.push(0x4b);
ty.into().encode(self.0);
}
/// Define an interface type.
///
/// The returned encoder must be finished before adding another type.
#[must_use = "the encoder must be used to encode the type"]
pub fn interface_type(self) -> InterfaceTypeEncoder<'a> {
InterfaceTypeEncoder(self.0)
}
}
/// Represents a primitive interface type.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum PrimitiveInterfaceType {
/// The type is the unit type.
Unit,
/// The type is a boolean.
Bool,
/// The type is a signed 8-bit integer.
S8,
/// The type is an unsigned 8-bit integer.
U8,
/// The type is a signed 16-bit integer.
S16,
/// The type is an unsigned 16-bit integer.
U16,
/// The type is a signed 32-bit integer.
S32,
/// The type is an unsigned 32-bit integer.
U32,
/// The type is a signed 64-bit integer.
S64,
/// The type is an unsigned 64-bit integer.
U64,
/// The type is a 32-bit floating point number.
Float32,
/// The type is a 64-bit floating point number.
Float64,
/// The type is a Unicode character.
Char,
/// The type is a string.
String,
}
impl Encode for PrimitiveInterfaceType {
fn encode(&self, sink: &mut Vec<u8>) {
sink.push(match self {
Self::Unit => 0x7f,
Self::Bool => 0x7e,
Self::S8 => 0x7d,
Self::U8 => 0x7c,
Self::S16 => 0x7b,
Self::U16 => 0x7a,
Self::S32 => 0x79,
Self::U32 => 0x78,
Self::S64 => 0x77,
Self::U64 => 0x76,
Self::Float32 => 0x75,
Self::Float64 => 0x74,
Self::Char => 0x73,
Self::String => 0x72,
});
}
}
/// Represents a reference to an interface type.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum InterfaceTypeRef {
/// The reference is to a primitive type.
Primitive(PrimitiveInterfaceType),
/// The reference is to a type index.
///
/// The type index must be to an interface type.
Type(u32),
}
impl Encode for InterfaceTypeRef {
fn encode(&self, sink: &mut Vec<u8>) {
match self {
Self::Primitive(ty) => ty.encode(sink),
Self::Type(index) => (*index as i64).encode(sink),
}
}
}
impl From<PrimitiveInterfaceType> for InterfaceTypeRef {
fn from(ty: PrimitiveInterfaceType) -> Self {
Self::Primitive(ty)
}
}
/// Used for encoding interface types.
#[derive(Debug)]
pub struct InterfaceTypeEncoder<'a>(&'a mut Vec<u8>);
impl InterfaceTypeEncoder<'_> {
/// Define a primitive interface type.
pub fn primitive(self, ty: PrimitiveInterfaceType) {
ty.encode(self.0);
}
/// Define a record type.
pub fn record<'a, F, T>(self, fields: F)
where
F: IntoIterator<Item = (&'a str, T)>,
F::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let fields = fields.into_iter();
self.0.push(0x71);
fields.len().encode(self.0);
for (name, ty) in fields {
name.encode(self.0);
ty.into().encode(self.0);
}
}
/// Define a variant type.
pub fn variant<'a, C, T>(self, cases: C)
where
C: IntoIterator<Item = (&'a str, T, Option<u32>)>,
C::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let cases = cases.into_iter();
self.0.push(0x70);
cases.len().encode(self.0);
for (name, ty, default_to) in cases {
name.encode(self.0);
ty.into().encode(self.0);
if let Some(default) = default_to {
self.0.push(0x01);
default.encode(self.0);
} else {
self.0.push(0x00);
}
}
}
/// Define a list type.
pub fn list(self, ty: impl Into<InterfaceTypeRef>) {
self.0.push(0x6f);
ty.into().encode(self.0);
}
/// Define a tuple type.
pub fn tuple<I, T>(self, types: I)
where
I: IntoIterator<Item = T>,
I::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let types = types.into_iter();
self.0.push(0x6E);
types.len().encode(self.0);
for ty in types {
ty.into().encode(self.0);
}
}
/// Define a flags type.
pub fn flags<'a, I>(self, names: I)
where
I: IntoIterator<Item = &'a str>,
I::IntoIter: ExactSizeIterator,
{
let names = names.into_iter();
self.0.push(0x6D);
names.len().encode(self.0);
for name in names {
name.encode(self.0);
}
}
/// Define an enum type.
pub fn enum_type<'a, I>(self, tags: I)
where
I: IntoIterator<Item = &'a str>,
I::IntoIter: ExactSizeIterator,
{
let tags = tags.into_iter();
self.0.push(0x6C);
tags.len().encode(self.0);
for tag in tags {
tag.encode(self.0);
}
}
/// Define a union type.
pub fn union<I, T>(self, types: I)
where
I: IntoIterator<Item = T>,
I::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let types = types.into_iter();
self.0.push(0x6B);
types.len().encode(self.0);
for ty in types {
ty.into().encode(self.0);
}
}
/// Define an option type.
pub fn option(self, ty: impl Into<InterfaceTypeRef>) {
self.0.push(0x6A);
ty.into().encode(self.0);
}
/// Define an expected type.
pub fn expected(self, ok: impl Into<InterfaceTypeRef>, error: impl Into<InterfaceTypeRef>) {
self.0.push(0x69);
ok.into().encode(self.0);
error.into().encode(self.0);
}
}
/// An encoder for the type section of WebAssembly components.
///
/// # Example
///
/// ```rust
/// use wasm_encoder::{Component, ComponentTypeSection, PrimitiveInterfaceType};
///
/// let mut types = ComponentTypeSection::new();
///
/// types.function(
/// [
/// (Some("a"), PrimitiveInterfaceType::String),
/// (Some("b"), PrimitiveInterfaceType::String)
/// ],
/// PrimitiveInterfaceType::String
/// );
///
/// let mut component = Component::new();
/// component.section(&types);
///
/// let bytes = component.finish();
/// ```
#[derive(Clone, Debug, Default)]
pub struct ComponentTypeSection {
bytes: Vec<u8>,
num_added: u32,
}
impl ComponentTypeSection {
/// Create a new component type section encoder.
pub fn new() -> Self {
Self::default()
}
/// The number of types in the section.
pub fn len(&self) -> u32 {
self.num_added
}
/// Determines if the section is empty.
pub fn is_empty(&self) -> bool {
self.num_added == 0
}
/// Encode a type into this section.
///
/// The returned encoder must be finished before adding another type.
#[must_use = "the encoder must be used to encode the type"]
pub fn ty(&mut self) -> TypeEncoder<'_> {
self.num_added += 1;
TypeEncoder(&mut self.bytes)
}
/// Define a module type in this type section.
pub fn module(&mut self, ty: &ModuleType) -> &mut Self {
self.ty().module(ty);
self
}
/// Define a component type in this type section.
pub fn component(&mut self, ty: &ComponentType) -> &mut Self {
self.ty().component(ty);
self
}
/// Define an instance type in this type section.
pub fn instance(&mut self, ty: &InstanceType) -> &mut Self {
self.ty().instance(ty);
self
}
/// Define a function type in this type section.
pub fn function<'a, P, T>(
&mut self,
params: P,
result: impl Into<InterfaceTypeRef>,
) -> &mut Self
where
P: IntoIterator<Item = (Option<&'a str>, T)>,
P::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
self.ty().function(params, result);
self
}
/// Define a value type in this type section.
pub fn value(&mut self, ty: impl Into<InterfaceTypeRef>) -> &mut Self {
self.ty().value(ty);
self
}
/// Define an interface type in this type section.
///
/// The returned encoder must be finished before adding another type.
#[must_use = "the encoder must be used to encode the type"]
pub fn interface_type(&mut self) -> InterfaceTypeEncoder<'_> {
self.ty().interface_type()
}
}
impl Encode for ComponentTypeSection {
fn encode(&self, sink: &mut Vec<u8>) {
encode_section(sink, ComponentSectionId::Type, self.num_added, &self.bytes);
}
}
impl ComponentSection for ComponentTypeSection {}
| {
Self::default()
} |
test_minimize.py | import unittest
from policy_sentry.writing.minimize import minimize_statement_actions
from policy_sentry.querying.all import get_all_actions
class MinimizeWildcardActionsTestCase(unittest.TestCase):
def test_minimize_statement_actions(self):
actions_to_minimize = [
"kms:CreateGrant",
"kms:CreateCustomKeyStore",
"ec2:AuthorizeSecurityGroupEgress",
"ec2:AuthorizeSecurityGroupIngress",
]
desired_result = ["ec2:authorizes*", "kms:createc*", "kms:createg*"]
all_actions = get_all_actions(lowercase=True)
minchars = None
self.maxDiff = None
# minimized_actions_list = minimize_statement_actions(desired_actions, all_actions, minchars)
self.assertListEqual(
sorted(
minimize_statement_actions(actions_to_minimize, all_actions, minchars)
),
sorted(desired_result),
)
def test_minimize_statement_actions_funky_case(self):
| actions_to_minimize = [
"kms:creategrant",
"kms:createcustomkeystore",
"ec2:authorizesecuritygroupegress",
"ec2:authorizesecuritygroupingress",
]
desired_result = ["ec2:authorizes*", "kms:createc*", "kms:createg*"]
all_actions = get_all_actions(lowercase=True)
minchars = None
self.maxDiff = None
# minimized_actions_list = minimize_statement_actions(desired_actions, all_actions, minchars)
self.assertListEqual(
sorted(
minimize_statement_actions(actions_to_minimize, all_actions, minchars)
),
sorted(desired_result),
) |
|
guild.py | import typing
import datetime
from typing import Mapping, Any, Optional, Iterable, List
from .model_abc import JsonAPIModel
from .snowflake import Snowflake
from .user import User
from .enums import PermissionFlags
from .permissions import Role
from serpcord.utils.model import _init_model_from_mapping_json_data
if typing.TYPE_CHECKING:
|
class GuildMember(JsonAPIModel[Mapping[str, Any]]): # TODO: Optional[Guild] - make sure the guild itself adds itself
def __init__(self, client: "BotClient", user: User, # TODO: docs + slots
*, nick: Optional[str] = None, guild_avatar_hash: Optional[str] = None,
role_ids: Iterable[Snowflake], roles: Iterable[Role], joined_at: datetime.datetime,
premium_since: Optional[datetime.datetime] = None,
is_deaf: bool, is_muted: bool, is_pending: bool = False,
permissions: Optional[PermissionFlags] = None,
communication_disabled_until: Optional[datetime.datetime] = None):
self.client: "BotClient" = client
self.user: User = user # NOTE: Must be injected in MESSAGE_CREATE / MESSAGE_UPDATE events (not provided by API)
self.nick: Optional[str] = str(nick) if nick is not None else None
self.guild_avatar_hash: Optional[str] = str(guild_avatar_hash) if guild_avatar_hash is not None else None
self.role_ids: List[Snowflake] = list(role_ids)
self.joined_at: datetime.datetime = joined_at
self.premium_since: Optional[datetime.datetime] = premium_since
self.is_deaf = bool(is_deaf)
self.is_muted = bool(is_muted)
self.is_pending = bool(is_pending)
self.permissions = PermissionFlags(permissions) if permissions is not None else None
self.communication_disabled_until: Optional[datetime.datetime] = communication_disabled_until
@property
def id(self) -> Snowflake:
return self.user.id
@property
def username(self) -> str:
return self.user.username
@property
def display_name(self) -> str:
return self.nick or self.username
@classmethod
def _from_json_data(cls, client: "BotClient", json_data: Mapping[str, Any]):
return _init_model_from_mapping_json_data(cls, client, json_data, rename=dict(
avatar="guild_avatar_hash", roles="role_ids", deaf="is_deaf", muted="is_muted", pending="is_pending"
), type_check_types=True)
| from serpcord.botclient import BotClient |
securityGroup.go | // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package rds
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Provides an RDS security group resource. This is only for DB instances in the
// EC2-Classic Platform. For instances inside a VPC, use the
// `aws_db_instance.vpc_security_group_ids`
// attribute instead.
//
// ## Example Usage
//
// ```go
// package main
//
// import (
// "github.com/pulumi/pulumi-aws/sdk/v4/go/aws/rds"
// "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
// )
//
// func main() {
// pulumi.Run(func(ctx *pulumi.Context) error {
// _, err := rds.NewSecurityGroup(ctx, "default", &rds.SecurityGroupArgs{
// Ingress: rds.SecurityGroupIngressArray{
// &rds.SecurityGroupIngressArgs{
// Cidr: pulumi.String("10.0.0.0/24"),
// },
// },
// })
// if err != nil {
// return err
// }
// return nil
// })
// }
// ```
//
// ## Import
//
// DB Security groups can be imported using the `name`, e.g.,
//
// ```sh
// $ pulumi import aws:rds/securityGroup:SecurityGroup default aws_rds_sg-1
// ```
type SecurityGroup struct {
pulumi.CustomResourceState
// The arn of the DB security group.
Arn pulumi.StringOutput `pulumi:"arn"`
// The description of the DB security group. Defaults to "Managed by Pulumi".
Description pulumi.StringOutput `pulumi:"description"`
// A list of ingress rules.
Ingress SecurityGroupIngressArrayOutput `pulumi:"ingress"`
// The name of the DB security group.
Name pulumi.StringOutput `pulumi:"name"`
// A map of tags to assign to the resource. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Tags pulumi.StringMapOutput `pulumi:"tags"`
// A map of tags assigned to the resource, including those inherited from the provider .
TagsAll pulumi.StringMapOutput `pulumi:"tagsAll"`
}
// NewSecurityGroup registers a new resource with the given unique name, arguments, and options.
func NewSecurityGroup(ctx *pulumi.Context,
name string, args *SecurityGroupArgs, opts ...pulumi.ResourceOption) (*SecurityGroup, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.Ingress == nil {
return nil, errors.New("invalid value for required argument 'Ingress'")
}
if isZero(args.Description) {
args.Description = pulumi.StringPtr("Managed by Pulumi")
}
var resource SecurityGroup
err := ctx.RegisterResource("aws:rds/securityGroup:SecurityGroup", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetSecurityGroup gets an existing SecurityGroup resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetSecurityGroup(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *SecurityGroupState, opts ...pulumi.ResourceOption) (*SecurityGroup, error) |
// Input properties used for looking up and filtering SecurityGroup resources.
type securityGroupState struct {
// The arn of the DB security group.
Arn *string `pulumi:"arn"`
// The description of the DB security group. Defaults to "Managed by Pulumi".
Description *string `pulumi:"description"`
// A list of ingress rules.
Ingress []SecurityGroupIngress `pulumi:"ingress"`
// The name of the DB security group.
Name *string `pulumi:"name"`
// A map of tags to assign to the resource. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Tags map[string]string `pulumi:"tags"`
// A map of tags assigned to the resource, including those inherited from the provider .
TagsAll map[string]string `pulumi:"tagsAll"`
}
type SecurityGroupState struct {
// The arn of the DB security group.
Arn pulumi.StringPtrInput
// The description of the DB security group. Defaults to "Managed by Pulumi".
Description pulumi.StringPtrInput
// A list of ingress rules.
Ingress SecurityGroupIngressArrayInput
// The name of the DB security group.
Name pulumi.StringPtrInput
// A map of tags to assign to the resource. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Tags pulumi.StringMapInput
// A map of tags assigned to the resource, including those inherited from the provider .
TagsAll pulumi.StringMapInput
}
func (SecurityGroupState) ElementType() reflect.Type {
return reflect.TypeOf((*securityGroupState)(nil)).Elem()
}
type securityGroupArgs struct {
// The description of the DB security group. Defaults to "Managed by Pulumi".
Description *string `pulumi:"description"`
// A list of ingress rules.
Ingress []SecurityGroupIngress `pulumi:"ingress"`
// The name of the DB security group.
Name *string `pulumi:"name"`
// A map of tags to assign to the resource. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Tags map[string]string `pulumi:"tags"`
}
// The set of arguments for constructing a SecurityGroup resource.
type SecurityGroupArgs struct {
// The description of the DB security group. Defaults to "Managed by Pulumi".
Description pulumi.StringPtrInput
// A list of ingress rules.
Ingress SecurityGroupIngressArrayInput
// The name of the DB security group.
Name pulumi.StringPtrInput
// A map of tags to assign to the resource. .If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Tags pulumi.StringMapInput
}
func (SecurityGroupArgs) ElementType() reflect.Type {
return reflect.TypeOf((*securityGroupArgs)(nil)).Elem()
}
type SecurityGroupInput interface {
pulumi.Input
ToSecurityGroupOutput() SecurityGroupOutput
ToSecurityGroupOutputWithContext(ctx context.Context) SecurityGroupOutput
}
func (*SecurityGroup) ElementType() reflect.Type {
return reflect.TypeOf((**SecurityGroup)(nil)).Elem()
}
func (i *SecurityGroup) ToSecurityGroupOutput() SecurityGroupOutput {
return i.ToSecurityGroupOutputWithContext(context.Background())
}
func (i *SecurityGroup) ToSecurityGroupOutputWithContext(ctx context.Context) SecurityGroupOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityGroupOutput)
}
// SecurityGroupArrayInput is an input type that accepts SecurityGroupArray and SecurityGroupArrayOutput values.
// You can construct a concrete instance of `SecurityGroupArrayInput` via:
//
// SecurityGroupArray{ SecurityGroupArgs{...} }
type SecurityGroupArrayInput interface {
pulumi.Input
ToSecurityGroupArrayOutput() SecurityGroupArrayOutput
ToSecurityGroupArrayOutputWithContext(context.Context) SecurityGroupArrayOutput
}
type SecurityGroupArray []SecurityGroupInput
func (SecurityGroupArray) ElementType() reflect.Type {
return reflect.TypeOf((*[]*SecurityGroup)(nil)).Elem()
}
func (i SecurityGroupArray) ToSecurityGroupArrayOutput() SecurityGroupArrayOutput {
return i.ToSecurityGroupArrayOutputWithContext(context.Background())
}
func (i SecurityGroupArray) ToSecurityGroupArrayOutputWithContext(ctx context.Context) SecurityGroupArrayOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityGroupArrayOutput)
}
// SecurityGroupMapInput is an input type that accepts SecurityGroupMap and SecurityGroupMapOutput values.
// You can construct a concrete instance of `SecurityGroupMapInput` via:
//
// SecurityGroupMap{ "key": SecurityGroupArgs{...} }
type SecurityGroupMapInput interface {
pulumi.Input
ToSecurityGroupMapOutput() SecurityGroupMapOutput
ToSecurityGroupMapOutputWithContext(context.Context) SecurityGroupMapOutput
}
type SecurityGroupMap map[string]SecurityGroupInput
func (SecurityGroupMap) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]*SecurityGroup)(nil)).Elem()
}
func (i SecurityGroupMap) ToSecurityGroupMapOutput() SecurityGroupMapOutput {
return i.ToSecurityGroupMapOutputWithContext(context.Background())
}
func (i SecurityGroupMap) ToSecurityGroupMapOutputWithContext(ctx context.Context) SecurityGroupMapOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityGroupMapOutput)
}
type SecurityGroupOutput struct{ *pulumi.OutputState }
func (SecurityGroupOutput) ElementType() reflect.Type {
return reflect.TypeOf((**SecurityGroup)(nil)).Elem()
}
func (o SecurityGroupOutput) ToSecurityGroupOutput() SecurityGroupOutput {
return o
}
func (o SecurityGroupOutput) ToSecurityGroupOutputWithContext(ctx context.Context) SecurityGroupOutput {
return o
}
type SecurityGroupArrayOutput struct{ *pulumi.OutputState }
func (SecurityGroupArrayOutput) ElementType() reflect.Type {
return reflect.TypeOf((*[]*SecurityGroup)(nil)).Elem()
}
func (o SecurityGroupArrayOutput) ToSecurityGroupArrayOutput() SecurityGroupArrayOutput {
return o
}
func (o SecurityGroupArrayOutput) ToSecurityGroupArrayOutputWithContext(ctx context.Context) SecurityGroupArrayOutput {
return o
}
func (o SecurityGroupArrayOutput) Index(i pulumi.IntInput) SecurityGroupOutput {
return pulumi.All(o, i).ApplyT(func(vs []interface{}) *SecurityGroup {
return vs[0].([]*SecurityGroup)[vs[1].(int)]
}).(SecurityGroupOutput)
}
type SecurityGroupMapOutput struct{ *pulumi.OutputState }
func (SecurityGroupMapOutput) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]*SecurityGroup)(nil)).Elem()
}
func (o SecurityGroupMapOutput) ToSecurityGroupMapOutput() SecurityGroupMapOutput {
return o
}
func (o SecurityGroupMapOutput) ToSecurityGroupMapOutputWithContext(ctx context.Context) SecurityGroupMapOutput {
return o
}
func (o SecurityGroupMapOutput) MapIndex(k pulumi.StringInput) SecurityGroupOutput {
return pulumi.All(o, k).ApplyT(func(vs []interface{}) *SecurityGroup {
return vs[0].(map[string]*SecurityGroup)[vs[1].(string)]
}).(SecurityGroupOutput)
}
func init() {
pulumi.RegisterInputType(reflect.TypeOf((*SecurityGroupInput)(nil)).Elem(), &SecurityGroup{})
pulumi.RegisterInputType(reflect.TypeOf((*SecurityGroupArrayInput)(nil)).Elem(), SecurityGroupArray{})
pulumi.RegisterInputType(reflect.TypeOf((*SecurityGroupMapInput)(nil)).Elem(), SecurityGroupMap{})
pulumi.RegisterOutputType(SecurityGroupOutput{})
pulumi.RegisterOutputType(SecurityGroupArrayOutput{})
pulumi.RegisterOutputType(SecurityGroupMapOutput{})
}
| {
var resource SecurityGroup
err := ctx.ReadResource("aws:rds/securityGroup:SecurityGroup", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
} |
utils.py | # -*- coding: utf-8 -*-
"""
flask_login.utils
-----------------
General utilities.
"""
import hmac
from hashlib import sha512
from functools import wraps
from werkzeug.local import LocalProxy
from werkzeug.security import safe_str_cmp
from werkzeug.urls import url_decode, url_encode
from flask import (
_request_ctx_stack,
current_app,
request,
session,
url_for,
has_request_context,
)
from ._compat import text_type, urlparse, urlunparse
from .config import COOKIE_NAME, EXEMPT_METHODS
from .signals import user_logged_in, user_logged_out, user_login_confirmed
#: A proxy for the current user. If no user is logged in, this will be an
#: anonymous user
current_user = LocalProxy(lambda: _get_user())
def encode_cookie(payload):
"""
This will encode a ``unicode`` value into a cookie, and sign that cookie
with the app's secret key.
:param payload: The value to encode, as `unicode`.
:type payload: unicode
"""
return u"{0}|{1}".format(payload, _cookie_digest(payload))
def decode_cookie(cookie):
"""
This decodes a cookie given by `encode_cookie`. If verification of the
cookie fails, ``None`` will be implicitly returned.
:param cookie: An encoded cookie.
:type cookie: str
"""
try:
payload, digest = cookie.rsplit(u"|", 1)
if hasattr(digest, "decode"):
digest = digest.decode("ascii") # pragma: no cover
except ValueError:
return
if safe_str_cmp(_cookie_digest(payload), digest):
return payload
def make_next_param(login_url, current_url):
"""
Reduces the scheme and host from a given URL so it can be passed to
the given `login` URL more efficiently.
:param login_url: The login URL being redirected to.
:type login_url: str
:param current_url: The URL to reduce.
:type current_url: str
"""
l = urlparse(login_url)
c = urlparse(current_url)
if (not l.scheme or l.scheme == c.scheme) and (
not l.netloc or l.netloc == c.netloc
):
return urlunparse(("", "", c.path, c.params, c.query, ""))
return current_url
def expand_login_view(login_view):
"""
Returns the url for the login view, expanding the view name to a url if
needed.
:param login_view: The name of the login view or a URL for the login view.
:type login_view: str
"""
if login_view.startswith(("https://", "http://", "/")):
return login_view
else:
return url_for(login_view)
| """
Creates a URL for redirecting to a login page. If only `login_view` is
provided, this will just return the URL for it. If `next_url` is provided,
however, this will append a ``next=URL`` parameter to the query string
so that the login view can redirect back to that URL. Flask-Login's default
unauthorized handler uses this function when redirecting to your login url.
To force the host name used, set `FORCE_HOST_FOR_REDIRECTS` to a host. This
prevents from redirecting to external sites if request headers Host or
X-Forwarded-For are present.
:param login_view: The name of the login view. (Alternately, the actual
URL to the login view.)
:type login_view: str
:param next_url: The URL to give the login view for redirection.
:type next_url: str
:param next_field: What field to store the next URL in. (It defaults to
``next``.)
:type next_field: str
"""
base = expand_login_view(login_view)
if next_url is None:
return base
parsed_result = urlparse(base)
md = url_decode(parsed_result.query)
md[next_field] = make_next_param(base, next_url)
netloc = current_app.config.get("FORCE_HOST_FOR_REDIRECTS") or parsed_result.netloc
parsed_result = parsed_result._replace(
netloc=netloc, query=url_encode(md, sort=True)
)
return urlunparse(parsed_result)
def login_fresh():
"""
This returns ``True`` if the current login is fresh.
"""
return session.get("_fresh", False)
def login_user(user, remember=False, duration=None, force=False, fresh=True):
"""
Logs a user in. You should pass the actual user object to this. If the
user's `is_active` property is ``False``, they will not be logged in
unless `force` is ``True``.
This will return ``True`` if the log in attempt succeeds, and ``False`` if
it fails (i.e. because the user is inactive).
:param user: The user object to log in.
:type user: object
:param remember: Whether to remember the user after their session expires.
Defaults to ``False``.
:type remember: bool
:param duration: The amount of time before the remember cookie expires. If
``None`` the value set in the settings is used. Defaults to ``None``.
:type duration: :class:`datetime.timedelta`
:param force: If the user is inactive, setting this to ``True`` will log
them in regardless. Defaults to ``False``.
:type force: bool
:param fresh: setting this to ``False`` will log in the user with a session
marked as not "fresh". Defaults to ``True``.
:type fresh: bool
"""
if not force and not user.is_active:
return False
user_id = getattr(user, current_app.login_manager.id_attribute)()
session["user_id"] = user_id
session["_fresh"] = fresh
session["_id"] = current_app.login_manager._session_identifier_generator()
if remember:
session["remember"] = "set"
if duration is not None:
try:
# equal to timedelta.total_seconds() but works with Python 2.6
session["remember_seconds"] = (
duration.microseconds
+ (duration.seconds + duration.days * 24 * 3600) * 10 ** 6
) / 10.0 ** 6
except AttributeError:
raise Exception(
"duration must be a datetime.timedelta, "
"instead got: {0}".format(duration)
)
_request_ctx_stack.top.user = user
user_logged_in.send(current_app._get_current_object(), user=_get_user())
return True
def logout_user():
"""
Logs a user out. (You do not need to pass the actual user.) This will
also clean up the remember me cookie if it exists.
"""
user = _get_user()
if "user_id" in session:
session.pop("user_id")
if "_fresh" in session:
session.pop("_fresh")
cookie_name = current_app.config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME)
if cookie_name in request.cookies:
session["remember"] = "clear"
if "remember_seconds" in session:
session.pop("remember_seconds")
user_logged_out.send(current_app._get_current_object(), user=user)
current_app.login_manager.reload_user()
return True
def confirm_login():
"""
This sets the current session as fresh. Sessions become stale when they
are reloaded from a cookie.
"""
session["_fresh"] = True
session["_id"] = current_app.login_manager._session_identifier_generator()
user_login_confirmed.send(current_app._get_current_object())
def login_required(func):
"""
If you decorate a view with this, it will ensure that the current user is
logged in and authenticated before calling the actual view. (If they are
not, it calls the :attr:`LoginManager.unauthorized` callback.) For
example::
@app.route('/post')
@login_required
def post():
pass
If there are only certain times you need to require that your user is
logged in, you can do so with::
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
...which is essentially the code that this function adds to your views.
It can be convenient to globally turn off authentication when unit testing.
To enable this, if the application configuration variable `LOGIN_DISABLED`
is set to `True`, this decorator will be ignored.
.. Note ::
Per `W3 guidelines for CORS preflight requests
<http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,
HTTP ``OPTIONS`` requests are exempt from login checks.
:param func: The view function to decorate.
:type func: function
"""
@wraps(func)
def decorated_view(*args, **kwargs):
if request.method in EXEMPT_METHODS:
return func(*args, **kwargs)
elif current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
return func(*args, **kwargs)
return decorated_view
def fresh_login_required(func):
"""
If you decorate a view with this, it will ensure that the current user's
login is fresh - i.e. their session was not restored from a 'remember me'
cookie. Sensitive operations, like changing a password or e-mail, should
be protected with this, to impede the efforts of cookie thieves.
If the user is not authenticated, :meth:`LoginManager.unauthorized` is
called as normal. If they are authenticated, but their session is not
fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that
case, you will need to provide a :attr:`LoginManager.refresh_view`.)
Behaves identically to the :func:`login_required` decorator with respect
to configutation variables.
.. Note ::
Per `W3 guidelines for CORS preflight requests
<http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,
HTTP ``OPTIONS`` requests are exempt from login checks.
:param func: The view function to decorate.
:type func: function
"""
@wraps(func)
def decorated_view(*args, **kwargs):
if request.method in EXEMPT_METHODS:
return func(*args, **kwargs)
elif current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
elif not login_fresh():
return current_app.login_manager.needs_refresh()
return func(*args, **kwargs)
return decorated_view
def set_login_view(login_view, blueprint=None):
"""
Sets the login view for the app or blueprint. If a blueprint is passed,
the login view is set for this blueprint on ``blueprint_login_views``.
:param login_view: The user object to log in.
:type login_view: str
:param blueprint: The blueprint which this login view should be set on.
Defaults to ``None``.
:type blueprint: object
"""
num_login_views = len(current_app.login_manager.blueprint_login_views)
if blueprint is not None or num_login_views != 0:
(current_app.login_manager.blueprint_login_views[blueprint.name]) = login_view
if (
current_app.login_manager.login_view is not None
and None not in current_app.login_manager.blueprint_login_views
):
(
current_app.login_manager.blueprint_login_views[None]
) = current_app.login_manager.login_view
current_app.login_manager.login_view = None
else:
current_app.login_manager.login_view = login_view
def _get_user():
if has_request_context() and not hasattr(_request_ctx_stack.top, "user"):
current_app.login_manager._load_user()
return getattr(_request_ctx_stack.top, "user", None)
def _cookie_digest(payload, key=None):
key = _secret_key(key)
return hmac.new(key, payload.encode("utf-8"), sha512).hexdigest()
def _get_remote_addr():
address = request.headers.get("X-Forwarded-For", request.remote_addr)
if address is not None:
# An 'X-Forwarded-For' header includes a comma separated list of the
# addresses, the first address being the actual remote address.
address = address.encode("utf-8").split(b",")[0].strip()
return address
def _create_identifier():
user_agent = request.headers.get("User-Agent")
if user_agent is not None:
user_agent = user_agent.encode("utf-8")
base = "{0}|{1}".format(_get_remote_addr(), user_agent)
if str is bytes:
base = text_type(base, "utf-8", errors="replace") # pragma: no cover
h = sha512()
h.update(base.encode("utf8"))
return h.hexdigest()
def _user_context_processor():
return dict(current_user=_get_user())
def _secret_key(key=None):
if key is None:
key = current_app.config["SECRET_KEY"]
if isinstance(key, text_type): # pragma: no cover
key = key.encode("latin1") # ensure bytes
return key |
def login_url(login_view, next_url=None, next_field="next"): |
shorts.rs | #[doc = "Register `SHORTS` reader"]
pub struct R(crate::R<SHORTS_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SHORTS_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<SHORTS_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<SHORTS_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `SHORTS` writer"]
pub struct W(crate::W<SHORTS_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SHORTS_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<SHORTS_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<SHORTS_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Shortcut between event READY and task SAMPLE\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum READY_SAMPLE_A {
#[doc = "0: Disable shortcut"]
DISABLED = 0,
#[doc = "1: Enable shortcut"]
ENABLED = 1,
}
impl From<READY_SAMPLE_A> for bool {
#[inline(always)]
fn from(variant: READY_SAMPLE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `READY_SAMPLE` reader - Shortcut between event READY and task SAMPLE"]
pub struct READY_SAMPLE_R(crate::FieldReader<bool, READY_SAMPLE_A>);
impl READY_SAMPLE_R {
pub(crate) fn new(bits: bool) -> Self {
READY_SAMPLE_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> READY_SAMPLE_A {
match self.bits {
false => READY_SAMPLE_A::DISABLED,
true => READY_SAMPLE_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == READY_SAMPLE_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == READY_SAMPLE_A::ENABLED
}
}
impl core::ops::Deref for READY_SAMPLE_R {
type Target = crate::FieldReader<bool, READY_SAMPLE_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `READY_SAMPLE` writer - Shortcut between event READY and task SAMPLE"]
pub struct READY_SAMPLE_W<'a> {
w: &'a mut W,
}
impl<'a> READY_SAMPLE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: READY_SAMPLE_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable shortcut"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(READY_SAMPLE_A::DISABLED)
}
#[doc = "Enable shortcut"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(READY_SAMPLE_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
#[doc = "Shortcut between event READY and task STOP\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum READY_STOP_A {
#[doc = "0: Disable shortcut"]
DISABLED = 0,
#[doc = "1: Enable shortcut"]
ENABLED = 1,
}
impl From<READY_STOP_A> for bool {
#[inline(always)]
fn from(variant: READY_STOP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `READY_STOP` reader - Shortcut between event READY and task STOP"]
pub struct READY_STOP_R(crate::FieldReader<bool, READY_STOP_A>);
impl READY_STOP_R {
pub(crate) fn new(bits: bool) -> Self {
READY_STOP_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> READY_STOP_A {
match self.bits {
false => READY_STOP_A::DISABLED,
true => READY_STOP_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == READY_STOP_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == READY_STOP_A::ENABLED
}
}
impl core::ops::Deref for READY_STOP_R {
type Target = crate::FieldReader<bool, READY_STOP_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `READY_STOP` writer - Shortcut between event READY and task STOP"]
pub struct READY_STOP_W<'a> {
w: &'a mut W,
}
impl<'a> READY_STOP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: READY_STOP_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable shortcut"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(READY_STOP_A::DISABLED)
}
#[doc = "Enable shortcut"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(READY_STOP_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Shortcut between event DOWN and task STOP\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DOWN_STOP_A {
#[doc = "0: Disable shortcut"]
DISABLED = 0,
#[doc = "1: Enable shortcut"]
ENABLED = 1,
}
impl From<DOWN_STOP_A> for bool {
#[inline(always)]
fn from(variant: DOWN_STOP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `DOWN_STOP` reader - Shortcut between event DOWN and task STOP"]
pub struct DOWN_STOP_R(crate::FieldReader<bool, DOWN_STOP_A>);
impl DOWN_STOP_R {
pub(crate) fn new(bits: bool) -> Self {
DOWN_STOP_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DOWN_STOP_A {
match self.bits {
false => DOWN_STOP_A::DISABLED,
true => DOWN_STOP_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == DOWN_STOP_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == DOWN_STOP_A::ENABLED
}
}
impl core::ops::Deref for DOWN_STOP_R {
type Target = crate::FieldReader<bool, DOWN_STOP_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DOWN_STOP` writer - Shortcut between event DOWN and task STOP"]
pub struct DOWN_STOP_W<'a> {
w: &'a mut W,
}
impl<'a> DOWN_STOP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DOWN_STOP_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable shortcut"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(DOWN_STOP_A::DISABLED)
}
#[doc = "Enable shortcut"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(DOWN_STOP_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2);
self.w
}
}
#[doc = "Shortcut between event UP and task STOP\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum UP_STOP_A {
#[doc = "0: Disable shortcut"]
DISABLED = 0,
#[doc = "1: Enable shortcut"]
ENABLED = 1,
}
impl From<UP_STOP_A> for bool {
#[inline(always)]
fn from(variant: UP_STOP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `UP_STOP` reader - Shortcut between event UP and task STOP"]
pub struct UP_STOP_R(crate::FieldReader<bool, UP_STOP_A>);
impl UP_STOP_R {
pub(crate) fn new(bits: bool) -> Self {
UP_STOP_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> UP_STOP_A {
match self.bits {
false => UP_STOP_A::DISABLED,
true => UP_STOP_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == UP_STOP_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == UP_STOP_A::ENABLED
}
}
impl core::ops::Deref for UP_STOP_R {
type Target = crate::FieldReader<bool, UP_STOP_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `UP_STOP` writer - Shortcut between event UP and task STOP"]
pub struct UP_STOP_W<'a> {
w: &'a mut W,
}
impl<'a> UP_STOP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: UP_STOP_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable shortcut"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(UP_STOP_A::DISABLED)
}
#[doc = "Enable shortcut"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(UP_STOP_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u32 & 0x01) << 3);
self.w
}
}
#[doc = "Shortcut between event CROSS and task STOP\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CROSS_STOP_A {
#[doc = "0: Disable shortcut"]
DISABLED = 0,
#[doc = "1: Enable shortcut"]
ENABLED = 1,
}
impl From<CROSS_STOP_A> for bool {
#[inline(always)]
fn from(variant: CROSS_STOP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CROSS_STOP` reader - Shortcut between event CROSS and task STOP"]
pub struct CROSS_STOP_R(crate::FieldReader<bool, CROSS_STOP_A>);
impl CROSS_STOP_R {
pub(crate) fn new(bits: bool) -> Self {
CROSS_STOP_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CROSS_STOP_A {
match self.bits {
false => CROSS_STOP_A::DISABLED,
true => CROSS_STOP_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == CROSS_STOP_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == CROSS_STOP_A::ENABLED
}
}
impl core::ops::Deref for CROSS_STOP_R {
type Target = crate::FieldReader<bool, CROSS_STOP_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CROSS_STOP` writer - Shortcut between event CROSS and task STOP"]
pub struct CROSS_STOP_W<'a> {
w: &'a mut W,
}
impl<'a> CROSS_STOP_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CROSS_STOP_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable shortcut"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(CROSS_STOP_A::DISABLED)
}
#[doc = "Enable shortcut"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(CROSS_STOP_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u32 & 0x01) << 4);
self.w
}
}
impl R {
#[doc = "Bit 0 - Shortcut between event READY and task SAMPLE"]
#[inline(always)]
pub fn ready_sample(&self) -> READY_SAMPLE_R {
READY_SAMPLE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Shortcut between event READY and task STOP"]
#[inline(always)]
pub fn ready_stop(&self) -> READY_STOP_R {
READY_STOP_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Shortcut between event DOWN and task STOP"]
#[inline(always)]
pub fn down_stop(&self) -> DOWN_STOP_R {
DOWN_STOP_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Shortcut between event UP and task STOP"]
#[inline(always)]
pub fn up_stop(&self) -> UP_STOP_R {
UP_STOP_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Shortcut between event CROSS and task STOP"]
#[inline(always)]
pub fn cross_stop(&self) -> CROSS_STOP_R {
CROSS_STOP_R::new(((self.bits >> 4) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Shortcut between event READY and task SAMPLE"]
#[inline(always)]
pub fn ready_sample(&mut self) -> READY_SAMPLE_W {
READY_SAMPLE_W { w: self }
}
#[doc = "Bit 1 - Shortcut between event READY and task STOP"]
#[inline(always)]
pub fn ready_stop(&mut self) -> READY_STOP_W {
READY_STOP_W { w: self }
}
#[doc = "Bit 2 - Shortcut between event DOWN and task STOP"]
#[inline(always)]
pub fn down_stop(&mut self) -> DOWN_STOP_W {
DOWN_STOP_W { w: self }
}
#[doc = "Bit 3 - Shortcut between event UP and task STOP"]
#[inline(always)]
pub fn up_stop(&mut self) -> UP_STOP_W {
UP_STOP_W { w: self }
}
#[doc = "Bit 4 - Shortcut between event CROSS and task STOP"]
#[inline(always)]
pub fn cross_stop(&mut self) -> CROSS_STOP_W {
CROSS_STOP_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Shortcuts between local events and tasks\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [shorts](index.html) module"] | pub struct SHORTS_SPEC;
impl crate::RegisterSpec for SHORTS_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [shorts::R](R) reader structure"]
impl crate::Readable for SHORTS_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [shorts::W](W) writer structure"]
impl crate::Writable for SHORTS_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets SHORTS to value 0"]
impl crate::Resettable for SHORTS_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
} | |
app.e2e-spec.ts | import { AngularPresentationPage } from './app.po';
describe('angular-presentation App', () => {
let page: AngularPresentationPage;
beforeEach(() => {
page = new AngularPresentationPage();
});
| it('should display message saying app works', () => {
page.navigateTo();
expect(1).toEqual(1);
});
}); | |
p2p_zpos_fakestake_accepted.py | #!/usr/bin/env python3
# Copyright (c) 2019 The redspace Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Performs the same check as in Test_02 verifying that zPoS forked blocks that stake a zerocoin which is spent on mainchain on an higher block are still accepted.
'''
from test_framework.authproxy import JSONRPCException
from fake_stake.base_test import redspace_FakeStakeTest
from time import sleep
class zPoSFakeStakeAccepted(redspace_FakeStakeTest):
def | (self):
''' Setup test environment
:param:
:return:
'''
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [['-staking=1', '-debug=net', '-zrscstake']] * self.num_nodes
def run_test(self):
self.description = "Performs the same check as in Test_02 verifying that zPoS forked blocks that stake a zerocoin which is spent on mainchain on an higher block are still accepted."
self.init_test()
DENOM_TO_USE = 1000 # zc denomination
INITAL_MINED_BLOCKS = 321
MORE_MINED_BLOCKS = 301
FORK_DEPTH = 75
self.NUM_BLOCKS = 2
# 1) Starting mining blocks
self.log.info("Mining %d blocks to get to zPOS activation...." % INITAL_MINED_BLOCKS)
self.node.generate(INITAL_MINED_BLOCKS)
sleep(2)
# 2) Collect the possible prevouts and mint zerocoins with those
self.log.info("Collecting all unspent coins which we generated from mining...")
balance = self.node.getbalance("*", 100)
self.log.info("Minting zerocoins...")
initial_mints = 0
while balance > DENOM_TO_USE:
try:
self.node.mintzerocoin(DENOM_TO_USE)
except JSONRPCException:
break
sleep(1)
initial_mints += 1
self.node.generate(1)
sleep(1)
if initial_mints % 5 == 0:
self.log.info("Minted %d coins" % initial_mints)
if initial_mints >= 20:
break
balance = self.node.getbalance("*", 100)
self.log.info("Minted %d coins in the %d-denom, remaining balance %d", initial_mints, DENOM_TO_USE, balance)
sleep(2)
# 3) mine more blocks
self.log.info("Mining %d more blocks ... and getting spendable zerocoins" % MORE_MINED_BLOCKS)
self.node.generate(MORE_MINED_BLOCKS)
sleep(2)
mints = self.node.listmintedzerocoins(True, True)
sleep(1)
mints_hashes = [x["serial hash"] for x in mints]
# This mints are not ready spendable, only few of them.
self.log.info("Got %d confirmed mints" % len(mints_hashes))
# 4) Start mining again so that spends get confirmed in a block.
self.log.info("Mining 200 more blocks...")
self.node.generate(200)
sleep(2)
# 5) spend mints
self.log.info("Spending mints in block %d..." % self.node.getblockcount())
spends = 0
for mint in mints_hashes:
# create a single element list to pass to RPC spendzerocoinmints
mint_arg = []
mint_arg.append(mint)
try:
self.node.spendzerocoinmints(mint_arg)
sleep(1)
spends += 1
except JSONRPCException as e:
self.log.warning(str(e))
continue
sleep(1)
self.log.info("Successfully spent %d mints" % spends)
self.log.info("Mining 6 more blocks...")
self.node.generate(6)
sleep(2)
# 6) Collect some prevouts for random txes
self.log.info("Collecting inputs for txes...")
utxo_list = self.node.listunspent()
sleep(1)
# 7) Create valid forked zPoS blocks and send them
self.log.info("Creating stake zPoS blocks...")
err_msgs = self.test_spam("Fork", mints, spending_utxo_list=utxo_list, fZPoS=True, fRandomHeight=True, randomRange=FORK_DEPTH, randomRange2=50, fMustPass=True)
if not len(err_msgs) == 0:
self.log.error("result: " + " | ".join(err_msgs))
raise AssertionError("TEST FAILED")
self.log.info("%s PASSED" % self.__class__.__name__)
if __name__ == '__main__':
zPoSFakeStakeAccepted().main()
| set_test_params |
client_test.go | // Copyright 2016 LINE Corporation
//
// LINE Corporation licenses this file to you under the Apache License,
// version 2.0 (the "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package linebot
import (
"context"
"crypto/tls"
"net/http"
"net/http/httptest"
"net/url"
"reflect"
"testing"
)
func mockClient(server *httptest.Server) (*Client, error) {
client, err := New(
"testsecret",
"testtoken",
WithHTTPClient(&http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
},
}),
WithEndpointBase(server.URL),
)
if err != nil {
return nil, err
}
return client, nil
}
func TestNewClient(t *testing.T) {
secret := "testsecret"
token := "testtoken"
wantURL, _ := url.Parse(APIEndpointBase)
client, err := New(secret, token)
if err != nil {
t.Fatal(err)
}
if client.channelSecret != secret {
t.Errorf("channelSecret %s; want %s", client.channelSecret, secret)
}
if client.channelToken != token {
t.Errorf("channelToken %s; want %s", client.channelToken, token)
}
if !reflect.DeepEqual(client.endpointBase, wantURL) {
t.Errorf("endpointBase %v; want %v", client.endpointBase, wantURL)
}
if client.httpClient != http.DefaultClient {
t.Errorf("httpClient %p; want %p", client.httpClient, http.DefaultClient)
}
}
func TestNewClientWithOptions(t *testing.T) {
secret := "testsecret"
token := "testtoken"
endpoint := "https://example.test/"
httpClient := http.Client{}
wantURL, _ := url.Parse(endpoint)
client, err := New(
secret,
token,
WithHTTPClient(&httpClient),
WithEndpointBase(endpoint),
)
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(client.endpointBase, wantURL) {
t.Errorf("endpointBase %v; want %v", client.endpointBase, wantURL)
}
if client.httpClient != &httpClient {
t.Errorf("httpClient %p; want %p", client.httpClient, &httpClient)
}
}
func | (ctx context.Context, err error, t *testing.T) {
if err == nil || ctx.Err() != context.DeadlineExceeded {
t.Errorf("err %v; want %v", err, context.DeadlineExceeded)
}
}
| expectCtxDeadlineExceed |
findopt_replaceone_test.go | package findopt
import (
"testing"
"reflect"
"github.com/mongodb/mongo-go-driver/core/option"
"github.com/mongodb/mongo-go-driver/internal/testutil/helpers"
"github.com/mongodb/mongo-go-driver/mongo/mongoopt"
)
func createNestedReplaceOneBundle1(t *testing.T) *ReplaceOneBundle {
nestedBundle := BundleReplaceOne(Upsert(false))
testhelpers.RequireNotNil(t, nestedBundle, "nested bundle was nil")
outerBundle := BundleReplaceOne(Upsert(true), MaxTime(500), nestedBundle, MaxTime(1000))
testhelpers.RequireNotNil(t, outerBundle, "outer bundle was nil")
return outerBundle
}
// Test doubly nested bundle
func createNestedReplaceOneBundle2(t *testing.T) *ReplaceOneBundle {
b1 := BundleReplaceOne(Upsert(false))
testhelpers.RequireNotNil(t, b1, "nested bundle was nil")
b2 := BundleReplaceOne(MaxTime(100), b1)
testhelpers.RequireNotNil(t, b2, "nested bundle was nil")
outerBundle := BundleReplaceOne(Upsert(true), MaxTime(500), b2, MaxTime(1000))
testhelpers.RequireNotNil(t, outerBundle, "outer bundle was nil")
return outerBundle
}
// Test two top level nested bundles
func createNestedReplaceOneBundle3(t *testing.T) *ReplaceOneBundle {
b1 := BundleReplaceOne(Upsert(false))
testhelpers.RequireNotNil(t, b1, "nested bundle was nil")
b2 := BundleReplaceOne(MaxTime(100), b1)
testhelpers.RequireNotNil(t, b2, "nested bundle was nil")
b3 := BundleReplaceOne(Upsert(true))
testhelpers.RequireNotNil(t, b3, "nested bundle was nil")
b4 := BundleReplaceOne(MaxTime(100), b3)
testhelpers.RequireNotNil(t, b4, "nested bundle was nil")
outerBundle := BundleReplaceOne(b4, MaxTime(500), b2, MaxTime(1000))
testhelpers.RequireNotNil(t, outerBundle, "outer bundle was nil")
return outerBundle
}
func TestFindAndReplaceOneOpt(t *testing.T) {
var bundle1 *ReplaceOneBundle
bundle1 = bundle1.Upsert(true).BypassDocumentValidation(false)
testhelpers.RequireNotNil(t, bundle1, "created bundle was nil")
bundle1Opts := []option.Optioner{
OptUpsert(true).ConvertReplaceOneOption(),
OptBypassDocumentValidation(false).ConvertReplaceOneOption(),
}
bundle1DedupOpts := []option.Optioner{
OptUpsert(true).ConvertReplaceOneOption(),
OptBypassDocumentValidation(false).ConvertReplaceOneOption(),
}
bundle2 := BundleReplaceOne(MaxTime(1))
bundle2Opts := []option.Optioner{
OptMaxTime(1).ConvertReplaceOneOption(),
}
bundle3 := BundleReplaceOne().
MaxTime(1).
MaxTime(2).
Upsert(false).
Upsert(true)
bundle3Opts := []option.Optioner{
OptMaxTime(1).ConvertReplaceOneOption(),
OptMaxTime(2).ConvertReplaceOneOption(),
OptUpsert(false).ConvertReplaceOneOption(),
OptUpsert(true).ConvertReplaceOneOption(),
}
bundle3DedupOpts := []option.Optioner{
OptMaxTime(2).ConvertReplaceOneOption(),
OptUpsert(true).ConvertReplaceOneOption(),
}
nilBundle := BundleReplaceOne()
var nilBundleOpts []option.Optioner
nestedBundle1 := createNestedReplaceOneBundle1(t)
nestedBundleOpts1 := []option.Optioner{
OptUpsert(true).ConvertReplaceOneOption(),
OptMaxTime(500).ConvertReplaceOneOption(),
OptUpsert(false).ConvertReplaceOneOption(),
OptMaxTime(1000).ConvertReplaceOneOption(),
}
nestedBundleDedupOpts1 := []option.Optioner{
OptUpsert(false).ConvertReplaceOneOption(),
OptMaxTime(1000).ConvertReplaceOneOption(),
}
nestedBundle2 := createNestedReplaceOneBundle2(t)
nestedBundleOpts2 := []option.Optioner{
OptUpsert(true).ConvertReplaceOneOption(),
OptMaxTime(500).ConvertReplaceOneOption(),
OptMaxTime(100).ConvertReplaceOneOption(),
OptUpsert(false).ConvertReplaceOneOption(),
OptMaxTime(1000).ConvertReplaceOneOption(),
}
nestedBundleDedupOpts2 := []option.Optioner{
OptUpsert(false).ConvertReplaceOneOption(),
OptMaxTime(1000).ConvertReplaceOneOption(),
}
nestedBundle3 := createNestedReplaceOneBundle3(t)
nestedBundleOpts3 := []option.Optioner{
OptMaxTime(100).ConvertReplaceOneOption(),
OptUpsert(true).ConvertReplaceOneOption(),
OptMaxTime(500).ConvertReplaceOneOption(),
OptMaxTime(100).ConvertReplaceOneOption(),
OptUpsert(false).ConvertReplaceOneOption(),
OptMaxTime(1000).ConvertReplaceOneOption(),
}
nestedBundleDedupOpts3 := []option.Optioner{
OptUpsert(false).ConvertReplaceOneOption(),
OptMaxTime(1000).ConvertReplaceOneOption(),
}
t.Run("TestAll", func(t *testing.T) {
c := &mongoopt.Collation{
Locale: "string locale",
}
proj := Projection(true)
sort := Sort(true)
opts := []ReplaceOneOption{
Collation(c),
MaxTime(5),
Projection(proj),
ReturnDocument(mongoopt.After),
Sort(sort),
Upsert(true),
}
params := make([]ReplaceOne, len(opts))
for i := range opts {
params[i] = opts[i]
}
bundle := BundleReplaceOne(params...)
deleteOpts, _, err := bundle.Unbundle(true)
testhelpers.RequireNil(t, err, "got non-nill error from unbundle: %s", err)
if len(deleteOpts) != len(opts) {
t.Errorf("expected unbundled opts len %d. got %d", len(opts), len(deleteOpts))
}
for i, opt := range opts {
if !reflect.DeepEqual(opt.ConvertReplaceOneOption(), deleteOpts[i]) {
t.Errorf("opt mismatch. expected %#v, got %#v", opt, deleteOpts[i])
}
}
})
t.Run("Nil Option Bundle", func(t *testing.T) {
sess := FindSessionOpt{}
opts, _, err := BundleReplaceOne(Upsert(true), BundleReplaceOne(nil), sess, nil).unbundle()
testhelpers.RequireNil(t, err, "got non-nil error from unbundle: %s", err)
if len(opts) != 1 {
t.Errorf("expected bundle length 1. got: %d", len(opts))
}
opts, _, err = BundleReplaceOne(nil, sess, BundleReplaceOne(nil), Upsert(true)).unbundle() | if len(opts) != 1 {
t.Errorf("expected bundle length 1. got: %d", len(opts))
}
})
t.Run("MakeOptions", func(t *testing.T) {
head := bundle1
bundleLen := 0
for head != nil && head.option != nil {
bundleLen++
head = head.next
}
if bundleLen != len(bundle1Opts) {
t.Errorf("expected bundle length %d. got: %d", len(bundle1Opts), bundleLen)
}
})
t.Run("Unbundle", func(t *testing.T) {
var cases = []struct {
name string
dedup bool
bundle *ReplaceOneBundle
expectedOpts []option.Optioner
}{
{"NilBundle", false, nilBundle, nilBundleOpts},
{"Bundle1", false, bundle1, bundle1Opts},
{"Bundle1Dedup", true, bundle1, bundle1DedupOpts},
{"Bundle2", false, bundle2, bundle2Opts},
{"Bundle2Dedup", true, bundle2, bundle2Opts},
{"Bundle3", false, bundle3, bundle3Opts},
{"Bundle3Dedup", true, bundle3, bundle3DedupOpts},
{"NestedBundle1_DedupFalse", false, nestedBundle1, nestedBundleOpts1},
{"NestedBundle1_DedupTrue", true, nestedBundle1, nestedBundleDedupOpts1},
{"NestedBundle2_DedupFalse", false, nestedBundle2, nestedBundleOpts2},
{"NestedBundle2_DedupTrue", true, nestedBundle2, nestedBundleDedupOpts2},
{"NestedBundle3_DedupFalse", false, nestedBundle3, nestedBundleOpts3},
{"NestedBundle3_DedupTrue", true, nestedBundle3, nestedBundleDedupOpts3},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
options, _, err := tc.bundle.Unbundle(tc.dedup)
testhelpers.RequireNil(t, err, "got non-nill error from unbundle: %s", err)
if len(options) != len(tc.expectedOpts) {
t.Errorf("options length does not match expected length. got %d expected %d", len(options),
len(tc.expectedOpts))
} else {
for i, opt := range options {
if !reflect.DeepEqual(opt, tc.expectedOpts[i]) {
t.Errorf("expected: %s\nreceived: %s", opt, tc.expectedOpts[i])
}
}
}
})
}
})
} | testhelpers.RequireNil(t, err, "got non-nil error from unbundle: %s", err)
|
emerge.py | import dnf
import dnf.cli
from glob import glob
import logging
import threading
import tempfile
import subprocess
import shutil
import os
logger = logging.getLogger('dnf')
class ErrorThread(threading.Thread):
_my_exception = None
def run(self, *args):
try:
self._run(*self._args)
except Exception as ex:
self._my_exception = ex
class | (ErrorThread):
@property
def branch(self):
return 'master'
@property
def template_mock_config(self):
return '/etc/mock/fedora-rawhide-x86_64.cfg'
def _run(self, workdir, pkg):
pkgdir = os.path.join(workdir, pkg)
# Grab sources
logger.info('Grabbing sources')
subprocess.run(['fedpkg', 'clone', '--anonymous', '--branch', self.branch, 'rpms/%s' % pkg, pkgdir],
check=True)
# Generate mockconfig
logger.info('Generating mock config')
mock_config = os.path.join(workdir, '_mockconfig', 'emerge-%s.cfg' % pkg)
with open(self.template_mock_config, 'r') as template:
with open(mock_config, 'w') as out:
out.write("config_opts['basedir'] = '%s'\n" % (os.path.join(workdir, '_mockroots')))
for line in template.readlines():
if "config_opts['root']" in line:
out.write("config_opts['root'] = 'emerge-%s'\n" % pkg)
else:
out.write(line)
# Run mockbuild
logger.info('Building')
subprocess.run(['fedpkg', 'mockbuild', '--root', mock_config, '--no-clean-all'], check=True, cwd=pkgdir)
@dnf.plugin.register_command
class EmergeCommand(dnf.cli.Command):
aliases = ['emerge']
workdir = None
def configure(self):
self.cli.demands.available_repos = True
self.cli.demands.sack_activation = True
self.cli.demands.root_user = True
self.cli.demands.resolving = True
@staticmethod
def set_argparser(parser):
parser.add_argument('package', nargs='+', metavar='package',
help='Package to emerge')
parser.add_argument('--workdir')
parser.add_argument('--skip-build', action='store_true')
parser.add_argument('--skip-clean', action='store_true')
def run_transaction(self):
self._rmworkdir()
def _rmworkdir(self):
if self.workdir and not self.opts.workdir and not self.opts.skip_clean:
shutil.rmtree(self.workdir)
def run(self):
try:
self._run()
except:
self._rmworkdir()
raise
def _run(self):
q = self.base.sack.query()
pkgs = self.base.sack.query().available().filter(name=self.opts.package).latest().run()
if not pkgs:
raise dnf.exceptions.Error('no package matched')
to_build_install = {}
for pkg in pkgs:
if pkg.source_name in to_build_install:
to_build_install[pkg.source_name].add(pkg.name)
else:
to_build_install[pkg.source_name] = set([pkg.name])
logger.info('Building/installing: %s' % to_build_install)
if self.opts.workdir:
self.workdir = self.opts.workdir
else:
self.workdir = tempfile.TemporaryDirectory(prefix='dnf-emerge-').name
logger.debug('Workdir: %s', self.workdir)
self._build(self.workdir, to_build_install)
pkgs = self._find_packages(self.workdir, to_build_install)
err_pkgs = []
for pkg in self.base.add_remote_rpms(pkgs):
try:
self.base.package_install(pkg)
except dnf.exceptions.MarkingError:
logger.info('Unable to install %s' % self.base.output.term.bold(pkg.location))
err_pkgs.append(pkg)
if len(err_pkgs) != 0 and strict:
raise dnf.exceptions.PackagesNotAvailableError(
'Unable to find a match', packages=err_pkgs)
@staticmethod
def _is_wanted_file(fname, haystack):
for needle in haystack:
if fname.endswith('.src.rpm'):
continue
if not fname.startswith(needle + '-'):
continue
rest = fname[len(needle)+1:].split('-')
if len(rest) > 2:
continue
if not rest[0][0].isdigit():
continue
return True
return False
def _find_packages(self, workdir, to_build_install):
to_install = []
for source, binaries in to_build_install.items():
sourcedir = os.path.join(workdir, source, 'results_%s' % source, '*', '*', '*.rpm')
for fpath in glob(sourcedir):
fname = os.path.basename(fpath)
if self._is_wanted_file(fname, binaries):
to_install.append(fpath)
logger.info('Marking for installation: %s', to_install)
return to_install
def _build(self, workdir, to_build_install):
if self.opts.skip_build:
logger.error('Skipping build per request')
return
os.makedirs(os.path.join(workdir, '_mockconfig'))
os.makedirs(os.path.join(workdir, '_mockroots'))
buildthreads = []
for pkg in to_build_install.keys():
bthread = BuildThread(name='emerge-build-%s' % pkg, args=(workdir, pkg))
buildthreads.append(bthread)
bthread.start()
logger.info('All builds started, waiting for them to finish...')
for bthread in buildthreads:
bthread.join()
if bthread._my_exception:
raise bthread._my_exception
logger.info('All builds finished') | BuildThread |
api.ts | import axios from 'axios';
const api = axios.create({
baseURL: 'http://192.168.15.10:3333' | });
export default api; |
|
getTimeDist3D.py | from veroviz._common import *
from veroviz._validation import *
from veroviz._buildFlightProfile import buildNoLoiteringFlight
from veroviz._buildFlightProfile import getTimeDistFromFlight
from veroviz._utilities import privConvertDistance
from veroviz._utilities import privConvertTime
def getTimeDist3D(nodes=None, matrixType='all2all', fromNodeID=None, toNodeID=None, takeoffSpeedMPS=None, cruiseSpeedMPS=None, landSpeedMPS=None, cruiseAltMetersAGL=None,
routeType='square', climbRateMPS=None, descentRateMPS=None, outputDistUnits='meters', outputTimeUnits='seconds'):
| """
This function calculates travel time and distance for vehicles that travel in 3-dimensional space (e.g., drones). The function returns three dictionaries; one for time, one for ground distance, and one for overall (3D) travel distance.
Parameters
----------
nodes: :ref:`Nodes`, Required, default as None
This :ref:`Nodes` dataframe contains the locations between which the travel time and distance will be calculated.
matrixType: string, Optional, default as 'all2all'
Specifies the structure of the travel matrices. Valid options include 'all2all', 'many2one', and 'one2many'. The default 'all2all' option will return square matrices (one for time, one for distance) describing the directed travel time and travel distance between all pairs of nodes. The 'one2many' option will return vectors describing the directed travel from one node to all other nodes. Similarly, the 'many2one' option will return vectors describing the directed travel from all nodes to a given node. See the table in the note below for details.
fromNodeID: int, Optional, default as None
Specifies the node ID (from the `id` column of the input `nodes` dataframe) of the origin node. This parameter is required for the 'one2many' matrix type; it is ignored by all other matrix types. See the table in the note below for details.
toNodeID: int, Optional, default as None
Specifies the node ID (from the `id` column of the input `nodes` dataframe) of the destination node. This parameter is required for the 'many2one' matrix type; it is ignored for all other matrix types. See the table in the note below for details.
takeoffSpeedMPS: float, Conditional, default as None
The speed of the aircraft, in meters per second, during the "takeoff" phase. This will apply only to 'square' and 'trapezoidal' route types. The takeoff phase is the first component of these route types, and is associated with an increase in altitude. The takeoff speed is assumed to be constant, and ignores acceleration. See :ref:`Flight Profile and Flight Path` for additional information.
cruiseSpeedMPS: float, Conditional, default as None
The speed of the aircraft, in meters per second, during the "cruising" phase. This will apply to all of the route options. Typically, the cruising phase occurs at a constant altitude, as specified by `cruiseAltMetersAGL`. However, for the 'triangular' route type, cruiseSpeedMPS specifies the constant travel speed during both the ascent to, and immediate descent from, the cruise altitude. In the 'triangle' route type, the aircraft has no horizontal travel at the cruise altitude. In all cases, the cruise speed is assumed to be constant, and ignores acceleration. See :ref:`Flight Profile and Flight Path` for additional information.
landSpeedMPS: float, Conditional, default as None
The speed of the aircraft, in meters per second, during the "landing" phase. This will apply to only the 'square' and 'trapezoidal' route types. The landing phase is the last component of these route types, and is associated with a decrease in altitude. The landing speed is assumed to be constant, and ignore deceleration. See :ref:`Flight Profile and Flight Path` for additional information.
cruiseAltMetersAGL: float, Conditional, default as None
The altitude, in meters above ground level, at which the aircraft is in the "cruise" phase. This phase is typically associated with horizontal movement at a fixed altitude. The exception is for the 'triangular' route type, in which case the aircraft instantaneously transitions from ascent to descent at the cruise altitude (i.e., there is no horizontal travel at this altitude). All but the 'straight' route type require/use the cruise altitude. See :ref:`Flight Profile and Flight Path` for additional details.
routeType: string, Optional, default as 'square'
Specifies the basic shape of the flight profile. Valid options include 'square', 'triangular', 'trapezoidal', and 'straight'. The square profile involves a vertical takeoff to a cruising altitude, horizontal travel at the cruising altitude, and a vertical landing. The trapezoidal profile describes a takeoff phase in which the aircraft increases altitude and travels horizontally towards the destination until reaching the cruising altitude, horizontal travel at the cruising altitude, and a landing phase in which the aircraft decreases altitude and travels horizontally until reaching the destination. For the trapezoidal profile, the horizontal movement during the takeoff and landing phases is a function of the `climbRateMPS` and `descentRateMPS`, respectively. The triangular profile describes an ascent to the cruising altitude followed immediately by a descent to the destination. Finally, the straight profile describes straight-line flight directly from the starting location to the ending location; the altitudes of these two locations may differ. See :ref:`Flight Profile and Flight Path` for a description of these flight profiles.
climbRateMPS: float, Conditional, default as None
This parameter is used only for the 'trapezoidal' route type, and is in units of meters per second. It describes the rate at which the aircraft increases its altitude, relative to the value of `takeoffSpeedMPS`. If `climbRateMPS == takeoffSpeedMPS`, then the takeoff phase will be purely vertical. If `climbRateMPS` is close to zero, then the takeoff phase will be characterized by a slow increase in altitude (and longer horizontal flight). The aircraft's actual travel speed during the climb will be `takeoffSpeedMPS`. See :ref:`Flight Profile and Flight Path` for additional details.
descentRateMPS: float, Conditional, default as None
This parameter is used only for the 'trapezoidal' route type, and is in units of meters per second. It describes the rate at which the aircraft decreases its altitude, relative to the value of `landSpeedMPS`. If `descentRateMPS == landSpeedMPS`, then the landing phase will be purely vertical. If `descentRateMPS` is close to zero, then the landing phase will be characterized by a slow decrease in altitude (and longer horizontal flight). The aircraft's actual travel speed during the descent will be `landSpeedMPS`. See :ref:`Flight Profile and Flight Path` for additional details.
outputDistUnits: string, Optional, default as 'meters'
Specifies the desired distance units for the function's output. Valid values are 'meters', 'm', 'kilometers', 'km', 'miles', 'mi', 'feet', 'ft', 'nm', and 'nmi' (nautical miles). See :ref:`Units` for options and abbreviations.
outputTimeUnits: string, Optional, default as 'seconds'
Specifies the desired time units for the function's output. Valid values are 'seconds', 'hours', and 'minutes'. See :ref:`Units` for options and abbreviations.
Returns
-------
totalTime: dictionary
A Python dictionary containing travel times. Time units are defined by `outputTimeUnits`. The format of key values is: `(fromID, toID)`. The travel time from ID 1 to ID 2 is provided by `time[1, 2]`.
totalGroundDistance: dictionary
A Python dictionary containing ground travel distances (i.e., ignoring any vertical distances). Distance units are defined by `outputDistUnits`. The format of key values is: `(fromID, toID)`. The horizontal-only travel distance from ID 1 to ID 2 is provided by `totalGroundDistance[1, 2]`.
totalFlightDistance: dictionary
A Python dictionary containing total travel distances (i.e., including both the horizontal and vertical components of flight). Distance units are defined by `outputDistUnits`. The format of key values is: `(fromID, toID)`. The total travel distance from ID 1 to ID 2 is provided by `totalFlightDistance[1, 2]`.
Note
----
For `matrixType`, the options are 'all2all', 'one2many', and 'many2one'.
+----------------------+--------------+------------+------------------+
| `matrixType` options | `fromNodeID` | `toNodeID` | Return type |
+======================+==============+============+==================+
| 'all2all' | ignored | ignored | Square matrices |
+----------------------+--------------+------------+------------------+
| 'one2many' | required | ignored | Row vectors |
+----------------------+--------------+------------+------------------+
| 'many2one' | ignored | required | Column vectors |
+----------------------+--------------+------------+------------------+
In 'all2all', square matrices will be generated for all node pairs in the
provided `nodes` dataframe.
In 'one2many', a node `id` will be assigned in the `fromNodeID` field, which
comes from the `id` column in the provided `nodes` dataframe.
Row vectors will be returned for the time and distance from that node
to all the nodes in the provided `nodes` dataframe.
In 'many2one', column vectors will be returned for the time and distance
from all nodes in the provided `nodes` dataframe to the node indicated
by `toNodeID`.
Examples
--------
Import veroviz and check if the version is up-to-date
>>> import veroviz as vrv
>>> vrv.checkVersion()
Generate a :ref:`Nodes` dataframe from a list of coordinates. See :meth:`~veroviz.generateNodes.generateNodes` for other methods to generate "nodes" dataframes.
>>> locs = [
... [42.1538, -78.4253],
... [42.3465, -78.6234],
... [42.6343, -78.1146]]
>>> exampleNodes = vrv.createNodesFromLocs(locs=locs)
Example 1 - Calculate 'all2all' travel matrices for a drone with a 'square' flight profile. There are 3 nodes, so the matrices will be 3x3.
>>> [totalTime, totalGroundDistance, totalFlightDistance] = vrv.getTimeDist3D(
... nodes = exampleNodes,
... routeType = 'square',
... cruiseAltMetersAGL = 120,
... takeoffSpeedMPS = 5,
... cruiseSpeedMPS = 12,
... landSpeedMPS = 2,
... outputDistUnits = 'meters',
... outputTimeUnits = 'seconds')
>>> print("Travel time from node 2 to node 3 is %.2f seconds" % (totalTime[2, 3]))
>>> print("Ground distance from node 2 to node 3 is %.2f meters" % (totalGroundDistance[2, 3]))
>>> print("Total flight distance from node 2 to node 3 is %.2f meters" % (totalFlightDistance[2, 3]))
Example 2 - Calculate 'one2many' travel matrices for a drone with a 'trapezoidal' flight profile, starting from node 2. All functional arguments are included in this example.
>>> [timeSec, groundDist, totalDist] = vrv.getTimeDist3D(
... nodes = exampleNodes,
... matrixType = 'one2many',
... fromNodeID = 2,
... toNodeID = None,
... takeoffSpeedMPS = 5,
... cruiseSpeedMPS = 12,
... landSpeedMPS = 5,
... cruiseAltMetersAGL = 120,
... routeType = 'trapezoidal',
... climbRateMPS = 1,
... descentRateMPS = 1,
... outputDistUnits = 'meters',
... outputTimeUnits = 'seconds')
>>> print("Travel time from node 2 to node 3 is %.2f seconds" % (timeSec[2, 3]))
>>> print("Ground distance from node 2 to node 3 is %.2f meters" % (groundDist[2, 3]))
>>> print("Total flight distance from node 2 to node 3 is %.2f meters" % (totalDist[2, 3]))
"""
# validation
[valFlag, errorMsg, warningMsg] = valGetTimeDist3D(nodes, matrixType, fromNodeID, toNodeID, outputDistUnits, outputTimeUnits, routeType, takeoffSpeedMPS, climbRateMPS, cruiseSpeedMPS, cruiseAltMetersAGL, landSpeedMPS, descentRateMPS)
if (not valFlag):
print (errorMsg)
return [None, None, None]
elif (config['VRV_SETTING_SHOWWARNINGMESSAGE'] and warningMsg != ""):
print (warningMsg)
try:
matrixType = matrixType.lower()
except:
pass
# Specify the list of rows and columns of output dataframes
fromIDs = []
toIDs = []
if (matrixType == "all2all"):
fromIDs = nodes['id'].tolist()
toIDs = nodes['id'].tolist()
elif (matrixType == "one2many"):
fromIDs = [fromNodeID]
toIDs = nodes['id'].tolist()
elif (matrixType == "many2one"):
fromIDs = nodes['id'].tolist()
toIDs = [toNodeID]
else:
return
# Specify the list of coordinations, for each coordinate, it is in [lat, lon, alt] format
fromLocs = []
toLocs = []
for i in range(0, len(fromIDs)):
fromLocs.append([
float(nodes.loc[nodes['id'] == fromIDs[i], 'lat']),
float(nodes.loc[nodes['id'] == fromIDs[i], 'lon']),
float(nodes.loc[nodes['id'] == fromIDs[i], 'altMeters'])])
for i in range(0, len(toIDs)):
toLocs.append([
float(nodes.loc[nodes['id'] == toIDs[i], 'lat']),
float(nodes.loc[nodes['id'] == toIDs[i], 'lon']),
float(nodes.loc[nodes['id'] == toIDs[i], 'altMeters'])])
# Do queries to find DICTIONARIES of distance and time matrices
totalTimeSec = {}
totalGroundDistMeters = {}
totalFlightDistMeters = {}
for i in range(len(fromLocs)):
for j in range(i, len(toLocs)):
# Prepare for fields to generate flight
startLoc = fromLocs[i]
endLoc = toLocs[j]
if (i != j):
# The flight has no loitering
flight = buildNoLoiteringFlight(routeType, startLoc, cruiseAltMetersAGL, endLoc, takeoffSpeedMPS, climbRateMPS, cruiseSpeedMPS, landSpeedMPS, descentRateMPS)
# Time and ground/flight distance, notice the matrix is symmetric
[time, groundDistance, flightDistance] = getTimeDistFromFlight(flight.copy())
totalTimeSec[i, j] = time
totalTimeSec[j, i] = time
totalGroundDistMeters[i, j] = groundDistance
totalGroundDistMeters[j, i] = groundDistance
totalFlightDistMeters[i, j] = flightDistance
totalFlightDistMeters[j, i] = flightDistance
else:
totalTimeSec[i, j] = 0
totalGroundDistMeters[i, j] = 0
totalFlightDistMeters[i, j] = 0
# Rename the keyvalues by fromRows and toCols and reset output units
totalTime = {}
totalGroundDistance = {}
totalFlightDistance = {}
for i in range(len(fromIDs)):
for j in range(len(toIDs)):
totalTime[fromIDs[i], toIDs[j]] = totalTimeSec[i, j] * privConvertTime(1.0, 's', outputTimeUnits)
totalGroundDistance[fromIDs[i], toIDs[j]] = totalGroundDistMeters[i, j] * privConvertDistance(1.0, 'm', outputDistUnits)
totalFlightDistance[fromIDs[i], toIDs[j]] = totalFlightDistMeters[i, j] * privConvertDistance(1.0, 'm', outputDistUnits)
return [totalTime, totalGroundDistance, totalFlightDistance] |
|
lib.rs | #![recursion_limit = "128"]
#[macro_use]
extern crate stdweb;
// Own services implementation
pub mod ccxt;
pub mod gravatar;
use failure::Error;
use yew::services::fetch::FetchTask;
use yew::{html, Callback, Component, ComponentLink, Html, ShouldRender};
use ccxt::CcxtService;
use gravatar::{GravatarService, Profile};
pub struct Model {
link: ComponentLink<Self>,
gravatar: GravatarService,
ccxt: CcxtService,
callback: Callback<Result<Profile, Error>>,
profile: Option<Profile>,
exchanges: Vec<String>,
task: Option<FetchTask>,
}
pub enum | {
Gravatar,
GravatarReady(Result<Profile, Error>),
Exchanges,
}
impl Component for Model {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {
Model {
link: link.clone(),
gravatar: GravatarService::new(),
ccxt: CcxtService::new(),
callback: link.callback(Msg::GravatarReady),
profile: None,
exchanges: Vec::new(),
task: None,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Gravatar => {
let task = self
.gravatar
.profile("205e460b479e2e5b48aec07710c08d50", self.callback.clone());
self.task = Some(task);
}
Msg::GravatarReady(Ok(profile)) => {
self.profile = Some(profile);
}
Msg::GravatarReady(Err(_)) => {
// Can't load gravatar profile
}
Msg::Exchanges => {
self.exchanges = self.ccxt.exchanges();
}
}
true
}
fn view(&self) -> Html {
let view_exchange = |exchange| {
html! {
<li>{ exchange }</li>
}
};
html! {
<div>
<button onclick=self.link.callback(|_| Msg::Exchanges)>{ "Get Exchanges" }</button>
<button onclick=self.link.callback(|_| Msg::Gravatar)>{ "Get Gravatar" }</button>
<ul>
{ for self.exchanges.iter().map(view_exchange) }
</ul>
</div>
}
}
}
| Msg |
fs_js.go | // Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build js,wasm
package syscall
import (
"errors"
"io"
"sync"
"syscall/js"
)
// Provided by package runtime.
func now() (sec int64, nsec int32)
var jsProcess = js.Global().Get("process")
var jsFS = js.Global().Get("fs")
var constants = jsFS.Get("constants")
var uint8Array = js.Global().Get("Uint8Array")
var (
nodeWRONLY = constants.Get("O_WRONLY").Int()
nodeRDWR = constants.Get("O_RDWR").Int()
nodeCREATE = constants.Get("O_CREAT").Int()
nodeTRUNC = constants.Get("O_TRUNC").Int()
nodeAPPEND = constants.Get("O_APPEND").Int()
nodeEXCL = constants.Get("O_EXCL").Int()
)
type jsFile struct {
path string
entries []string
pos int64
seeked bool
}
var filesMu sync.Mutex
var files = map[int]*jsFile{
0: {},
1: {},
2: {},
}
func fdToFile(fd int) (*jsFile, error) {
filesMu.Lock()
f, ok := files[fd]
filesMu.Unlock()
if !ok {
return nil, EBADF
}
return f, nil
}
func Open(path string, openmode int, perm uint32) (int, error) {
if err := checkPath(path); err != nil {
return 0, err
}
flags := 0
if openmode&O_WRONLY != 0 {
flags |= nodeWRONLY
}
if openmode&O_RDWR != 0 {
flags |= nodeRDWR
}
if openmode&O_CREATE != 0 {
flags |= nodeCREATE
}
if openmode&O_TRUNC != 0 {
flags |= nodeTRUNC
}
if openmode&O_APPEND != 0 {
flags |= nodeAPPEND
}
if openmode&O_EXCL != 0 {
flags |= nodeEXCL
}
if openmode&O_SYNC != 0 {
return 0, errors.New("syscall.Open: O_SYNC is not supported by js/wasm")
}
jsFD, err := fsCall("open", path, flags, perm)
if err != nil {
return 0, err
}
fd := jsFD.Int()
var entries []string
if stat, err := fsCall("fstat", fd); err == nil && stat.Call("isDirectory").Bool() {
dir, err := fsCall("readdir", path)
if err != nil {
return 0, err
}
entries = make([]string, dir.Length())
for i := range entries {
entries[i] = dir.Index(i).String()
}
}
f := &jsFile{
path: path,
entries: entries,
}
filesMu.Lock()
files[fd] = f
filesMu.Unlock()
return fd, nil
}
func Close(fd int) error {
filesMu.Lock()
delete(files, fd)
filesMu.Unlock()
_, err := fsCall("close", fd)
return err
}
func CloseOnExec(fd int) {
// nothing to do - no exec
}
func Mkdir(path string, perm uint32) error {
if err := checkPath(path); err != nil {
return err
}
_, err := fsCall("mkdir", path, perm)
return err
}
func ReadDirent(fd int, buf []byte) (int, error) {
f, err := fdToFile(fd)
if err != nil {
return 0, err
}
if f.entries == nil {
return 0, EINVAL
}
n := 0
for len(f.entries) > 0 {
entry := f.entries[0]
l := 2 + len(entry)
if l > len(buf) {
break
}
buf[0] = byte(l)
buf[1] = byte(l >> 8)
copy(buf[2:], entry)
buf = buf[l:]
n += l
f.entries = f.entries[1:]
}
return n, nil
}
func setStat(st *Stat_t, jsSt js.Value) {
st.Dev = int64(jsSt.Get("dev").Int())
st.Ino = uint64(jsSt.Get("ino").Int())
st.Mode = uint32(jsSt.Get("mode").Int())
st.Nlink = uint32(jsSt.Get("nlink").Int())
st.Uid = uint32(jsSt.Get("uid").Int())
st.Gid = uint32(jsSt.Get("gid").Int())
st.Rdev = int64(jsSt.Get("rdev").Int())
st.Size = int64(jsSt.Get("size").Int())
st.Blksize = int32(jsSt.Get("blksize").Int())
st.Blocks = int32(jsSt.Get("blocks").Int())
atime := int64(jsSt.Get("atimeMs").Int())
st.Atime = atime / 1000
st.AtimeNsec = (atime % 1000) * 1000000
mtime := int64(jsSt.Get("mtimeMs").Int())
st.Mtime = mtime / 1000
st.MtimeNsec = (mtime % 1000) * 1000000
ctime := int64(jsSt.Get("ctimeMs").Int())
st.Ctime = ctime / 1000
st.CtimeNsec = (ctime % 1000) * 1000000
}
func Stat(path string, st *Stat_t) error {
if err := checkPath(path); err != nil {
return err
}
jsSt, err := fsCall("stat", path)
if err != nil {
return err
}
setStat(st, jsSt)
return nil
}
func Lstat(path string, st *Stat_t) error {
if err := checkPath(path); err != nil {
return err
}
jsSt, err := fsCall("lstat", path)
if err != nil {
return err
}
setStat(st, jsSt)
return nil
}
func Fstat(fd int, st *Stat_t) error {
jsSt, err := fsCall("fstat", fd)
if err != nil {
return err
}
setStat(st, jsSt)
return nil
}
func Unlink(path string) error {
if err := checkPath(path); err != nil {
return err
}
_, err := fsCall("unlink", path)
return err
}
func Rmdir(path string) error {
if err := checkPath(path); err != nil {
return err
}
_, err := fsCall("rmdir", path)
return err
}
func Chmod(path string, mode uint32) error {
if err := checkPath(path); err != nil {
return err
}
_, err := fsCall("chmod", path, mode)
return err
}
func Fchmod(fd int, mode uint32) error {
_, err := fsCall("fchmod", fd, mode)
return err
}
func Chown(path string, uid, gid int) error {
if err := checkPath(path); err != nil {
return err
}
_, err := fsCall("chown", path, uint32(uid), uint32(gid))
return err
}
func Fchown(fd int, uid, gid int) error {
_, err := fsCall("fchown", fd, uint32(uid), uint32(gid))
return err
}
func Lchown(path string, uid, gid int) error {
if err := checkPath(path); err != nil {
return err
}
if jsFS.Get("lchown").IsUndefined() {
// fs.lchown is unavailable on Linux until Node.js 10.6.0
// TODO(neelance): remove when we require at least this Node.js version
return ENOSYS
}
_, err := fsCall("lchown", path, uint32(uid), uint32(gid))
return err
}
func UtimesNano(path string, ts []Timespec) error {
if err := checkPath(path); err != nil {
return err
}
if len(ts) != 2 {
return EINVAL
}
atime := ts[0].Sec
mtime := ts[1].Sec
_, err := fsCall("utimes", path, atime, mtime)
return err
}
func Rename(from, to string) error {
if err := checkPath(from); err != nil {
return err
}
if err := checkPath(to); err != nil {
return err
}
_, err := fsCall("rename", from, to)
return err
}
func Truncate(path string, length int64) error {
if err := checkPath(path); err != nil {
return err
}
_, err := fsCall("truncate", path, length)
return err
}
func Ftruncate(fd int, length int64) error {
_, err := fsCall("ftruncate", fd, length)
return err
}
func Getcwd(buf []byte) (n int, err error) {
defer recoverErr(&err)
cwd := jsProcess.Call("cwd").String()
n = copy(buf, cwd)
return
}
func Chdir(path string) (err error) {
if err := checkPath(path); err != nil {
return err
}
defer recoverErr(&err)
jsProcess.Call("chdir", path)
return
}
func Fchdir(fd int) error {
f, err := fdToFile(fd)
if err != nil {
return err
}
return Chdir(f.path)
}
func Readlink(path string, buf []byte) (n int, err error) {
if err := checkPath(path); err != nil {
return 0, err
}
dst, err := fsCall("readlink", path)
if err != nil {
return 0, err
}
n = copy(buf, dst.String())
return n, nil
}
func Link(path, link string) error {
if err := checkPath(path); err != nil {
return err
}
if err := checkPath(link); err != nil {
return err
}
_, err := fsCall("link", path, link)
return err
}
func Symlink(path, link string) error {
if err := checkPath(path); err != nil {
return err
}
if err := checkPath(link); err != nil {
return err
}
_, err := fsCall("symlink", path, link)
return err
}
func Fsync(fd int) error {
_, err := fsCall("fsync", fd)
return err
}
func Read(fd int, b []byte) (int, error) {
f, err := fdToFile(fd)
if err != nil {
return 0, err
}
if f.seeked {
n, err := Pread(fd, b, f.pos)
f.pos += int64(n)
return n, err
}
buf := uint8Array.New(len(b))
n, err := fsCall("read", fd, buf, 0, len(b), nil)
if err != nil {
return 0, err
}
js.CopyBytesToGo(b, buf)
n2 := n.Int()
f.pos += int64(n2)
return n2, err
}
func Write(fd int, b []byte) (int, error) {
f, err := fdToFile(fd)
if err != nil {
return 0, err
}
if f.seeked {
n, err := Pwrite(fd, b, f.pos)
f.pos += int64(n)
return n, err
}
if faketime && (fd == 1 || fd == 2) {
n := faketimeWrite(fd, b)
if n < 0 {
return 0, errnoErr(Errno(-n))
}
return n, nil
}
buf := uint8Array.New(len(b))
js.CopyBytesToJS(buf, b)
n, err := fsCall("write", fd, buf, 0, len(b), nil)
if err != nil {
return 0, err
}
n2 := n.Int()
f.pos += int64(n2)
return n2, err
}
func Pread(fd int, b []byte, offset int64) (int, error) {
buf := uint8Array.New(len(b))
n, err := fsCall("read", fd, buf, 0, len(b), offset)
if err != nil {
return 0, err
}
js.CopyBytesToGo(b, buf)
return n.Int(), nil
}
func Pwrite(fd int, b []byte, offset int64) (int, error) {
buf := uint8Array.New(len(b))
js.CopyBytesToJS(buf, b)
n, err := fsCall("write", fd, buf, 0, len(b), offset)
if err != nil {
return 0, err
}
return n.Int(), nil
} |
func Seek(fd int, offset int64, whence int) (int64, error) {
f, err := fdToFile(fd)
if err != nil {
return 0, err
}
var newPos int64
switch whence {
case io.SeekStart:
newPos = offset
case io.SeekCurrent:
newPos = f.pos + offset
case io.SeekEnd:
var st Stat_t
if err := Fstat(fd, &st); err != nil {
return 0, err
}
newPos = st.Size + offset
default:
return 0, errnoErr(EINVAL)
}
if newPos < 0 {
return 0, errnoErr(EINVAL)
}
f.seeked = true
f.pos = newPos
return newPos, nil
}
func Dup(fd int) (int, error) {
return 0, ENOSYS
}
func Dup2(fd, newfd int) error {
return ENOSYS
}
func Pipe(fd []int) error {
return ENOSYS
}
func fsCall(name string, args ...interface{}) (js.Value, error) {
type callResult struct {
val js.Value
err error
}
c := make(chan callResult, 1)
jsFS.Call(name, append(args, js.FuncOf(func(this js.Value, args []js.Value) interface{} {
var res callResult
if len(args) >= 1 { // on Node.js 8, fs.utimes calls the callback without any arguments
if jsErr := args[0]; !jsErr.IsNull() {
res.err = mapJSError(jsErr)
}
}
res.val = js.Undefined()
if len(args) >= 2 {
res.val = args[1]
}
c <- res
return nil
}))...)
res := <-c
return res.val, res.err
}
// checkPath checks that the path is not empty and that it contains no null characters.
func checkPath(path string) error {
if path == "" {
return EINVAL
}
for i := 0; i < len(path); i++ {
if path[i] == '\x00' {
return EINVAL
}
}
return nil
}
func recoverErr(errPtr *error) {
if err := recover(); err != nil {
jsErr, ok := err.(js.Error)
if !ok {
panic(err)
}
*errPtr = mapJSError(jsErr.Value)
}
}
// mapJSError maps an error given by Node.js to the appropriate Go error
func mapJSError(jsErr js.Value) error {
errno, ok := errnoByCode[jsErr.Get("code").String()]
if !ok {
panic(jsErr)
}
return errnoErr(Errno(errno))
} | |
vector.module.ts | import { Module } from "@nestjs/common";
import { VectorController } from "./vector.controller";
import { VectorService } from "./vector.service";
@Module({
controllers: [VectorController],
providers: [VectorService],
})
export class | {}
| VectorModule |
strict.rs | //! A strict query engine implementation. It always expects a strict match of query terms
use std::collections::BTreeMap;
use std::ops::RangeInclusive;
use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{debug, instrument, trace};
use crate::search::{Matches, Search, SearchOptions};
/// Implements strict query processing.
#[derive(Clone)]
pub struct StrictEngine {
// A BTreeMap will keep the records in a predictable order, which makes the
// search results predictable. This greatly simplifies the process of doing offsets
// and limits.
index: Arc<RwLock<BTreeMap<String, crate::Invoice>>>,
}
impl Default for StrictEngine {
fn default() -> Self {
StrictEngine {
index: Arc::new(RwLock::new(BTreeMap::new())),
}
}
}
#[async_trait::async_trait]
impl Search for StrictEngine {
#[instrument(level = "trace", skip(self))]
async fn | (
&self,
term: &str,
filter: &str,
options: SearchOptions,
) -> anyhow::Result<Matches> {
trace!("beginning search");
let mut found: Vec<crate::Invoice> = self
.index
.read()
.await
.iter()
.filter(|(_, i)| {
// Per the spec:
// - if `term` is present, then it must be contained within the name field of the bindle.
// - if a version filter is present, then the version of the bindle must abide by the filter.
debug!(term, filter, "comparing term and filter");
i.bindle.id.name().contains(term)
&& (filter.is_empty() || i.version_in_range(filter))
})
.map(|(_, v)| (*v).clone())
.collect();
debug!(total_matches = found.len(), "Found matches");
let mut matches = Matches::new(&options, term.to_owned());
matches.strict = true;
matches.yanked = false;
matches.total = found.len() as u64;
if matches.offset >= matches.total {
// We're past the end of the search results. Return an empty matches object.
matches.more = false;
return Ok(matches);
}
// Apply offset and limit
let mut last_index = matches.offset + matches.limit as u64 - 1;
if last_index >= matches.total {
last_index = matches.total - 1;
}
matches.more = matches.total > last_index + 1;
trace!(last_index, matches.more, "Getting next page of results");
let range = RangeInclusive::new(matches.offset as usize, last_index as usize);
matches.invoices = found.drain(range).collect();
trace!("Returning {} found invoices", matches.invoices.len());
Ok(matches)
}
async fn index(&self, invoice: &crate::Invoice) -> anyhow::Result<()> {
self.index
.write()
.await
.insert(invoice.name(), invoice.clone());
Ok(())
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::Invoice;
#[tokio::test]
async fn strict_engine_should_index() {
let inv = invoice_fixture("my/bindle".to_owned(), "1.2.3".to_owned());
let inv2 = invoice_fixture("my/bindle".to_owned(), "1.3.0".to_owned());
let searcher = StrictEngine::default();
searcher
.index(&inv)
.await
.expect("successfully indexed my/bindle/1.2.3");
searcher
.index(&inv2)
.await
.expect("successfully indexed my/bindle/1.3.0");
assert_eq!(2, searcher.index.read().await.len());
// Search for one result
let matches = searcher
.query("my/bindle", "1.2.3", SearchOptions::default())
.await
.expect("found some matches");
assert_eq!(1, matches.invoices.len());
// Search for two results
let matches = searcher
.query("my/bindle", "^1.2.3", SearchOptions::default())
.await
.expect("found some matches");
assert_eq!(2, matches.invoices.len());
// Search for non-existent bindle
let matches = searcher
.query("my/bindle2", "1.2.3", SearchOptions::default())
.await
.expect("found some matches");
assert!(matches.invoices.is_empty());
// Search for non-existent version
let matches = searcher
.query("my/bindle", "1.2.99", SearchOptions::default())
.await
.expect("found some matches");
assert!(matches.invoices.is_empty());
// TODO: Need to test yanked bindles
}
fn invoice_fixture(name: String, version: String) -> Invoice {
let labels = vec![
crate::Label {
sha256: "abcdef1234567890987654321".to_owned(),
media_type: "text/toml".to_owned(),
name: "foo.toml".to_owned(),
size: 101,
..Default::default()
},
crate::Label {
sha256: "bbcdef1234567890987654321".to_owned(),
media_type: "text/toml".to_owned(),
name: "foo2.toml".to_owned(),
size: 101,
..Default::default()
},
crate::Label {
sha256: "cbcdef1234567890987654321".to_owned(),
media_type: "text/toml".to_owned(),
name: "foo3.toml".to_owned(),
size: 101,
..Default::default()
},
];
Invoice {
bindle_version: crate::BINDLE_VERSION_1.to_owned(),
yanked: None,
yanked_signature: None,
annotations: None,
bindle: crate::BindleSpec {
id: format!("{}/{}", name, version).parse().unwrap(),
description: Some("bar".to_owned()),
authors: Some(vec!["m butcher".to_owned()]),
},
parcel: Some(
labels
.iter()
.map(|l| crate::Parcel {
label: l.clone(),
conditions: None,
})
.collect(),
),
group: None,
signature: None,
}
}
}
| query |
base.py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
import google.auth # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.ads.googleads.v7.resources.types import combined_audience
from google.ads.googleads.v7.services.types import combined_audience_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
'google-ads',
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class CombinedAudienceServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for CombinedAudienceService."""
AUTH_SCOPES = (
'https://www.googleapis.com/auth/adwords',
)
def __init__(
self, *,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ':' not in host:
host += ':443'
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
|
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def get_combined_audience(self) -> typing.Callable[
[combined_audience_service.GetCombinedAudienceRequest],
combined_audience.CombinedAudience]:
raise NotImplementedError
__all__ = (
'CombinedAudienceServiceTransport',
)
| self._wrapped_methods = {
self.get_combined_audience: gapic_v1.method.wrap_method(
self.get_combined_audience,
default_timeout=None,
client_info=client_info,
),
} |
geometry_collection.rs | use crate::{CoordinateType, Geometry};
use std::iter::FromIterator;
use std::ops::{Index, IndexMut};
/// A collection of [`Geometry`](enum.Geometry.html) types.
///
/// It can be created from a `Vec` of Geometries, or from an Iterator which yields Geometries.
///
/// Looping over this object yields its component **Geometry
/// enum members** (_not_ the underlying geometry
/// primitives), and it supports iteration and indexing as
/// well as the various
/// [`MapCoords`](algorithm/map_coords/index.html)
/// functions, which _are_ directly applied to the
/// underlying geometry primitives.
///
/// # Examples
/// ## Looping
///
/// ```
/// use std::convert::TryFrom;
/// use geo_types::{Point, point, Geometry, GeometryCollection};
/// let p = point!(x: 1.0, y: 1.0);
/// let pe = Geometry::Point(p);
/// let gc = GeometryCollection(vec![pe]);
/// for geom in gc {
/// println!("{:?}", Point::try_from(geom).unwrap().x());
/// }
/// ```
/// ## Implements `iter()`
///
/// ```
/// use std::convert::TryFrom;
/// use geo_types::{Point, point, Geometry, GeometryCollection};
/// let p = point!(x: 1.0, y: 1.0);
/// let pe = Geometry::Point(p);
/// let gc = GeometryCollection(vec![pe]);
/// gc.iter().for_each(|geom| println!("{:?}", geom));
/// ```
///
/// ## Mutable Iteration
///
/// ```
/// use std::convert::TryFrom;
/// use geo_types::{Point, point, Geometry, GeometryCollection};
/// let p = point!(x: 1.0, y: 1.0);
/// let pe = Geometry::Point(p);
/// let mut gc = GeometryCollection(vec![pe]);
/// gc.iter_mut().for_each(|geom| {
/// if let Geometry::Point(p) = geom {
/// p.set_x(0.2);
/// }
/// });
/// let updated = gc[0].clone();
/// assert_eq!(Point::try_from(updated).unwrap().x(), 0.2);
/// ```
///
/// ## Indexing
///
/// ```
/// use std::convert::TryFrom;
/// use geo_types::{Point, point, Geometry, GeometryCollection};
/// let p = point!(x: 1.0, y: 1.0);
/// let pe = Geometry::Point(p);
/// let gc = GeometryCollection(vec![pe]);
/// println!("{:?}", gc[0]);
/// ```
///
#[derive(Eq, PartialEq, Clone, Debug, Hash)]
pub struct GeometryCollection<T>(pub Vec<Geometry<T>>)
where
T: CoordinateType;
impl<T: CoordinateType> GeometryCollection<T> {
/// Return an empty GeometryCollection
pub fn new() -> GeometryCollection<T> {
GeometryCollection(Vec::new())
}
/// Number of geometries in this GeometryCollection
pub fn len(&self) -> usize {
self.0.len()
}
/// Is this GeometryCollection empty
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
/// Convert any Geometry (or anything that can be converted to a Geometry) into a
/// GeometryCollection
impl<T: CoordinateType, IG: Into<Geometry<T>>> From<IG> for GeometryCollection<T> {
fn from(x: IG) -> Self {
GeometryCollection(vec![x.into()])
}
}
/// Collect Geometries (or what can be converted to a Geometry) into a GeometryCollection
impl<T: CoordinateType, IG: Into<Geometry<T>>> FromIterator<IG> for GeometryCollection<T> {
fn from_iter<I: IntoIterator<Item = IG>>(iter: I) -> Self {
GeometryCollection(iter.into_iter().map(|g| g.into()).collect())
}
}
impl<T: CoordinateType> Index<usize> for GeometryCollection<T> {
type Output = Geometry<T>;
fn index(&self, index: usize) -> &Geometry<T> {
self.0.index(index)
}
}
impl<T: CoordinateType> IndexMut<usize> for GeometryCollection<T> {
fn index_mut(&mut self, index: usize) -> &mut Geometry<T> {
self.0.index_mut(index)
}
}
// structure helper for consuming iterator
pub struct IntoIteratorHelper<T: CoordinateType> {
iter: ::std::vec::IntoIter<Geometry<T>>,
}
// implement the IntoIterator trait for a consuming iterator. Iteration will
// consume the GeometryCollection
impl<T: CoordinateType> IntoIterator for GeometryCollection<T> {
type Item = Geometry<T>;
type IntoIter = IntoIteratorHelper<T>;
// note that into_iter() is consuming self
fn into_iter(self) -> Self::IntoIter {
IntoIteratorHelper {
iter: self.0.into_iter(),
}
}
}
// implement Iterator trait for the helper struct, to be used by adapters
impl<T: CoordinateType> Iterator for IntoIteratorHelper<T> {
type Item = Geometry<T>;
// just return the reference
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
// structure helper for non-consuming iterator
pub struct IterHelper<'a, T: CoordinateType> {
iter: ::std::slice::Iter<'a, Geometry<T>>,
}
// implement the IntoIterator trait for a non-consuming iterator. Iteration will
// borrow the GeometryCollection
impl<'a, T: CoordinateType> IntoIterator for &'a GeometryCollection<T> {
type Item = &'a Geometry<T>;
type IntoIter = IterHelper<'a, T>;
// note that into_iter() is consuming self
fn into_iter(self) -> Self::IntoIter |
}
// implement the Iterator trait for the helper struct, to be used by adapters
impl<'a, T: CoordinateType> Iterator for IterHelper<'a, T> {
type Item = &'a Geometry<T>;
// just return the str reference
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
// structure helper for mutable non-consuming iterator
pub struct IterMutHelper<'a, T: CoordinateType> {
iter: ::std::slice::IterMut<'a, Geometry<T>>,
}
// implement the IntoIterator trait for a mutable non-consuming iterator. Iteration will
// mutably borrow the GeometryCollection
impl<'a, T: CoordinateType> IntoIterator for &'a mut GeometryCollection<T> {
type Item = &'a mut Geometry<T>;
type IntoIter = IterMutHelper<'a, T>;
// note that into_iter() is consuming self
fn into_iter(self) -> Self::IntoIter {
IterMutHelper {
iter: self.0.iter_mut(),
}
}
}
// implement the Iterator trait for the helper struct, to be used by adapters
impl<'a, T: CoordinateType> Iterator for IterMutHelper<'a, T> {
type Item = &'a mut Geometry<T>;
// just return the str reference
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
impl<'a, T: CoordinateType> GeometryCollection<T> {
pub fn iter(&'a self) -> IterHelper<'a, T> {
self.into_iter()
}
pub fn iter_mut(&'a mut self) -> IterMutHelper<'a, T> {
self.into_iter()
}
}
| {
IterHelper {
iter: self.0.iter(),
}
} |
configuration_collection.js | 'use strict'
const mongodb = require('mongodb')
const utils = require('../utils')
const { RecordNotFoundError, RuntimeError } = require('../errors')
const collectionName = 'configuration'
/** @type {(hex: string) => mongodb.ObjectId | null} */
const makeObjectID = hex => {
try {
return new mongodb.ObjectId(hex)
} catch (e) {
return null
}
}
class ConfigurationNotFoundError extends RecordNotFoundError {
constructor(message = 'Configuration not found') {
super(message)
}
}
class | {
/**
* @param {mongodb.Db} db Connection to the database.
*/
constructor(db) {
/** @private */
this._db = db
}
/**
* Configuration collection instance.
* @returns {mongodb.Collection<Model.Configuration>}
*/
collection() {
return this._db.collection(collectionName)
}
/**
* Returns last inserted configuration object.
* @returns {Promise<Configuration.Runtime>}
* @throws {RuntimeError} If `configuration` collection is empty.
*/
async currentConfiguration() {
try {
const document = await this.findLast()
if (!utils.isEmpty(document.data)) {
return document.data
}
throw new ConfigurationNotFoundError()
} catch (error) {
throw new RuntimeError('Runtime configuration is not set in the database')
}
}
/**
* Returns last inserted document.
* @returns {Promise<Model.Configuration>}
* @throws {RecordNotFoundError} If `configuration` collection is empty.
*/
async findLast() {
const document = await this.collection().findOne(
{},
{ sort: { $natural: -1 } }
)
if (document) {
return document
}
throw new ConfigurationNotFoundError()
}
/**
* Returns the document with provided ID.
* @param {string} hexString Document ID as hexadecimal string.
* @returns {Promise<Model.Configuration>}
* @throws {RecordNotFoundError} If `hexString` not valid ObjectID or document with this id not found.
*/
async findOne(hexString) {
const id = makeObjectID(hexString)
const document = id ? await this.collection().findOne({ _id: id }) : null
if (document) {
return document
}
throw new ConfigurationNotFoundError()
}
/**
* Returns iterator over documents in capped collection.
* @returns {mongodb.FindCursor<Model.Configuration>}
*/
history() {
return this.collection().find(
{},
{ projection: { author: 1, user: 1 }, sort: { $natural: -1 } }
)
}
/**
* Adds document to the config capped collection.
* @param {Omit<Model.Configuration, '_id'>} document Configuration object.
* @returns {Promise<mongodb.ObjectId>} Inserted document id.
* @throws {RuntimeError} If `mongodb` fail to insert document and didn't throw an error.
*/
async insertOne(document) {
const { insertedId } = await this.collection().insertOne(document)
if (insertedId) {
return insertedId
}
// XXX Is even possible that `mongo` fail insert document without throwing an error?
throw new RuntimeError('Fail to write configuration data to the database')
}
}
/**
* @param {mongodb.Db} db Connection to the database.
* @returns {ConfigurationCollection}
*/
function make(db) {
return new ConfigurationCollection(db)
}
module.exports = make
module.exports.ConfigurationNotFoundError = ConfigurationNotFoundError
module.exports.collectionName = collectionName
| ConfigurationCollection |
main.py |
module1 = Module1()
do_add = module1.add(1,1)
module2 = Module2()
do_minus = module2.minus(1,1) | from .sub_folder1.module1 import Module1
from .sub_folder1.module2 import Module2 |
|
comm_rank.rs | use rustc_serialize::json;
use libc;
use comm_request::CommRequest;
use comm_request::CommRequestType;
use comm_request::ControlTy;
use std::io::prelude::*;
use std::net::TcpStream;
use utils;
pub fn mpi_comm_rank() -> usize | {
let pid = utils::pid();
let tag: u64 = u64::max_value();
let mut rank: Option<usize> = None;
let commreq = CommRequest::<u32>::new(None,
None,
tag,
None,
CommRequestType::Control(ControlTy::GetMyRank),
pid);
let commreq_json = json::encode(&commreq).unwrap();
let mut stream = TcpStream::connect("127.0.0.1:31337").unwrap();
let _ = stream.write(&commreq_json.as_bytes());
let mut str_in = utils::read_stream(&mut stream);
if !str_in.is_empty() {
rank = usize::from_str_radix(&str_in, 10).ok();
}
rank.expect("Rank fetching failed")
} |
|
ra.rs | #[doc = "Register `RA` reader"]
pub struct R(crate::R<RA_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<RA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<RA_SPEC>> for R {
fn from(reader: crate::R<RA_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `RA` writer"]
pub struct W(crate::W<RA_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<RA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<RA_SPEC>> for W {
fn from(writer: crate::W<RA_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `RAD` reader - Range Slave Address"]
pub struct RAD_R(crate::FieldReader<u8, u8>);
impl RAD_R {
pub(crate) fn new(bits: u8) -> Self {
RAD_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RAD_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RAD` writer - Range Slave Address"]
pub struct RAD_W<'a> {
w: &'a mut W,
}
impl<'a> RAD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 1)) | ((value as u8 & 0x7f) << 1);
self.w
}
}
impl R {
#[doc = "Bits 1:7 - Range Slave Address"]
#[inline(always)]
pub fn rad(&self) -> RAD_R |
}
impl W {
#[doc = "Bits 1:7 - Range Slave Address"]
#[inline(always)]
pub fn rad(&mut self) -> RAD_W {
RAD_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "I2C Range Address register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ra](index.html) module"]
pub struct RA_SPEC;
impl crate::RegisterSpec for RA_SPEC {
type Ux = u8;
}
#[doc = "`read()` method returns [ra::R](R) reader structure"]
impl crate::Readable for RA_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [ra::W](W) writer structure"]
impl crate::Writable for RA_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets RA to value 0"]
impl crate::Resettable for RA_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| {
RAD_R::new(((self.bits >> 1) & 0x7f) as u8)
} |
mod.rs | use crate::borrow_check::nll::constraints::OutlivesConstraint;
use crate::borrow_check::nll::region_infer::RegionInferenceContext;
use crate::borrow_check::nll::type_check::Locations;
use crate::borrow_check::nll::universal_regions::DefiningTy;
use crate::borrow_check::nll::ConstraintDescription;
use crate::util::borrowck_errors::{BorrowckErrors, Origin};
use rustc::hir::def_id::DefId;
use rustc::infer::error_reporting::nice_region_error::NiceRegionError;
use rustc::infer::InferCtxt;
use rustc::infer::NLLRegionVariableOrigin;
use rustc::mir::{ConstraintCategory, Location, Mir};
use rustc::ty::{self, RegionVid};
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_errors::{Diagnostic, DiagnosticBuilder};
use std::collections::VecDeque;
use syntax::errors::Applicability;
use syntax::symbol::keywords;
use syntax_pos::Span;
mod region_name;
mod var_name;
crate use self::region_name::{RegionName, RegionNameSource};
impl ConstraintDescription for ConstraintCategory {
fn description(&self) -> &'static str {
// Must end with a space. Allows for empty names to be provided.
match self {
ConstraintCategory::Assignment => "assignment ",
ConstraintCategory::Return => "returning this value ",
ConstraintCategory::Yield => "yielding this value ",
ConstraintCategory::UseAsConst => "using this value as a constant ",
ConstraintCategory::UseAsStatic => "using this value as a static ",
ConstraintCategory::Cast => "cast ",
ConstraintCategory::CallArgument => "argument ",
ConstraintCategory::TypeAnnotation => "type annotation ",
ConstraintCategory::ClosureBounds => "closure body ",
ConstraintCategory::SizedBound => "proving this value is `Sized` ",
ConstraintCategory::CopyBound => "copying this value ",
ConstraintCategory::OpaqueType => "opaque type ",
ConstraintCategory::Boring
| ConstraintCategory::BoringNoLocation
| ConstraintCategory::Internal => "",
}
}
}
#[derive(Copy, Clone, PartialEq, Eq)]
enum Trace {
StartRegion,
FromOutlivesConstraint(OutlivesConstraint),
NotVisited,
}
impl<'tcx> RegionInferenceContext<'tcx> {
/// Tries to find the best constraint to blame for the fact that
/// `R: from_region`, where `R` is some region that meets
/// `target_test`. This works by following the constraint graph,
/// creating a constraint path that forces `R` to outlive
/// `from_region`, and then finding the best choices within that
/// path to blame.
fn best_blame_constraint(
&self,
mir: &Mir<'tcx>,
from_region: RegionVid,
target_test: impl Fn(RegionVid) -> bool,
) -> (ConstraintCategory, bool, Span) {
debug!("best_blame_constraint(from_region={:?})", from_region);
// Find all paths
let (path, target_region) =
self.find_constraint_paths_between_regions(from_region, target_test)
.unwrap();
debug!(
"best_blame_constraint: path={:#?}",
path.iter()
.map(|&c| format!(
"{:?} ({:?}: {:?})",
c,
self.constraint_sccs.scc(c.sup),
self.constraint_sccs.scc(c.sub),
))
.collect::<Vec<_>>()
);
// Classify each of the constraints along the path.
let mut categorized_path: Vec<(ConstraintCategory, bool, Span)> = path.iter()
.map(|constraint| {
if constraint.category == ConstraintCategory::ClosureBounds {
self.retrieve_closure_constraint_info(mir, &constraint)
} else {
(constraint.category, false, constraint.locations.span(mir))
}
})
.collect();
debug!(
"best_blame_constraint: categorized_path={:#?}",
categorized_path
);
// To find the best span to cite, we first try to look for the
// final constraint that is interesting and where the `sup` is
// not unified with the ultimate target region. The reason
// for this is that we have a chain of constraints that lead
// from the source to the target region, something like:
//
// '0: '1 ('0 is the source)
// '1: '2
// '2: '3
// '3: '4
// '4: '5
// '5: '6 ('6 is the target)
//
// Some of those regions are unified with `'6` (in the same
// SCC). We want to screen those out. After that point, the
// "closest" constraint we have to the end is going to be the
// most likely to be the point where the value escapes -- but
// we still want to screen for an "interesting" point to
// highlight (e.g., a call site or something).
let target_scc = self.constraint_sccs.scc(target_region);
let best_choice = (0..path.len()).rev().find(|&i| {
let constraint = path[i];
let constraint_sup_scc = self.constraint_sccs.scc(constraint.sup);
match categorized_path[i].0 {
ConstraintCategory::OpaqueType | ConstraintCategory::Boring |
ConstraintCategory::BoringNoLocation | ConstraintCategory::Internal => false,
ConstraintCategory::TypeAnnotation | ConstraintCategory::Return |
ConstraintCategory::Yield => true,
_ => constraint_sup_scc != target_scc,
}
});
if let Some(i) = best_choice {
return categorized_path[i];
}
// If that search fails, that is.. unusual. Maybe everything
// is in the same SCC or something. In that case, find what
// appears to be the most interesting point to report to the
// user via an even more ad-hoc guess.
categorized_path.sort_by(|p0, p1| p0.0.cmp(&p1.0));
debug!("`: sorted_path={:#?}", categorized_path);
*categorized_path.first().unwrap()
}
/// Walks the graph of constraints (where `'a: 'b` is considered
/// an edge `'a -> 'b`) to find all paths from `from_region` to
/// `to_region`. The paths are accumulated into the vector
/// `results`. The paths are stored as a series of
/// `ConstraintIndex` values -- in other words, a list of *edges*.
///
/// Returns: a series of constraints as well as the region `R`
/// that passed the target test.
fn find_constraint_paths_between_regions(
&self,
from_region: RegionVid,
target_test: impl Fn(RegionVid) -> bool,
) -> Option<(Vec<OutlivesConstraint>, RegionVid)> {
let mut context = IndexVec::from_elem(Trace::NotVisited, &self.definitions);
context[from_region] = Trace::StartRegion;
// Use a deque so that we do a breadth-first search. We will
// stop at the first match, which ought to be the shortest
// path (fewest constraints).
let mut deque = VecDeque::new();
deque.push_back(from_region);
while let Some(r) = deque.pop_front() {
debug!(
"find_constraint_paths_between_regions: from_region={:?} r={:?} value={}",
from_region,
r,
self.region_value_str(r),
);
// Check if we reached the region we were looking for. If so,
// we can reconstruct the path that led to it and return it.
if target_test(r) {
let mut result = vec![];
let mut p = r;
loop {
match context[p] {
Trace::NotVisited => {
bug!("found unvisited region {:?} on path to {:?}", p, r)
}
Trace::FromOutlivesConstraint(c) => {
result.push(c);
p = c.sup;
}
Trace::StartRegion => {
result.reverse();
return Some((result, r));
}
}
}
}
// Otherwise, walk over the outgoing constraints and
// enqueue any regions we find, keeping track of how we
// reached them.
let fr_static = self.universal_regions.fr_static;
for constraint in self.constraint_graph
.outgoing_edges(r, &self.constraints, fr_static)
{
debug_assert_eq!(constraint.sup, r);
let sub_region = constraint.sub;
if let Trace::NotVisited = context[sub_region] {
context[sub_region] = Trace::FromOutlivesConstraint(constraint);
deque.push_back(sub_region);
}
}
}
None
}
/// Report an error because the universal region `fr` was required to outlive
/// `outlived_fr` but it is not known to do so. For example:
///
/// ```
/// fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x }
/// ```
///
/// Here we would be invoked with `fr = 'a` and `outlived_fr = `'b`.
pub(super) fn report_error(
&self,
mir: &Mir<'tcx>,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir_def_id: DefId,
fr: RegionVid,
outlived_fr: RegionVid,
errors_buffer: &mut Vec<Diagnostic>,
) {
debug!("report_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr);
let (category, _, span) = self.best_blame_constraint(mir, fr, |r| {
self.provides_universal_region(r, fr, outlived_fr)
});
// Check if we can use one of the "nice region errors".
if let (Some(f), Some(o)) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) {
let tables = infcx.tcx.typeck_tables_of(mir_def_id);
let nice = NiceRegionError::new_from_span(infcx, span, o, f, Some(tables));
if let Some(_error_reported) = nice.try_report_from_nll() {
return;
}
}
let (fr_is_local, outlived_fr_is_local): (bool, bool) = (
self.universal_regions.is_local_free_region(fr),
self.universal_regions.is_local_free_region(outlived_fr),
);
debug!(
"report_error: fr_is_local={:?} outlived_fr_is_local={:?} category={:?}",
fr_is_local, outlived_fr_is_local, category
);
match (category, fr_is_local, outlived_fr_is_local) {
(ConstraintCategory::Return, true, false) if self.is_closure_fn_mut(infcx, fr) => {
self.report_fnmut_error(
mir,
infcx,
mir_def_id,
fr,
outlived_fr,
span,
errors_buffer,
)
}
(ConstraintCategory::Assignment, true, false)
| (ConstraintCategory::CallArgument, true, false) => self.report_escaping_data_error(
mir,
infcx,
mir_def_id,
fr,
outlived_fr,
category,
span,
errors_buffer,
),
_ => self.report_general_error(
mir,
infcx,
mir_def_id,
fr,
fr_is_local,
outlived_fr,
outlived_fr_is_local,
category,
span,
errors_buffer,
),
};
}
/// We have a constraint `fr1: fr2` that is not satisfied, where
/// `fr2` represents some universal region. Here, `r` is some
/// region where we know that `fr1: r` and this function has the
/// job of determining whether `r` is "to blame" for the fact that
/// `fr1: fr2` is required.
///
/// This is true under two conditions:
///
/// - `r == fr2`
/// - `fr2` is `'static` and `r` is some placeholder in a universe
/// that cannot be named by `fr1`; in that case, we will require
/// that `fr1: 'static` because it is the only way to `fr1: r` to
/// be satisfied. (See `add_incompatible_universe`.)
fn provides_universal_region(&self, r: RegionVid, fr1: RegionVid, fr2: RegionVid) -> bool {
debug!(
"provides_universal_region(r={:?}, fr1={:?}, fr2={:?})",
r, fr1, fr2
);
let result = {
r == fr2 || {
fr2 == self.universal_regions.fr_static && self.cannot_name_placeholder(fr1, r)
}
};
debug!("provides_universal_region: result = {:?}", result);
result
}
/// Report a specialized error when `FnMut` closures return a reference to a captured variable.
/// This function expects `fr` to be local and `outlived_fr` to not be local.
///
/// ```text
/// error: captured variable cannot escape `FnMut` closure body
/// --> $DIR/issue-53040.rs:15:8
/// |
/// LL | || &mut v;
/// | -- ^^^^^^ creates a reference to a captured variable which escapes the closure body
/// | |
/// | inferred to be a `FnMut` closure
/// |
/// = note: `FnMut` closures only have access to their captured variables while they are
/// executing...
/// = note: ...therefore, returned references to captured variables will escape the closure
/// ```
fn report_fnmut_error(
&self,
mir: &Mir<'tcx>,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir_def_id: DefId,
_fr: RegionVid,
outlived_fr: RegionVid,
span: Span,
errors_buffer: &mut Vec<Diagnostic>,
) {
let mut diag = infcx
.tcx
.sess
.struct_span_err(span, "captured variable cannot escape `FnMut` closure body");
// We should check if the return type of this closure is in fact a closure - in that
// case, we can special case the error further.
let return_type_is_closure = self.universal_regions.unnormalized_output_ty.is_closure();
let message = if return_type_is_closure {
"returns a closure that contains a reference to a captured variable, which then \
escapes the closure body"
} else {
"returns a reference to a captured variable which escapes the closure body"
};
diag.span_label(span, message);
match self.give_region_a_name(infcx, mir, mir_def_id, outlived_fr, &mut 1).unwrap().source {
RegionNameSource::NamedEarlyBoundRegion(fr_span)
| RegionNameSource::NamedFreeRegion(fr_span)
| RegionNameSource::SynthesizedFreeEnvRegion(fr_span, _)
| RegionNameSource::CannotMatchHirTy(fr_span, _)
| RegionNameSource::MatchedHirTy(fr_span)
| RegionNameSource::MatchedAdtAndSegment(fr_span)
| RegionNameSource::AnonRegionFromUpvar(fr_span, _)
| RegionNameSource::AnonRegionFromOutput(fr_span, _, _) => {
diag.span_label(fr_span, "inferred to be a `FnMut` closure");
}
_ => {}
}
diag.note(
"`FnMut` closures only have access to their captured variables while they are \
executing...",
);
diag.note("...therefore, they cannot allow references to captured variables to escape");
diag.buffer(errors_buffer);
}
/// Reports a error specifically for when data is escaping a closure.
///
/// ```text
/// error: borrowed data escapes outside of function
/// --> $DIR/lifetime-bound-will-change-warning.rs:44:5
/// |
/// LL | fn test2<'a>(x: &'a Box<Fn()+'a>) {
/// | - `x` is a reference that is only valid in the function body
/// LL | // but ref_obj will not, so warn.
/// LL | ref_obj(x)
/// | ^^^^^^^^^^ `x` escapes the function body here
/// ```
fn report_escaping_data_error(
&self,
mir: &Mir<'tcx>,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir_def_id: DefId,
fr: RegionVid,
outlived_fr: RegionVid,
category: ConstraintCategory,
span: Span,
errors_buffer: &mut Vec<Diagnostic>,
) {
let fr_name_and_span = self.get_var_name_and_span_for_region(infcx.tcx, mir, fr);
let outlived_fr_name_and_span =
self.get_var_name_and_span_for_region(infcx.tcx, mir, outlived_fr);
let escapes_from = match self.universal_regions.defining_ty {
DefiningTy::Closure(..) => "closure",
DefiningTy::Generator(..) => "generator",
DefiningTy::FnDef(..) => "function",
DefiningTy::Const(..) => "const",
};
// Revert to the normal error in these cases.
// Assignments aren't "escapes" in function items.
if (fr_name_and_span.is_none() && outlived_fr_name_and_span.is_none())
|| (category == ConstraintCategory::Assignment && escapes_from == "function")
|| escapes_from == "const"
{
return self.report_general_error(
mir,
infcx,
mir_def_id,
fr,
true,
outlived_fr,
false,
category,
span,
errors_buffer,
); | }
let mut diag = infcx
.tcx
.borrowed_data_escapes_closure(span, escapes_from, Origin::Mir);
if let Some((Some(outlived_fr_name), outlived_fr_span)) = outlived_fr_name_and_span {
diag.span_label(
outlived_fr_span,
format!(
"`{}` is declared here, outside of the {} body",
outlived_fr_name, escapes_from
),
);
}
if let Some((Some(fr_name), fr_span)) = fr_name_and_span {
diag.span_label(
fr_span,
format!(
"`{}` is a reference that is only valid in the {} body",
fr_name, escapes_from
),
);
diag.span_label(
span,
format!("`{}` escapes the {} body here", fr_name, escapes_from),
);
}
diag.buffer(errors_buffer);
}
/// Reports a region inference error for the general case with named/synthesized lifetimes to
/// explain what is happening.
///
/// ```text
/// error: unsatisfied lifetime constraints
/// --> $DIR/regions-creating-enums3.rs:17:5
/// |
/// LL | fn mk_add_bad1<'a,'b>(x: &'a ast<'a>, y: &'b ast<'b>) -> ast<'a> {
/// | -- -- lifetime `'b` defined here
/// | |
/// | lifetime `'a` defined here
/// LL | ast::add(x, y)
/// | ^^^^^^^^^^^^^^ function was supposed to return data with lifetime `'a` but it
/// | is returning data with lifetime `'b`
/// ```
fn report_general_error(
&self,
mir: &Mir<'tcx>,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir_def_id: DefId,
fr: RegionVid,
fr_is_local: bool,
outlived_fr: RegionVid,
outlived_fr_is_local: bool,
category: ConstraintCategory,
span: Span,
errors_buffer: &mut Vec<Diagnostic>,
) {
let mut diag = infcx.tcx.sess.struct_span_err(
span,
"lifetime may not live long enough"
);
let counter = &mut 1;
let fr_name = self.give_region_a_name(infcx, mir, mir_def_id, fr, counter).unwrap();
fr_name.highlight_region_name(&mut diag);
let outlived_fr_name =
self.give_region_a_name(infcx, mir, mir_def_id, outlived_fr, counter).unwrap();
outlived_fr_name.highlight_region_name(&mut diag);
let mir_def_name = if infcx.tcx.is_closure(mir_def_id) {
"closure"
} else {
"function"
};
match (category, outlived_fr_is_local, fr_is_local) {
(ConstraintCategory::Return, true, _) => {
diag.span_label(
span,
format!(
"{} was supposed to return data with lifetime `{}` but it is returning \
data with lifetime `{}`",
mir_def_name, outlived_fr_name, fr_name
),
);
}
_ => {
diag.span_label(
span,
format!(
"{}requires that `{}` must outlive `{}`",
category.description(),
fr_name,
outlived_fr_name,
),
);
}
}
self.add_static_impl_trait_suggestion(infcx, &mut diag, fr, fr_name, outlived_fr);
diag.buffer(errors_buffer);
}
/// Adds a suggestion to errors where a `impl Trait` is returned.
///
/// ```text
/// help: to allow this impl Trait to capture borrowed data with lifetime `'1`, add `'_` as
/// a constraint
/// |
/// LL | fn iter_values_anon(&self) -> impl Iterator<Item=u32> + 'a {
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
/// ```
fn add_static_impl_trait_suggestion(
&self,
infcx: &InferCtxt<'_, '_, 'tcx>,
diag: &mut DiagnosticBuilder<'_>,
fr: RegionVid,
// We need to pass `fr_name` - computing it again will label it twice.
fr_name: RegionName,
outlived_fr: RegionVid,
) {
if let (Some(f), Some(ty::RegionKind::ReStatic)) =
(self.to_error_region(fr), self.to_error_region(outlived_fr))
{
if let Some(ty::TyS {
sty: ty::TyKind::Opaque(did, substs),
..
}) = infcx
.tcx
.is_suitable_region(f)
.map(|r| r.def_id)
.map(|id| infcx.tcx.return_type_impl_trait(id))
.unwrap_or(None)
{
// Check whether or not the impl trait return type is intended to capture
// data with the static lifetime.
//
// eg. check for `impl Trait + 'static` instead of `impl Trait`.
let has_static_predicate = {
let predicates_of = infcx.tcx.predicates_of(*did);
let bounds = predicates_of.instantiate(infcx.tcx, substs);
let mut found = false;
for predicate in bounds.predicates {
if let ty::Predicate::TypeOutlives(binder) = predicate {
if let ty::OutlivesPredicate(_, ty::RegionKind::ReStatic) =
binder.skip_binder()
{
found = true;
break;
}
}
}
found
};
debug!(
"add_static_impl_trait_suggestion: has_static_predicate={:?}",
has_static_predicate
);
let static_str = keywords::StaticLifetime.name();
// If there is a static predicate, then the only sensible suggestion is to replace
// fr with `'static`.
if has_static_predicate {
diag.help(&format!(
"consider replacing `{}` with `{}`",
fr_name, static_str,
));
} else {
// Otherwise, we should suggest adding a constraint on the return type.
let span = infcx.tcx.def_span(*did);
if let Ok(snippet) = infcx.tcx.sess.source_map().span_to_snippet(span) {
let suggestable_fr_name = if fr_name.was_named() {
fr_name.to_string()
} else {
"'_".to_string()
};
diag.span_suggestion(
span,
&format!(
"to allow this impl Trait to capture borrowed data with lifetime \
`{}`, add `{}` as a constraint",
fr_name, suggestable_fr_name,
),
format!("{} + {}", snippet, suggestable_fr_name),
Applicability::MachineApplicable,
);
}
}
}
}
}
crate fn free_region_constraint_info(
&self,
mir: &Mir<'tcx>,
mir_def_id: DefId,
infcx: &InferCtxt<'_, '_, 'tcx>,
borrow_region: RegionVid,
outlived_region: RegionVid,
) -> (ConstraintCategory, bool, Span, Option<RegionName>) {
let (category, from_closure, span) =
self.best_blame_constraint(mir, borrow_region, |r| r == outlived_region);
let outlived_fr_name =
self.give_region_a_name(infcx, mir, mir_def_id, outlived_region, &mut 1);
(category, from_closure, span, outlived_fr_name)
}
// Finds some region R such that `fr1: R` and `R` is live at
// `elem`.
crate fn find_sub_region_live_at(&self, fr1: RegionVid, elem: Location) -> RegionVid {
debug!("find_sub_region_live_at(fr1={:?}, elem={:?})", fr1, elem);
self.find_constraint_paths_between_regions(fr1, |r| {
// First look for some `r` such that `fr1: r` and `r` is live at `elem`
debug!(
"find_sub_region_live_at: liveness_constraints for {:?} are {:?}",
r,
self.liveness_constraints.region_value_str(r),
);
self.liveness_constraints.contains(r, elem)
}).or_else(|| {
// If we fail to find that, we may find some `r` such that
// `fr1: r` and `r` is a placeholder from some universe
// `fr1` cannot name. This would force `fr1` to be
// `'static`.
self.find_constraint_paths_between_regions(fr1, |r| {
self.cannot_name_placeholder(fr1, r)
})
})
.or_else(|| {
// If we fail to find THAT, it may be that `fr1` is a
// placeholder that cannot "fit" into its SCC. In that
// case, there should be some `r` where `fr1: r`, both
// `fr1` and `r` are in the same SCC, and `fr1` is a
// placeholder that `r` cannot name. We can blame that
// edge.
self.find_constraint_paths_between_regions(fr1, |r| {
self.constraint_sccs.scc(fr1) == self.constraint_sccs.scc(r)
&& self.cannot_name_placeholder(r, fr1)
})
})
.map(|(_path, r)| r)
.unwrap()
}
// Finds a good span to blame for the fact that `fr1` outlives `fr2`.
crate fn find_outlives_blame_span(
&self,
mir: &Mir<'tcx>,
fr1: RegionVid,
fr2: RegionVid,
) -> (ConstraintCategory, Span) {
let (category, _, span) =
self.best_blame_constraint(mir, fr1, |r| self.provides_universal_region(r, fr1, fr2));
(category, span)
}
fn retrieve_closure_constraint_info(
&self,
mir: &Mir<'tcx>,
constraint: &OutlivesConstraint,
) -> (ConstraintCategory, bool, Span) {
let loc = match constraint.locations {
Locations::All(span) => return (constraint.category, false, span),
Locations::Single(loc) => loc,
};
let opt_span_category =
self.closure_bounds_mapping[&loc].get(&(constraint.sup, constraint.sub));
opt_span_category
.map(|&(category, span)| (category, true, span))
.unwrap_or((constraint.category, false, mir.source_info(loc).span))
}
/// Returns `true` if a closure is inferred to be an `FnMut` closure.
crate fn is_closure_fn_mut(&self, infcx: &InferCtxt<'_, '_, 'tcx>, fr: RegionVid) -> bool {
if let Some(ty::ReFree(free_region)) = self.to_error_region(fr) {
if let ty::BoundRegion::BrEnv = free_region.bound_region {
if let DefiningTy::Closure(def_id, substs) = self.universal_regions.defining_ty {
let closure_kind_ty = substs.closure_kind_ty(def_id, infcx.tcx);
return Some(ty::ClosureKind::FnMut) == closure_kind_ty.to_opt_closure_kind();
}
}
}
false
}
/// If `r2` represents a placeholder region, then this returns
/// `true` if `r1` cannot name that placeholder in its
/// value; otherwise, returns `false`.
fn cannot_name_placeholder(&self, r1: RegionVid, r2: RegionVid) -> bool {
debug!("cannot_name_value_of(r1={:?}, r2={:?})", r1, r2);
match self.definitions[r2].origin {
NLLRegionVariableOrigin::Placeholder(placeholder) => {
let universe1 = self.definitions[r1].universe;
debug!(
"cannot_name_value_of: universe1={:?} placeholder={:?}",
universe1, placeholder
);
universe1.cannot_name(placeholder.universe)
}
NLLRegionVariableOrigin::FreeRegion | NLLRegionVariableOrigin::Existential => false,
}
}
} | |
init.go | // 配置优先级: flag > 配置文件 > 默认值
package config
import (
"io"
"log"
"oneTiny/core/util"
"os"
"path/filepath"
"runtime"
"github.com/fatih/color"
"github.com/spf13/viper"
)
const (
VERSION string = "v0.3.0"
ROOT string = "/"
SEPARATORS string = "/"
)
var (
Output io.Writer = os.Stderr
SessionVal string = util.RandString(64)
Goos string = runtime.GOOS // 程序所在的操作系统,默认值 linux
IP string = ip // 本机局域网IP
wd, _ = os.Getwd()
ip, _ = util.GetIP()
userCfgDir, _ = os.UserConfigDir()
cfgDir string = filepath.Join(userCfgDir, "tiny")
cfgFile string = filepath.Join(cfgDir, "config.yml")
// 各个参数的原厂默认值
MaxLevel uint8 = 0 // 允许访问的最大层级,默认值 0
Port int = 9090 // 指定的服务端口,默认值 9090
IsAllowUpload bool = false // 是否允许上传,默认值:否
IsSecure bool = false // 是否开启访问登录,默认值:否
RootPath string = wd // 共享目录的根路径,默认值:当前目录
Username string = "admin" // 访问登录的帐号
Password string = "admin" // 访问登录的密码
)
func init() {
loadUserConfig()
MaxLevel = uint8(viper.GetInt("server.max_level"))
Port = viper.GetInt("server.port")
IsAllowUpload = viper.GetBool("server.allow_upload")
IsSecure = viper.GetBool("account.secure")
if Goos == "windows" {
Output = color.Output
}
}
// loadUserConfig 负责加载用户配置文件,如果文件不存在则创建并设置默认值
func loadUserConfig() {
viper.AddConfigPath(cfgDir)
viper.SetConfigName("config")
viper.SetConfigType("yml")
read:
if err := viper.ReadInConfig(); err != nil {
switch err.(type) {
case viper.ConfigFileNotFoundError:
log.Println(color.YellowString("未找到「自定义配置文件」, 正在创建中..."))
createCfgFile()
goto read
case viper.ConfigParseError:
log.Println(color.RedString("已找到「自定义配置文件」,但是解析失败!"))
case viper.ConfigMarshalError:
log.Println(color.RedString("已找到「自定义配置文件」,但是读取失败!"))
}
}
}
func createCfgFile() {
_, err := os.Stat(cfgDir)
if os.IsNotExist(err) {
os.MkdirAll(cfgDir, os.ModePerm)
}
_, err = os.Create(cfgFile)
if err != nil {
log.Println(color.RedString("创建自定义配置文件失败!"))
}
if err := setDefault(); err == nil {
log.Println(color.GreenString("创建成功,配置文件位于: %s", cfgFile))
}
}
func setDefault() error {
viper.Set("server.port", 9090)
viper.Set("server.allow_upload", false)
v | ver.max_level", 0)
return viper.WriteConfig()
}
| iper.Set("ser |
get_contract_counter.rs | use serde::Deserialize;
use types::ImplicitAddress;
use crypto::ToBase58Check;
use crate::api::{
GetContractCounter, GetContractCounterResult,
TransportError, GetContractCounterError, GetContractCounterErrorKind,
};
use crate::http_api::HttpApi;
fn get_contract_counter_url(base_url: &str, addr: &ImplicitAddress) -> String {
format!(
"{}/chains/main/blocks/head/context/contracts/{}/counter",
base_url,
addr.to_base58check(),
)
}
impl From<ureq::Error> for GetContractCounterErrorKind {
fn from(error: ureq::Error) -> Self {
match error {
ureq::Error::Transport(error) => {
Self::Transport(TransportError(Box::new(error)))
}
ureq::Error::Status(code, resp) => {
let status_text = resp.status_text().to_string();
Self::Unknown(format!(
"Http status: ({}, {}){}",
code,
status_text,
match resp.into_string() {
Ok(s) => format!(", message: {}", s),
Err(_) => "".to_string(),
},
))
}
}
}
}
impl From<std::io::Error> for GetContractCounterErrorKind {
fn from(error: std::io::Error) -> Self |
}
#[derive(Deserialize)]
#[serde(transparent)]
struct ContractCounter {
#[serde(with = "utils::serde_str")]
current: u64,
}
impl Into<u64> for ContractCounter {
fn into(self) -> u64 {
self.current
}
}
#[inline]
fn build_error<E>(address: &ImplicitAddress, kind: E) -> GetContractCounterError
where E: Into<GetContractCounterErrorKind>,
{
GetContractCounterError {
address: address.clone(),
kind: kind.into(),
}
}
impl GetContractCounter for HttpApi {
fn get_contract_counter(&self, addr: &ImplicitAddress) -> GetContractCounterResult {
let value: String = get_contract_counter_url(&self.base_url, addr);
println!("{}", value);
Ok(self.client.get(&value)
.call()
.map_err(|err| build_error(addr, err))?
.into_json::<ContractCounter>()
.map_err(|err| build_error(addr, err))?
.into())
}
}
| {
Self::Transport(TransportError(Box::new(error)))
} |
factory.go | // Copyright OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package azuremonitorexporter
import (
"context"
"errors"
"time"
"github.com/microsoft/ApplicationInsights-Go/appinsights"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/config/configmodels"
"go.opentelemetry.io/collector/exporter/exporterhelper"
"go.uber.org/zap"
)
const (
// The value of "type" key in configuration.
typeStr = "azuremonitor"
defaultEndpoint = "https://dc.services.visualstudio.com/v2/track"
)
var (
errUnexpectedConfigurationType = errors.New("failed to cast configuration to Azure Monitor Config")
)
// NewFactory returns a factory for Azure Monitor exporter.
func NewFactory() component.ExporterFactory {
f := &factory{}
return exporterhelper.NewFactory(
typeStr,
createDefaultConfig,
exporterhelper.WithTraces(f.createTraceExporter))
}
// Implements the interface from go.opentelemetry.io/collector/exporter/factory.go
type factory struct {
tChannel transportChannel
}
func createDefaultConfig() configmodels.Exporter |
func (f *factory) createTraceExporter(
ctx context.Context,
params component.ExporterCreateParams,
cfg configmodels.Exporter,
) (component.TraceExporter, error) {
exporterConfig, ok := cfg.(*Config)
if !ok {
return nil, errUnexpectedConfigurationType
}
tc := f.getTransportChannel(exporterConfig, params.Logger)
return newTraceExporter(exporterConfig, tc, params.Logger)
}
// Configures the transport channel.
// This method is not thread-safe
func (f *factory) getTransportChannel(exporterConfig *Config, logger *zap.Logger) transportChannel {
// The default transport channel uses the default send mechanism from the AppInsights telemetry client.
// This default channel handles batching, appropriate retries, and is backed by memory.
if f.tChannel == nil {
telemetryConfiguration := appinsights.NewTelemetryConfiguration(exporterConfig.InstrumentationKey)
telemetryConfiguration.EndpointUrl = exporterConfig.Endpoint
telemetryConfiguration.MaxBatchSize = exporterConfig.MaxBatchSize
telemetryConfiguration.MaxBatchInterval = exporterConfig.MaxBatchInterval
telemetryClient := appinsights.NewTelemetryClientFromConfig(telemetryConfiguration)
f.tChannel = telemetryClient.Channel()
// Don't even bother enabling the AppInsights diagnostics listener unless debug logging is enabled
if checkedEntry := logger.Check(zap.DebugLevel, ""); checkedEntry != nil {
appinsights.NewDiagnosticsMessageListener(func(msg string) error {
logger.Debug(msg)
return nil
})
}
}
return f.tChannel
}
| {
return &Config{
ExporterSettings: configmodels.ExporterSettings{
TypeVal: configmodels.Type(typeStr),
NameVal: typeStr,
},
Endpoint: defaultEndpoint,
MaxBatchSize: 1024,
MaxBatchInterval: 10 * time.Second,
}
} |
test_grammars.py | # This Python file uses the following encoding: utf-8
# The above line is required for the MultiLingualTests class
import copy
import tempfile
import unittest
from jsgf import *
from jsgf.ext import Dictation
class BasicGrammarCase(unittest.TestCase):
def setUp(self):
rule2 = PrivateRule("greetWord", AlternativeSet("hello", "hi"))
rule3 = PrivateRule("name", AlternativeSet(
"peter", "john", "mary", "anna"))
rule1 = PublicRule("greet", RequiredGrouping(
RuleRef(rule2), RuleRef(rule3)))
self.grammar = Grammar("test")
self.grammar.add_rules(rule1, rule2, rule3)
self.rule1 = rule1
self.rule2 = rule2
self.rule3 = rule3
def test_compile(self):
expected = "#JSGF V1.0;\n" \
"grammar test;\n" \
"public <greet> = (<greetWord> <name>);\n" \
"<greetWord> = (hello|hi);\n" \
"<name> = (peter|john|mary|anna);\n"
compiled = self.grammar.compile()
self.assertEqual(expected, compiled)
def test_compile_to_file(self):
expected = "#JSGF V1.0;\n" \
"grammar test;\n" \
"public <greet> = (<greetWord> <name>);\n" \
"<greetWord> = (hello|hi);\n" \
"<name> = (peter|john|mary|anna);\n"
# Create a temporary testing file.
tf = tempfile.NamedTemporaryFile()
self.grammar.compile_to_file(tf.name)
# Check the file contents after writing to it.
with open(tf.name) as f:
content = f.read()
try:
self.assertEqual(expected, content)
finally:
# Always close and remove the temp file, even if the assertion fails.
tf.close()
def test_remove_dependent_rule(self):
self.assertRaises(GrammarError, self.grammar.remove_rule, "greetWord")
self.assertRaises(GrammarError, self.grammar.remove_rule, "name")
# Test again with the actual rule objects
self.assertRaises(GrammarError, self.grammar.remove_rule, self.rule2)
self.assertRaises(GrammarError, self.grammar.remove_rule, self.rule3)
self.grammar.remove_rule("greet")
self.assertListEqual([self.rule2, self.rule3], self.grammar.rules)
# Add it again to test removing the rule using the object
self.grammar.add_rule(self.rule1)
self.assertListEqual([self.rule2, self.rule3, self.rule1],
self.grammar.rules)
self.grammar.remove_rule(self.rule1)
self.assertListEqual([self.rule2, self.rule3], self.grammar.rules)
# Test that removing rule2 works using ignore_dependent=True
self.grammar.add_rule(self.rule1) # add rule1 again
self.assertIsNone(self.grammar.remove_rule(self.rule2,
ignore_dependent=True))
def test_add_rules_with_taken_names(self):
self.assertRaises(GrammarError, self.grammar.add_rule,
PublicRule("name", "bob"))
self.assertRaises(GrammarError, self.grammar.add_rule,
PrivateRule("name", "bob"))
rules_to_add = [PrivateRule("name", "bob"),
PublicRule("name", "bob")]
self.assertRaises(GrammarError, self.grammar.add_rules, *rules_to_add)
def test_enable_disable_rule(self):
self.grammar.disable_rule(self.rule1)
self.assertFalse(self.rule1.active)
self.grammar.enable_rule(self.rule1)
self.assertTrue(self.rule1.active)
def test_enable_disable_using_name(self):
self.grammar.disable_rule("greetWord")
self.assertFalse(self.rule2.active)
self.grammar.enable_rule("greetWord")
self.assertTrue(self.rule1.active)
def test_enable_disable_non_existent(self):
self.assertRaises(GrammarError, self.grammar.disable_rule, "hello")
self.assertRaises(GrammarError, self.grammar.enable_rule, "hello")
r = PublicRule("test", "hello")
self.assertRaises(GrammarError, self.grammar.disable_rule, r)
self.assertRaises(GrammarError, self.grammar.enable_rule, r)
def test_enable_disable_using_dup_rule(self):
"""
Test that a copy of a rule in the grammar can be used to disable or enable
the equivalent rule in the grammar as well as the rule object passed.
"""
r = PrivateRule("greetWord", AlternativeSet("hello", "hi"))
self.grammar.disable_rule(r)
self.assertFalse(r.active, "duplicate rule should be disabled")
self.assertFalse(self.rule2.active, "rule in grammar should be disabled")
# Test enabling it again
self.grammar.enable_rule(r)
self.assertTrue(r.active, "duplicate rule should be enabled again")
self.assertTrue(self.rule2.active, "rule in grammar should be enabled")
def test_enable_disable_compile_output(self):
enabled_output = "#JSGF V1.0;\n" \
"grammar test;\n" \
"public <greet> = (<greetWord> <name>);\n" \
"<greetWord> = (hello|hi);\n" \
"<name> = (peter|john|mary|anna);\n"
self.assertEqual(self.grammar.compile(), enabled_output)
self.grammar.disable_rule(self.rule1)
self.assertFalse(self.rule1.active)
self.assertEqual(
self.grammar.compile(),
"#JSGF V1.0;\n"
"grammar test;\n"
"<greetWord> = (hello|hi);\n"
"<name> = (peter|john|mary|anna);\n",
"disabled output shouldn't have the public 'greet' rule"
)
self.grammar.enable_rule(self.rule1)
self.assertTrue(self.rule1.active)
self.assertEqual(self.grammar.compile(), enabled_output)
def test_comparisons(self):
self.assertEqual(Grammar(), Grammar())
self.assertNotEqual(Grammar(name="test"), Grammar(name="test2"),
"grammars with different names should not be equal")
g1 = Grammar(name="test")
g1.add_import(Import("test2.*"))
self.assertNotEqual(g1, Grammar(name="test"),
"grammars with different imports should not be equal")
g2 = Grammar()
g2.add_rule(PublicRule("r1", "hello"))
g3 = Grammar()
self.assertNotEqual(g1, g2,
"grammars with different names, rules and imports "
"should not be equal")
self.assertEqual(g2, g2, "the same grammar should be equal with itself")
self.assertEqual(g2, copy.copy(g2),
"grammars with the same rules should be equal")
self.assertNotEqual(g2, g3, "grammars with only different rules should not "
"be equal")
# Assert that any difference in the JSGF header makes Grammar objects not
# equal
default = Grammar()
def check():
self.assertNotEqual(g3, default, "grammars with only different JSGF "
"headers should not be equal")
g3.language_name = "ru"
check()
g3.jsgf_version = "2.0"
check()
g3.charset_name = "utf-16"
check()
self.assertEqual(RootGrammar(name="test"), Grammar(name="test"),
"grammars with only different types should be equal")
# Check case-sensitive vs case-insensitive grammars.
self.assertNotEqual(
Grammar(case_sensitive=False), Grammar(case_sensitive=True),
"grammars with different case sensitivity should not be equal")
def test_jsgf_header(self):
""" JSGF header uses grammar header attributes correctly. """
grammar = Grammar()
self.assertEqual(grammar.jsgf_header, "#JSGF V1.0;\n")
grammar.charset_name = "utf-8"
self.assertEqual(grammar.jsgf_header, "#JSGF V1.0 utf-8;\n")
grammar.charset_name = ""
grammar.language_name = "en"
self.assertEqual(grammar.jsgf_header, "#JSGF V1.0 en;\n")
grammar.charset_name = "utf-8"
self.assertEqual(grammar.jsgf_header, "#JSGF V1.0 utf-8 en;\n")
def test_links(self):
"""Expansion.rule and Rule.grammar attributes work correctly."""
hello = Literal("hello")
self.assertIsNone(hello.rule, "no rule will use the expansion yet")
r = PublicRule("test", hello)
self.assertEqual(hello.rule, r, "rule 'test' should use the expansion")
r.expansion = "hi"
self.assertIsNone(hello.rule, "setting r.expansion should reset "
"hello.rule")
# Test Rule.grammar
g = Grammar(name="g")
self.assertIsNone(r.grammar, "no grammar will be using the rule yet")
g.add_rule(r)
self.assertEqual(r.grammar, g, "adding r to a grammar should set r.grammar")
g.remove_rule(r)
self.assertIsNone(r.grammar, "remove r from its grammar should reset "
"r.grammar")
def test_case_sensitivity(self):
"""JSGF Grammars support configurable case-sensitivity."""
grammar = Grammar("test")
direction = Rule("direction", False, AlternativeSet(
"Up", "Down", "Left", "Right"
))
n = Rule("n", False, AlternativeSet("One", "Two", "Three"))
cmd_rule = Rule("cmd", True, Sequence(
NamedRuleRef("direction"), NamedRuleRef("n")
))
grammar.add_rules(direction, n, cmd_rule)
expected_sensitive = "#JSGF V1.0;\n" \
"grammar test;\n" \
"<direction> = (Up|Down|Left|Right);\n" \
"<n> = (One|Two|Three);\n" \
"public <cmd> = <direction> <n>;\n"
expected_insensitive = "#JSGF V1.0;\n" \
"grammar test;\n" \
"<direction> = (up|down|left|right);\n" \
"<n> = (one|two|three);\n" \
"public <cmd> = <direction> <n>;\n"
# Test that default is case-insensitive.
self.assertFalse(grammar.case_sensitive)
self.assertEqual(grammar.compile(), expected_insensitive)
# Test that setting grammar.case_sensitive overrides the values for each
# grammar rule.
grammar.case_sensitive = True
self.assertTrue(grammar.case_sensitive)
for rule in grammar.rules:
self.assertTrue(rule.case_sensitive)
# Test case-sensitive compilation and matching.
self.assertEqual(grammar.compile(), expected_sensitive)
self.assertSequenceEqual(grammar.find_matching_rules("Up Two"), [cmd_rule])
self.assertSequenceEqual(grammar.find_matching_rules("up two"), [])
# Switch back to case-insensitive to test that the casing of rule literals is
# never lost.
grammar.case_sensitive = False
self.assertFalse(grammar.case_sensitive)
self.assertEqual(grammar.compile(), expected_insensitive)
self.assertSequenceEqual(grammar.find_matching_rules("Up Two"), [cmd_rule])
self.assertSequenceEqual(grammar.find_matching_rules("up two"), [cmd_rule])
def test_add_import(self):
""" Import objects can be added and used by grammars. """
grammar = Grammar("test")
X = "com.example.grammar.X"
Y = "com.example.grammar.Y"
Z = "com.example.grammar.Z"
grammar.add_import(Import(X))
grammar.add_imports(Import(Y), Import(Z))
self.assertEqual(grammar.compile(),
"#JSGF V1.0;\n"
"grammar test;\n"
"import <com.example.grammar.X>;\n"
"import <com.example.grammar.Y>;\n"
"import <com.example.grammar.Z>;\n")
self.assertEqual(grammar.imports, [Import(i) for i in (X, Y, Z)])
self.assertEqual(grammar.import_names, [X, Y, Z])
def test_add_import_optimal(self):
""" Import objects added to grammars multiple times are only added once. """
grammar = Grammar("test")
import_name = "com.example.grammar.X"
for i in range(2):
grammar.add_import(Import(import_name))
self.assertEqual(grammar.compile(),
"#JSGF V1.0;\n"
"grammar test;\n"
"import <com.example.grammar.X>;\n")
self.assertEqual(grammar.imports, [Import(import_name)])
self.assertEqual(grammar.import_names, [import_name])
def test_add_import_type(self):
""" Grammar.add_import only accepts Import objects. """
grammar = Grammar("test")
grammar.add_import(Import("com.example.grammar.X"))
self.assertRaises(TypeError, grammar.add_import, "com.example.grammar.Y")
self.assertRaises(TypeError, grammar.add_imports, "com.example.grammar.Y")
def test_remove_import(self):
""" Import objects can be properly removed from grammars. """
grammar = Grammar("test")
expected = "#JSGF V1.0;\ngrammar test;\n"
import_name = "com.example.grammar.X"
import_ = Import(import_name)
# Both identical and equivalent Import objects should work.
for remove_item in (import_, Import(import_name)):
grammar.add_import(import_)
grammar.remove_import(remove_item)
self.assertEqual(grammar.compile(), expected)
self.assertEqual(grammar.imports, [])
self.assertEqual(grammar.import_names, [])
def test_remove_import_type(self):
""" Grammar.remove_import only accepts Import objects. """
grammar = Grammar("test")
grammar.add_import(Import("com.example.grammar.X"))
self.assertRaises(TypeError, grammar.remove_import, "com.example.grammar.X")
self.assertRaises(TypeError, grammar.remove_imports, "com.example.grammar.X")
def test_remove_import_unknown(self):
""" Removing an Import object that isn't in a grammar raises an error. """
grammar = Grammar("test")
self.assertRaises(GrammarError, grammar.remove_import,
Import("com.example.grammar.X"))
self.assertRaises(GrammarError, grammar.remove_imports,
Import("com.example.grammar.X"),
Import("com.example.grammar.Y"))
class TagTests(unittest.TestCase):
"""
Test the Grammar.find_tagged_rules method.
"""
def test_simple(self):
g = Grammar()
r = Rule("r", True, "test")
r.expansion.tag = "tag"
g.add_rule(r)
self.assertListEqual(g.find_tagged_rules("tag"), [r])
def test_hidden_rule(self):
g = Grammar()
r = Rule("r", False, "test")
r.expansion.tag = "tag"
g.add_rule(r)
self.assertListEqual(g.find_tagged_rules("tag"), [])
self.assertListEqual(g.find_tagged_rules("tag", include_hidden=True), [r])
def test_no_tag(self):
g = Grammar()
r = PublicRule("hello", "hello world")
self.assertListEqual(g.find_tagged_rules(""), [])
r.expansion.tag = ""
self.assertListEqual(g.find_tagged_rules(""), [])
self.assertListEqual(g.find_tagged_rules(" "), [])
r.expansion.tag = " "
self.assertListEqual(g.find_tagged_rules(" "), [])
def test_whitespace(self):
# Leading or trailing whitespace should be ignored by find_tagged_rules.
g = Grammar()
r = PublicRule("r", "test")
r.expansion.tag = " tag "
g.add_rule(r)
self.assertEqual(r.expansion.tag, "tag")
self.assertListEqual(g.find_tagged_rules("tag"), [r])
self.assertListEqual(g.find_tagged_rules(" tag "), [r])
def test_get_rules_from_names(self):
g = Grammar()
x = PublicRule("X", "x")
y = PrivateRule("Y", "y")
z = PublicRule("Z", "z")
g.add_rules(x, y, z)
# Test that rules are retrievable with both methods.
self.assertEqual(g.get_rules_from_names("X", "Y"), [x, y])
self.assertEqual(g.get_rules("X", "Y"), [x, y])
# Test that a GrammarError is raised if any name is invalid.
self.assertRaises(GrammarError, g.get_rules_from_names, "W")
self.assertRaises(GrammarError, g.get_rules_from_names, "X", "W")
self.assertRaises(GrammarError, g.get_rules, "W")
self.assertRaises(GrammarError, g.get_rules, "X", "W")
class SpeechMatchCase(unittest.TestCase):
def assert_matches(self, speech, rule):
self.assertTrue(rule.matches(speech))
def assert_no_match(self, speech, rule):
self.assertFalse(rule.matches(speech))
def test_single_rule_match(self):
grammar = Grammar("test")
rule = PrivateRule("greet", Sequence(
AlternativeSet("hello", "hi"), "world"
))
grammar.add_rules(rule)
self.assert_matches("hello world", rule)
self.assert_matches("hello world".swapcase(), rule)
self.assert_matches("hi world", rule)
self.assert_no_match("hey world", rule)
self.assert_no_match("hello", rule)
self.assert_no_match("world", rule)
self.assert_no_match("", rule)
def test_multi_rule_match(self):
grammar = Grammar("test")
rule2 = PrivateRule("greetWord", AlternativeSet("hello", "hi"))
rule3 = PrivateRule("name", AlternativeSet("peter", "john",
"mary", "anna"))
rule1 = PublicRule("greet",
RequiredGrouping(
RuleRef(rule2),
RuleRef(rule3))
)
grammar.add_rules(rule1, rule2, rule3)
# Rule 1
self.assert_matches("hello john", rule1)
self.assert_matches("hello john".swapcase(), rule1)
self.assert_no_match("hello", rule1)
self.assert_no_match("john", rule1)
self.assert_no_match("", rule1)
# Rule 2
self.assert_matches("hello", rule2)
self.assert_matches("HELLO", rule2)
self.assert_matches("hi", rule2)
self.assert_matches("HI", rule2)
self.assert_no_match("", rule2)
# Rule 3
self.assert_matches("john", rule3)
self.assert_no_match("", rule3)
class MultiLingualTests(unittest.TestCase):
"""
Test that Unicode characters can be used in rule, import and grammar names
as well as in literals and that the text can be matched.
Cyrillic characters are used to test this functionality. There are various
Unicode character sets, each containing an enormous number of characters, so
it is hardly feasible to test everything. Plus, this library simply uses
Python's Unicode support.
"""
def test_names(self):
"""Unicode strings can be used in names and literals and can be matched."""
grammar = Grammar(name=u"грамматика")
self.assertEqual(grammar.name, u"грамматика")
rule = PublicRule(u"русский", AlternativeSet(
u"привет", u"здравствуйте", u"пожалуйста"))
import_ = Import(u"грамматика2.*")
self.assertEqual(import_.name, u"грамматика2.*")
# Test matching the rule
self.assertTrue(rule.matches(u"здравствуйте"))
# Test matching using the grammar
grammar.add_rule(rule)
self.assertListEqual(grammar.find_matching_rules(u"пожалуйста"), [rule])
def test_dictation(self):
"""Dictation Expansions match Unicode strings."""
self.assertTrue(PublicRule(u"всё", Dictation().matches(u"это кофе")))
class VisibleRulesCase(unittest.TestCase):
"""
Test the 'visible_rules' property of the Grammar class.
"""
def setUp(self):
grammar1 = Grammar("test")
self.rule1 = PrivateRule("rule1", "Hello")
self.rule2 = PrivateRule("rule2", "Hey")
self.rule3 = PrivateRule("rule3", "Hi")
grammar1.add_rules(self.rule1, self.rule2, self.rule3)
self.grammar1 = grammar1
grammar2 = Grammar("test2")
self.rule4 = PublicRule("rule4", "Hello")
self.rule5 = PublicRule("rule5", "Hey")
self.rule6 = PrivateRule("rule6", "Hi")
grammar2.add_rules(self.rule4, self.rule5, self.rule6)
self.grammar2 = grammar2
def test_none(self):
self.assertListEqual(self.grammar1.visible_rules, [])
def test_many(self):
self.assertListEqual(self.grammar2.visible_rules, [self.rule4, self.rule5])
class RootGrammarCase(unittest.TestCase):
def setUp(self):
self.grammar = RootGrammar(name="root")
self.rule2 = PrivateRule("greetWord", AlternativeSet("hello", "hi"))
self.rule3 = PrivateRule("name", AlternativeSet(
"peter", "john", "mary", "anna"))
self.rule1 = PublicRule("greet", RequiredGrouping(
RuleRef(self.rule2), RuleRef(self.rule3)))
self.grammar.add_rules(self.rule1, self.rule2, self.rule3)
self.rule5 = PrivateRule("greetWord", AlternativeSet("hello", "hi"))
self.rule4 = PublicRule("greet", Sequence(RuleRef(self.rule5), "there"))
self.rule6 = PublicRule("partingPhrase", AlternativeSet(
"goodbye", "see you"))
def test_compile(self):
root = self.grammar
expected = "#JSGF V1.0;\n" \
"grammar root;\n" \
"public <root> = (<greet>);\n" \
"<greet> = (<greetWord> <name>);\n" \
"<greetWord> = (hello|hi);\n" \
"<name> = (peter|john|mary|anna);\n"
self.assertEqual(root.compile(), expected)
def test_compile_to_file(self):
root = self.grammar
expected = "#JSGF V1.0;\n" \
"grammar root;\n" \
"public <root> = (<greet>);\n" \
"<greet> = (<greetWord> <name>);\n" \
"<greetWord> = (hello|hi);\n" \
"<name> = (peter|john|mary|anna);\n"
| tf = tempfile.NamedTemporaryFile()
root.compile_to_file(tf.name)
# Check the file contents after writing to it.
with open(tf.name) as f:
content = f.read()
try:
self.assertEqual(expected, content)
finally:
# Always close and remove the temp file, even if the assertion fails.
tf.close()
def test_compile_add_remove_rule(self):
root = RootGrammar(rules=[self.rule5, self.rule4], name="root")
expected_without = "#JSGF V1.0;\n" \
"grammar root;\n" \
"public <root> = (<greet>);\n" \
"<greetWord> = (hello|hi);\n" \
"<greet> = <greetWord> there;\n"
expected_with = "#JSGF V1.0;\n" \
"grammar root;\n" \
"public <root> = (<greet>|<partingPhrase>);\n" \
"<greetWord> = (hello|hi);\n" \
"<greet> = <greetWord> there;\n" \
"<partingPhrase> = (goodbye|see you);\n"
self.assertEqual(root.compile(), expected_without)
root.add_rule(self.rule6)
self.assertEqual(root.compile(), expected_with)
# Test removing the partingPhrase rule using the name
root.remove_rule("partingPhrase")
self.assertEqual(root.compile(), expected_without)
# Add the rule and test removing it using the rule object
root.add_rule(self.rule6)
self.assertEqual(root.compile(), expected_with)
root.remove_rule(self.rule6)
self.assertEqual(root.compile(), expected_without)
def test_match(self):
# Only rule1 should match
root = self.grammar
self.assertListEqual(root.find_matching_rules("Hello John"), [self.rule1])
self.assertListEqual(root.find_matching_rules("HELLO mary"), [self.rule1])
self.assertListEqual(root.find_matching_rules("hello ANNA"), [self.rule1])
def test_match_add_remove(self):
root = RootGrammar(rules=[self.rule5, self.rule4], name="root")
self.assertListEqual(root.find_matching_rules("Hello there"), [self.rule4])
self.assertListEqual(root.find_matching_rules("Hi there"), [self.rule4])
# Add a rule
root.add_rule(self.rule6)
self.assertListEqual(root.find_matching_rules("Goodbye"), [self.rule6])
self.assertListEqual(root.find_matching_rules("See you"), [self.rule6])
# Remove it and test again
root.remove_rule("partingPhrase")
self.assertListEqual(root.find_matching_rules("Goodbye"), [])
self.assertListEqual(root.find_matching_rules("See you"), [])
# Test again using the remove_rule(rule object) instead
root.add_rule(self.rule6)
self.assertListEqual(root.find_matching_rules("Goodbye"), [self.rule6])
self.assertListEqual(root.find_matching_rules("See you"), [self.rule6])
root.remove_rule(self.rule6)
self.assertListEqual(root.find_matching_rules("Goodbye"), [])
self.assertListEqual(root.find_matching_rules("See you"), [])
def test_add_rules_with_taken_names(self):
root = self.grammar
self.assertRaises(GrammarError, root.add_rule,
PublicRule("name", "bob"))
self.assertRaises(GrammarError, root.add_rule,
PrivateRule("name", "bob"))
rules_to_add = [PrivateRule("name", "bob"),
PublicRule("name", "bob")]
self.assertRaises(GrammarError, root.add_rules,
*rules_to_add)
# Test if adding a rule with the name 'root' raises an error
self.assertRaises(GrammarError, root.add_rule, PublicRule("root", "test"))
def test_create_grammar_with_rule_name_conflicts(self):
# Try with duplicate rules (should fail silently)
g = RootGrammar()
r = PublicRule("test", "test")
g.add_rule(r)
self.assertListEqual(g.rules, [r])
g.add_rule(PublicRule("test", "test"))
self.assertListEqual(g.rules, [r])
# Try with slightly different rules
self.assertRaises(GrammarError, RootGrammar,
[PublicRule("test", "testing"),
PublicRule("test", "test")])
self.assertRaises(GrammarError, RootGrammar,
[PublicRule("test", "test"),
PrivateRule("test", "test")])
self.assertRaises(GrammarError, RootGrammar,
[PublicRule("test", "testing"),
PrivateRule("test", "test")])
def test_enable_disable_rule(self):
self.grammar.disable_rule(self.rule1)
self.assertFalse(self.rule1.active)
self.grammar.enable_rule(self.rule1)
self.assertTrue(self.rule1.active)
def test_enable_disable_using_name(self):
self.grammar.disable_rule("greetWord")
self.assertFalse(self.rule2.active)
self.grammar.enable_rule("greetWord")
self.assertTrue(self.rule2.active)
def test_enable_disable_non_existent(self):
self.assertRaises(GrammarError, self.grammar.disable_rule, "hello")
self.assertRaises(GrammarError, self.grammar.enable_rule, "hello")
r = PublicRule("test", "hello")
self.assertRaises(GrammarError, self.grammar.disable_rule, r)
self.assertRaises(GrammarError, self.grammar.enable_rule, r)
def test_enable_disable_using_dup_rule(self):
"""
Test that a copy of a rule in the grammar can be used to disable or enable
the equivalent rule in the grammar as well as the rule object passed.
"""
r = PrivateRule("greetWord", AlternativeSet("hello", "hi"))
self.assertTrue(self.rule2.active)
self.grammar.disable_rule(r)
self.assertFalse(r.active, "duplicate rule should be disabled")
self.assertFalse(self.rule2.active, "original rule should be disabled")
# Test enabling it again
self.grammar.enable_rule(r)
self.assertTrue(r.active, "duplicate rule should be enabled again")
self.assertTrue(self.rule2.active, "original rule should be enabled")
def test_enable_disable_compile_output(self):
enabled_output = "#JSGF V1.0;\n" \
"grammar root;\n" \
"public <root> = (<greet>);\n" \
"<greet> = (<greetWord> <name>);\n" \
"<greetWord> = (hello|hi);\n" \
"<name> = (peter|john|mary|anna);\n"
self.assertEqual(self.grammar.compile(), enabled_output)
self.grammar.disable_rule(self.rule1)
self.assertFalse(self.rule1.active)
self.assertEqual(
self.grammar.compile(),
"#JSGF V1.0;\n"
"grammar root;\n",
"disabled output shouldn't have the originally public 'greet' rule"
)
self.grammar.enable_rule(self.rule1)
self.assertTrue(self.rule1.active)
self.assertEqual(self.grammar.compile(), enabled_output)
# Add another public rule and test again
self.grammar.add_rule(PublicRule("test", "testing"))
self.grammar.disable_rule(self.rule1)
self.assertFalse(self.rule1.active)
self.assertEqual(
self.grammar.compile(),
"#JSGF V1.0;\n"
"grammar root;\n"
"public <root> = (<test>);\n"
"<greetWord> = (hello|hi);\n"
"<name> = (peter|john|mary|anna);\n"
"<test> = testing;\n",
"disabled output should have the originally public 'test' rule"
)
if __name__ == '__main__':
unittest.main() | # Create a temporary testing file. |
main.rs | use std::error::Error;
use logwatcher::{LogWatcher, LogWatcherAction};
use serenity::{
async_trait,
http::Http,
model::{channel::Message, gateway::Ready, id::ChannelId},
prelude::*, |
struct Handler {
listen_channel_id: u64,
rcon_connection: Mutex<rcon::Connection>,
}
#[async_trait]
impl EventHandler for Handler {
async fn message(&self, _ctx: Context, msg: Message) {
if msg.channel_id == self.listen_channel_id && !msg.author.bot {
// TODO handle empty messages with embeds, attachments, etc
let message_text = format!("{}: {}", msg.author.name, msg.content);
let message_text = message_text.replace('\\', "\\\\");
let message_text = message_text.replace('\'', "\\'");
if let Err(e) = self
.rcon_connection
.lock()
.await
.cmd(&format!(
"/silent-command game.print('[Discord] {}')",
message_text
))
.await
{
println!("Couldn't send message to rcon: {:?}", e);
}
}
}
async fn ready(&self, _ctx: Context, _ready: Ready) {
println!("Discord event handler ready");
}
}
impl Handler {
fn new(listen_channel_id: u64, rcon_connection: rcon::Connection) -> Handler {
Handler {
listen_channel_id,
rcon_connection: Mutex::new(rcon_connection),
}
}
}
#[derive(serde::Deserialize, Clone)]
struct Config {
channel_id: u64,
discord_token: String,
log_file_path: String,
rcon_address: String,
rcon_password: String,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
println!("reading config");
let config_str = fs::read_to_string("config.toml").await?;
let config: Config = toml::from_str(&config_str)?;
println!("setting up rcon client");
let rcon = rcon::Connection::builder()
.enable_factorio_quirks(true)
.connect(config.rcon_address.clone(), &config.rcon_password)
.await?;
let (tx, mut rx) = mpsc::unbounded_channel();
println!("setting up logwatcher");
let config_clone = config.clone();
tokio::task::spawn_blocking(move || {
let mut logwatcher = LogWatcher::register(config_clone.log_file_path)
.expect("could not register logwatcher");
logwatcher.watch(&mut move |line| {
if let Some(msg) = try_get_log_chat_message(line) {
tx.send(msg).expect("couldn't send line to mpsc channel");
}
LogWatcherAction::None
});
println!("logwatcher task exiting");
});
println!("setting up discord writer");
let config_clone = config.clone();
tokio::spawn(async move {
let http = Http::new_with_token(&config_clone.discord_token);
let channel = ChannelId(config_clone.channel_id);
while let Some(line) = rx.recv().await {
channel
.say(&http, line)
.await
.expect("couldn't send message to discord");
}
});
println!("setting up discord client");
let mut discord_client = Client::builder(&config.discord_token)
.event_handler(Handler::new(config.channel_id, rcon))
.await?;
println!("starting discord client");
discord_client.start().await?;
unreachable!()
}
fn try_get_log_chat_message(mut line: String) -> Option<String> {
if let Some(offset) = line.find(" [CHAT] ") {
line.replace_range(..offset + 8, "");
if !line.starts_with("[Discord]") {
return Some(line);
}
}
None
} | };
use tokio::fs;
use tokio::sync::mpsc; |
words.py | from fastapi import APIRouter, Depends
from app.db.crud import get_video_captions
from app.db.session import get_db
from app.model import tf_idf
words_router = r = APIRouter()
@r.get("/topics/{topic}/{number_of_words}")
def | (topic: str, number_of_words: int):
most_important_words = tf_idf.run(topic, number_of_words)
return {"topic": topic, "words": most_important_words}
@r.get("/topic/videos/")
def get_videos_with_words(db=Depends(get_db)):
topic = "business"
words = {
"company",
"want",
"business",
"world",
"actually",
}
return {"value": get_video_captions(db, limit=1)}
| by_topic |
idl_namespace.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
IDLNamespace for PPAPI
This file defines the behavior of the AST namespace which allows for resolving
a symbol as one or more AST nodes given a release or range of releases.
"""
import sys
from idl_option import GetOption, Option, ParseOptions
from idl_log import ErrOut, InfoOut, WarnOut
from idl_release import IDLRelease, IDLReleaseList
Option('label', 'Use the specifed label blocks.', default='Chrome')
Option('namespace_debug', 'Use the specified release')
#
# IDLNamespace
#
# IDLNamespace provides a mapping between a symbol name and an IDLReleaseList
# which contains IDLRelease objects. It provides an interface for fetching
# one or more IDLNodes based on a release or range of releases.
#
class IDLNamespace(object):
def __init__(self, parent):
self.namespace = {}
self.parent = parent
def Dump(self):
for name in self.namespace:
InfoOut.Log('NAME=%s' % name)
for cver in self.namespace[name].nodes:
InfoOut.Log(' %s' % cver)
InfoOut.Log('')
def FindRelease(self, name, release):
verlist = self.namespace.get(name, None)
if verlist == None:
if self.parent:
return self.parent.FindRelease(name, release)
else:
return None
return verlist.FindRelease(release)
def FindRange(self, name, rmin, rmax):
verlist = self.namespace.get(name, None)
if verlist == None:
if self.parent:
return self.parent.FindRange(name, rmin, rmax)
else:
return []
return verlist.FindRange(rmin, rmax)
def FindList(self, name):
verlist = self.namespace.get(name, None)
if verlist == None:
if self.parent:
return self.parent.FindList(name)
return verlist
def AddNode(self, node):
name = node.GetName()
verlist = self.namespace.setdefault(name,IDLReleaseList())
if GetOption('namespace_debug'):
print "Adding to namespace: %s" % node
return verlist.AddNode(node)
#
# Testing Code
#
#
# MockNode
#
# Mocks the IDLNode to support error, warning handling, and string functions.
#
class MockNode(IDLRelease):
def __init__(self, name, rmin, rmax):
self.name = name
self.rmin = rmin
self.rmax = rmax
self.errors = []
self.warns = []
self.properties = {
'NAME': name,
'release': rmin,
'deprecate' : rmax
}
def __str__(self):
return '%s (%s : %s)' % (self.name, self.rmin, self.rmax)
def GetName(self):
return self.name
def Error(self, msg):
if GetOption('release_debug'): print 'Error: %s' % msg
self.errors.append(msg)
def Warn(self, msg):
if GetOption('release_debug'): print 'Warn: %s' % msg
self.warns.append(msg)
def GetProperty(self, name):
return self.properties.get(name, None)
errors = 0
#
# DumpFailure
#
# Dumps all the information relevant to an add failure.
def DumpFailure(namespace, node, msg):
global errors
print '\n******************************'
print 'Failure: %s %s' % (node, msg)
for warn in node.warns:
print ' WARN: %s' % warn
for err in node.errors:
print ' ERROR: %s' % err
print '\n'
namespace.Dump()
print '******************************\n'
errors += 1
# Add expecting no errors or warnings
def AddOkay(namespace, node):
okay = namespace.AddNode(node)
if not okay or node.errors or node.warns:
DumpFailure(namespace, node, 'Expected success')
# Add expecting a specific warning
def AddWarn(namespace, node, msg):
okay = namespace.AddNode(node)
if not okay or node.errors or not node.warns:
DumpFailure(namespace, node, 'Expected warnings')
if msg not in node.warns:
DumpFailure(namespace, node, 'Expected warning: %s' % msg)
# Add expecting a specific error any any number of warnings
def AddError(namespace, node, msg):
okay = namespace.AddNode(node)
if okay or not node.errors:
DumpFailure(namespace, node, 'Expected errors')
if msg not in node.errors:
DumpFailure(namespace, node, 'Expected error: %s' % msg)
print ">>%s<<\n>>%s<<\n" % (node.errors[0], msg)
# Verify that a FindRelease call on the namespace returns the expected node.
def VerifyFindOne(namespace, name, release, node):
global errors
if (namespace.FindRelease(name, release) != node):
print "Failed to find %s as release %f of %s" % (node, release, name)
namespace.Dump()
print "\n"
errors += 1
# Verify that a FindRage call on the namespace returns a set of expected nodes.
def VerifyFindAll(namespace, name, rmin, rmax, nodes):
global errors
out = namespace.FindRange(name, rmin, rmax)
if (out != nodes):
print "Found [%s] instead of[%s] for releases %f to %f of %s" % (
' '.join([str(x) for x in out]),
' '.join([str(x) for x in nodes]),
rmin,
rmax,
name)
namespace.Dump()
print "\n"
errors += 1
def Main(args):
global errors
ParseOptions(args)
InfoOut.SetConsole(True)
namespace = IDLNamespace(None)
FooXX = MockNode('foo', None, None)
Foo1X = MockNode('foo', 1.0, None)
Foo2X = MockNode('foo', 2.0, None)
Foo3X = MockNode('foo', 3.0, None)
# Verify we succeed with undeprecated adds
AddOkay(namespace, FooXX)
AddOkay(namespace, Foo1X)
AddOkay(namespace, Foo3X)
# Verify we fail to add a node between undeprecated releases
AddError(namespace, Foo2X,
'Overlap in releases: 3.0 vs 2.0 when adding foo (2.0 : None)')
BarXX = MockNode('bar', None, None)
Bar12 = MockNode('bar', 1.0, 2.0)
Bar23 = MockNode('bar', 2.0, 3.0)
Bar34 = MockNode('bar', 3.0, 4.0)
# Verify we succeed with fully qualified releases
namespace = IDLNamespace(namespace)
AddOkay(namespace, BarXX)
AddOkay(namespace, Bar12)
# Verify we warn when detecting a gap
AddWarn(namespace, Bar34, 'Gap in release numbers.')
# Verify we fail when inserting into this gap
# (NOTE: while this could be legal, it is sloppy so we disallow it)
AddError(namespace, Bar23, 'Declarations out of order.')
# Verify local namespace
VerifyFindOne(namespace, 'bar', 0.0, BarXX)
VerifyFindAll(namespace, 'bar', 0.5, 1.5, [BarXX, Bar12])
# Verify the correct release of the object is found recursively
VerifyFindOne(namespace, 'foo', 0.0, FooXX)
VerifyFindOne(namespace, 'foo', 0.5, FooXX)
VerifyFindOne(namespace, 'foo', 1.0, Foo1X)
VerifyFindOne(namespace, 'foo', 1.5, Foo1X)
VerifyFindOne(namespace, 'foo', 3.0, Foo3X)
VerifyFindOne(namespace, 'foo', 100.0, Foo3X)
# Verify the correct range of objects is found
VerifyFindAll(namespace, 'foo', 0.0, 1.0, [FooXX])
VerifyFindAll(namespace, 'foo', 0.5, 1.0, [FooXX])
VerifyFindAll(namespace, 'foo', 1.0, 1.1, [Foo1X])
VerifyFindAll(namespace, 'foo', 0.5, 1.5, [FooXX, Foo1X])
VerifyFindAll(namespace, 'foo', 0.0, 3.0, [FooXX, Foo1X])
VerifyFindAll(namespace, 'foo', 3.0, 100.0, [Foo3X])
FooBar = MockNode('foobar', 1.0, 2.0)
namespace = IDLNamespace(namespace)
AddOkay(namespace, FooBar)
if errors:
print 'Test failed with %d errors.' % errors
else: |
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:])) | print 'Passed.'
return errors
|
model_helm_repos_delete_response.go | /*
* Pipeline API
*
* Pipeline is a feature rich application platform, built for containers on top of Kubernetes to automate the DevOps experience, continuous application development and the lifecycle of deployments.
*
* API version: latest
* Contact: [email protected]
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package pipeline | Name string `json:"name,omitempty"`
} | // HelmReposDeleteResponse struct for HelmReposDeleteResponse
type HelmReposDeleteResponse struct {
Status int32 `json:"status,omitempty"`
Message string `json:"message,omitempty"` |
group_config.py | # You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Configuration for a tracker component group."""
from typing import Iterable, Optional
import numpy as np
from transforms3d.euler import euler2mat, quat2euler
from transforms3d.quaternions import quat2mat
from robel.simulation.sim_scene import SimScene
class TrackerGroupConfig:
"""Group configuration for a TrackerComponent."""
def __init__(self,
sim_scene: SimScene,
element_name: Optional[str] = None,
element_type: Optional[str] = None,
qpos_indices: Optional[Iterable[int]] = None,
qvel_indices: Optional[Iterable[int]] = None,
sim_observation_noise: Optional[float] = None):
"""Initializes a group configuration for a TrackerComponent.
Args:
sim_scene: The simulation, used for validation purposes.
element_name: The name of the element to use for tracking in
simulation.
element_type: The type of the element as defined in the XML.
Should be one of `site`, `body`, `geom`, or `joint`. If this is
`joint`, `qpos_indices` and `qvel_indices` should be
provided.
qpos_indices: The indices into `MjData.qpos` to read for the
joint element position and rotation.
qvel_indices: The indices into `MjData.qvel` to read for the joint
element velocity. This defaults to `qpos_indices`.
sim_observation_noise: The range of the observation noise (in
meters) to apply to the state in simulation.
"""
self.element_type = element_type
if self.element_type not in ['site', 'body', 'geom', 'joint']:
raise ValueError('Unknown element type %s' % self.element_type)
self.element_name = element_name
self.element_id = None
self.element_attr = None
self.qpos_indices = None
self.qvel_indices = None
self._is_euler = False
if self.element_type == 'joint':
if qpos_indices is None:
raise ValueError('Must provided qpos_indices for joints.')
# Ensure that the qpos indices are valid.
nq = sim_scene.model.nq
assert all(-nq <= i < nq for i in qpos_indices), \
'All qpos indices must be in [-{}, {}]'.format(nq, nq - 1)
self.qpos_indices = np.array(qpos_indices, dtype=int)
if len(self.qpos_indices) == 6:
self._is_euler = True
elif len(self.qpos_indices) != 7:
raise ValueError('qpos_indices must be 6 or 7 elements.')
if qvel_indices is None:
if not self._is_euler:
raise ValueError(
'qvel_indices must be provided for free joints.')
qvel_indices = qpos_indices
# Ensure that the qvel indices are valid.
nv = sim_scene.model.nv
assert all(-nv <= i < nv for i in qvel_indices), \
'All qvel indices must be in [-{}, {}]'.format(nv, nv - 1)
self.qvel_indices = np.array(qvel_indices, dtype=int)
else:
self.element_attr = (lambda obj, attr_name: getattr(
obj, self.element_type + '_' + attr_name))
self.element_id = self.element_attr(sim_scene.model, 'name2id')(
element_name)
self.sim_observation_noise = sim_observation_noise
def get_pos(self, sim_scene: SimScene) -> np.ndarray:
"""Returns the cartesian position of the element."""
if self.qpos_indices is not None:
return sim_scene.data.qpos[self.qpos_indices[:3]]
return self.element_attr(sim_scene.data, 'xpos')[self.element_id, :]
def get_rot(self, sim_scene: SimScene) -> np.ndarray:
"""Returns the (3x3) rotation matrix of the element."""
if self.qpos_indices is not None:
qpos = sim_scene.data.qpos[self.qpos_indices[3:]]
if self._is_euler:
return euler2mat(*qpos, axes='rxyz')
return quat2mat(qpos)
return self.element_attr(sim_scene.data,
'xmat')[self.element_id].reshape((3, 3))
def get_vel(self, sim_scene: SimScene) -> np.ndarray:
"""Returns the cartesian velocity of the element."""
if self.qvel_indices is not None:
return sim_scene.data.qvel[self.qvel_indices[:3]]
raise NotImplementedError('Cartesian velocity is not supported for ' +
self.element_type)
def get_angular_vel(self, sim_scene: SimScene) -> np.ndarray:
"""Returns the angular velocity (x, y, z) of the element."""
if self.qvel_indices is not None:
return sim_scene.data.qvel[self.qvel_indices[3:]]
raise NotImplementedError('Angular velocity is not supported for ' +
self.element_type)
def set_pos(self, sim_scene: SimScene, pos: np.ndarray):
"""Sets the cartesian position of the element."""
if self.qpos_indices is not None:
sim_scene.data.qpos[self.qpos_indices[:len(pos)]] = pos
return
self.element_attr(sim_scene.model,
'pos')[self.element_id, :len(pos)] = pos
def set_rot_quat(self, sim_scene: SimScene, quat: np.ndarray):
"""Sets the cartesian position of the element."""
if self.qpos_indices is not None:
qpos = quat
if self._is_euler:
qpos = quat2euler(quat, axes='rxyz')
sim_scene.data.qpos[self.qpos_indices[3:]] = qpos
return
self.element_attr(sim_scene.model, 'quat')[self.element_id, :] = quat | # Copyright 2019 The ROBEL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. |
|
data_preprocessing.py | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import json
import os
import pickle
import random
import re
import string
from collections import Counter
import numpy as np
import torch
from tqdm.auto import tqdm
from nemo.utils import logging
from nemo.utils.env_var_parsing import get_envint
__all__ = [
'DataProcessor',
'get_label_stats',
'partition_data',
'write_files',
'write_data',
'create_dataset',
'read_csv',
'get_dataset',
'partition',
'map_entities',
'get_entities',
'get_data',
'reverse_dict',
'get_intent_labels',
'get_stats',
'DATABASE_EXISTS_TMP',
'MODE_EXISTS_TMP',
'is_whitespace',
'write_vocab',
'if_exist',
'remove_punctuation_from_sentence',
'dataset_to_ids',
'get_freq_weights',
'fill_class_weights',
'normalize_answer',
'get_labels_to_labels_id_mapping',
'get_vocab',
'find_newlines',
'load_data_indices',
'chinese_punctuation',
'check_chinese_char',
'normalize_chinese_answer',
]
DATABASE_EXISTS_TMP = '{} dataset has already been processed and stored at {}'
MODE_EXISTS_TMP = '{} mode of {} dataset has already been processed and stored at {}'
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with open(input_file, "r", encoding="utf-8-sig") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
# if sys.version_info[0] == 2:
# line = list(unicode(cell, 'utf-8') for cell in line)
lines.append(line)
return lines
chinese_punctuation = {
'——',
'‘',
'’',
'“',
'”',
'…',
'、',
'。',
'〈',
'〉',
'《',
'》',
'「',
'」',
'『',
'』',
'【',
'】',
'〔',
'〕',
'!',
'(',
')',
',',
'.',
':',
';',
'?',
}
def check_chinese_char(ch):
"""Check if a character is in Chinese."""
if u'\u4e00' <= ch <= u'\u9fff' or ch in chinese_punctuation:
return True
else:
return False
def normalize_chinese_answer(text):
"""Remove the Chinese punctuation and separate Chinese answers to char-level"""
def remove_punc(text):
exclude = chinese_punctuation
return ''.join(ch for ch in text if ch not in exclude)
def separate_char(text):
ch_list = []
for ch in text:
ch_list.append(ch)
return ch_list
return separate_char(remove_punc(text))
def normalize_answer(s):
"""Lower text and remove punctuation, articles and extra whitespace."""
def remove_articles(text):
return re.sub(r'\b(a|an|the)\b', ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def get_label_stats(labels, outfile='stats.tsv', verbose=True):
'''
Args:
labels: list of all labels
outfile: path to the file where to save label stats
Returns:
total (int): total number of labels
label_frequencies (list of tuples): each tuple represent (label, label frequency)
max id of the labels
'''
labels = Counter(labels)
total = sum(labels.values())
out = open(outfile, 'w')
i = 0
freq_dict = {}
label_frequencies = labels.most_common()
for k, v in label_frequencies:
out.write(f'{k}\t\t{round(v/total,5)}\t\t{v}\n')
if verbose and i < 3:
logging.info(f'label: {k}, {v} out of {total} ({(v / total)*100.0:.2f}%).')
i += 1
freq_dict[k] = v
return total, freq_dict, max(labels.keys())
def partition_data(intent_queries, slot_tags, split=0.1):
n = len(intent_queries)
n_dev = int(n * split)
dev_idx = set(random.sample(range(n), n_dev))
dev_intents, dev_slots, train_intents, train_slots = [], [], [], []
dev_intents.append('sentence\tlabel\n')
train_intents.append('sentence\tlabel\n')
for i, item in enumerate(intent_queries):
if i in dev_idx:
dev_intents.append(item)
dev_slots.append(slot_tags[i])
else:
train_intents.append(item)
train_slots.append(slot_tags[i])
return train_intents, train_slots, dev_intents, dev_slots
def write_files(data, outfile):
with open(outfile, 'w') as f:
for item in data:
item = f'{item.strip()}\n'
f.write(item)
def write_data(data, slot_dict, intent_dict, outfold, mode, uncased):
intent_file = open(f'{outfold}/{mode}.tsv', 'w')
intent_file.write('sentence\tlabel\n')
slot_file = open(f'{outfold}/{mode}_slots.tsv', 'w')
for tokens, slots, intent in data:
text = ' '.join(tokens)
if uncased:
text = text.lower()
intent_file.write(f'{text}\t{intent_dict[intent]}\n')
slots = [str(slot_dict[slot]) for slot in slots]
slot_file.write(' '.join(slots) + '\n')
intent_file.close()
slot_file.close()
def create_dataset(train, dev, slots, intents, uncased, outfold):
os.makedirs(outfold, exist_ok=True)
if 'O' in slots:
slots.remove('O')
slots = sorted(list(slots)) + ['O']
intents = sorted(list(intents))
slots = write_vocab(slots, f'{outfold}/dict.slots.csv')
intents = write_vocab(intents, f'{outfold}/dict.intents.csv')
write_data(train, slots, intents, outfold, 'train', uncased)
write_data(dev, slots, intents, outfold, 'test', uncased)
def read_csv(file_path):
rows = []
with open(file_path, 'r') as csvfile:
| ue, value2entity = get_entities(files)
data, slots, intents = get_data(files)
if len(data) == 1:
train, dev = partition(data[0], split=dev_split)
else:
train, dev = data[0], data[1]
return train, dev, slots, intents
def partition(data, split=0.1):
n = len(data)
n_dev = int(n * split)
dev_idx = set(random.sample(range(n), n_dev))
dev, train = [], []
for i, item in enumerate(data):
if i in dev_idx:
dev.append(item)
else:
train.append(item)
return train, dev
def map_entities(entity2value, entities):
for key in entities:
if 'data' in entities[key]:
if key not in entity2value:
entity2value[key] = set([])
values = []
for value in entities[key]['data']:
values.append(value['value'])
values.extend(value['synonyms'])
entity2value[key] = entity2value[key] | set(values)
return entity2value
def get_entities(files):
entity2value = {}
for file in files:
with open(file, 'r') as json_file:
data = json.load(json_file)
entity2value = map_entities(entity2value, data['entities'])
value2entity = reverse_dict(entity2value)
return entity2value, value2entity
def get_data(files):
all_data, all_slots, all_intents = [], set(['O']), set()
for file in files:
file_data = []
with open(file, 'r') as json_file:
data = json.load(json_file)
for intent in data['intents']:
all_intents.add(intent)
utterances = data['intents'][intent]['utterances']
for utterance in utterances:
tokens, slots = [], []
for frag in utterance['data']:
frag_tokens = frag['text'].strip().split()
tokens.extend(frag_tokens)
if 'slot_name' not in frag:
slot = 'O'
else:
slot = frag['slot_name']
all_slots.add(slot)
slots.extend([slot] * len(frag_tokens))
file_data.append((tokens, slots, intent))
all_data.append(file_data)
return all_data, all_slots, all_intents
def reverse_dict(entity2value):
value2entity = {}
for entity in entity2value:
for value in entity2value[entity]:
value2entity[value] = entity
return value2entity
def get_intent_labels(intent_file):
labels = {}
label = 0
with open(intent_file, 'r') as f:
for line in f:
intent = line.strip()
labels[intent] = label
label += 1
return labels
def get_stats(lengths):
logging.info('Some stats of the lengths of the sequences:')
lengths = np.asarray(lengths)
logging.info(
f'Min: {np.min(lengths)} | \
Max: {np.max(lengths)} | \
Mean: {np.mean(lengths)} | \
Median: {np.median(lengths)}'
)
logging.info(f'75 percentile: {np.percentile(lengths, 75):.2f}')
logging.info(f'99 percentile: {np.percentile(lengths, 99):.2f}')
def is_whitespace(c):
if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F:
return True
return False
def write_vocab(items, outfile):
vocab = {}
idx = 0
with open(outfile, 'w') as f:
for item in items:
f.write(item + '\n')
vocab[item] = idx
idx += 1
return vocab
def get_labels_to_labels_id_mapping(file):
'''
Reads labels from the file and returns labels to id mapping dictionary
Args:
file: path to file
Returns:
labels to id mapping dictionary
'''
lines = open(file, 'r').readlines()
lines = [line.strip() for line in lines if line.strip()]
label_ids = {lines[i]: i for i in range(len(lines))}
return label_ids
def if_exist(outfold, files):
if not os.path.exists(outfold):
return False
for file in files:
if not os.path.exists(f'{outfold}/{file}'):
return False
return True
def remove_punctuation_from_sentence(sentence):
sentence = re.sub('[' + string.punctuation + ']', '', sentence)
sentence = sentence.lower()
return sentence
def dataset_to_ids(dataset, tokenizer, cache_ids=False, add_bos_eos=True, cache_data_per_node=False, use_cache=False):
"""
Reads dataset from file line by line, tokenizes each line with tokenizer,
and returns list of lists which corresponds to ids of tokenized strings.
Args:
dataset (str): path to dataset
tokenizer: tokenizer to convert text into ids
cache_ids (bool): if True, ids are saved to disk as pickle file
with similar name (e.g., data.txt --> data.txt.pkl)
add_bos_eos (bool): whether to add <s> and </s> symbols (e.g., for NMT)
cache_data_per_node (bool): Cache data on local_rank 0. Use when there is not a shared-filesystem.
use_cache (bool): Use cached ids if they exist.
Returns:
ids: list of ids which correspond to tokenized strings of the dataset
"""
cached_ids_dataset = dataset + str(".pkl")
if use_cache and os.path.isfile(cached_ids_dataset):
logging.info("Loading cached tokenized dataset ...")
ids = pickle.load(open(cached_ids_dataset, "rb"))
else:
logging.info(f"Tokenizing dataset {dataset}...")
data = open(dataset, "rb").readlines()
ids = []
for sentence in tqdm(data, desc='Tokenizing sentence'):
sent_ids = tokenizer.text_to_ids(sentence.decode("utf-8"))
if add_bos_eos:
sent_ids = [tokenizer.bos_id] + sent_ids + [tokenizer.eos_id]
ids.append(sent_ids)
if cache_ids and (
not torch.distributed.is_initialized() or (cache_data_per_node and get_envint("LOCAL_RANK", 0) == 0)
):
logging.info("Caching tokenized dataset ...")
pickle.dump(ids, open(cached_ids_dataset, "wb"))
return ids
def get_freq_weights(label_freq):
"""
Goal is to give more weight to the classes with less samples
so as to match the ones with the higher frequencies. We achieve this by
dividing the total frequency by the freq of each label to calculate its weight.
"""
total_size = 0
for lf in label_freq.values():
total_size += lf
weighted_slots = {label: (total_size / (len(label_freq) * freq)) for label, freq in label_freq.items()}
return weighted_slots
def fill_class_weights(weights, max_id=-1):
"""
Gets a dictionary of labels with their weights and creates a list with size of the labels filled with those weights.
Missing labels in the dictionary would get value 1.
Args:
weights: dictionary of weights for labels, labels as keys and weights are their values
max_id: the largest label id in the dataset, default=-1 would consider the largest label in the weights dictionary as max_id
Returns:
weights_list: list of weights for labels
"""
if max_id < 0:
max_id = 0
for l in weights.keys():
max_id = max(max_id, l)
all_weights = [1.0] * (max_id + 1)
for i in range(len(all_weights)):
if i in weights:
all_weights[i] = weights[i]
return all_weights
def get_vocab(file):
lines = open(file, 'r').readlines()
lines = [line.strip() for line in lines if line.strip()]
labels = {i: lines[i] for i in range(len(lines))}
return labels
def find_newlines(contents):
"""
Finds all of the newline positions in a text file.
"""
start = 0
while True:
try:
# index and split are much faster than Python for loops
new_start = contents.index(b"\n", start)
line = (
contents[start:new_start]
.replace(b"\xc2\x99", b" ")
.replace(b"\xc2\xa0", b" ")
.decode("utf-8", errors="ignore")
)
if len(line.split()) > 0:
yield start
start = new_start + 1
except ValueError:
break
def load_data_indices(idx_file: str, data_file: str, savename: str):
"""
Loads dataset index file if it exsits
"""
data_dir = data_file[: data_file.rfind('/')]
mode = data_file[data_file.rfind('/') + 1 : data_file.rfind('.')]
idx_file = f"{data_dir}/{mode}_{savename}.pkl"
if os.path.isfile(idx_file):
# If the sentence indices file already exists, load from it
with open(idx_file, "rb") as f:
indices = pickle.load(f)
return indices, idx_file, data_dir
return None, idx_file, data_dir
| read_csv = csv.reader(csvfile, delimiter=',')
for row in read_csv:
rows.append(row)
return rows
def get_dataset(files, dev_split=0.1):
# entity2val |
setup.py | #!/usr/bin/env python
import codecs
import os
import re
import sys
from setuptools import setup
DESCRIPTION = 'UI-level acceptance test framework'
def load_requirements(*requirements_paths):
"""
Load all requirements from the specified requirements files.
Requirements will include any constraints from files specified
with -c in the requirements files.
Returns a list of requirement strings.
"""
# UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why.
# minor update to allow brackets in library names
requirements = {}
constraint_files = set()
# groups "my-package-name<=x.y.z,..." into ("my-package-name", "<=x.y.z,...")
requirement_line_regex = re.compile(r"([a-zA-Z0-9-_.\[\]]+)([<>=][^#\s]+)?")
def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present):
regex_match = requirement_line_regex.match(current_line)
if regex_match:
package = regex_match.group(1)
version_constraints = regex_match.group(2)
existing_version_constraints = current_requirements.get(package, None)
# it's fine to add constraints to an unconstrained package, but raise an error if there are already
# constraints in place
if existing_version_constraints and existing_version_constraints != version_constraints:
raise BaseException(f'Multiple constraint definitions found for {package}:'
f' "{existing_version_constraints}" and "{version_constraints}".'
f'Combine constraints into one location with {package}'
f'{existing_version_constraints},{version_constraints}.')
if add_if_not_present or package in current_requirements:
current_requirements[package] = version_constraints
# process .in files and store the path to any constraint files that are pulled in
for path in requirements_paths:
with open(path) as reqs:
for line in reqs:
if is_requirement(line):
add_version_constraint_or_raise(line, requirements, True)
if line and line.startswith('-c') and not line.startswith('-c http'):
constraint_files.add(os.path.dirname(path) + '/' + line.split('#')[0].replace('-c', '').strip())
# process constraint files and add any new constraints found to existing requirements
for constraint_file in constraint_files:
with open(constraint_file) as reader:
for line in reader:
if is_requirement(line):
add_version_constraint_or_raise(line, requirements, False)
# process back into list of pkg><=constraints strings
constrained_requirements = [f'{pkg}{version or ""}' for (pkg, version) in sorted(requirements.items())]
return constrained_requirements
def is_requirement(line):
"""
Return True if the requirement line is a package requirement.
Returns:
bool: True if the line is not blank, a comment,
a URL, or an included file
"""
# UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why
| print("Tagging the version on github:")
os.system("git tag -a v%s -m 'v%s'" % (VERSION, VERSION))
os.system("git push --tags")
sys.exit()
with codecs.open('README.rst', 'r', 'utf-8') as f:
LONG_DESCRIPTION = f.read()
def get_version(*file_paths):
"""
Extract the version string from the file at the given relative path fragments.
"""
filename = os.path.join(os.path.dirname(__file__), *file_paths)
with open(filename, encoding='utf-8') as opened_file:
version_file = opened_file.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
VERSION = get_version("bok_choy", "__init__.py")
setup(
name='bok_choy',
version=VERSION,
author='edX',
author_email='[email protected]',
url='http://github.com/edx/bok-choy',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
license='Apache 2.0',
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance'],
packages=['bok_choy', 'bok_choy/a11y'],
package_data={'bok_choy': ['vendor/google/*.*', 'vendor/axe-core/*.*']},
install_requires=load_requirements('requirements/base.in'),
extras_require={
'visual_diff': ['needle']
}
) | return line and line.strip() and not line.startswith(('-r', '#', '-e', 'git+', '-c'))
if sys.argv[-1] == 'tag': |
__init__.py | """
gof.py
gof stands for Graph Optimization Framework.
The gof submodule of theano implements a framework
for manipulating programs described as graphs. The
gof module defines basic theano graph concepts:
-Apply nodes, which represent the application
of an Op to Variables. Together these make up a
graph.
-The Type, needed for Variables to make sense.
-The FunctionGraph, which defines how a subgraph
should be interpreted to implement a function.
-The Thunk, a callable object that becames part
of the executable emitted by theano.
-Linkers/VMs, the objects that call Thunks in
sequence in order to execute a theano program.
Conceptually, gof is intended to be sufficiently abstract | numerical computation, created by implementing
tensor Variables and Ops that perform mathematical functions.
A different kind of domain-specific language could be
made by using gof with different Variables and Ops.
In practice, gof and the rest of theano are somewhat more
tightly intertwined.
Currently, gof also contains much of the C compilation
functionality. Ideally this should be refactored into
a different submodule.
For more details and discussion, see the theano-dev
e-mail thread "What is gof?".
"""
from __future__ import absolute_import, print_function, division
from theano.gof.cc import \
CLinker, OpWiseCLinker, DualLinker, HideC
from theano.gof.fg import \
CachedConstantError, InconsistencyError, MissingInputError, FunctionGraph
from theano.gof.destroyhandler import \
DestroyHandler
from theano.gof.graph import \
Apply, Variable, Constant, view_roots
from theano.gof.link import \
Container, Linker, LocalLinker, PerformLinker, WrapLinker, WrapLinkerMany
from theano.gof.op import \
Op, OpenMPOp, PureOp, COp, ops_with_inner_function
from theano.gof.type import EnumType, EnumList, CEnumType
from theano.gof.opt import (
Optimizer,
optimizer, inplace_optimizer,
SeqOptimizer,
MergeOptimizer,
LocalOptimizer, local_optimizer, LocalOptGroup,
OpSub, OpRemove, PatternSub,
NavigatorOptimizer, TopoOptimizer, EquilibriumOptimizer,
OpKeyOptimizer, CheckStackTraceOptimization)
from theano.gof.optdb import \
DB, LocalGroupDB, Query, \
EquilibriumDB, SequenceDB, ProxyDB
from theano.gof.toolbox import \
Feature, \
Bookkeeper, History, Validator, ReplaceValidate, NodeFinder,\
PrintListener, ReplacementDidntRemovedError, NoOutputFromInplace
from theano.gof.type import \
Type, Generic, generic
from theano.gof.utils import \
hashtype, object2, MethodNotDefined
from theano.gof.params_type import ParamsType, Params
import theano
if theano.config.cmodule.preload_cache:
cc.get_module_cache() | that it could be used to implement a language other than
theano. ie, theano is a domain-specific language for |
handlers.go | package oas
import (
"fmt"
"net/http"
"github.com/ghodss/yaml"
"github.com/go-openapi/spec"
)
// SpecHandlerType represents spec handler type.
type SpecHandlerType int
const (
// SpecHandlerTypeDynamic represents dynamic spec handler.
SpecHandlerTypeDynamic SpecHandlerType = iota + 1
// SpecHandlerTypeStatic represents static spec handler.
SpecHandlerTypeStatic
)
// DynamicSpecHandler returns HTTP handler for OpenAPI spec that
// changes its host and schemes dynamically based on incoming request.
func DynamicSpecHandler(s *spec.Swagger) http.Handler |
type dynamicSpecHandler struct {
s *spec.Swagger
}
func (h *dynamicSpecHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
host := req.Header.Get("X-Forwarded-Host")
if host == "" {
host = req.Host
}
scheme := req.Header.Get("X-Forwarded-Proto")
if scheme == "" {
scheme = req.Header.Get("X-Scheme")
if scheme == "" {
scheme = "http"
}
}
specShallowCopy := &spec.Swagger{
VendorExtensible: h.s.VendorExtensible,
SwaggerProps: h.s.SwaggerProps,
}
specShallowCopy.Host = host
specShallowCopy.Schemes = []string{scheme}
writeSpec(w, specShallowCopy)
}
// StaticSpecHandler returns HTTP handler for static OpenAPI spec.
func StaticSpecHandler(s *spec.Swagger) http.Handler {
return &staticSpecHandler{s: s}
}
type staticSpecHandler struct {
s *spec.Swagger
}
func (h *staticSpecHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
writeSpec(w, h.s)
}
func writeSpec(w http.ResponseWriter, s *spec.Swagger) {
b, err := yaml.Marshal(s)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
}
w.Header().Set("Content-Type", "application/x-yaml")
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(b)))
w.Write(b) // nolint
}
| {
return &dynamicSpecHandler{s: s}
} |
debug.rs | //! Debug utilities.
use std::borrow::Cow;
use std::fmt;
use std::str::FromStr;
use conv::errors::NoError;
/// A dummy type that has specific `Debug` impl given by the `str`.
///
/// This is useful when implementing `Debug` for other types using the standard
/// helpers like `debug_struct`.
pub struct ExplicitDebug<'d>(Cow<'d, str>);
impl<'d> ExplicitDebug<'d> {
#[inline]
pub fn new(s: Cow<'d, str>) -> Self {
ExplicitDebug(s)
}
}
// Conversions from string (borrow/owned).
impl<'d> From<Cow<'d, str>> for ExplicitDebug<'d> {
fn from(s: Cow<'d, str>) -> Self {
ExplicitDebug(s) | ExplicitDebug(s.into())
}
}
impl<'d> From<String> for ExplicitDebug<'d> {
fn from(s: String) -> Self {
ExplicitDebug(s.into())
}
}
impl<'d> FromStr for ExplicitDebug<'d> {
type Err = NoError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(ExplicitDebug(s.to_owned().into()))
}
}
impl<'d> fmt::Debug for ExplicitDebug<'d> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", self.0)
}
} | }
}
impl<'d> From<&'d str> for ExplicitDebug<'d> {
fn from(s: &'d str) -> Self { |
0015_attachment.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
from django.conf import settings
class | (migrations.Migration):
dependencies = [
('zerver', '0014_realm_emoji_url_length'),
]
operations = [
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('file_name', models.CharField(max_length=100, db_index=True)),
('path_id', models.TextField(db_index=True)),
('create_time', models.DateTimeField(default=django.utils.timezone.now, db_index=True)),
('messages', models.ManyToManyField(to='zerver.Message')),
('owner', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
| Migration |
kubernetes.go | // Copyright 2020 The PipeCD Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package kubernetes
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"strings"
"time"
"go.uber.org/zap"
provider "github.com/pipe-cd/pipe/pkg/app/piped/cloudprovider/kubernetes"
"github.com/pipe-cd/pipe/pkg/app/piped/cloudprovider/kubernetes/resource"
"github.com/pipe-cd/pipe/pkg/app/piped/deploysource"
"github.com/pipe-cd/pipe/pkg/app/piped/diff"
"github.com/pipe-cd/pipe/pkg/app/piped/planner"
"github.com/pipe-cd/pipe/pkg/model"
)
const (
versionUnknown = "unknown"
)
// Planner plans the deployment pipeline for kubernetes application.
type Planner struct {
}
type registerer interface {
Register(k model.ApplicationKind, p planner.Planner) error
}
// Register registers this planner into the given registerer.
func Register(r registerer) {
r.Register(model.ApplicationKind_KUBERNETES, &Planner{})
}
// Plan decides which pipeline should be used for the given input.
func (p *Planner) Plan(ctx context.Context, in planner.Input) (out planner.Output, err error) {
ds, err := in.TargetDSP.Get(ctx, ioutil.Discard)
if err != nil {
err = fmt.Errorf("error while preparing deploy source data (%v)", err)
return
}
cfg := ds.DeploymentConfig.KubernetesDeploymentSpec
if cfg == nil {
err = fmt.Errorf("missing KubernetesDeploymentSpec in deployment configuration")
return
}
manifestCache := provider.AppManifestsCache{
AppID: in.Deployment.ApplicationId,
Cache: in.AppManifestsCache,
Logger: in.Logger,
}
// Load previous deployed manifests and new manifests to compare.
newManifests, ok := manifestCache.Get(in.Deployment.Trigger.Commit.Hash)
if !ok {
// When the manifests were not in the cache we have to load them.
loader := provider.NewManifestLoader(in.Deployment.ApplicationName, ds.AppDir, ds.RepoDir, in.Deployment.GitPath.ConfigFilename, cfg.Input, in.Logger)
newManifests, err = loader.LoadManifests(ctx)
if err != nil {
return
}
manifestCache.Put(in.Deployment.Trigger.Commit.Hash, newManifests)
}
// Determine application version from the manifests.
if version, e := determineVersion(newManifests); e != nil {
in.Logger.Error("unable to determine version", zap.Error(e))
out.Version = versionUnknown
} else {
out.Version = version
}
// If the deployment was triggered by forcing via web UI,
// we rely on the user's decision.
switch in.Deployment.Trigger.SyncStrategy {
case model.SyncStrategy_QUICK_SYNC:
out.Stages = buildQuickSyncPipeline(cfg.Input.AutoRollback, time.Now())
out.Summary = fmt.Sprintf("Quick sync by applying all manifests (forced via web)", out.Version)
return
case model.SyncStrategy_PIPELINE:
if cfg.Pipeline == nil {
err = fmt.Errorf("unable to force sync with pipeline because no pipeline was specified")
return
}
out.Stages = buildProgressivePipeline(cfg.Pipeline, cfg.Input.AutoRollback, time.Now())
out.Summary = "Sync with the specified pipeline (forced via web)"
return
}
// If the progressive pipeline was not configured
// we have only one choise to do is applying all manifestt.
if cfg.Pipeline == nil || len(cfg.Pipeline.Stages) == 0 {
out.Stages = buildQuickSyncPipeline(cfg.Input.AutoRollback, time.Now())
out.Summary = "Quick sync by applying all manifests (no pipeline was configured)"
return
}
// This deployment is triggered by a commit with the intent to perform pipeline.
// Commit Matcher will be ignored when triggered by a command.
if p := cfg.CommitMatcher.Pipeline; p != "" && in.Deployment.Trigger.Commander == "" {
pipelineRegex, err := in.RegexPool.Get(p)
if err != nil {
err = fmt.Errorf("failed to compile commitMatcher.pipeline(%s): %w", p, err)
return out, err
}
if pipelineRegex.MatchString(in.Deployment.Trigger.Commit.Message) {
out.Stages = buildProgressivePipeline(cfg.Pipeline, cfg.Input.AutoRollback, time.Now())
out.Summary = fmt.Sprintf("Sync progressively because the commit message was matching %q", p)
return out, err
}
}
// This deployment is triggered by a commit with the intent to synchronize.
// Commit Matcher will be ignored when triggered by a command.
if s := cfg.CommitMatcher.QuickSync; s != "" && in.Deployment.Trigger.Commander == "" {
syncRegex, err := in.RegexPool.Get(s)
if err != nil {
err = fmt.Errorf("failed to compile commitMatcher.sync(%s): %w", s, err)
return out, err
}
if syncRegex.MatchString(in.Deployment.Trigger.Commit.Message) {
out.Stages = buildQuickSyncPipeline(cfg.Input.AutoRollback, time.Now())
out.Summary = fmt.Sprintf("Quick sync by applying all manifests because the commit message was matching %q", s)
return out, err
}
}
// This is the first time to deploy this application
// or it was unable to retrieve that value.
// We just apply all manifests.
if in.MostRecentSuccessfulCommitHash == "" {
out.Stages = buildQuickSyncPipeline(cfg.Input.AutoRollback, time.Now())
out.Summary = "Quick sync by applying all manifests because it seems this is the first deployment"
return
}
// Load manifests of the previously applied commit.
oldManifests, ok := manifestCache.Get(in.MostRecentSuccessfulCommitHash)
if !ok {
// When the manifests were not in the cache we have to load them.
var runningDs *deploysource.DeploySource
runningDs, err = in.RunningDSP.Get(ctx, ioutil.Discard)
if err != nil {
err = fmt.Errorf("failed to prepare the running deploy source data (%v)", err)
return
}
loader := provider.NewManifestLoader(in.Deployment.ApplicationName, runningDs.AppDir, runningDs.RepoDir, in.Deployment.GitPath.ConfigFilename, cfg.Input, in.Logger)
oldManifests, err = loader.LoadManifests(ctx)
if err != nil {
err = fmt.Errorf("failed to load previously deployed manifests: %w", err)
return
}
manifestCache.Put(in.MostRecentSuccessfulCommitHash, oldManifests)
}
progressive, desc := decideStrategy(oldManifests, newManifests)
out.Summary = desc
if progressive {
out.Stages = buildProgressivePipeline(cfg.Pipeline, cfg.Input.AutoRollback, time.Now())
return
}
out.Stages = buildQuickSyncPipeline(cfg.Input.AutoRollback, time.Now())
return
}
// First up, checks to see if the workload's `spec.template` has been changed,
// and then checks if the configmap/secret's data.
func decideStrategy(olds, news []provider.Manifest) (progressive bool, desc string) |
// The assumption that an application has only one workload.
func findWorkload(manifests []provider.Manifest) (provider.Manifest, bool) {
for _, m := range manifests {
if !m.Key.IsDeployment() {
continue
}
return m, true
}
return provider.Manifest{}, false
}
func findConfigs(manifests []provider.Manifest) map[provider.ResourceKey]provider.Manifest {
configs := make(map[provider.ResourceKey]provider.Manifest)
for _, m := range manifests {
if m.Key.IsConfigMap() {
configs[m.Key] = m
}
if m.Key.IsSecret() {
configs[m.Key] = m
}
}
return configs
}
func checkImageChange(ns diff.Nodes) (string, bool) {
const containerImageQuery = `^spec\.template\.spec\.containers\.\d+.image$`
nodes, _ := ns.Find(containerImageQuery)
if len(nodes) == 0 {
return "", false
}
images := make([]string, 0, len(ns))
for _, n := range ns {
beforeName, beforeTag := parseContainerImage(n.StringX())
afterName, afterTag := parseContainerImage(n.StringY())
if beforeName == afterName {
images = append(images, fmt.Sprintf("image %s from %s to %s", beforeName, beforeTag, afterTag))
} else {
images = append(images, fmt.Sprintf("image %s:%s to %s:%s", beforeName, beforeTag, afterName, afterTag))
}
}
desc := fmt.Sprintf("Sync progressively because of updating %s", strings.Join(images, ", "))
return desc, true
}
func checkReplicasChange(ns diff.Nodes) (string, bool) {
const replicasQuery = `^spec\.replicas$`
node, err := ns.FindOne(replicasQuery)
if err != nil {
return "", false
}
desc := fmt.Sprintf("Scale workload from %s to %s.", node.StringX(), node.StringY())
return desc, true
}
func parseContainerImage(image string) (name, tag string) {
parts := strings.Split(image, ":")
if len(parts) == 2 {
tag = parts[1]
}
paths := strings.Split(parts[0], "/")
name = paths[len(paths)-1]
return
}
// TODO: Add ability to configure how to determine application version.
func determineVersion(manifests []provider.Manifest) (string, error) {
for _, m := range manifests {
if !m.Key.IsDeployment() {
continue
}
data, err := m.MarshalJSON()
if err != nil {
return "", err
}
var d resource.Deployment
if err := json.Unmarshal(data, &d); err != nil {
return "", err
}
containers := d.Spec.Template.Spec.Containers
if len(containers) == 0 {
return versionUnknown, nil
}
_, tag := parseContainerImage(containers[0].Image)
return tag, nil
}
return versionUnknown, nil
}
| {
oldWorkload, ok := findWorkload(olds)
if !ok {
desc = "Quick sync by applying all manifests because it was unable to find the currently running workloads"
return
}
newWorkload, ok := findWorkload(news)
if !ok {
desc = "Quick sync by applying all manifests because it was unable to find workloads in the new manifests"
return
}
// If the workload's pod template was touched
// do progressive deployment with the specified pipeline.
diffResult, err := provider.Diff(oldWorkload, newWorkload)
if err != nil {
progressive = true
desc = fmt.Sprintf("Sync progressively due to an error while calculating the diff (%v)", err)
return
}
diffNodes := diffResult.Nodes()
templateDiffs := diffNodes.FindByPrefix("spec.template")
if len(templateDiffs) > 0 {
progressive = true
if msg, changed := checkImageChange(templateDiffs); changed {
desc = msg
return
}
desc = fmt.Sprintf("Sync progressively because pod template of workload %s was changed", newWorkload.Key.Name)
return
}
// If the config/secret was touched, we also need to do progressive
// deployment to check run with the new config/secret content.
oldConfigs := findConfigs(olds)
newConfigs := findConfigs(news)
if len(oldConfigs) > len(newConfigs) {
progressive = true
desc = fmt.Sprintf("Sync progressively because %d configmap/secret deleted", len(oldConfigs)-len(newConfigs))
return
}
if len(oldConfigs) < len(newConfigs) {
progressive = true
desc = fmt.Sprintf("Sync progressively because new %d configmap/secret added", len(newConfigs)-len(oldConfigs))
return
}
for k, oc := range oldConfigs {
nc, ok := newConfigs[k]
if !ok {
progressive = true
desc = fmt.Sprintf("Sync progressively because %s %s was deleted", oc.Key.Kind, oc.Key.Name)
return
}
result, err := provider.Diff(oc, nc)
if err != nil {
progressive = true
desc = fmt.Sprintf("Sync progressively due to an error while calculating the diff (%v)", err)
return
}
if result.HasDiff() {
progressive = true
desc = fmt.Sprintf("Sync progressively because %s %s was updated", oc.Key.Kind, oc.Key.Name)
return
}
}
// Check if this is a scaling commit.
if msg, changed := checkReplicasChange(diffNodes); changed {
desc = msg
return
}
desc = "Quick sync by applying all manifests"
return
} |
trades_service.go | package api
import (
"github.com/HydroProtocol/hydro-scaffold-dex/backend/models"
"github.com/shopspring/decimal"
"sort"
"time"
)
const MaxBarsCount = 200
func GetAllTrades(p Param) (interface{}, error) {
req := p.(*QueryTradeReq)
count, trades := models.TradeDao.FindAllTrades(req.MarketID)
resp := QueryTradeResp{
Count: count,
Trades: trades,
}
return &resp, nil
}
func GetAccountTrades(p Param) (interface{}, error) {
req := p.(*QueryTradeReq)
if req.PerPage <= 0 {
req.PerPage = 20
}
if req.Page <= 0 {
req.Page = 1
}
offset := req.PerPage * (req.Page - 1)
limit := req.PerPage
count, trades := models.TradeDao.FindAccountMarketTrades(req.Address, req.MarketID, req.Status, limit, offset)
return &QueryTradeResp{
Count: count,
Trades: trades,
}, nil
}
func GetTradingView(p Param) (interface{}, error) {
params := p.(*CandlesReq)
pair := params.MarketID
from := params.From
to := params.To
granularity := params.Granularity
if (to - granularity*MaxBarsCount) > from { | from = to - granularity*MaxBarsCount
}
trades := models.TradeDao.FindTradesByMarket(pair, time.Unix(from, 0), time.Unix(to, 0))
if len(trades) == 0 {
return map[string]interface{}{
"candles": []*Bar{},
}, nil
}
return map[string]interface{}{
"candles": BuildTradingViewByTrades(trades, granularity),
}, nil
}
func BuildTradingViewByTrades(trades []*models.Trade, granularity int64) []*Bar {
var bars []*Bar
var currentIndex int64
var currentBar *Bar
sort.Slice(trades, func(i, j int) bool {
return trades[i].ExecutedAt.Unix() < trades[j].ExecutedAt.Unix()
})
for _, trade := range trades {
tIndex := trade.ExecutedAt.Unix() / granularity
if currentBar == nil || currentBar.Volume.IsZero() {
currentIndex = tIndex
currentBar = newBar(trade, currentIndex, granularity)
continue
}
if tIndex < currentIndex+1 {
currentBar.High = decimal.Max(currentBar.High, trade.Price)
currentBar.Low = decimal.Min(currentBar.Low, trade.Price)
currentBar.Volume = currentBar.Volume.Add(trade.Amount)
currentBar.Close = trade.Price
} else {
currentIndex = tIndex
if currentBar.Volume.IsZero() {
continue
}
bars = pushBar(bars, currentBar)
currentBar = newBar(trade, currentIndex, granularity)
}
}
bars = pushBar(bars, currentBar)
return bars
}
func pushBar(bars []*Bar, bar *Bar) []*Bar {
newBar := &Bar{
Time: bar.Time,
Open: bar.Open,
Close: bar.Close,
Low: bar.Low,
High: bar.High,
Volume: bar.Volume,
}
bars = append(bars, newBar)
return bars
}
func newBar(trade *models.Trade, currentIndex int64, granularity int64) *Bar {
bar := &Bar{
Time: currentIndex * granularity,
Volume: trade.Amount,
Open: trade.Price,
Close: trade.Price,
High: trade.Price,
Low: trade.Price,
}
return bar
}
type Bar struct {
Time int64 `json:"time"`
Open decimal.Decimal `json:"open"`
Close decimal.Decimal `json:"close"`
Low decimal.Decimal `json:"low"`
High decimal.Decimal `json:"high"`
Volume decimal.Decimal `json:"volume"`
} | |
technical_ratio_test.py | # -*- coding: utf-8 -*-
# MooQuant
#
# Copyright 2011-2015 Gabriel Martin Becedillas Ruiz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <[email protected]>
"""
from mooquant import dataseries
from mooquant.technical import ratio
from . import common
class TestCase(common.TestCase):
| def __buildRatio(self, values, ratioMaxLen=None):
seqDS = dataseries.SequenceDataSeries()
ret = ratio.Ratio(seqDS, ratioMaxLen)
for value in values:
seqDS.append(value)
return ret
def testSimple(self):
ratio = self.__buildRatio([1, 2, 1])
self.assertEqual(ratio[0], None)
self.assertEqual(ratio[1], 1)
self.assertEqual(ratio[2], -0.5)
self.assertEqual(ratio[-1], -0.5)
with self.assertRaises(IndexError):
ratio[3]
self.assertEqual(ratio[-2], ratio[1])
self.assertEqual(ratio[-1], ratio[2])
self.assertEqual(len(ratio.getDateTimes()), 3)
for i in range(len(ratio)):
self.assertEqual(ratio.getDateTimes()[i], None)
def testNegativeValues(self):
ratio = self.__buildRatio([-1, -2, -1])
self.assertEqual(ratio[0], None)
self.assertEqual(ratio[1], -1)
self.assertEqual(ratio[2], 0.5)
self.assertEqual(ratio[-1], 0.5)
with self.assertRaises(IndexError):
ratio[3]
self.assertEqual(ratio[-2], ratio[1])
self.assertEqual(ratio[-1], ratio[2])
self.assertEqual(len(ratio.getDateTimes()), 3)
for i in range(len(ratio)):
self.assertEqual(ratio.getDateTimes()[i], None)
def testBounded(self):
ratio = self.__buildRatio([-1, -2, -1], 2)
self.assertEqual(ratio[0], -1)
self.assertEqual(ratio[1], 0.5)
self.assertEqual(len(ratio), 2) |
|
secret_acl_test.go | package acceptance
import (
"errors"
"fmt"
"os"
"testing"
. "github.com/databrickslabs/databricks-terraform/access"
"github.com/databrickslabs/databricks-terraform/common"
"github.com/databrickslabs/databricks-terraform/internal/acceptance"
"github.com/hashicorp/terraform-plugin-sdk/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/stretchr/testify/assert"
)
func TestAccSecretAclResource(t *testing.T) {
// TODO: refactor for common instance pool & AZ CLI
if _, ok := os.LookupEnv("CLOUD_ENV"); !ok {
t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set")
}
//var secretScope Secre
var secretACL ACLItem
// generate a random name for each tokenInfo test run, to avoid
// collisions from multiple concurrent tests.
// the acctest package includes many helpers such as RandStringFromCharSet
// See https://godoc.org/github.com/hashicorp/terraform-plugin-sdk/helper/acctest
//scope := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)
scope := "terraform_acc_test_acl"
principal := "users"
permission := "READ"
acceptance.AccTest(t, resource.TestCase{
CheckDestroy: testSecretACLResourceDestroy,
Steps: []resource.TestStep{
{
// use a dynamic configuration with the random name from above
Config: testSecretACLResource(scope, principal, permission),
// compose a basic test, checking both remote and local values
Check: resource.ComposeTestCheckFunc(
// query the API to retrieve the tokenInfo object
testSecretACLResourceExists("databricks_secret_acl.my_secret_acl", &secretACL, t),
// verify remote values
testSecretACLValues(t, &secretACL, permission, principal),
// verify local values
resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "scope", scope),
resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "principal", principal),
resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "permission", permission),
),
},
{
PreConfig: func() {
client := common.CommonEnvironmentClient()
err := NewSecretAclsAPI(client).Delete(scope, principal)
assert.NoError(t, err, err)
},
// use a dynamic configuration with the random name from above
Config: testSecretACLResource(scope, principal, permission),
// compose a basic test, checking both remote and local values
Check: resource.ComposeTestCheckFunc(
// query the API to retrieve the tokenInfo object
testSecretACLResourceExists("databricks_secret_acl.my_secret_acl", &secretACL, t),
// verify remote values
testSecretACLValues(t, &secretACL, permission, principal),
// verify local values
resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "scope", scope),
resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "principal", principal),
resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "permission", permission),
),
},
},
})
}
func testSecretACLResourceDestroy(s *terraform.State) error {
client := common.CommonEnvironmentClient()
for _, rs := range s.RootModule().Resources {
if rs.Type != "databricks_secret" && rs.Type != "databricks_secret_scope" {
continue
}
_, err := NewSecretAclsAPI(client).Read(rs.Primary.Attributes["scope"], rs.Primary.Attributes["principal"])
if err == nil {
return errors.New("resource secret acl is not cleaned up")
}
_, err = NewSecretScopesAPI(client).Read(rs.Primary.Attributes["scope"])
if err == nil |
}
return nil
}
func testSecretACLValues(t *testing.T, acl *ACLItem, permission, principal string) resource.TestCheckFunc {
return func(s *terraform.State) error {
assert.True(t, acl.Permission == ACLPermissionRead)
assert.True(t, acl.Principal == principal)
return nil
}
}
// testAccCheckTokenResourceExists queries the API and retrieves the matching Widget.
func testSecretACLResourceExists(n string, aclItem *ACLItem, t *testing.T) resource.TestCheckFunc {
return func(s *terraform.State) error {
// find the corresponding state object
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
// retrieve the configured client from the test setup
conn := common.CommonEnvironmentClient()
resp, err := NewSecretAclsAPI(conn).Read(rs.Primary.Attributes["scope"], rs.Primary.Attributes["principal"])
//t.Log(resp)
if err != nil {
return err
}
// If no error, assign the response Widget attribute to the widget pointer
*aclItem = resp
return nil
//return fmt.Errorf("Token (%s) not found", rs.Primary.ID)
}
}
// testAccTokenResource returns an configuration for an Example Widget with the provided name
func testSecretACLResource(scopeName, principal, permission string) string {
return fmt.Sprintf(`
resource "databricks_secret_scope" "my_scope" {
name = "%s"
}
resource "databricks_secret_acl" "my_secret_acl" {
principal = "%s"
permission = "%s"
scope = databricks_secret_scope.my_scope.name
}
`, scopeName, principal, permission)
}
| {
return errors.New("resource secret is not cleaned up")
} |
iterate_drug.py | from torchvision import datasets, transforms
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from argparse import ArgumentParser
from tqdm import tqdm
import time
import numpy as np
###########
# file imports / path issues
import os
import sys
from pathlib import Path
path = Path(os.path.abspath(__file__)).parents[3]
os.chdir(path)
sys.path.append('./BivariateShapley')
from utils_shapley import *
from shapley_kernel import Bivariate_KernelExplainer
import pickle
import os
import shap
############################################
# Define Test Parameters
############################################
parser = ArgumentParser(description='get phi plus matrices')
parser.add_argument('--dataset_min_index', type = int,default=0,
help='iterate over dataset starting from min_index')
parser.add_argument('--dataset_samples', type = int,default=500,
help='number of samples, starting from min_index')
parser.add_argument('--verbose', action='store_true', default=False,
help='boolean, use tqdm')
args = parser.parse_args()
min_index = args.dataset_min_index
max_index = min_index + args.dataset_samples
baseline = 'excess'
save_path = './Files/results_attribution/drug_%s' % (baseline)
make_dir(save_path)
model_path = './Files/trained_bb_models/model_drug.pkl'
data_path = './Files/Data/drug.h5'
from shapley_value_functions import *
# load model
import pickle
with open(model_path, 'rb') as fid:
model = pickle.load(fid)
model_eval = eval_RF_binary(model)
# Data Sample
from shapley_datasets import drug
dataset = drug(data_path = data_path, train = False)
dataloader = DataLoader(dataset, batch_size = 1, shuffle = False, num_workers = 0)
dataset_train = drug(data_path = data_path, train = True)
dataloader_train = DataLoader(dataset_train, batch_size = 10, shuffle = True, num_workers = 0)
data_iterator = iter(dataloader_train)
#######################
# Explainer
#######################
# initialize variables
| x_list = []
label_list = []
unary_list = []
matrix_list = []
time_list = []
db_ind = {}
time1 = time.time()
if args.verbose:
batch_iterator = tqdm(enumerate(dataloader), total = max_index)
else:
batch_iterator = enumerate(dataloader)
for idx, (x, label) in batch_iterator:
# advance batch iterator
if idx < min_index:
continue
elif idx == max_index:
break
time_start = time.time()
label = label[0].item()
#######################################
# Calculate Shapley
#######################################
baseline_value = 0
########################################
x = tensor2numpy(x)
x_train = np.zeros_like(x)
n_feat = x.reshape(-1).shape[0]
matrix = np.zeros((n_feat, n_feat))
model_eval.init_baseline(x, baseline_value = baseline_value)
explainer = shap.KernelExplainer(model_eval, x_train)
shapley_values = explainer.shap_values(x, silent = True, l1_reg = False)
for i in range(n_feat):
for j in range(i+1, n_feat):
model_eval.init_baseline(x, j = j, i = i, baseline_value = baseline_value)
x_ = np_collapse(x, index = j) # remove column j from x
explainer = shap.KernelExplainer(model_eval, np.zeros_like(x_)+baseline_value)
shapley_coalition = explainer.shap_values(x_, silent = True, l1_reg = False)
shapley_coalition = np_insert(shapley_coalition, np.zeros((x.shape[0], 1)), index = j)
matrix[i, j] = 0.5 * (shapley_coalition[0,i] - shapley_values[0,i] - shapley_values[0,j])
matrix[j, i] = matrix[i,j]
#######################################
# save individual shapley
time_list.append(time.time() - time_start)
x_list.append(x)
label_list.append(label)
unary_list.append(shapley_values)
matrix_list.append(matrix)
if idx % 5 == 0:
if not args.verbose:
print('=====================')
print('samples:' + str(idx+1))
print('time per sample: ' + str(np.array(time_list).mean()))
'''
db_ind['x_list'] = x_list
db_ind['label_list'] = label_list
db_ind['unary_list'] = unary_list
db_ind['matrix_list'] = matrix_list
db_ind['time'] = time_list
save_dict(db_ind, os.path.join(save_path, '%s-%s_checkpoint.pkl' % (str(min_index), str(max_index-1))))
'''
db_ind['x_list'] = x_list
db_ind['label_list'] = label_list
db_ind['unary_list'] = unary_list
db_ind['matrix_list'] = matrix_list
db_ind['time_list'] = time_list
save_dict(db_ind, os.path.join(save_path, '%s-%s.pkl' % (str(min_index), str(max_index-1))))
#os.remove(os.path.join(save_path, '%s-%s_checkpoint.pkl' % (str(min_index), str(max_index-1))))
print('done!') | |
libsyn.py | # coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
unified_strdate,
)
class | (InfoExtractor):
_VALID_URL = r'(?P<mainurl>https?://html5-player\.libsyn\.com/embed/episode/id/(?P<id>[0-9]+))'
_TESTS = [{
'url': 'http://html5-player.libsyn.com/embed/episode/id/6385796/',
'md5': '2a55e75496c790cdeb058e7e6c087746',
'info_dict': {
'id': '6385796',
'ext': 'mp3',
'title': "Champion Minded - Developing a Growth Mindset",
'description': 'In this episode, Allistair talks about the importance of developing a growth mindset, not only in sports, but in life too.',
'upload_date': '20180320',
'thumbnail': 're:^https?://.*',
},
}, {
'url': 'https://html5-player.libsyn.com/embed/episode/id/3727166/height/75/width/200/theme/standard/direction/no/autoplay/no/autonext/no/thumbnail/no/preload/no/no_addthis/no/',
'md5': '6c5cb21acd622d754d3b1a92b582ce42',
'info_dict': {
'id': '3727166',
'ext': 'mp3',
'title': 'Clients From Hell Podcast - How a Sex Toy Company Kickstarted my Freelance Career',
'upload_date': '20150818',
'thumbnail': 're:^https?://.*',
}
}]
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
video_id = m.group('id')
url = m.group('mainurl')
webpage = self._download_webpage(url, video_id)
podcast_title = self._search_regex(
r'<h3>([^<]+)</h3>', webpage, 'podcast title', default=None)
if podcast_title:
podcast_title = podcast_title.strip()
episode_title = self._search_regex(
r'(?:<div class="episode-title">|<h4>)([^<]+)</', webpage, 'episode title')
if episode_title:
episode_title = episode_title.strip()
title = '%s - %s' % (podcast_title, episode_title) if podcast_title else episode_title
description = self._html_search_regex(
r'<p\s+id="info_text_body">(.+?)</p>', webpage,
'description', default=None)
if description:
# Strip non-breaking and normal spaces
description = description.replace('\u00A0', ' ').strip()
release_date = unified_strdate(self._search_regex(
r'<div class="release_date">Released: ([^<]+)<', webpage, 'release date', fatal=False))
data_json = self._search_regex(r'var\s+playlistItem\s*=\s*(\{.*?\});\n', webpage, 'JSON data block')
data = json.loads(data_json)
formats = [{
'url': data['media_url'],
'format_id': 'main',
}, {
'url': data['media_url_libsyn'],
'format_id': 'libsyn',
}]
thumbnail = data.get('thumbnail_url')
duration = parse_duration(data.get('duration'))
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'upload_date': release_date,
'duration': duration,
'formats': formats,
}
| LibsynIE |
billing_account_sink_types.go | /*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by Kubeform. DO NOT EDIT.
package v1alpha1
import (
base "kubeform.dev/apimachinery/api/v1alpha1"
core "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
kmapi "kmodules.xyz/client-go/api/v1"
"sigs.k8s.io/cli-utils/pkg/kstatus/status"
)
// +genclient
// +k8s:openapi-gen=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:object:root=true
// +kubebuilder:subresource:status
// +kubebuilder:printcolumn:name="Phase",type=string,JSONPath=`.status.phase`
type BillingAccountSink struct {
metav1.TypeMeta `json:",inline,omitempty"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec BillingAccountSinkSpec `json:"spec,omitempty"`
Status BillingAccountSinkStatus `json:"status,omitempty"`
}
type BillingAccountSinkSpecBigqueryOptions struct {
// Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.
UsePartitionedTables *bool `json:"usePartitionedTables" tf:"use_partitioned_tables"`
}
type BillingAccountSinkSpecExclusions struct {
// A description of this exclusion.
// +optional
Description *string `json:"description,omitempty" tf:"description"`
// If set to True, then this exclusion is disabled and it does not exclude any log entries
// +optional
Disabled *bool `json:"disabled,omitempty" tf:"disabled"`
// An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the matching log entries
Filter *string `json:"filter" tf:"filter"`
// A client-assigned identifier, such as "load-balancer-exclusion". Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. First character has to be alphanumeric.
Name *string `json:"name" tf:"name"`
}
type BillingAccountSinkSpec struct {
State *BillingAccountSinkSpecResource `json:"state,omitempty" tf:"-"`
Resource BillingAccountSinkSpecResource `json:"resource" tf:"resource"`
UpdatePolicy base.UpdatePolicy `json:"updatePolicy,omitempty" tf:"-"`
TerminationPolicy base.TerminationPolicy `json:"terminationPolicy,omitempty" tf:"-"`
ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`
BackendRef *core.LocalObjectReference `json:"backendRef,omitempty" tf:"-"`
}
type BillingAccountSinkSpecResource struct {
ID string `json:"id,omitempty" tf:"id,omitempty"`
// Options that affect sinks exporting data to BigQuery.
// +optional
BigqueryOptions *BillingAccountSinkSpecBigqueryOptions `json:"bigqueryOptions,omitempty" tf:"bigquery_options"`
// The billing account exported to the sink.
BillingAccount *string `json:"billingAccount" tf:"billing_account"`
// A description of this sink. The maximum length of the description is 8000 characters.
// +optional
Description *string `json:"description,omitempty" tf:"description"`
// The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, or a BigQuery dataset. Examples: "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The writer associated with the sink must have access to write to the above resource.
Destination *string `json:"destination" tf:"destination"`
// If set to True, then this sink is disabled and it does not export any log entries.
// +optional
Disabled *bool `json:"disabled,omitempty" tf:"disabled"`
// Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusion_filters it will not be exported.
// +optional
Exclusions []BillingAccountSinkSpecExclusions `json:"exclusions,omitempty" tf:"exclusions"`
// The filter to apply when exporting logs. Only log entries that match the filter are exported.
// +optional
Filter *string `json:"filter,omitempty" tf:"filter"`
// The name of the logging sink. | // The identity associated with this sink. This identity must be granted write access to the configured destination.
// +optional
WriterIdentity *string `json:"writerIdentity,omitempty" tf:"writer_identity"`
}
type BillingAccountSinkStatus struct {
// Resource generation, which is updated on mutation by the API Server.
// +optional
ObservedGeneration int64 `json:"observedGeneration,omitempty"`
// +optional
Phase status.Status `json:"phase,omitempty"`
// +optional
Conditions []kmapi.Condition `json:"conditions,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:object:root=true
// BillingAccountSinkList is a list of BillingAccountSinks
type BillingAccountSinkList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
// Items is a list of BillingAccountSink CRD objects
Items []BillingAccountSink `json:"items,omitempty"`
} | Name *string `json:"name" tf:"name"` |
sqlite3.go | package dialect
import (
"fmt"
"reflect"
"time"
)
type sqlite3 struct {
}
func init() |
func (s *sqlite3) DataTypeOf(typ reflect.Value) string {
switch typ.Kind() {
case reflect.Bool:
return "bool"
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uintptr:
return "integer"
case reflect.Int64, reflect.Uint64:
return "bigint"
case reflect.Float32, reflect.Float64:
return "real"
case reflect.String:
return "text"
case reflect.Array, reflect.Slice:
return "blob"
case reflect.Struct:
if _, ok := typ.Interface().(time.Time); ok {
return "datetime"
}
}
panic(fmt.Sprintf("invalid sql type %s (%s)", typ.Type().Name(), typ.Kind()))
}
func (s *sqlite3) TableExistSQL(tableName string) (string, []interface{}) {
args := []interface{}{tableName}
return "SELECT name FROM sqlite_master WHERE type='table' and name=?", args
}
| {
RegisterDialect("sqlite3", &sqlite3{})
} |
model.py | """
model.py
--------
This module provides a class and methods for building and managing a model with tensorflow.
By: Sebastian D. Goodfellow, Ph.D., 2018
"""
# Compatibility imports
from __future__ import absolute_import, division, print_function
# 3rd party imports
import os
import sys
import json
import pickle
import tensorflow as tf
# Local imports
from deepecg.training.model.disc.graph import Graph
from deepecg.training.networks.deep_ecg_v1 import DeepECGV1
from deepecg.training.networks.deep_ecg_v2 import DeepECGV2
from deepecg.training.networks.deep_ecg_v3 import DeepECGV3
from deepecg.training.networks.deep_ecg_v4 import DeepECGV4
from deepecg.training.networks.deep_ecg_v5 import DeepECGV5
from deepecg.training.networks.deep_ecg_v6 import DeepECGV6
from deepecg.training.networks.deep_ecg_v7 import DeepECGV7
class Model(object):
"""A class for managing a model through training."""
def __init__(self, model_name, network_name, network_parameters, save_path, data_path, max_to_keep):
# Set input parameters
self.model_name = model_name
self.network_name = network_name
self.network_parameters = network_parameters
self.save_path = os.path.join(save_path, self.model_name)
self.data_path = data_path
self.max_to_keep = max_to_keep
# Set attributes
self.sess = None
self.graph = None
self.network = None
# Create project file structure
self._create_folder_structure()
# Save parameters
self._save_parameters()
# Initialize graph
self.initialize_graph()
def initialize_graph(self):
# Get neural network
self.network = self._get_neural_network()
# Save network object
self._pickle_network()
# Build computational graph
self.graph = Graph(network=self.network, save_path=self.save_path, data_path=self.data_path,
max_to_keep=self.max_to_keep)
# Start session
self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))
# Initialize global variables
self.sess.run(self.graph.init_global)
@classmethod
def build_training_graph(cls, save_path):
"""Build training graph."""
# Import model parameters
model_parameters = cls._import_model_parameters(save_path=save_path)
# Import network parameters
network_parameters = cls._import_network_parameters(save_path=save_path)
# Initialize Model
return cls(model_name=model_parameters['model_name'], network_name=model_parameters['network_name'],
network_parameters=network_parameters, save_path=os.path.dirname(save_path),
data_path=model_parameters['data_path'], max_to_keep=model_parameters['max_to_keep'])
def restore(self, global_step):
"""Restore model from checkpoint."""
# Initialize graph
if self.sess._closed:
self.initialize_graph()
# Restore checkpoint
self.graph.saver.restore(sess=self.sess, save_path=os.path.join(self.save_path, 'checkpoints', global_step))
def close_session(self):
"""Close any active sessions."""
try:
self.sess.close()
except AttributeError:
print('No active Tensorflow session.')
def _save_parameters(self):
"""Save model and network parameters to JSON."""
# Save model parameters
self._save_model_parameters()
# Save network parameters
self._save_network_parameters()
def | (self):
"""Save model parameters to JSON."""
# Get model parameters
model_parameters = dict(model_name=self.model_name, network_name=self.network_name, save_path=self.save_path,
data_path=self.data_path, max_to_keep=self.max_to_keep)
# Save model parameters to JSON
if not os.path.exists(os.path.join(self.save_path, 'parameters', 'model_parameters.json')):
with open(os.path.join(self.save_path, 'parameters', 'model_parameters.json'), 'w') as file:
json.dump(model_parameters, file)
def _save_network_parameters(self):
"""Save network parameters to JSON."""
if not os.path.exists(os.path.join(self.save_path, 'parameters', 'network_parameters.json')):
with open(os.path.join(self.save_path, 'parameters', 'network_parameters.json'), 'w') as file:
json.dump(self.network_parameters, file)
def _get_neural_network(self):
"""Instantiate neural network."""
# Convert string to class
network = getattr(sys.modules[__name__], self.network_name)
# Instantiate network class with network parameters
network = network(**self.network_parameters)
return network
def _create_folder_structure(self):
# Set list of folders
folders = ['train', 'val', 'checkpoints', 'network', 'graph', 'logs', 'parameters']
# Main project directory
if not os.path.exists(self.save_path):
os.makedirs(self.save_path)
# Loop through and create project folders
for folder in folders:
self._create_folder(folder=folder)
def _create_folder(self, folder):
"""Create folder."""
if not os.path.exists(os.path.join(self.save_path, folder)):
os.makedirs(os.path.join(self.save_path, folder))
def _pickle_network(self):
"""Pickle graph."""
with open(os.path.join(self.save_path, 'network', 'network.obj'), 'wb') as file:
pickle.dump(obj=self.network, file=file)
@staticmethod
def _import_model_parameters(save_path):
"""Import model parameters."""
with open(os.path.join(save_path, 'parameters', 'model_parameters.json')) as file:
return json.load(file)
@staticmethod
def _import_network_parameters(save_path):
"""Import network parameters."""
with open(os.path.join(save_path, 'parameters', 'network_parameters.json')) as file:
return json.load(file)
| _save_model_parameters |
teams.go | package teams
import (
"crypto/rand"
"crypto/sha256"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"sort"
"time"
"github.com/keybase/go-codec/codec"
"golang.org/x/crypto/nacl/secretbox"
"golang.org/x/net/context"
"github.com/keybase/client/go/libkb"
"github.com/keybase/client/go/protocol/gregor1"
"github.com/keybase/client/go/protocol/keybase1"
"github.com/keybase/client/go/sig3"
hidden "github.com/keybase/client/go/teams/hidden"
jsonw "github.com/keybase/go-jsonw"
)
// Teamer is an interface that can fit a materialized Team (just below) or intermediary temporary products
// that are available during the team load process. It has access to both the main and hidden chain data
// so that we can ask questions like "what is the maximal on-chain PTK generation."
type Teamer interface {
MainChain() *keybase1.TeamData
HiddenChain() *keybase1.HiddenTeamChain
}
// A snapshot of a team's state.
// Not threadsafe.
type Team struct {
libkb.Contextified
ID keybase1.TeamID
Data *keybase1.TeamData
Hidden *keybase1.HiddenTeamChain
keyManager *TeamKeyManager
// rotated is set by rotateBoxes after rotating team key.
rotated bool
}
// Used to order multiple signatures to post
type teamSectionWithLinkType struct {
linkType libkb.LinkType
section SCTeamSection
}
func (t *Team) MainChain() *keybase1.TeamData { return t.Data }
func (t *Team) HiddenChain() *keybase1.HiddenTeamChain { return t.Hidden }
var _ Teamer = (*Team)(nil)
func NewTeam(ctx context.Context, g *libkb.GlobalContext, teamData *keybase1.TeamData, hidden *keybase1.HiddenTeamChain) *Team {
return &Team{
Contextified: libkb.NewContextified(g),
ID: teamData.ID(),
Data: teamData,
Hidden: hidden,
}
}
func (t *Team) CanSkipKeyRotation() bool {
// Only applies for >=200 member teams.
const MinTeamSize = 200
// Aim for one rotation every 24h.
const KeyRotateInterval = time.Duration(24) * time.Hour
if t.IsImplicit() {
// Do not do this optimization for implicit teams.
return false
}
if t.IsOpen() {
// Skip all rotations in open teams.
return true
}
// If cannot decide because of an error, return default false.
members, err := t.UsersWithRoleOrAbove(keybase1.TeamRole_BOT)
if err != nil {
return false
}
if len(members) < MinTeamSize {
// Not a big team
return false
}
now := t.G().Clock().Now()
duration := now.Sub(time.Unix(int64(t.chain().GetLatestPerTeamKeyCTime()), 0))
if duration > KeyRotateInterval { //nolint
// Last key rotation was more than predefined interval.
return false
}
// Team is big and key was rotated recently - can skip rotation.
return true
}
func (t *Team) chain() *TeamSigChainState {
return &TeamSigChainState{inner: t.Data.Chain, hidden: t.Hidden}
}
func (t *Team) Name() keybase1.TeamName {
return t.Data.Name
}
func (t *Team) Generation() keybase1.PerTeamKeyGeneration {
return t.chain().GetLatestGeneration()
}
func (t *Team) IsPublic() bool {
return t.chain().IsPublic()
}
func (t *Team) IsImplicit() bool {
return t.chain().IsImplicit()
}
func (t *Team) IsSubteam() bool {
return t.chain().IsSubteam()
}
func (t *Team) IsOpen() bool {
return t.chain().IsOpen()
}
func (t *Team) OpenTeamJoinAs() keybase1.TeamRole {
return t.chain().inner.OpenTeamJoinAs
}
func (t *Team) KBFSTLFIDs() []keybase1.TLFID {
return t.chain().inner.TlfIDs
}
func (t *Team) LatestKBFSTLFID() (res keybase1.TLFID) {
ids := t.KBFSTLFIDs()
if len(ids) > 0 {
res = ids[len(ids)-1]
}
return res
}
func (t *Team) KBFSCryptKeys(ctx context.Context, appType keybase1.TeamApplication) []keybase1.CryptKey {
return t.Data.TlfCryptKeys[appType]
}
func (t *Team) getKeyManager(ctx context.Context) (km *TeamKeyManager, err error) {
if t.keyManager == nil {
gen := t.chain().GetLatestGeneration()
item, err := GetAndVerifyPerTeamKey(t.MetaContext(ctx), t, gen)
if err != nil {
return nil, err
}
t.keyManager, err = NewTeamKeyManagerWithSeedItem(t.ID, item)
if err != nil {
return nil, err
}
}
return t.keyManager, nil
}
func (t *Team) SharedSecret(ctx context.Context) (ret keybase1.PerTeamKeySeed, err error) {
defer t.G().CTrace(ctx, "Team#SharedSecret", func() error { return err })()
km, err := t.getKeyManager(ctx)
if err != nil {
return ret, err
}
return km.SharedSecret(), nil
}
func (t *Team) KBFSKey(ctx context.Context) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKey(ctx, keybase1.TeamApplication_KBFS)
}
func (t *Team) ChatKey(ctx context.Context) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKey(ctx, keybase1.TeamApplication_CHAT)
}
func (t *Team) GitMetadataKey(ctx context.Context) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKey(ctx, keybase1.TeamApplication_GIT_METADATA)
}
func (t *Team) SeitanInviteTokenKeyLatest(ctx context.Context) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKey(ctx, keybase1.TeamApplication_SEITAN_INVITE_TOKEN)
}
func (t *Team) SaltpackEncryptionKeyLatest(ctx context.Context) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKey(ctx, keybase1.TeamApplication_SALTPACK)
}
func (t *Team) ChatKeyAtGeneration(ctx context.Context, generation keybase1.PerTeamKeyGeneration) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKeyAtGeneration(ctx, keybase1.TeamApplication_CHAT, generation)
}
func (t *Team) SaltpackEncryptionKeyAtGeneration(ctx context.Context, generation keybase1.PerTeamKeyGeneration) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKeyAtGeneration(ctx, keybase1.TeamApplication_SALTPACK, generation)
}
func (t *Team) SeitanInviteTokenKeyAtGeneration(ctx context.Context, generation keybase1.PerTeamKeyGeneration) (keybase1.TeamApplicationKey, error) {
return t.ApplicationKeyAtGeneration(ctx, keybase1.TeamApplication_SEITAN_INVITE_TOKEN, generation)
}
func (t *Team) SigningKID(ctx context.Context) (kid keybase1.KID, err error) {
gen := t.chain().GetLatestGeneration()
chainKey, err := newTeamSigChainState(t).GetPerTeamKeyAtGeneration(gen)
if err != nil {
return kid, err
}
return chainKey.SigKID, nil
}
func (t *Team) SigningKey(ctx context.Context) (key libkb.NaclSigningKeyPair, err error) {
km, err := t.getKeyManager(ctx)
if err != nil {
return key, err
}
return km.SigningKey()
}
func (t *Team) EncryptionKey(ctx context.Context) (key libkb.NaclDHKeyPair, err error) {
km, err := t.getKeyManager(ctx)
if err != nil {
return key, err
}
return km.EncryptionKey()
}
func (t *Team) encryptionKeyAtGen(ctx context.Context, gen keybase1.PerTeamKeyGeneration) (key libkb.NaclDHKeyPair, err error) {
item, err := GetAndVerifyPerTeamKey(libkb.NewMetaContext(ctx, t.G()), t, gen)
if err != nil {
return key, err
}
keyManager, err := NewTeamKeyManagerWithSeedItem(t.ID, item)
if err != nil {
return key, err
}
return keyManager.EncryptionKey()
}
func (t *Team) IsMember(ctx context.Context, uv keybase1.UserVersion) bool {
role, err := t.MemberRole(ctx, uv)
if err != nil {
t.G().Log.CDebugf(ctx, "error getting user role: %s", err)
return false
}
return role != keybase1.TeamRole_NONE
}
func (t *Team) MemberCtime(ctx context.Context, uv keybase1.UserVersion) *keybase1.Time {
return t.chain().MemberCtime(uv)
}
func (t *Team) MemberRole(ctx context.Context, uv keybase1.UserVersion) (keybase1.TeamRole, error) {
return t.chain().GetUserRole(uv)
}
func (t *Team) myRole(ctx context.Context) (keybase1.TeamRole, error) {
uv, err := t.currentUserUV(ctx)
if err != nil {
return keybase1.TeamRole_NONE, err
}
role, err := t.MemberRole(ctx, uv)
return role, err
}
func (t *Team) UserVersionByUID(ctx context.Context, uid keybase1.UID) (keybase1.UserVersion, error) {
return t.chain().GetLatestUVWithUID(uid)
}
func (t *Team) AllUserVersionsByUID(ctx context.Context, uid keybase1.UID) []keybase1.UserVersion {
return t.chain().GetAllUVsWithUID(uid)
}
func (t *Team) AllUserVersions(ctx context.Context) []keybase1.UserVersion {
return t.chain().GetAllUVs()
}
func (t *Team) UsersWithRole(role keybase1.TeamRole) ([]keybase1.UserVersion, error) {
return t.chain().GetUsersWithRole(role)
}
func (t *Team) UsersWithRoleOrAbove(role keybase1.TeamRole) ([]keybase1.UserVersion, error) {
return t.chain().GetUsersWithRoleOrAbove(role)
}
func (t *Team) Members() (keybase1.TeamMembers, error) {
var members keybase1.TeamMembers
x, err := t.UsersWithRole(keybase1.TeamRole_OWNER)
if err != nil {
return keybase1.TeamMembers{}, err
}
members.Owners = x
x, err = t.UsersWithRole(keybase1.TeamRole_ADMIN)
if err != nil {
return keybase1.TeamMembers{}, err
}
members.Admins = x
x, err = t.UsersWithRole(keybase1.TeamRole_WRITER)
if err != nil {
return keybase1.TeamMembers{}, err
}
members.Writers = x
x, err = t.UsersWithRole(keybase1.TeamRole_READER)
if err != nil {
return keybase1.TeamMembers{}, err
}
members.Readers = x
x, err = t.UsersWithRole(keybase1.TeamRole_BOT)
if err != nil {
return keybase1.TeamMembers{}, err
}
members.Bots = x
x, err = t.UsersWithRole(keybase1.TeamRole_RESTRICTEDBOT)
if err != nil {
return keybase1.TeamMembers{}, err
}
members.RestrictedBots = x
return members, nil
}
func (t *Team) ImplicitTeamDisplayName(ctx context.Context) (res keybase1.ImplicitTeamDisplayName, err error) {
return t.implicitTeamDisplayName(ctx, false)
}
func (t *Team) ImplicitTeamDisplayNameNoConflicts(ctx context.Context) (res keybase1.ImplicitTeamDisplayName, err error) {
return t.implicitTeamDisplayName(ctx, true)
}
func (t *Team) implicitTeamDisplayName(ctx context.Context, skipConflicts bool) (res keybase1.ImplicitTeamDisplayName, err error) {
defer t.G().CTrace(ctx, "Team.ImplicitTeamDisplayName", func() error { return err })()
impName := keybase1.ImplicitTeamDisplayName{
IsPublic: t.IsPublic(),
ConflictInfo: nil, // TODO should we know this here?
}
seenKBUsers := make(map[string]bool)
members, err := t.Members()
if err != nil {
return res, err
}
// Add the keybase owners
for _, member := range members.Owners {
name, err := t.G().GetUPAKLoader().LookupUsername(ctx, member.Uid)
if err != nil {
return res, err
}
impName.Writers.KeybaseUsers = append(impName.Writers.KeybaseUsers, name.String())
}
// Add the keybase readers
for _, member := range members.Readers {
name, err := t.G().GetUPAKLoader().LookupUsername(ctx, member.Uid)
if err != nil {
return res, err
}
impName.Readers.KeybaseUsers = append(impName.Readers.KeybaseUsers, name.String())
}
// Mark all the usernames we know about
for _, name := range append(impName.Writers.KeybaseUsers, impName.Readers.KeybaseUsers...) {
seenKBUsers[name] = true
}
// Add the invites
chainInvites := t.chain().inner.ActiveInvites
inviteMap, err := AnnotateInvites(ctx, t.G(), t)
if err != nil {
return res, err
}
isFullyResolved := true
for inviteID := range chainInvites {
invite, ok := inviteMap[inviteID]
if !ok {
// this should never happen
return res, fmt.Errorf("missing invite: %v", inviteID)
}
invtyp, err := invite.Type.C()
if err != nil {
continue
}
switch invtyp {
case keybase1.TeamInviteCategory_SBS:
sa := keybase1.SocialAssertion{
User: string(invite.Name),
Service: keybase1.SocialAssertionService(string(invite.Type.Sbs())),
}
switch invite.Role {
case keybase1.TeamRole_OWNER:
impName.Writers.UnresolvedUsers = append(impName.Writers.UnresolvedUsers, sa)
case keybase1.TeamRole_READER:
impName.Readers.UnresolvedUsers = append(impName.Readers.UnresolvedUsers, sa)
default:
return res, fmt.Errorf("implicit team contains invite to role: %v (%v)", invite.Role, invite.Id)
}
isFullyResolved = false
case keybase1.TeamInviteCategory_KEYBASE:
// Check to make sure we don't already have the user in the name
iname := string(invite.Name)
if seenKBUsers[iname] {
continue
}
seenKBUsers[iname] = true
// invite.Name is the username of the invited user, which AnnotateInvites has resolved.
switch invite.Role {
case keybase1.TeamRole_OWNER:
impName.Writers.KeybaseUsers = append(impName.Writers.KeybaseUsers, iname)
case keybase1.TeamRole_READER:
impName.Readers.KeybaseUsers = append(impName.Readers.KeybaseUsers, iname)
default:
return res, fmt.Errorf("implicit team contains invite to role: %v (%v)", invite.Role,
invite.Id)
}
case keybase1.TeamInviteCategory_PHONE, keybase1.TeamInviteCategory_EMAIL:
typ, err := invite.Type.String()
if err != nil {
return res, fmt.Errorf("Failed to handle invite type %v: %s", invtyp, err)
}
sa := keybase1.SocialAssertion{
User: string(invite.Name),
Service: keybase1.SocialAssertionService(typ),
}
switch invite.Role {
case keybase1.TeamRole_OWNER:
impName.Writers.UnresolvedUsers = append(impName.Writers.UnresolvedUsers, sa)
case keybase1.TeamRole_READER:
impName.Readers.UnresolvedUsers = append(impName.Readers.UnresolvedUsers, sa)
default:
return res, fmt.Errorf("implicit team contains invite to role: %v (%v)", invite.Role, invite.Id)
}
isFullyResolved = false
case keybase1.TeamInviteCategory_UNKNOWN:
return res, fmt.Errorf("unknown invite type in implicit team: %q", invite.Type.Unknown())
default:
return res, fmt.Errorf("unrecognized invite type in implicit team: %v", invtyp)
}
}
if !skipConflicts {
impName, err = GetConflictInfo(ctx, t.G(), t.ID, isFullyResolved, impName)
if err != nil {
return res, err
}
}
return impName, nil
}
func (t *Team) ImplicitTeamDisplayNameString(ctx context.Context) (string, error) {
impName, err := t.ImplicitTeamDisplayName(ctx)
if err != nil {
return "", err
}
return FormatImplicitTeamDisplayName(ctx, t.G(), impName)
}
func (t *Team) NextSeqno() keybase1.Seqno {
return t.CurrentSeqno() + 1
}
func (t *Team) CurrentSeqno() keybase1.Seqno {
return t.chain().GetLatestSeqno()
}
func (t *Team) AllApplicationKeys(ctx context.Context, application keybase1.TeamApplication) (res []keybase1.TeamApplicationKey, err error) {
return AllApplicationKeys(t.MetaContext(ctx), t, application, t.chain().GetLatestGeneration())
}
func (t *Team) AllApplicationKeysWithKBFS(ctx context.Context, application keybase1.TeamApplication) (res []keybase1.TeamApplicationKey, err error) {
return AllApplicationKeysWithKBFS(t.MetaContext(ctx), t, application,
t.chain().GetLatestGeneration())
}
// ApplicationKey returns the most recent key for an application.
func (t *Team) ApplicationKey(ctx context.Context, application keybase1.TeamApplication) (keybase1.TeamApplicationKey, error) {
latestGen := t.chain().GetLatestGeneration()
return t.ApplicationKeyAtGeneration(ctx, application, latestGen)
}
func (t *Team) ApplicationKeyAtGeneration(ctx context.Context,
application keybase1.TeamApplication, generation keybase1.PerTeamKeyGeneration) (res keybase1.TeamApplicationKey, err error) {
return ApplicationKeyAtGeneration(t.MetaContext(ctx), t, application, generation)
}
func (t *Team) ApplicationKeyAtGenerationWithKBFS(ctx context.Context,
application keybase1.TeamApplication, generation keybase1.PerTeamKeyGeneration) (res keybase1.TeamApplicationKey, err error) {
return ApplicationKeyAtGenerationWithKBFS(t.MetaContext(ctx), t, application, generation)
}
func (t *Team) TeamBotSettings() (map[keybase1.UserVersion]keybase1.TeamBotSettings, error) {
botSettings := t.chain().TeamBotSettings()
// It's possible that we added a RESTRICTEDBOT member without posting any
// settings for them. Fill in default values (no access) for those members
restrictedBots, err := t.UsersWithRole(keybase1.TeamRole_RESTRICTEDBOT)
if err != nil {
return nil, err
}
for _, uv := range restrictedBots {
if _, ok := botSettings[uv]; !ok {
botSettings[uv] = keybase1.TeamBotSettings{}
}
}
return botSettings, nil
}
func addSummaryHash(section *SCTeamSection, boxes *PerTeamSharedSecretBoxes) error {
if boxes == nil {
return nil
}
bps := boxes.boxPublicSummary
if bps == nil || bps.IsEmpty() {
return nil
}
bsh := SCTeamBoxSummaryHash(bps.HashHexEncoded())
section.BoxSummaryHash = &bsh
return nil
}
func (t *Team) Rotate(ctx context.Context, rt keybase1.RotationType) (err error) {
return t.rotate(ctx, rt)
}
func (t *Team) rotate(ctx context.Context, rt keybase1.RotationType) (err error) {
mctx := t.MetaContext(ctx).WithLogTag("ROT")
defer mctx.Trace(fmt.Sprintf("Team#rotate(%s,%s)", t.ID, rt), func() error { return err })()
rt, err = hidden.CheckFeatureGateForSupportWithRotationType(mctx, t.ID, true /* isWrite */, rt)
if err != nil {
return err
}
// initialize key manager
if _, err := t.SharedSecret(mctx.Ctx()); err != nil {
return err
}
mr, err := t.G().MerkleClient.FetchRootFromServer(mctx, libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
// load an empty member set (no membership changes)
memSet := newMemberSet()
// Try to get the admin perms if they are available, if not, proceed anyway
var admin *SCTeamAdmin
admin, err = t.getAdminPermission(mctx.Ctx())
if err != nil {
mctx.Debug("Rotate: unable to get admin permission: %v, attempting without admin section", err)
admin = nil
}
if err := t.ForceMerkleRootUpdate(mctx.Ctx()); err != nil {
return err
}
section := SCTeamSection{
ID: SCTeamID(t.ID),
Admin: admin,
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
}
// create the team section of the signature
section.Members, err = memSet.Section()
if err != nil {
return err
}
// rotate the team key for all current members
secretBoxes, perTeamKeySection, teamEKPayload, err := t.rotateBoxes(mctx.Ctx(), memSet)
if err != nil {
return err
}
section.PerTeamKey = perTeamKeySection
err = addSummaryHash(§ion, secretBoxes)
if err != nil {
return err
}
// post the change to the server
payloadArgs := sigPayloadArgs{
secretBoxes: secretBoxes,
teamEKPayload: teamEKPayload,
}
if rt == keybase1.RotationType_VISIBLE {
err = t.rotatePostVisible(mctx.Ctx(), section, mr, payloadArgs)
} else {
err = t.rotatePostHidden(mctx.Ctx(), section, mr, payloadArgs)
}
if err != nil {
return err
}
t.storeTeamEKPayload(mctx.Ctx(), teamEKPayload)
createTeambotKeys(t.G(), t.ID, memSet.restrictedBotRecipientUids())
return nil
}
func (t *Team) rotatePostVisible(ctx context.Context, section SCTeamSection, mr *libkb.MerkleRoot, payloadArgs sigPayloadArgs) error {
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
section.Ratchets = ratchet.ToTeamSection()
payloadArgs.ratchetBlindingKeys = ratchet.ToSigPayload()
latestSeqno, err := t.postChangeItem(ctx, section, libkb.LinkTypeRotateKey, mr, payloadArgs)
if err != nil {
return err
}
return t.notify(ctx, keybase1.TeamChangeSet{KeyRotated: true}, latestSeqno)
}
func (t *Team) rotatePostHidden(ctx context.Context, section SCTeamSection, mr *libkb.MerkleRoot, payloadArgs sigPayloadArgs) error {
mctx := libkb.NewMetaContext(ctx, t.G())
// Generate a "sig multi item" that we POST up to the API endpoint
smi, ratchet, err := t.rotateHiddenGenerateSigMultiItem(mctx, section, mr)
if err != nil {
return err
}
links := []libkb.SigMultiItem{*smi}
err = t.precheckLinksToPost(ctx, links)
if err != nil {
return err
}
// Combine the "sig multi item" above with the various off-chain items, like boxes.
payload := t.sigPayload(links, payloadArgs)
// Post the changes up to the server
err = t.postMulti(mctx, payload)
if err != nil {
return err
}
// Inform local caching that we've ratcheted forward the hidden chain with a change
// that we made.
tmp := mctx.G().GetHiddenTeamChainManager().Ratchet(mctx, t.ID, *ratchet)
if tmp != nil {
mctx.Warning("Failed to ratchet forward team chain: %s", tmp.Error())
}
// We rotated the key but didn't change the visibile chain
return t.notifyNoChainChange(ctx, keybase1.TeamChangeSet{KeyRotated: true})
}
func teamAdminToSig3ChainLocation(admin *SCTeamAdmin) (*sig3.ChainLocation, error) {
if admin == nil {
return nil, nil
}
id, err := admin.TeamID.ToTeamID()
if err != nil {
return nil, err
}
s3id, err := sig3.ImportTeamID(id)
if err != nil {
return nil, err
}
return &sig3.ChainLocation{
TeamID: *s3id,
Seqno: admin.Seqno,
ChainType: admin.SeqType,
}, nil
}
func (t *Team) rotateHiddenGenerateSigMultiItem(mctx libkb.MetaContext, section SCTeamSection, mr *libkb.MerkleRoot) (ret *libkb.SigMultiItem, ratchets *keybase1.HiddenTeamChainRatchetSet, err error) {
currentSeqno := t.CurrentSeqno()
lastLinkID := t.chain().GetLatestLinkID()
mainChainPrev := keybase1.LinkTriple{
Seqno: currentSeqno,
SeqType: keybase1.SeqType_SEMIPRIVATE,
LinkID: lastLinkID,
}
me, err := loadMeForSignatures(mctx.Ctx(), mctx.G())
if err != nil {
return nil, nil, err
}
deviceSigningKey, err := t.G().ActiveDevice.SigningKey()
if err != nil {
return nil, nil, err
}
hiddenPrev, err := t.G().GetHiddenTeamChainManager().Tail(mctx, t.ID)
if err != nil {
return nil, nil, err
}
sk, err := t.keyManager.SigningKey()
if err != nil {
return nil, nil, err
}
ek, err := t.keyManager.EncryptionKey()
if err != nil {
return nil, nil, err
}
admin, err := teamAdminToSig3ChainLocation(section.Admin)
if err != nil {
return nil, nil, err
}
ret, ratchets, err = hidden.GenerateKeyRotation(mctx, hidden.GenerateKeyRotationParams{
TeamID: t.ID,
IsPublic: t.IsPublic(),
IsImplicit: t.IsImplicit(),
MerkleRoot: mr,
Me: me,
SigningKey: deviceSigningKey,
MainPrev: mainChainPrev,
HiddenPrev: hiddenPrev,
Gen: t.keyManager.Generation(),
NewSigningKey: sk,
NewEncryptionKey: ek,
Check: t.keyManager.Check(),
Admin: admin,
})
return ret, ratchets, err
}
func (t *Team) isAdminOrOwner(m keybase1.UserVersion) (res bool, err error) {
role, err := t.chain().GetUserRole(m)
if err != nil {
return false, err
}
if role.IsAdminOrAbove() {
res = true
}
return res, nil
}
func (t *Team) getDowngradedUsers(ctx context.Context, ms *memberSet) (uids []keybase1.UID, err error) {
for _, member := range ms.None {
// Load member first to check if their eldest_seqno has not changed.
// If it did, the member was nuked and we do not need to lease.
_, _, err := loadMember(ctx, t.G(), member.version, true)
if err != nil {
if _, reset := err.(libkb.AccountResetError); reset {
continue
} else {
return nil, err
}
}
uids = append(uids, member.version.Uid)
}
for _, member := range ms.nonAdmins() {
admin, err := t.isAdminOrOwner(member.version)
if err != nil {
return nil, err
}
if admin {
uids = append(uids, member.version.Uid)
}
}
return uids, nil
}
type ChangeMembershipOptions struct {
// Pass "permanent" flag, user will not be able to request access
// to the team again, admin will have to add them back.
Permanent bool
// Do not rotate team key, even on member removals. Server will
// queue CLKR if client sends removals without rotation.
SkipKeyRotation bool
}
func (t *Team) ChangeMembershipWithOptions(ctx context.Context, req keybase1.TeamChangeReq, opts ChangeMembershipOptions) (err error) {
defer t.G().CTrace(ctx, "Team.ChangeMembershipWithOptions", func() error { return err })()
if t.IsSubteam() && len(req.Owners) > 0 {
return NewSubteamOwnersError()
}
// create the change membership section + secretBoxes
section, secretBoxes, implicitAdminBoxes, teamEKPayload, memberSet, ratchet, err := t.changeMembershipSection(ctx, req, opts.SkipKeyRotation)
if err != nil {
return err
}
if err := t.ForceMerkleRootUpdate(ctx); err != nil {
return err
}
var merkleRoot *libkb.MerkleRoot
var lease *libkb.Lease
downgrades, err := t.getDowngradedUsers(ctx, memberSet)
if err != nil {
return err
}
if len(downgrades) != 0 {
lease, merkleRoot, err = libkb.RequestDowngradeLeaseByTeam(ctx, t.G(), t.ID, downgrades)
if err != nil {
return err
}
defer func() {
// We must cancel in the case of an error in postMulti, but it's safe to cancel
// if everything worked. So we always cancel the lease on the way out of this function.
// See CORE-6473 for a case in which this was needed. And also the test
// `TestOnlyOwnerLeaveThenUpgradeFriend`.
err := libkb.CancelDowngradeLease(ctx, t.G(), lease.LeaseID)
if err != nil {
t.G().Log.CWarningf(ctx, "Failed to cancel downgrade lease: %s", err.Error())
}
}()
}
// post the change to the server
sigPayloadArgs := sigPayloadArgs{
secretBoxes: secretBoxes,
implicitAdminBoxes: implicitAdminBoxes,
lease: lease,
teamEKPayload: teamEKPayload,
ratchetBlindingKeys: ratchet.ToSigPayload(),
}
if opts.Permanent {
sigPayloadArgs.prePayload = libkb.JSONPayload{"permanent": true}
}
// Add a ChangeMembership section and possibly a BotSettings section.
sections := []teamSectionWithLinkType{
{
linkType: libkb.LinkTypeChangeMembership,
section: section,
},
}
// If we are adding any restricted bots add a bot_settings link
if len(req.RestrictedBots) > 0 {
section, _, err := t.botSettingsSection(ctx, req.RestrictedBots, ratchet, merkleRoot)
if err != nil {
return err
}
sections = append(sections, teamSectionWithLinkType{
linkType: libkb.LinkTypeTeamBotSettings,
section: section,
})
}
payload, latestSeqno, err := t.changeItemsPayload(ctx, sections, merkleRoot, sigPayloadArgs)
if err != nil {
return err
}
var recipients, botRecipients []keybase1.UserVersion
for uv := range memberSet.recipients {
recipients = append(recipients, uv)
}
for uv := range memberSet.restrictedBotRecipients {
botRecipients = append(botRecipients, uv)
}
newMemSet := newMemberSet()
_, err = newMemSet.loadGroup(ctx, t.G(), recipients, storeMemberKindRecipient, true)
if err != nil {
return err
}
_, err = newMemSet.loadGroup(ctx, t.G(), botRecipients, storeMemberKindRestrictedBotRecipient, true)
if err != nil {
return err
}
if !memberSet.recipients.Eq(newMemSet.recipients) {
return BoxRaceError{inner: fmt.Errorf("team box summary changed during sig creation; retry required")}
}
err = t.postMulti(libkb.NewMetaContext(ctx, t.G()), payload)
if err != nil {
return err
}
err = t.notify(ctx, keybase1.TeamChangeSet{MembershipChanged: true}, latestSeqno)
if err != nil {
return err
}
t.storeTeamEKPayload(ctx, teamEKPayload)
createTeambotKeys(t.G(), t.ID, memberSet.restrictedBotRecipientUids())
return nil
}
func (t *Team) ChangeMembership(ctx context.Context, req keybase1.TeamChangeReq) error {
return t.ChangeMembershipWithOptions(ctx, req, ChangeMembershipOptions{})
}
func (t *Team) downgradeIfOwnerOrAdmin(ctx context.Context) (needsReload bool, err error) {
defer t.G().CTrace(ctx, "Team#downgradeIfOwnerOrAdmin", func() error { return err })()
uv, err := t.currentUserUV(ctx)
if err != nil {
return false, err
}
role, err := t.MemberRole(ctx, uv)
if err != nil {
return false, err
}
if role.IsAdminOrAbove() {
reqs := keybase1.TeamChangeReq{Writers: []keybase1.UserVersion{uv}}
if err := t.ChangeMembership(ctx, reqs); err != nil {
return false, err
}
return true, nil
}
return false, nil
}
func (t *Team) makeRatchet(ctx context.Context) (ret *hidden.Ratchet, err error) {
return t.chain().makeHiddenRatchet(libkb.NewMetaContext(ctx, t.G()))
}
func (t *Team) Leave(ctx context.Context, permanent bool) error {
// If we are owner or admin, we have to downgrade ourselves first.
needsReload, err := t.downgradeIfOwnerOrAdmin(ctx)
if err != nil {
return err
}
if needsReload {
t, err = Load(ctx, t.G(), keybase1.LoadTeamArg{
ID: t.ID,
Public: t.IsPublic(),
ForceRepoll: true,
})
if err != nil {
return err
}
}
// Check if we are an implicit admin with no explicit membership
// in order to give a nice error.
role, err := t.myRole(ctx)
if err != nil {
role = keybase1.TeamRole_NONE
}
if role == keybase1.TeamRole_NONE {
_, err := t.getAdminPermission(ctx)
switch err.(type) {
case nil, AdminPermissionRequiredError:
return NewImplicitAdminCannotLeaveError()
}
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
section := SCTeamSection{
ID: SCTeamID(t.ID),
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
Ratchets: ratchet.ToTeamSection(),
}
sigPayloadArgs := sigPayloadArgs{
prePayload: libkb.JSONPayload{"permanent": permanent},
ratchetBlindingKeys: ratchet.ToSigPayload(),
}
latestSeqno, err := t.postChangeItem(ctx, section, libkb.LinkTypeLeave, nil, sigPayloadArgs)
if err != nil {
return err
}
return t.notify(ctx, keybase1.TeamChangeSet{MembershipChanged: true}, latestSeqno)
}
func (t *Team) deleteRoot(ctx context.Context, ui keybase1.TeamsUiInterface) error {
m := t.MetaContext(ctx)
uv, err := t.currentUserUV(ctx)
if err != nil {
return err
}
role, err := t.MemberRole(ctx, uv)
if err != nil {
return err
}
if role != keybase1.TeamRole_OWNER {
return libkb.AppStatusError{
Code: int(keybase1.StatusCode_SCTeamSelfNotOwner),
Name: "SELF_NOT_OWNER",
Desc: "You must be an owner to delete a team",
}
}
confirmed, err := ui.ConfirmRootTeamDelete(ctx, keybase1.ConfirmRootTeamDeleteArg{TeamName: t.Name().String()})
if err != nil {
return err
}
if !confirmed {
return errors.New("team delete not confirmed")
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
teamSection := SCTeamSection{
ID: SCTeamID(t.ID),
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
Ratchets: ratchet.ToTeamSection(),
}
mr, err := t.G().MerkleClient.FetchRootFromServer(t.MetaContext(ctx), libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
if mr == nil {
return errors.New("No merkle root available for team delete root")
}
sigMultiItem, latestSeqno, err := t.sigTeamItem(ctx, teamSection, libkb.LinkTypeDeleteRoot, mr)
if err != nil {
return err
}
sigPayloadArgs := sigPayloadArgs{
ratchetBlindingKeys: ratchet.ToSigPayload(),
}
payload := t.sigPayload([]libkb.SigMultiItem{sigMultiItem}, sigPayloadArgs)
err = t.postMulti(m, payload)
if err != nil {
return err
}
return t.HintLatestSeqno(m, latestSeqno)
}
func (t *Team) deleteSubteam(ctx context.Context, ui keybase1.TeamsUiInterface) error {
m := t.MetaContext(ctx)
// subteam delete consists of two links:
// 1. delete_subteam in parent chain
// 2. delete_up_pointer in subteam chain
if t.IsImplicit() {
return fmt.Errorf("unsupported delete of implicit subteam")
}
parentID := t.chain().GetParentID()
parentTeam, err := Load(ctx, t.G(), keybase1.LoadTeamArg{
ID: *parentID,
Public: t.IsPublic(),
ForceRepoll: true,
})
if err != nil {
return err
}
admin, err := parentTeam.getAdminPermission(ctx)
if err != nil {
return err
}
confirmed, err := ui.ConfirmSubteamDelete(ctx, keybase1.ConfirmSubteamDeleteArg{TeamName: t.Name().String()})
if err != nil {
return err
}
if !confirmed {
return errors.New("team delete not confirmed")
}
subteamName := SCTeamName(t.Data.Name.String())
entropy, err := makeSCTeamEntropy()
if err != nil {
return err
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
parentSection := SCTeamSection{
ID: SCTeamID(parentTeam.ID),
Subteam: &SCSubteam{
ID: SCTeamID(t.ID),
Name: subteamName, // weird this is required
},
Admin: admin,
Public: t.IsPublic(),
Entropy: entropy,
Ratchets: ratchet.ToTeamSection(),
}
mr, err := t.G().MerkleClient.FetchRootFromServer(t.MetaContext(ctx), libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
if mr == nil {
return errors.New("No merkle root available for team delete subteam")
}
sigParent, _, err := parentTeam.sigTeamItem(ctx, parentSection, libkb.LinkTypeDeleteSubteam, mr)
if err != nil {
return err
}
subSection := SCTeamSection{
ID: SCTeamID(t.ID),
Name: &subteamName, // weird this is required
Parent: &SCTeamParent{
ID: SCTeamID(parentTeam.ID),
Seqno: parentTeam.chain().GetLatestSeqno() + 1, // the seqno of the *new* parent link
SeqType: seqTypeForTeamPublicness(parentTeam.IsPublic()),
},
Public: t.IsPublic(),
Admin: admin,
}
sigSub, latestSeqno, err := t.sigTeamItem(ctx, subSection, libkb.LinkTypeDeleteUpPointer, mr)
if err != nil {
return err
}
payload := make(libkb.JSONPayload)
payload["sigs"] = []interface{}{sigParent, sigSub}
ratchet.AddToJSONPayload(payload)
err = t.postMulti(m, payload)
if err != nil {
return err
}
return t.HintLatestSeqno(m, latestSeqno)
}
func (t *Team) NumActiveInvites() int {
return t.chain().NumActiveInvites()
}
func (t *Team) HasActiveInvite(mctx libkb.MetaContext, name keybase1.TeamInviteName, typ string) (bool, error) {
it, err := TeamInviteTypeFromString(mctx, typ)
if err != nil {
return false, err
}
return t.chain().HasActiveInvite(name, it)
}
func (t *Team) FindActiveKeybaseInvite(uid keybase1.UID) (keybase1.TeamInvite, keybase1.UserVersion, bool) {
return t.chain().FindActiveKeybaseInvite(uid)
}
func (t *Team) GetActiveAndObsoleteInvites() (ret map[keybase1.TeamInviteID]keybase1.TeamInvite) {
ret = make(map[keybase1.TeamInviteID]keybase1.TeamInvite)
for id, invite := range t.chain().inner.ActiveInvites {
ret[id] = invite
}
for id, invite := range t.chain().inner.ObsoleteInvites {
ret[id] = invite
}
return ret
}
// If uv.Uid is set, then username is ignored.
// Otherwise resolvedUsername and uv are ignored.
func (t *Team) InviteMember(ctx context.Context, username string, role keybase1.TeamRole, resolvedUsername libkb.NormalizedUsername, uv keybase1.UserVersion) (keybase1.TeamAddMemberResult, error) {
// if a user version was previously loaded, then there is a keybase user for username, but
// without a PUK or without any keys.
if uv.Uid.Exists() {
return t.inviteKeybaseMember(libkb.NewMetaContext(ctx, t.G()), uv, role, resolvedUsername)
}
// If a social, or email, or other type of invite, assert it's not an owner.
if role.IsOrAbove(keybase1.TeamRole_OWNER) {
return keybase1.TeamAddMemberResult{}, errors.New("You cannot invite an owner to a team.")
}
return t.inviteSBSMember(ctx, username, role)
}
func (t *Team) InviteEmailMember(ctx context.Context, email string, role keybase1.TeamRole) error {
t.G().Log.CDebugf(ctx, "team %s invite email member %s", t.Name(), email)
if role == keybase1.TeamRole_OWNER {
return errors.New("You cannot invite an owner to a team over email.")
}
invite := SCTeamInvite{
Type: "email",
Name: keybase1.TeamInviteName(email),
ID: NewInviteID(),
}
return t.postInvite(ctx, invite, role)
}
func (t *Team) inviteKeybaseMember(mctx libkb.MetaContext, uv keybase1.UserVersion, role keybase1.TeamRole, resolvedUsername libkb.NormalizedUsername) (res keybase1.TeamAddMemberResult, err error) {
mctx.Debug("team %s invite keybase member %s", t.Name(), uv)
invite := SCTeamInvite{
Type: "keybase",
Name: uv.TeamInviteName(),
ID: NewInviteID(),
}
existing, err := t.HasActiveInvite(mctx, invite.Name, invite.Type)
if err != nil {
return res, err
}
if existing {
return res, libkb.ExistsError{Msg: "An invite for this user already exists."}
}
if t.IsSubteam() && role == keybase1.TeamRole_OWNER {
return res, NewSubteamOwnersError()
}
invList := []SCTeamInvite{invite}
cancelList := []SCTeamInviteID{}
var invites SCTeamInvites
switch role {
case keybase1.TeamRole_ADMIN:
invites.Admins = &invList
case keybase1.TeamRole_WRITER:
invites.Writers = &invList
case keybase1.TeamRole_READER:
invites.Readers = &invList
case keybase1.TeamRole_OWNER:
invites.Owners = &invList
}
// Inviting keybase PUKless member has to remove old invites for that
// uid first, or it will bounce off the server with an error. There is
// no hard limit in team player to disallow multiple keybase invites
// for the same UID, but there is a soft serverside check when
// signature is posted.
for inviteID, existingInvite := range t.GetActiveAndObsoleteInvites() {
// KeybaseUserVersion checks if invite is KEYBASE and errors
// if not, we can blindly call it for all invites, and continue
// to next one if we get an error.
existingUV, err := existingInvite.KeybaseUserVersion()
if err != nil {
continue
}
if existingUV.Uid != uv.Uid {
continue
}
if uv.EldestSeqno != 0 && existingUV.EldestSeqno > uv.EldestSeqno {
// We probably know invitee by their outdated EldestSeqno. There
// is also a server check for this case.
return res, libkb.ExistsError{
Msg: fmt.Sprintf("An invite for this user already exists, with higher EldestSeqno (%d > %d)", existingUV.EldestSeqno, uv.EldestSeqno),
}
}
mctx.Debug("Canceling old Keybase invite: %+v", existingInvite)
cancelList = append(cancelList, SCTeamInviteID(inviteID))
}
if len(cancelList) != 0 {
mctx.Debug("Total %d old invites will be canceled.", len(cancelList))
invites.Cancel = &cancelList
}
mctx.Debug("Adding invite: %+v", invite)
if err := t.postTeamInvites(mctx.Ctx(), invites); err != nil {
return res, err
}
return keybase1.TeamAddMemberResult{Invited: true, User: &keybase1.User{Uid: uv.Uid, Username: resolvedUsername.String()}}, nil
}
func (t *Team) inviteSBSMember(ctx context.Context, username string, role keybase1.TeamRole) (keybase1.TeamAddMemberResult, error) {
// parse username to get social
typ, name, err := parseSocialAssertion(libkb.NewMetaContext(ctx, t.G()), username)
if err != nil {
return keybase1.TeamAddMemberResult{}, err
}
t.G().Log.CDebugf(ctx, "team %s invite sbs member %s/%s", t.Name(), typ, name)
invite := SCTeamInvite{
Type: typ,
Name: keybase1.TeamInviteName(name),
ID: NewInviteID(),
}
if err := t.postInvite(ctx, invite, role); err != nil {
return keybase1.TeamAddMemberResult{}, err
}
return keybase1.TeamAddMemberResult{Invited: true}, nil
}
func (t *Team) InviteSeitan(ctx context.Context, role keybase1.TeamRole, label keybase1.SeitanKeyLabel) (ikey SeitanIKey, err error) {
defer t.G().CTraceTimed(ctx, fmt.Sprintf("InviteSeitan: team: %v, role: %v", t.Name(), role), func() error { return err })()
ikey, err = GenerateIKey()
if err != nil {
return ikey, err
}
sikey, err := ikey.GenerateSIKey()
if err != nil {
return ikey, err
}
inviteID, err := sikey.GenerateTeamInviteID()
if err != nil {
return ikey, err
}
_, encoded, err := ikey.GeneratePackedEncryptedKey(ctx, t, label)
if err != nil {
return ikey, err
}
invite := SCTeamInvite{
Type: "seitan_invite_token",
Name: keybase1.TeamInviteName(encoded),
ID: inviteID,
}
if err := t.postInvite(ctx, invite, role); err != nil {
return ikey, err
}
return ikey, err
}
func (t *Team) InviteSeitanV2(ctx context.Context, role keybase1.TeamRole, label keybase1.SeitanKeyLabel) (ikey SeitanIKeyV2, err error) {
defer t.G().CTraceTimed(ctx, fmt.Sprintf("InviteSeitanV2: team: %v, role: %v", t.Name(), role), func() error { return err })()
ikey, err = GenerateIKeyV2()
if err != nil {
return ikey, err
}
sikey, err := ikey.GenerateSIKey()
if err != nil {
return ikey, err
}
inviteID, err := sikey.GenerateTeamInviteID()
if err != nil {
return ikey, err
}
_, encoded, err := sikey.GeneratePackedEncryptedKey(ctx, t, label)
if err != nil {
return ikey, err
}
invite := SCTeamInvite{
Type: "seitan_invite_token",
Name: keybase1.TeamInviteName(encoded),
ID: inviteID,
}
if err := t.postInvite(ctx, invite, role); err != nil {
return ikey, err
}
return ikey, err
}
func (t *Team) postInvite(ctx context.Context, invite SCTeamInvite, role keybase1.TeamRole) error {
existing, err := t.HasActiveInvite(t.MetaContext(ctx), invite.Name, invite.Type)
if err != nil {
return err
}
if existing {
return libkb.ExistsError{Msg: "An invite for this user already exists."}
}
if t.IsSubteam() && role == keybase1.TeamRole_OWNER {
return NewSubteamOwnersError()
}
invList := []SCTeamInvite{invite}
var invites SCTeamInvites
switch role {
case keybase1.TeamRole_RESTRICTEDBOT, keybase1.TeamRole_BOT:
return fmt.Errorf("bot roles disallowed for invites")
case keybase1.TeamRole_READER:
invites.Readers = &invList
case keybase1.TeamRole_WRITER:
invites.Writers = &invList
case keybase1.TeamRole_ADMIN:
invites.Admins = &invList
case keybase1.TeamRole_OWNER:
invites.Owners = &invList
}
if invites.Len() == 0 {
return fmt.Errorf("invalid invite, 0 members invited")
}
return t.postTeamInvites(ctx, invites)
}
func (t *Team) postTeamInvites(ctx context.Context, invites SCTeamInvites) error {
m := t.MetaContext(ctx)
admin, err := t.getAdminPermission(ctx)
if err != nil {
return err
}
if t.IsSubteam() && invites.Owners != nil && len(*invites.Owners) > 0 {
return NewSubteamOwnersError()
}
entropy, err := makeSCTeamEntropy()
if err != nil {
return err
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
teamSection := SCTeamSection{
ID: SCTeamID(t.ID),
Admin: admin,
Invites: &invites,
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
Entropy: entropy,
Ratchets: ratchet.ToTeamSection(),
}
mr, err := t.G().MerkleClient.FetchRootFromServer(t.MetaContext(ctx), libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
if mr == nil {
return errors.New("No merkle root available for team invite")
}
sigMultiItem, latestSeqno, err := t.sigTeamItem(ctx, teamSection, libkb.LinkTypeInvite, mr)
if err != nil {
return err
}
sigMulti := []libkb.SigMultiItem{sigMultiItem}
err = t.precheckLinksToPost(ctx, sigMulti)
if err != nil {
return err
}
sigPayloadArgs := sigPayloadArgs{
ratchetBlindingKeys: ratchet.ToSigPayload(),
}
payload := t.sigPayload(sigMulti, sigPayloadArgs)
err = t.postMulti(m, payload)
if err != nil {
return err
}
return t.notify(ctx, keybase1.TeamChangeSet{MembershipChanged: true}, latestSeqno)
}
// NOTE since this function uses `Load` and not `load2`, readSubteamID cannot
// be passed through, this call will fail if a user is not a member of the
// parent team (or child of the parent team) for which the validator validates
func (t *Team) traverseUpUntil(ctx context.Context, validator func(t *Team) bool) (targetTeam *Team, err error) {
targetTeam = t
for {
if validator(targetTeam) {
return targetTeam, nil
}
parentID := targetTeam.chain().GetParentID()
if parentID == nil {
return nil, nil
}
targetTeam, err = Load(ctx, t.G(), keybase1.LoadTeamArg{
ID: *parentID,
Public: parentID.IsPublic(),
// This is in a cold path anyway, so might as well trade reliability
// at the expense of speed.
ForceRepoll: true,
})
if err != nil {
return nil, err
}
}
}
func (t *Team) getAdminPermission(ctx context.Context) (admin *SCTeamAdmin, err error) {
uv, err := t.currentUserUV(ctx)
if err != nil {
return nil, err
}
targetTeam, err := t.traverseUpUntil(ctx, func(s *Team) bool {
return s.chain().GetAdminUserLogPoint(uv) != nil
})
if err != nil {
return nil, err
}
if targetTeam == nil {
return nil, NewAdminPermissionRequiredError()
}
logPoint := targetTeam.chain().GetAdminUserLogPoint(uv)
ret := SCTeamAdmin{
TeamID: SCTeamID(targetTeam.ID),
Seqno: logPoint.SigMeta.SigChainLocation.Seqno,
SeqType: logPoint.SigMeta.SigChainLocation.SeqType,
}
return &ret, nil
}
func (t *Team) changeMembershipSection(ctx context.Context, req keybase1.TeamChangeReq, skipKeyRotation bool) (SCTeamSection, *PerTeamSharedSecretBoxes, map[keybase1.TeamID]*PerTeamSharedSecretBoxes, *teamEKPayload, *memberSet, *hidden.Ratchet, error) {
// initialize key manager
if _, err := t.SharedSecret(ctx); err != nil {
return SCTeamSection{}, nil, nil, nil, nil, nil, err
}
admin, err := t.getAdminPermission(ctx)
if err != nil {
return SCTeamSection{}, nil, nil, nil, nil, nil, err
}
if t.IsSubteam() && len(req.Owners) > 0 {
return SCTeamSection{}, nil, nil, nil, nil, nil, NewSubteamOwnersError()
}
// load the member set specified in req
memSet, err := newMemberSetChange(ctx, t.G(), req)
if err != nil {
return SCTeamSection{}, nil, nil, nil, nil, nil, err
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return SCTeamSection{}, nil, nil, nil, nil, nil, err
}
section := SCTeamSection{
ID: SCTeamID(t.ID),
Admin: admin,
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
Ratchets: ratchet.ToTeamSection(),
}
section.Members, err = memSet.Section()
if err != nil {
return SCTeamSection{}, nil, nil, nil, nil, nil, err
}
// create secret boxes for recipients, possibly rotating the key
secretBoxes, implicitAdminBoxes, perTeamKeySection, teamEKPayload, err := t.recipientBoxes(ctx, memSet, skipKeyRotation)
if err != nil {
return SCTeamSection{}, nil, nil, nil, nil, nil, err
}
section.PerTeamKey = perTeamKeySection
err = addSummaryHash(§ion, secretBoxes)
if err != nil {
return SCTeamSection{}, nil, nil, nil, nil, nil, err
}
section.CompletedInvites = req.CompletedInvites
section.Implicit = t.IsImplicit()
section.Public = t.IsPublic()
if len(section.CompletedInvites) > 0 && section.Members == nil {
// Just mooted invites is fine - if TeamChangeReq is empty,
// changeMembershipSection returned nil members. But we need
// empty Members in order to have a valid link.
section.Members = &SCTeamMembers{}
}
return section, secretBoxes, implicitAdminBoxes, teamEKPayload, memSet, ratchet, nil
}
func (t *Team) changeItemsPayload(ctx context.Context, sections []teamSectionWithLinkType,
merkleRoot *libkb.MerkleRoot, sigPayloadArgs sigPayloadArgs) (libkb.JSONPayload, keybase1.Seqno, error) {
var readySigs []libkb.SigMultiItem
nextSeqno := t.NextSeqno()
latestLinkID := t.chain().GetLatestLinkID()
for _, section := range sections {
sigMultiItem, linkID, err := t.sigTeamItemRaw(ctx, section.section,
section.linkType, nextSeqno, latestLinkID, merkleRoot)
if err != nil {
return nil, keybase1.Seqno(0), err
}
nextSeqno++
latestLinkID = linkID
readySigs = append(readySigs, sigMultiItem)
}
if err := t.precheckLinksToPost(ctx, readySigs); err != nil {
return nil, keybase1.Seqno(0), err
}
payload := t.sigPayload(readySigs, sigPayloadArgs)
return payload, nextSeqno - 1, nil
}
func (t *Team) changeItemPayload(ctx context.Context, section SCTeamSection, linkType libkb.LinkType,
merkleRoot *libkb.MerkleRoot, sigPayloadArgs sigPayloadArgs) (libkb.JSONPayload, keybase1.Seqno, error) {
// create the change item
sigMultiItem, latestSeqno, err := t.sigTeamItem(ctx, section, linkType, merkleRoot)
if err != nil {
return nil, keybase1.Seqno(0), err
}
sigMulti := []libkb.SigMultiItem{sigMultiItem}
err = t.precheckLinksToPost(ctx, sigMulti)
if err != nil {
return nil, keybase1.Seqno(0), err
}
// make the payload
payload := t.sigPayload(sigMulti, sigPayloadArgs)
return payload, latestSeqno, nil
}
func (t *Team) postChangeItem(ctx context.Context, section SCTeamSection, linkType libkb.LinkType, merkleRoot *libkb.MerkleRoot, sigPayloadArgs sigPayloadArgs) (keybase1.Seqno, error) {
payload, latestSeqno, err := t.changeItemPayload(ctx, section, linkType, merkleRoot, sigPayloadArgs)
if err != nil {
return keybase1.Seqno(0), err
}
// send it to the server
err = t.postMulti(libkb.NewMetaContext(ctx, t.G()), payload)
if err != nil {
return keybase1.Seqno(0), err
}
return latestSeqno, nil
}
func (t *Team) currentUserUV(ctx context.Context) (keybase1.UserVersion, error) {
return t.G().GetMeUV(ctx)
}
func loadMeForSignatures(ctx context.Context, g *libkb.GlobalContext) (libkb.UserForSignatures, error) {
return libkb.LoadSelfForTeamSignatures(ctx, g)
}
func usesPerTeamKeys(linkType libkb.LinkType) bool {
switch linkType {
case libkb.LinkTypeLeave:
return false
case libkb.LinkTypeInvite:
return false
case libkb.LinkTypeDeleteRoot:
return false
case libkb.LinkTypeDeleteSubteam:
return false
case libkb.LinkTypeDeleteUpPointer:
return false
case libkb.LinkTypeKBFSSettings:
return false
}
return true
}
func (t *Team) sigTeamItem(ctx context.Context, section SCTeamSection, linkType libkb.LinkType, merkleRoot *libkb.MerkleRoot) (libkb.SigMultiItem, keybase1.Seqno, error) {
nextSeqno := t.NextSeqno()
lastLinkID := t.chain().GetLatestLinkID()
sig, _, err := t.sigTeamItemRaw(ctx, section, linkType, nextSeqno, lastLinkID, merkleRoot)
return sig, nextSeqno, err
}
func (t *Team) sigTeamItemRaw(ctx context.Context, section SCTeamSection, linkType libkb.LinkType, nextSeqno keybase1.Seqno, lastLinkID keybase1.LinkID, merkleRoot *libkb.MerkleRoot) (libkb.SigMultiItem, keybase1.LinkID, error) {
me, err := loadMeForSignatures(ctx, t.G())
if err != nil {
return libkb.SigMultiItem{}, "", err
}
deviceSigningKey, err := t.G().ActiveDevice.SigningKey()
if err != nil {
return libkb.SigMultiItem{}, "", err
}
latestLinkID, err := libkb.ImportLinkID(lastLinkID)
if err != nil {
return libkb.SigMultiItem{}, "", err
}
sig, err := ChangeSig(t.G(), me, latestLinkID, nextSeqno, deviceSigningKey, section, linkType, merkleRoot)
if err != nil {
return libkb.SigMultiItem{}, "", err
}
var signingKey libkb.NaclSigningKeyPair
var encryptionKey libkb.NaclDHKeyPair
if usesPerTeamKeys(linkType) {
signingKey, err = t.keyManager.SigningKey()
if err != nil {
return libkb.SigMultiItem{}, "", err
}
encryptionKey, err = t.keyManager.EncryptionKey()
if err != nil {
return libkb.SigMultiItem{}, "", err
}
if section.PerTeamKey != nil {
// need a reverse sig
// set a nil value (not empty) for reverse_sig (fails without this)
err := sig.SetValueAtPath("body.team.per_team_key.reverse_sig", jsonw.NewNil())
if err != nil {
return libkb.SigMultiItem{}, "", err
}
reverseSig, _, _, err := libkb.SignJSON(sig, signingKey)
if err != nil {
return libkb.SigMultiItem{}, "", err
}
err = sig.SetValueAtPath("body.team.per_team_key.reverse_sig", jsonw.NewString(reverseSig))
if err != nil {
return libkb.SigMultiItem{}, "", err
}
}
}
seqType := seqTypeForTeamPublicness(t.IsPublic())
sigJSON, err := sig.Marshal()
if err != nil {
return libkb.SigMultiItem{}, "", err
}
v2Sig, _, newLinkID, err := libkb.MakeSigchainV2OuterSig(
t.MetaContext(ctx),
deviceSigningKey,
linkType,
nextSeqno,
sigJSON,
latestLinkID,
libkb.SigHasRevokes(false),
seqType,
libkb.SigIgnoreIfUnsupported(false),
nil,
)
if err != nil {
return libkb.SigMultiItem{}, "", err
}
sigMultiItem := libkb.SigMultiItem{
Sig: v2Sig,
SigningKID: deviceSigningKey.GetKID(),
Type: string(linkType),
SeqType: seqType,
SigInner: string(sigJSON),
TeamID: t.ID,
}
if usesPerTeamKeys(linkType) {
sigMultiItem.PublicKeys = &libkb.SigMultiItemPublicKeys{
Encryption: encryptionKey.GetKID(),
Signing: signingKey.GetKID(),
}
}
return sigMultiItem, keybase1.LinkID(newLinkID.String()), nil
}
func (t *Team) recipientBoxes(ctx context.Context, memSet *memberSet, skipKeyRotation bool) (
*PerTeamSharedSecretBoxes, map[keybase1.TeamID]*PerTeamSharedSecretBoxes,
*SCPerTeamKey, *teamEKPayload, error) {
// get device key
deviceEncryptionKey, err := t.G().ActiveDevice.EncryptionKey()
if err != nil {
return nil, nil, nil, nil, err
}
// First create all the subteam per-team-key boxes for new implicit admins.
// We'll return these whether or not we're doing a rotation below.
// TODO: Should we no-op this if the admins+owners aren't actually new?
var implicitAdminBoxes map[keybase1.TeamID]*PerTeamSharedSecretBoxes
adminAndOwnerRecipients := memSet.adminAndOwnerRecipients()
if len(adminAndOwnerRecipients) > 0 {
implicitAdminBoxes = map[keybase1.TeamID]*PerTeamSharedSecretBoxes{}
subteams, err := t.loadAllTransitiveSubteams(ctx, true /*forceRepoll*/)
if err != nil {
return nil, nil, nil, nil, err
}
for _, subteam := range subteams {
subteamBoxes, err := subteam.keyManager.SharedSecretBoxes(t.MetaContext(ctx), deviceEncryptionKey, adminAndOwnerRecipients)
if err != nil {
return nil, nil, nil, nil, err
}
implicitAdminBoxes[subteam.ID] = subteamBoxes
}
}
// if there are any removals happening, need to rotate the
// team key, and recipients will be all the users in the team
// after the removal.
if memSet.HasRemoval() {
if !skipKeyRotation {
// key is rotating, so recipients needs to be all the remaining members
// of the team after the removal (and including any new members in this
// change)
t.G().Log.CDebugf(ctx, "recipientBoxes: Team change request contains removal, rotating team key")
boxes, perTeamKey, teamEKPayload, err := t.rotateBoxes(ctx, memSet)
return boxes, implicitAdminBoxes, perTeamKey, teamEKPayload, err
}
// If we don't rotate key, continue with the usual boxing.
t.G().Log.CDebugf(ctx, "recipientBoxes: Skipping key rotation")
}
// don't need keys for existing or restricted bot members, so remove them from the set
memSet.removeExistingMembers(ctx, t)
t.G().Log.CDebugf(ctx, "team change request: %d new members", len(memSet.recipients))
if len(memSet.recipients) == 0 {
return nil, implicitAdminBoxes, nil, nil, nil
}
boxes, err := t.keyManager.SharedSecretBoxes(t.MetaContext(ctx), deviceEncryptionKey, memSet.recipients)
if err != nil {
return nil, nil, nil, nil, err
}
// No SCPerTeamKey section or teamEKPayload when the key isn't rotated
return boxes, implicitAdminBoxes, nil, nil, err
}
func (t *Team) rotateBoxes(ctx context.Context, memSet *memberSet) (*PerTeamSharedSecretBoxes, *SCPerTeamKey, *teamEKPayload, error) {
// get device key
deviceEncryptionKey, err := t.G().ActiveDevice.EncryptionKey()
if err != nil {
return nil, nil, nil, err
}
// rotate the team key for all current members except restricted bots.
existing, err := t.Members()
if err != nil {
return nil, nil, nil, err
}
if err := memSet.AddRemainingRecipients(ctx, t.G(), existing); err != nil {
return nil, nil, nil, err
}
// Without adding extra admins, get get the recipients for the new teamEK
recipients := memSet.recipientUids()
if t.IsSubteam() {
// rotate needs to be keyed for all admins above it
allParentAdmins, err := t.G().GetTeamLoader().ImplicitAdmins(ctx, t.ID)
if err != nil {
return nil, nil, nil, err
}
_, err = memSet.loadGroup(ctx, t.G(), allParentAdmins, storeMemberKindRecipient, true)
if err != nil {
return nil, nil, nil, err
}
}
t.rotated = true
boxes, key, err := t.keyManager.RotateSharedSecretBoxes(t.MetaContext(ctx), deviceEncryptionKey, memSet.recipients)
if err != nil {
return nil, nil, nil, err
}
// Once we have the new PTK, let's make the new teamEK
teamEKPayload, err := t.teamEKPayload(ctx, recipients)
return boxes, key, teamEKPayload, err
}
type teamEKPayload struct {
sig string
boxes *[]keybase1.TeamEkBoxMetadata
metadata keybase1.TeamEkMetadata
box *keybase1.TeamEkBoxed
}
func (t *Team) teamEKPayload(ctx context.Context, recipients []keybase1.UID) (*teamEKPayload, error) {
ekLib := t.G().GetEKLib()
if ekLib == nil || len(recipients) == 0 {
return nil, nil
}
sigKey, err := t.SigningKey(ctx)
if err != nil {
return nil, err
}
mctx := libkb.NewMetaContext(ctx, t.G())
sig, boxes, metadata, box, err := ekLib.PrepareNewTeamEK(mctx, t.ID, sigKey, recipients)
if err != nil {
return nil, err
}
return &teamEKPayload{
sig: sig,
boxes: boxes,
metadata: metadata,
box: box,
}, nil
}
func (t *Team) storeTeamEKPayload(ctx context.Context, teamEKPayload *teamEKPayload) {
// Add the new teamEK box to local storage, if it was created above.
if teamEKPayload != nil && teamEKPayload.box != nil {
mctx := libkb.NewMetaContext(ctx, t.G())
boxed := keybase1.NewTeamEphemeralKeyBoxedWithTeam(*teamEKPayload.box)
if err := t.G().GetTeamEKBoxStorage().Put(mctx, t.ID, teamEKPayload.metadata.Generation, boxed); err != nil {
t.G().Log.CErrorf(ctx, "error while saving teamEK box: %s", err)
}
}
}
// createTeambotKeys generates teambotKeys and teambotEKs for the given bot
// member list. Runs in the background on member addition or team rotation.
func createTeambotKeys(g *libkb.GlobalContext, teamID keybase1.TeamID, bots []keybase1.UID) {
mctx := libkb.NewMetaContextBackground(g)
go func() {
var err error
defer mctx.TraceTimed(fmt.Sprintf("createTeambotKeys: %d bot members", len(bots)), func() error { return err })()
if len(bots) == 0 {
return
}
// Load the team in case we need to grab the latest PTK generation after a rotation.
team, err := Load(mctx.Ctx(), g, keybase1.LoadTeamArg{
ID: teamID,
})
if err != nil {
return
}
ekLib := mctx.G().GetEKLib()
keyer := mctx.G().GetTeambotMemberKeyer()
appKey, err := team.ChatKey(mctx.Ctx())
if err != nil {
mctx.Debug("unable to get teamApplication key %v, aborting TeambotKey creation", err)
keyer = nil
}
for _, uid := range bots {
guid := gregor1.UID(uid.ToBytes())
if ekLib != nil {
if teambotEK, created, err := ekLib.GetOrCreateLatestTeambotEK(mctx, teamID, guid); err != nil {
mctx.Debug("unable to GetOrCreateLatestTeambotEK for %v, %v", guid, err)
} else {
mctx.Debug("published TeambotEK generation %d for %v, newly created: %v", teambotEK.Generation(), uid, created)
}
}
if keyer != nil {
if teambotKey, created, err := keyer.GetOrCreateTeambotKey(mctx, teamID, guid, appKey); err != nil {
mctx.Debug("unable to GetOrCreateTeambotKey for %v, %v", guid, err)
} else {
mctx.Debug("published TeambotKey generation %d for %v, newly created: %v", teambotKey.Generation(), uid, created)
}
}
}
}()
}
type sigPayloadArgs struct {
secretBoxes *PerTeamSharedSecretBoxes
implicitAdminBoxes map[keybase1.TeamID]*PerTeamSharedSecretBoxes
lease *libkb.Lease
prePayload libkb.JSONPayload
legacyTLFUpgrade *keybase1.TeamGetLegacyTLFUpgrade
teamEKBoxes *[]keybase1.TeamEkBoxMetadata
teamEKPayload *teamEKPayload
ratchetBlindingKeys hidden.EncodedRatchetBlindingKeySet
}
func (t *Team) sigPayload(sigMulti []libkb.SigMultiItem, args sigPayloadArgs) libkb.JSONPayload {
payload := libkb.JSONPayload{}
// copy the prepayload so we don't mutate it
for k, v := range args.prePayload {
payload[k] = v
}
payload["sigs"] = sigMulti
if args.secretBoxes != nil {
payload["per_team_key"] = args.secretBoxes
}
if args.implicitAdminBoxes != nil {
payload["implicit_team_keys"] = args.implicitAdminBoxes
}
if args.lease != nil {
payload["downgrade_lease_id"] = args.lease.LeaseID
}
if args.legacyTLFUpgrade != nil {
payload["legacy_tlf_upgrade"] = args.legacyTLFUpgrade
}
args.ratchetBlindingKeys.AddToJSONPayload(payload)
if args.teamEKBoxes != nil && len(*args.teamEKBoxes) > 0 {
payload["team_ek_rebox"] = libkb.JSONPayload{
"boxes": args.teamEKBoxes,
"team_id": t.ID,
}
} else if args.teamEKPayload != nil {
if args.teamEKPayload.boxes != nil && len(*args.teamEKPayload.boxes) > 0 {
payload["team_ek"] = libkb.JSONPayload{
"sig": args.teamEKPayload.sig,
"boxes": args.teamEKPayload.boxes,
"team_id": t.ID,
}
}
}
if t.G().VDL.DumpPayload() {
pretty, err := json.MarshalIndent(payload, "", "\t")
if err != nil {
t.G().Log.Info("json marshal error: %s", err)
} else {
t.G().Log.Info("payload: %s", pretty)
}
}
return payload
}
func (t *Team) postMulti(mctx libkb.MetaContext, payload libkb.JSONPayload) error {
_, err := t.G().API.PostJSON(mctx, libkb.APIArg{
Endpoint: "sig/multi",
SessionType: libkb.APISessionTypeREQUIRED,
JSONPayload: payload,
})
return err
}
// ForceMerkleRootUpdate will call LookupTeam on MerkleClient to
// update cached merkle root to include latest team sigs. Needed if
// client wants to create a signature that refers to an adminship,
// signature's merkle_root has to be more fresh than adminship's.
func (t *Team) ForceMerkleRootUpdate(ctx context.Context) error {
return ForceMerkleRootUpdateByTeamID(t.MetaContext(ctx), t.ID)
}
func ForceMerkleRootUpdateByTeamID(mctx libkb.MetaContext, teamID keybase1.TeamID) error {
_, err := mctx.G().GetMerkleClient().LookupTeam(mctx, teamID)
return err
}
// All admins, owners, and implicit admins of this team.
func (t *Team) AllAdmins(ctx context.Context) ([]keybase1.UserVersion, error) {
set := make(map[keybase1.UserVersion]bool)
owners, err := t.UsersWithRole(keybase1.TeamRole_OWNER)
if err != nil {
return nil, err
}
for _, m := range owners {
set[m] = true
}
admins, err := t.UsersWithRole(keybase1.TeamRole_ADMIN)
if err != nil {
return nil, err
}
for _, m := range admins {
set[m] = true
}
if t.IsSubteam() {
imp, err := t.G().GetTeamLoader().ImplicitAdmins(ctx, t.ID)
if err != nil {
return nil, err
}
for _, m := range imp {
set[m] = true
}
}
var all []keybase1.UserVersion
for uv := range set {
all = append(all, uv)
}
return all, nil
}
// Restriction inherited from ListSubteams:
// Only call this on a Team that has been loaded with NeedAdmin.
// Otherwise, you might get incoherent answers due to links that
// were stubbed over the life of the cached object.
func (t *Team) loadAllTransitiveSubteams(ctx context.Context, forceRepoll bool) ([]*Team, error) {
subteams := []*Team{}
for _, idAndName := range t.chain().ListSubteams() {
// Load each subteam...
subteam, err := Load(ctx, t.G(), keybase1.LoadTeamArg{
ID: idAndName.Id,
Public: t.IsPublic(),
NeedAdmin: true,
ForceRepoll: true,
})
if err != nil {
return nil, err
}
// Force loading the key manager.
// TODO: Should this be the default, so that we don't need to do it here?
_, err = subteam.SharedSecret(ctx)
if err != nil {
return nil, err
}
subteams = append(subteams, subteam)
// ...and then recursively load each subteam's children.
recursiveSubteams, err := subteam.loadAllTransitiveSubteams(ctx, forceRepoll)
if err != nil {
return nil, err
}
subteams = append(subteams, recursiveSubteams...)
}
return subteams, nil
}
func (t *Team) PostTeamSettings(ctx context.Context, settings keybase1.TeamSettings, rotate bool) error {
if _, err := t.SharedSecret(ctx); err != nil {
return err
}
admin, err := t.getAdminPermission(ctx)
if err != nil {
return err
}
mr, err := t.G().MerkleClient.FetchRootFromServer(t.MetaContext(ctx), libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
scSettings, err := CreateTeamSettings(settings.Open, settings.JoinAs)
if err != nil {
return err
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
section := SCTeamSection{
ID: SCTeamID(t.ID),
Admin: admin,
Settings: &scSettings,
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
Ratchets: ratchet.ToTeamSection(),
}
payloadArgs := sigPayloadArgs{
ratchetBlindingKeys: ratchet.ToSigPayload(),
}
var maybeEKPayload *teamEKPayload
var botMembers []keybase1.UID
if rotate {
// Create empty Members section. We are not changing memberships, but
// it's needed for key rotation.
memSet := newMemberSet()
section.Members, err = memSet.Section()
if err != nil {
return err
}
secretBoxes, perTeamKeySection, teamEKPayload, err := t.rotateBoxes(ctx, memSet)
if err != nil {
return err
}
section.PerTeamKey = perTeamKeySection
payloadArgs.secretBoxes = secretBoxes
payloadArgs.teamEKPayload = teamEKPayload
maybeEKPayload = teamEKPayload // for storeTeamEKPayload, after post succeeds
botMembers = memSet.restrictedBotRecipientUids()
}
latestSeqno, err := t.postChangeItem(ctx, section, libkb.LinkTypeSettings, mr, payloadArgs)
if err != nil {
return err
}
if rotate {
err := t.notify(ctx, keybase1.TeamChangeSet{KeyRotated: true, Misc: true}, latestSeqno)
if err != nil {
return err
}
t.storeTeamEKPayload(ctx, maybeEKPayload)
createTeambotKeys(t.G(), t.ID, botMembers)
} else {
err := t.notify(ctx, keybase1.TeamChangeSet{Misc: true}, latestSeqno)
if err != nil {
return err
}
}
return nil
}
func (t *Team) botSettingsSection(ctx context.Context, bots map[keybase1.UserVersion]keybase1.TeamBotSettings,
ratchet *hidden.Ratchet, merkleRoot *libkb.MerkleRoot) (SCTeamSection, *hidden.Ratchet, error) {
if _, err := t.SharedSecret(ctx); err != nil {
return SCTeamSection{}, nil, err
}
admin, err := t.getAdminPermission(ctx)
if err != nil {
return SCTeamSection{}, nil, err
}
scBotSettings, err := CreateTeamBotSettings(bots)
if err != nil {
return SCTeamSection{}, nil, err
}
if ratchet == nil {
ratchet, err = t.makeRatchet(ctx)
if err != nil {
return SCTeamSection{}, nil, err
}
}
section := SCTeamSection{
ID: SCTeamID(t.ID),
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
Admin: admin,
BotSettings: &scBotSettings,
Ratchets: ratchet.ToTeamSection(),
}
return section, ratchet, nil
}
func (t *Team) PostTeamBotSettings(ctx context.Context, bots map[keybase1.UserVersion]keybase1.TeamBotSettings) error {
mr, err := t.G().MerkleClient.FetchRootFromServer(t.MetaContext(ctx), libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
section, ratchet, err := t.botSettingsSection(ctx, bots, nil, mr)
if err != nil {
return err
}
payloadArgs := sigPayloadArgs{
ratchetBlindingKeys: ratchet.ToSigPayload(),
}
_, err = t.postChangeItem(ctx, section, libkb.LinkTypeTeamBotSettings, mr, payloadArgs)
return err
}
func (t *Team) precheckLinksToPost(ctx context.Context, sigMultiItems []libkb.SigMultiItem) (err error) {
uv, err := t.currentUserUV(ctx)
if err != nil {
return err
}
return precheckLinksToPost(ctx, t.G(), sigMultiItems, t.chain(), uv)
}
// Try to run `post` (expected to post new team sigchain links).
// Retry it several times if it fails due to being behind the latest team sigchain state or due to other retryable errors.
// Passes the attempt number (initially 0) to `post`.
func RetryIfPossible(ctx context.Context, g *libkb.GlobalContext, post func(ctx context.Context, attempt int) error) (err error) {
mctx := libkb.NewMetaContext(ctx, g)
defer mctx.TraceTimed("RetryIfPossible", func() error { return err })()
const nRetries = 3
for i := 0; i < nRetries; i++ {
mctx.Debug("| RetryIfPossible(%v)", i)
err = post(mctx.Ctx(), i)
switch {
case isSigOldSeqnoError(err):
mctx.Debug("| retrying due to SigOldSeqnoError %d", i)
case isStaleBoxError(err):
mctx.Debug("| retrying due to StaleBoxError %d", i)
case isSigBadTotalOrder(err):
mctx.Debug("| retrying since update would violate total ordering for team %d", i)
case isSigMissingRatchet(err):
mctx.Debug("| retrying since the server wanted a ratchet and we didn't provide one %d", i)
case isHiddenAppendPrecheckError(err):
mctx.Debug("| retrying since we hit a hidden append precheck error")
case libkb.IsEphemeralRetryableError(err):
mctx.Debug("| retrying since we hit a retryable ephemeral error %v, attempt %d", err, i)
default:
return err
}
}
mctx.Debug("| RetryIfPossible exhausted attempts")
if err == nil {
// Should never happen
return fmt.Errorf("failed retryable team operation")
}
// Return the error from the final round
return err
}
func isHiddenAppendPrecheckError(err error) bool {
perr, ok := err.(PrecheckAppendError)
if !ok {
return false
}
_, ok = perr.Inner.(hidden.LoaderError)
return ok
}
func isSigOldSeqnoError(err error) bool {
return libkb.IsAppStatusCode(err, keybase1.StatusCode_SCSigOldSeqno)
}
func isSigBadTotalOrder(err error) bool {
return libkb.IsAppStatusCode(err, keybase1.StatusCode_SCSigBadTotalOrder)
}
func isSigMissingRatchet(err error) bool {
return libkb.IsAppStatusCode(err, keybase1.StatusCode_SCSigMissingRatchet)
}
func (t *Team) marshal(incoming interface{}) ([]byte, error) {
var data []byte
mh := codec.MsgpackHandle{WriteExt: true}
enc := codec.NewEncoderBytes(&data, &mh)
if err := enc.Encode(incoming); err != nil {
return nil, err
}
return data, nil
}
func (t *Team) boxKBFSCryptKeys(ctx context.Context, key keybase1.TeamApplicationKey,
kbfsKeys []keybase1.CryptKey) (string, keybase1.TeamEncryptedKBFSKeysetHash, error) {
marshaledKeys, err := t.marshal(kbfsKeys)
if err != nil {
return "", "", err
}
var nonce [libkb.NaclDHNonceSize]byte
if _, err := rand.Read(nonce[:]); err != nil {
return "", "", err
}
var encKey [libkb.NaclSecretBoxKeySize]byte = key.Material()
sealed := secretbox.Seal(nil, marshaledKeys, &nonce, &encKey)
dat := keybase1.TeamEncryptedKBFSKeyset{
V: 1,
N: nonce[:],
E: sealed,
}
marshaledSealedDat, err := t.marshal(dat)
if err != nil {
return "", "", err
}
encStr := base64.StdEncoding.EncodeToString(marshaledSealedDat)
sbytes := sha256.Sum256([]byte(encStr))
return encStr, keybase1.TeamEncryptedKBFSKeysetHashFromBytes(sbytes[:]), nil
}
func (t *Team) AssociateWithTLFKeyset(ctx context.Context, tlfID keybase1.TLFID,
cryptKeys []keybase1.CryptKey, appType keybase1.TeamApplication) (err error) {
m := t.MetaContext(ctx)
defer m.Trace("Team.AssociateWithTLFKeyset", func() error { return err })()
// If we get no crypt keys, just associate TLF ID and bail
if len(cryptKeys) == 0 {
m.Debug("AssociateWithTLFKeyset: no crypt keys given, aborting")
return nil
}
// Sort crypt keys by generation (just in case they aren't naturally)
sort.Slice(cryptKeys, func(i, j int) bool {
return cryptKeys[i].KeyGeneration < cryptKeys[j].KeyGeneration
})
latestKey, err := t.ApplicationKey(ctx, appType)
if err != nil {
return err
}
encStr, hash, err := t.boxKBFSCryptKeys(ctx, latestKey, cryptKeys)
if err != nil {
return err
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
upgrade := SCTeamKBFSLegacyUpgrade{
AppType: appType,
KeysetHash: hash,
LegacyGeneration: cryptKeys[len(cryptKeys)-1].Generation(),
TeamGeneration: latestKey.KeyGeneration,
}
teamSection := SCTeamSection{
ID: SCTeamID(t.ID),
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
KBFS: &SCTeamKBFS{
Keyset: &upgrade,
},
Ratchets: ratchet.ToTeamSection(),
}
mr, err := m.G().MerkleClient.FetchRootFromServer(m, libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
if mr == nil {
return errors.New("No merkle root available for KBFS settings update")
}
sigMultiItem, latestSeqno, err := t.sigTeamItem(ctx, teamSection, libkb.LinkTypeKBFSSettings, mr)
if err != nil {
return err
}
payload := t.sigPayload([]libkb.SigMultiItem{sigMultiItem}, sigPayloadArgs{
legacyTLFUpgrade: &keybase1.TeamGetLegacyTLFUpgrade{
EncryptedKeyset: encStr,
LegacyGeneration: cryptKeys[len(cryptKeys)-1].Generation(),
TeamGeneration: latestKey.KeyGeneration,
AppType: appType,
},
ratchetBlindingKeys: ratchet.ToSigPayload(),
})
err = t.postMulti(m, payload)
if err != nil {
return err
}
return t.HintLatestSeqno(m, latestSeqno)
}
func (t *Team) AssociateWithTLFID(ctx context.Context, tlfID keybase1.TLFID) (err error) {
m := t.MetaContext(ctx)
defer m.Trace("Team.AssociateWithTLFID", func() error { return err })()
if tlfID.Eq(t.LatestKBFSTLFID()) {
m.Debug("No updated needed, TLFID already set to %s", tlfID)
return nil
}
ratchet, err := t.makeRatchet(ctx)
if err != nil {
return err
}
teamSection := SCTeamSection{
ID: SCTeamID(t.ID),
Implicit: t.IsImplicit(),
Public: t.IsPublic(),
KBFS: &SCTeamKBFS{
TLF: &SCTeamKBFSTLF{
ID: tlfID,
},
},
Ratchets: ratchet.ToTeamSection(),
}
mr, err := m.G().MerkleClient.FetchRootFromServer(m, libkb.TeamMerkleFreshnessForAdmin)
if err != nil {
return err
}
if mr == nil |
sigMultiItem, latestSeqno, err := t.sigTeamItem(ctx, teamSection, libkb.LinkTypeKBFSSettings, mr)
if err != nil {
return err
}
sigPayloadArgs := sigPayloadArgs{
ratchetBlindingKeys: ratchet.ToSigPayload(),
}
payload := t.sigPayload([]libkb.SigMultiItem{sigMultiItem}, sigPayloadArgs)
err = t.postMulti(libkb.NewMetaContext(ctx, t.G()), payload)
if err != nil {
return err
}
return t.HintLatestSeqno(m, latestSeqno)
}
func (t *Team) notifyNoChainChange(ctx context.Context, changes keybase1.TeamChangeSet) error {
return t.notify(ctx, changes, keybase1.Seqno(0))
}
// Send notifyrouter messages.
// Modifies `changes`
// Update to the latest seqno that we're passing though, don't make any assumptions about number of sigs.
// Note that we're probably going to be getting this same notification a second time, since it will
// bounce off a gregor and back to us. But they are idempotent, so it should be fine to be double-notified.
func (t *Team) notify(ctx context.Context, changes keybase1.TeamChangeSet, latestSeqno keybase1.Seqno) error {
changes.KeyRotated = changes.KeyRotated || t.rotated
m := libkb.NewMetaContext(ctx, t.G())
var err error
if latestSeqno > 0 {
err = HintLatestSeqno(m, t.ID, latestSeqno)
}
t.G().NotifyRouter.HandleTeamChangedByBothKeys(ctx, t.ID, t.Name().String(), t.NextSeqno(), t.IsImplicit(), changes, keybase1.Seqno(0))
return err
}
func (t *Team) HintLatestSeqno(m libkb.MetaContext, n keybase1.Seqno) error {
return HintLatestSeqno(m, t.ID, n)
}
func HintLatestSeqno(m libkb.MetaContext, id keybase1.TeamID, n keybase1.Seqno) error {
err := m.G().GetTeamLoader().HintLatestSeqno(m.Ctx(), id, n)
if err != nil {
m.Warning("error in TeamLoader#HintLatestSeqno: %v", err)
}
e2 := m.G().GetFastTeamLoader().HintLatestSeqno(m, id, n)
if e2 != nil {
m.Warning("error in FastTeamLoader#HintLatestSeqno: %v", err)
}
if err != nil {
return err
}
return e2
}
func HintLatestHiddenSeqno(m libkb.MetaContext, id keybase1.TeamID, n keybase1.Seqno) error {
err := m.G().GetHiddenTeamChainManager().HintLatestSeqno(m, id, n)
if err != nil {
m.Warning("error in HintLatestHiddenSeqno: %v", err)
}
return err
}
func (t *Team) refreshUIDMapper(ctx context.Context, g *libkb.GlobalContext) {
for uv := range t.chain().inner.UserLog {
_, err := g.UIDMapper.InformOfEldestSeqno(ctx, g, uv)
if err != nil {
g.Log.CDebugf(ctx, "Error informing eldest seqno: %+v", err.Error())
}
}
for id, invite := range t.chain().inner.ActiveInvites {
invtype, err := invite.Type.C()
if err != nil {
g.Log.CDebugf(ctx, "Error in invite %s: %s", id, err.Error())
continue
}
if invtype == keybase1.TeamInviteCategory_KEYBASE {
uv, err := invite.KeybaseUserVersion()
if err != nil {
g.Log.CDebugf(ctx, "Error in parsing invite %s: %s", id, err.Error())
}
_, err = g.UIDMapper.InformOfEldestSeqno(ctx, g, uv)
if err != nil {
g.Log.CDebugf(ctx, "Error informing eldest seqno: %+v", err.Error())
}
}
}
}
func UpgradeTLFIDToImpteam(ctx context.Context, g *libkb.GlobalContext, tlfName string, tlfID keybase1.TLFID,
public bool, appType keybase1.TeamApplication, cryptKeys []keybase1.CryptKey) (err error) {
defer g.CTrace(ctx, fmt.Sprintf("UpgradeTLFIDToImpteam(%s)", tlfID), func() error { return err })()
var team *Team
if team, _, _, err = LookupOrCreateImplicitTeam(ctx, g, tlfName, public); err != nil {
return err
}
// Associate the imp team with the TLF ID
if team.LatestKBFSTLFID().IsNil() {
if err = team.AssociateWithTLFID(ctx, tlfID); err != nil {
return err
}
} else {
if team.LatestKBFSTLFID().String() != tlfID.String() {
return fmt.Errorf("implicit team already associated with different TLF ID: teamID: %s tlfID: %s",
team.ID, tlfID)
}
}
// Reload the team
if team, err = Load(ctx, g, keybase1.LoadTeamArg{
ID: team.ID,
Public: public,
ForceRepoll: true,
}); err != nil {
return err
}
// Post the crypt keys
return team.AssociateWithTLFKeyset(ctx, tlfID, cryptKeys, appType)
}
func TeamInviteTypeFromString(mctx libkb.MetaContext, inviteTypeStr string) (keybase1.TeamInviteType, error) {
switch inviteTypeStr {
case "keybase":
return keybase1.NewTeamInviteTypeDefault(keybase1.TeamInviteCategory_KEYBASE), nil
case "email":
return keybase1.NewTeamInviteTypeDefault(keybase1.TeamInviteCategory_EMAIL), nil
case "seitan_invite_token":
return keybase1.NewTeamInviteTypeDefault(keybase1.TeamInviteCategory_SEITAN), nil
case "phone":
return keybase1.NewTeamInviteTypeDefault(keybase1.TeamInviteCategory_PHONE), nil
case "twitter", "github", "facebook", "reddit", "hackernews", "pgp", "http", "https", "dns":
return keybase1.NewTeamInviteTypeWithSbs(keybase1.TeamInviteSocialNetwork(inviteTypeStr)), nil
default:
if mctx.G().GetProofServices().GetServiceType(mctx.Ctx(), inviteTypeStr) != nil {
return keybase1.NewTeamInviteTypeWithSbs(keybase1.TeamInviteSocialNetwork(inviteTypeStr)), nil
}
isDev := mctx.G().Env.GetRunMode() == libkb.DevelRunMode
if isDev && inviteTypeStr == "rooter" {
return keybase1.NewTeamInviteTypeWithSbs(keybase1.TeamInviteSocialNetwork(inviteTypeStr)), nil
}
// Don't want to break existing clients if we see an unknown invite type.
return keybase1.NewTeamInviteTypeWithUnknown(inviteTypeStr), nil
}
}
func FreezeTeam(mctx libkb.MetaContext, teamID keybase1.TeamID) error {
err3 := mctx.G().GetHiddenTeamChainManager().Freeze(mctx, teamID)
if err3 != nil {
mctx.Debug("error freezing in hidden team chain manager: %v", err3)
}
err1 := mctx.G().GetTeamLoader().Freeze(mctx.Ctx(), teamID)
if err1 != nil {
mctx.Debug("error freezing in team cache: %v", err1)
}
err2 := mctx.G().GetFastTeamLoader().Freeze(mctx, teamID)
if err2 != nil {
mctx.Debug("error freezing in fast team cache: %v", err2)
}
return libkb.CombineErrors(err1, err2, err3)
}
func TombstoneTeam(mctx libkb.MetaContext, teamID keybase1.TeamID) error {
err3 := mctx.G().GetHiddenTeamChainManager().Tombstone(mctx, teamID)
if err3 != nil {
mctx.Debug("error tombstoning in hidden team chain manager: %v", err3)
}
err1 := mctx.G().GetTeamLoader().Tombstone(mctx.Ctx(), teamID)
if err1 != nil {
mctx.Debug("error tombstoning in team cache: %v", err1)
}
err2 := mctx.G().GetFastTeamLoader().Tombstone(mctx, teamID)
if err2 != nil {
mctx.Debug("error tombstoning in fast team cache: %v", err2)
}
return libkb.CombineErrors(err1, err2, err3)
}
type TeamShim struct {
Data *keybase1.TeamData
Hidden *keybase1.HiddenTeamChain
}
func (t *TeamShim) MainChain() *keybase1.TeamData { return t.Data }
func (t *TeamShim) HiddenChain() *keybase1.HiddenTeamChain { return t.Hidden }
var _ Teamer = (*TeamShim)(nil)
func KeySummary(t Teamer) string {
if t == nil {
return "Ø"
}
return fmt.Sprintf("{main:%s, hidden:%s}", t.MainChain().KeySummary(), t.HiddenChain().KeySummary())
}
| {
return errors.New("No merkle root available for KBFS settings update")
} |
token.py | """Token validation utilities"""
from mytoyota.const import TOKEN_LENGTH
from mytoyota.exceptions import ToyotaInvalidToken
def is_valid_token(token: str) -> bool:
"""Checks if token is the correct length"""
if token and len(token) == TOKEN_LENGTH and token.endswith("..*"): | return True
raise ToyotaInvalidToken(
f"Token must end with '..*' and be {TOKEN_LENGTH} characters long."
) | |
utils.py | import os
import numpy as np
import numba as nb
def create_folder(storage_path):
if not os.path.isdir(storage_path):
os.makedirs(storage_path,exist_ok=True)
lsdir = os.listdir(storage_path)
for item in ["info","hist","soln","figs"]:
if item not in lsdir:
os.makedirs(storage_path+item+"/",exist_ok=True)
if item == "figs":
lsdir_figs = os.listdir(storage_path+item+"/")
for item1 in ["crop","raw"]:
if item1 not in lsdir_figs:
os.makedirs(storage_path+item+"/"+item1+"/",exist_ok=True) | def time_to_string(runtime):
seconds = runtime%60
runmins = (runtime-seconds)/60
mins = int(runmins%60)
runhrs = (runmins-mins)/60
hrs = int(runhrs)
return "%.2d:%.2d:%05.2f"%(hrs,mins,seconds)
def multivariate_laplace(n,d,rng=None, random_state=None):
rng = rng if rng is not None else np.random.RandomState(random_state)
X = rng.randn(n,d)
Z = rng.exponential(size=(n,1))
return X*np.sqrt(Z)
@nb.njit(cache=True)
def np_apply_along_axis(func1d, axis, arr):
assert arr.ndim == 2
assert axis in [0, 1]
if axis == 0:
result = np.empty(arr.shape[1])
for i in range(len(result)):
result[i] = func1d(arr[:, i])
else:
result = np.empty(arr.shape[0])
for i in range(len(result)):
result[i] = func1d(arr[i, :])
return result
@nb.njit(cache=True)
def np_apply_along_axis_kd(funckd, axis, arr, k = -1):
assert arr.ndim == 2
assert axis in [0, 1]
if axis == 0:
k = k if k > 0 else arr.shape[0]
result = np.empty((k,arr.shape[1]))
for i in range(arr.shape[1]):
result[:, i] = funckd(arr[:, i])
else:
k = k if k > 0 else arr.shape[1]
result = np.empty((arr.shape[0],k))
for i in range(arr.shape[0]):
result[i, :] = funckd(arr[i, :])
return result
@nb.njit(cache=True)
def split(n, B):
sep = n//B
rem = n%B
indices = []
last = 0
cur = 0
for i in range(B):
cur = last + sep + (i < rem)
indices.append(cur)
last = cur
return indices | |
splitter-accessibility.directive.ts | import { isPlatformBrowser } from '@angular/common';
import { AfterViewInit, ContentChildren, Directive, ElementRef, EventEmitter, HostListener, Inject, OnDestroy, Output, PLATFORM_ID, QueryList, Renderer2 } from '@angular/core';
import { SplitAreaDirective, SplitComponent } from 'angular-split';
import { takeUntil } from 'rxjs/operators';
import { Subject } from 'rxjs/Subject';
@Directive({
selector: 'split'
})
export class SplitterAccessibilityDirective implements AfterViewInit, OnDestroy {
/** Emit an event whenever the gutter is moved using the keyboard */
@Output() gutterKeydown = new EventEmitter<KeyboardEvent>();
/** Find all the split areas */
@ContentChildren(SplitAreaDirective) areas: QueryList<SplitAreaDirective>;
/** Store all the gutter elements */
private _gutters: HTMLElement[] = [];
/** Watch for gutters being added or removed */
private _observer: MutationObserver;
/** Teardown our observables on destroy */
private _onDestroy = new Subject<void>();
constructor(
private _elementRef: ElementRef,
private _renderer: Renderer2,
@Inject(PLATFORM_ID) private _platform: string,
private _splitter: SplitComponent
) {
// update aria values when the a gutter is dragged
_splitter.dragProgress
.pipe(takeUntil(this._onDestroy))
.subscribe(() => this.updateGutterAttributes());
}
/** Once initialised make the gutters accessible */
ngAfterViewInit(): void {
// find the gutters
this.onGutterChange();
// if the number of split areas change then update the gutters and apply aria properties
this.areas.changes.pipe(takeUntil(this._onDestroy)).subscribe(() => this.onGutterChange());
// we can't know when additional split-gutters appear using ContentChildren as the directive class is not exported and selector doesn't work - use mutation observer instead
if (isPlatformBrowser(this._platform)) {
// create the mutation observer
this._observer = new MutationObserver(() => this.onGutterChange());
// begin observing the child nodes
this._observer.observe(this._elementRef.nativeElement, { childList: true });
}
}
/** Destroy all observables and observers */
ngOnDestroy(): void {
if (this._observer) {
this._observer.disconnect();
}
this._onDestroy.next();
this._onDestroy.complete();
}
/** We should focus the gutter when it is clicked */
@HostListener('click', ['$event'])
onClick(event: MouseEvent): void {
if (this.isSplitterGutter(event.target as HTMLElement)) {
(event.target as HTMLElement).focus();
}
}
/** Find all the gutters and set their attributes */
private onGutterChange(): void {
this._gutters = this.getGutters();
this.setGutterAttributes();
}
/** Get all the gutter elements */
private getGutters(): HTMLElement[] {
// This function uses DOM accessing properties - which won't work if server side rendered
if (isPlatformBrowser(this._platform)) {
const gutters: HTMLElement[] = [];
for (let idx = 0; idx < this._elementRef.nativeElement.children.length; idx++) {
const node = this._elementRef.nativeElement.children.item(idx);
if (this.isSplitterGutter(node as HTMLElement)) {
gutters.push(node as HTMLElement);
}
}
return gutters;
}
return [];
}
/** Set the appropriate attributes on the gutter elements */
private setGutterAttributes(): void {
// apply attribute to every gutter
this._gutters.forEach(gutter => {
// apply the separator role
this._renderer.setAttribute(gutter, 'role', 'separator');
// make the gutters tabbable
this._renderer.setAttribute(gutter, 'tabindex', '0');
// set the value now aria property
this.updateGutterAttributes();
});
}
/** Apply the aria attribute values */
private updateGutterAttributes(): void {
// update the value now properties of each gutter
this._gutters.forEach((gutter, idx) => {
this.setGutterValueNow(gutter, idx);
this.setGutterValueMin(gutter, idx);
this.setGutterValueMax(gutter, idx);
});
}
/** Apply the value now aria attribute */
private setGutterValueNow(gutter: HTMLElement, index: number): void {
// get the matching split area
const area = this._splitter.displayedAreas[index];
// indicate the size
this._renderer.setAttribute(gutter, 'aria-valuenow', `${Math.round(area.size * 100)}`);
}
/** Apply the value min aria attribute */
private setGutterValueMin(gutter: HTMLElement, index: number): void {
// get the matching split area
const area = this.areas.toArray()[index];
// indicate the minimum size
this._renderer.setAttribute(gutter, 'aria-valuemin', `${Math.round(area.minSize * 100)}`);
}
/** Apply the value max aria attribute */
private setGutterValueMax(gutter: HTMLElement, index: number): void {
// get every other splitter area
const availableSize = this.areas
.filter((_area, idx) => index !== idx)
.reduce<number>((total, area) => total + area.minSize, 0); |
@HostListener('keydown', ['$event'])
onKeydown(event: KeyboardEvent): void {
if (this.isSplitterGutter(event.target as HTMLElement)) {
this.gutterKeydown.emit(event);
}
}
@HostListener('keydown.ArrowDown', ['$event'])
@HostListener('keydown.ArrowRight', ['$event'])
onIncreaseKey(event: KeyboardEvent): void {
// only perform a move if a gutter is focused
if (this.isSplitterGutter(event.target as HTMLElement)) {
this.setGutterPosition(event.target as HTMLElement, -0.01);
// stop the browser from scrolling
event.preventDefault();
}
}
@HostListener('keydown.ArrowUp', ['$event'])
@HostListener('keydown.ArrowLeft', ['$event'])
onDecreaseKey(event: KeyboardEvent): void {
// only perform a move if a gutter is focused
if (this.isSplitterGutter(event.target as HTMLElement)) {
this.setGutterPosition(event.target as HTMLElement, 0.01);
// stop the browser from scrolling
event.preventDefault();
}
}
@HostListener('keydown.Home', ['$event'])
onHomeKey(event: KeyboardEvent): void {
if (this.isSplitterGutter(event.target as HTMLElement)) {
// get the affected panels
const areas = this.getAreasFromGutter(event.target as HTMLElement);
// set the previous area to it's minimum size
const delta = areas.previous.size - areas.previous.comp.minSize;
// update the sizes accordingly
this.setGutterPosition(event.target as HTMLElement, delta);
// stop the browser from scrolling
event.preventDefault();
}
}
@HostListener('keydown.End', ['$event'])
onEndKey(event: KeyboardEvent): void {
if (this.isSplitterGutter(event.target as HTMLElement)) {
// get the affected panels
const areas = this.getAreasFromGutter(event.target as HTMLElement);
// set the next area to it's minimum size
const delta = areas.next.size - areas.next.comp.minSize;
// update the sizes accordingly
this.setGutterPosition(event.target as HTMLElement, -delta);
// stop the browser from scrolling
event.preventDefault();
}
}
/** Determine if an element is a gutter */
private isSplitterGutter(element: HTMLElement): boolean {
return element.tagName === 'SPLIT-GUTTER';
}
/** Update the gutter position */
private setGutterPosition(gutter: HTMLElement, delta: number): void {
// get the affected panels
const areas = this.getAreasFromGutter(gutter);
// ensure we can perform the resize
if (areas.previous.size - delta < areas.previous.comp.minSize || areas.next.size + delta < areas.next.comp.minSize) {
return;
}
// perform the resize
areas.previous.size -= delta;
areas.next.size += delta;
// update the splitter - this is a private method but we need to call it
(this._splitter as any).refreshStyleSizes();
// update the gutter aria values
this.updateGutterAttributes();
}
/** Get the split areas associated with a given gutter */
private getAreasFromGutter(gutter: HTMLElement) {
const index = this._gutters.indexOf(gutter);
return {
previous: this._splitter.displayedAreas[index],
next: this._splitter.displayedAreas[index + 1]
};
}
} |
// indicate the minimum size
this._renderer.setAttribute(gutter, 'aria-valuemax', `${100 - Math.round(availableSize * 100)}`);
} |
media_player.py | """Play media via gstreamer."""
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE,
SUPPORT_PLAY, SUPPORT_PLAY_MEDIA, SUPPORT_VOLUME_SET)
from homeassistant.const import CONF_NAME, EVENT_HOMEASSISTANT_STOP, STATE_IDLE
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_PIPELINE = 'pipeline'
DOMAIN = 'gstreamer'
SUPPORT_GSTREAMER = SUPPORT_VOLUME_SET | SUPPORT_PLAY | SUPPORT_PAUSE |\
SUPPORT_PLAY_MEDIA | SUPPORT_NEXT_TRACK
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_PIPELINE): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Gstreamer platform."""
from gsp import GstreamerPlayer
name = config.get(CONF_NAME)
pipeline = config.get(CONF_PIPELINE)
player = GstreamerPlayer(pipeline)
def _shutdown(call):
"""Quit the player on shutdown."""
player.quit()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
add_entities([GstreamerDevice(player, name)])
class GstreamerDevice(MediaPlayerDevice):
"""Representation of a Gstreamer device."""
def __init__(self, player, name):
"""Initialize the Gstreamer device."""
self._player = player
self._name = name or DOMAIN
self._state = STATE_IDLE
self._volume = None
self._duration = None
self._uri = None
self._title = None
self._artist = None
self._album = None
def update(self):
"""Update properties."""
self._state = self._player.state
self._volume = self._player.volume
self._duration = self._player.duration
self._uri = self._player.uri
self._title = self._player.title
self._album = self._player.album
self._artist = self._player.artist
def set_volume_level(self, volume):
"""Set the volume level."""
self._player.volume = volume
def play_media(self, media_type, media_id, **kwargs):
"""Play media."""
if media_type != MEDIA_TYPE_MUSIC:
_LOGGER.error('invalid media type')
return
self._player.queue(media_id)
def media_play(self):
"""Play."""
self._player.play()
def media_pause(self):
"""Pause."""
self._player.pause()
def media_next_track(self):
"""Next track."""
self._player.next()
@property
def media_content_id(self):
"""Content ID of currently playing media."""
return self._uri
@property
def | (self):
"""Content type of currently playing media."""
return MEDIA_TYPE_MUSIC
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def volume_level(self):
"""Return the volume level."""
return self._volume
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_GSTREAMER
@property
def state(self):
"""Return the state of the player."""
return self._state
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self._duration
@property
def media_title(self):
"""Media title."""
return self._title
@property
def media_artist(self):
"""Media artist."""
return self._artist
@property
def media_album_name(self):
"""Media album."""
return self._album
| content_type |
SEXTANTsolver.py | from pextant.lib.geoshapely import GeoPolygon, LONG_LAT
import numpy as np
import csv
class SEXTANTSolver(object):
def __init__(self, environmental_model, cost_function, viz):
self.env_model = environmental_model
self.cost_function = cost_function
self.viz = viz
self.searches = []
def solve(self, start_point, end_point):
pass
def solvemultipoint(self, waypoints):
search_list = sextantSearchList(waypoints)
for i in range(len(waypoints) - 1):
search_result = self.solve(waypoints[i], waypoints[i + 1])
search_list.append(search_result)
return search_list, search_list.raw(), search_list.itemssrchd()
class sextantSearchList(object):
def __init__(self, points):
self.startpoint = points[0]
self.endpoint = points[-1]
self.waypoints = points
self.list = []
self.rawpoints = []
def addresult(self, raw, nodes, coordinates, expanded_items):
self.list.append(sextantSearch(raw, nodes, coordinates, expanded_items))
def append(self, sextantsearch):
self.list.append(sextantsearch)
def raw(self):
result = []
for search in self.list:
if search == False:
return None
result += search.raw
return np.array(result)
def coordinates(self):
result = []
for search in self.list:
if type(search) == bool:
return None
result += search.coordinates.to(LONG_LAT).transpose().tolist()
return GeoPolygon(LONG_LAT, *np.array(result).transpose())
def itemssrchd(self):
result = []
for search in self.list:
if type(search) == bool:
return None
result += search.expanded_items
return np.array(result)
def tojson(self, save=False):
return [elt.tojson() for elt in self.list]
def tocsv(self, filepath=None):
csvlist = [elt.tocsv() for elt in self.list]
rows = [['isStation', 'x', 'y', 'z', 'distanceMeters', 'energyJoules', 'timeSeconds']]
for row in csvlist:
rows += row
if filepath:
with open(filepath, 'wb') as csvfile:
writer = csv.writer(csvfile)
for row in rows:
writer.writerow(row)
return csvlist
class sextantSearch(object):
def __init__(self, raw, nodes, coordinates, expanded_items):
self.namemap = {
'time': ['timeList','totalTime'],
'pathlength': ['distanceList','totalDistance'],
'energy': ['energyList','totalEnergy']
}
#self.searches = []
self.nodes = nodes
self.raw = raw
self.npraw = np.array(raw).transpose()
self.coordinates = coordinates
self.expanded_items = expanded_items
def tojson(self):
|
def tocsv(self, coordstype=LONG_LAT):
sequence = []
coords = self.coordinates.to(coordstype).transpose().tolist()
for i, mesh_srch_elt in enumerate(self.nodes):
if i != 0:
row_entry = [i==1 or i==len(coords)-1] #True if it's the first or last entry
row_entry += coords[i] + [mesh_srch_elt.mesh_element.z]
derived = mesh_srch_elt.derived
row_entry += [derived['pathlength'], derived['time'], derived['energy']]
sequence += [row_entry]
return sequence
| out = {}
coordinates = self.coordinates.to(LONG_LAT).transpose().tolist()
out["geometry"] = {
'type': 'LineString',
'coordinates': coordinates
}
results = {}
for k, v in self.namemap.items():
results.update({v[0]:[],v[1]:0})
for i, mesh_srch_elt in enumerate(self.nodes):
derived = mesh_srch_elt.derived
for k, v in derived.items():
results[self.namemap[k][0]].append(v)
for k, v in self.namemap.items():
results[v[1]] = sum(results[v[0]])
out["derivedInfo"] = results
return out |
hasil.module.ts | import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { HasilRoutingModule } from './hasil-routing.module';
import { HasilComponent } from './hasil.component';
import { PageHeaderModule } from './../../shared';
import { HttpClientModule } from '@angular/common/http';
import { LayoutModule } from '../layout.module';
@NgModule({
imports: [
CommonModule,
HasilRoutingModule,
PageHeaderModule,
HttpClientModule,
LayoutModule
],
declarations: [
HasilComponent,
]
})
export class | {
}
| HasilModule |
theme-clouds_midnight.js | ace.define("ace/theme/clouds_midnight",[], function(require, exports, module) {
exports.isDark = true;
exports.cssClass = "ace-clouds-midnight";
exports.cssText = ".ace-clouds-midnight .ace_gutter {\
background: #232323;\
color: #929292\
}\
.ace-clouds-midnight .ace_print-margin {\
width: 1px;\
background: #232323\
}\
.ace-clouds-midnight {\
background-color: #191919;\
color: #929292\
}\
.ace-clouds-midnight .ace_cursor {\
color: #7DA5DC\
}\
.ace-clouds-midnight .ace_marker-layer .ace_selection {\
background: #000000\
}\
.ace-clouds-midnight.ace_multiselect .ace_selection.ace_start {\
box-shadow: 0 0 3px 0px #191919;\
}\
.ace-clouds-midnight .ace_marker-layer .ace_step {\
background: rgb(102, 82, 0)\ | }\
.ace-clouds-midnight .ace_marker-layer .ace_active-line {\
background: rgba(215, 215, 215, 0.031)\
}\
.ace-clouds-midnight .ace_gutter-active-line {\
background-color: rgba(215, 215, 215, 0.031)\
}\
.ace-clouds-midnight .ace_marker-layer .ace_selected-word {\
border: 1px solid #000000\
}\
.ace-clouds-midnight .ace_invisible {\
color: #666\
}\
.ace-clouds-midnight .ace_keyword,\
.ace-clouds-midnight .ace_meta,\
.ace-clouds-midnight .ace_support.ace_constant.ace_property-value {\
color: #927C5D\
}\
.ace-clouds-midnight .ace_keyword.ace_operator {\
color: #4B4B4B\
}\
.ace-clouds-midnight .ace_keyword.ace_other.ace_unit {\
color: #366F1A\
}\
.ace-clouds-midnight .ace_constant.ace_language {\
color: #39946A\
}\
.ace-clouds-midnight .ace_constant.ace_numeric {\
color: #46A609\
}\
.ace-clouds-midnight .ace_constant.ace_character.ace_entity {\
color: #A165AC\
}\
.ace-clouds-midnight .ace_invalid {\
color: #FFFFFF;\
background-color: #E92E2E\
}\
.ace-clouds-midnight .ace_fold {\
background-color: #927C5D;\
border-color: #929292\
}\
.ace-clouds-midnight .ace_storage,\
.ace-clouds-midnight .ace_support.ace_class,\
.ace-clouds-midnight .ace_support.ace_function,\
.ace-clouds-midnight .ace_support.ace_other,\
.ace-clouds-midnight .ace_support.ace_type {\
color: #E92E2E\
}\
.ace-clouds-midnight .ace_string {\
color: #5D90CD\
}\
.ace-clouds-midnight .ace_comment {\
color: #3C403B\
}\
.ace-clouds-midnight .ace_entity.ace_name.ace_tag,\
.ace-clouds-midnight .ace_entity.ace_other.ace_attribute-name {\
color: #606060\
}\
.ace-clouds-midnight .ace_indent-guide {\
background: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAEklEQVQImWNgYGBgYHB3d/8PAAOIAdULw8qMAAAAAElFTkSuQmCC) right repeat-y\
}";
var dom = require("../lib/dom");
dom.importCssString(exports.cssText, exports.cssClass, false);
}); (function() {
ace.require(["ace/theme/clouds_midnight"], function(m) {
if (typeof module == "object" && typeof exports == "object" && module) {
module.exports = m;
}
});
})(); | }\
.ace-clouds-midnight .ace_marker-layer .ace_bracket {\
margin: -1px 0 0 -1px;\
border: 1px solid #BFBFBF\ |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.